context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace matcher.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
/* * Copyright (c) 2009 Jim Radford http://www.jimradford.com * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using Microsoft.Win32; using System.Web; using SuperPutty.Data; using SuperPutty.Utils; using SuperPutty.Gui; using log4net; namespace SuperPutty { public partial class dlgEditSession : Form { private static readonly ILog Log = LogManager.GetLogger(typeof(dlgEditSession)); public delegate bool SessionNameValidationHandler(string name, out string error); private SessionData Session; private String OldHostname; private bool isInitialized = false; private ImageListPopup imgPopup = null; public dlgEditSession(SessionData session, ImageList iconList) { Session = session; InitializeComponent(); // get putty saved settings from the registry to populate // the dropdown PopulatePuttySettings(); if (!String.IsNullOrEmpty(Session.SessionName)) { this.Text = "Edit session: " + session.SessionName; this.textBoxSessionName.Text = Session.SessionName; this.textBoxHostname.Text = Session.Host; this.textBoxPort.Text = Session.Port.ToString(); this.textBoxExtraArgs.Text = Session.ExtraArgs; this.textBoxUsername.Text = Session.Username; switch (Session.Proto) { case ConnectionProtocol.Raw: radioButtonRaw.Checked = true; break; case ConnectionProtocol.Rlogin: radioButtonRlogin.Checked = true; break; case ConnectionProtocol.Serial: radioButtonSerial.Checked = true; break; case ConnectionProtocol.SSH: radioButtonSSH.Checked = true; break; case ConnectionProtocol.Telnet: radioButtonTelnet.Checked = true; break; case ConnectionProtocol.Cygterm: radioButtonCygterm.Checked = true; break; case ConnectionProtocol.Mintty: radioButtonMintty.Checked = true; break; default: radioButtonSSH.Checked = true; break; } comboBoxPuttyProfile.DropDownStyle = ComboBoxStyle.DropDownList; foreach(String settings in this.comboBoxPuttyProfile.Items){ if (settings == session.PuttySession) { this.comboBoxPuttyProfile.SelectedItem = settings; break; } } this.buttonSave.Enabled = true; } else { this.Text = "Create new session"; radioButtonSSH.Checked = true; this.buttonSave.Enabled = false; } // Setup icon chooser this.buttonImageSelect.ImageList = iconList; this.buttonImageSelect.ImageKey = string.IsNullOrEmpty(Session.ImageKey) ? SessionTreeview.ImageKeySession : Session.ImageKey; this.toolTip.SetToolTip(this.buttonImageSelect, buttonImageSelect.ImageKey); this.isInitialized = true; } protected override void OnLoad(EventArgs e) { base.OnLoad(e); this.BeginInvoke(new MethodInvoker(delegate { this.textBoxSessionName.Focus(); })); } private void PopulatePuttySettings() { foreach (String sessionName in PuttyDataHelper.GetSessionNames()) { comboBoxPuttyProfile.Items.Add(sessionName); } comboBoxPuttyProfile.SelectedItem = PuttyDataHelper.SessionDefaultSettings; } private void buttonCancel_Click(object sender, EventArgs e) { DialogResult = DialogResult.Cancel; } private void buttonSave_Click(object sender, EventArgs e) { Session.SessionName = textBoxSessionName.Text.Trim(); Session.PuttySession = comboBoxPuttyProfile.Text.Trim(); Session.Host = textBoxHostname.Text.Trim(); Session.ExtraArgs = textBoxExtraArgs.Text.Trim(); Session.Port = int.Parse(textBoxPort.Text.Trim()); Session.Username = textBoxUsername.Text.Trim(); Session.SessionId = SessionData.CombineSessionIds(SessionData.GetSessionParentId(Session.SessionId), Session.SessionName); Session.ImageKey = buttonImageSelect.ImageKey; for (int i = 0; i < groupBox1.Controls.Count; i++) { RadioButton rb = (RadioButton)groupBox1.Controls[i]; if (rb.Checked) { Session.Proto = (ConnectionProtocol)rb.Tag; } } DialogResult = DialogResult.OK; } /// <summary> /// Special UI handling for cygterm or mintty sessions /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void radioButtonCygterm_CheckedChanged(object sender, EventArgs e) { string host = this.textBoxHostname.Text; bool isLocalShell = this.radioButtonCygterm.Checked || this.radioButtonMintty.Checked; this.textBoxPort.Enabled = !isLocalShell; this.textBoxExtraArgs.Enabled = !isLocalShell; this.textBoxUsername.Enabled = !isLocalShell; if (isLocalShell) { if (String.IsNullOrEmpty(host) || !host.StartsWith(CygtermStartInfo.LocalHost)) { OldHostname = this.textBoxHostname.Text; this.textBoxHostname.Text = CygtermStartInfo.LocalHost; } } } private void radioButtonRaw_CheckedChanged(object sender, EventArgs e) { if (this.radioButtonRaw.Checked && this.isInitialized) { if (!string.IsNullOrEmpty(OldHostname)) { this.textBoxHostname.Text = OldHostname; OldHostname = null; } } } private void radioButtonTelnet_CheckedChanged(object sender, EventArgs e) { if (this.radioButtonTelnet.Checked && this.isInitialized) { if (!string.IsNullOrEmpty(OldHostname)) { this.textBoxHostname.Text = OldHostname; OldHostname = null; } this.textBoxPort.Text = "23"; } } private void radioButtonRlogin_CheckedChanged(object sender, EventArgs e) { if (this.radioButtonRlogin.Checked && this.isInitialized) { if (!string.IsNullOrEmpty(OldHostname)) { this.textBoxHostname.Text = OldHostname; OldHostname = null; } this.textBoxPort.Text = "513"; } } private void radioButtonSSH_CheckedChanged(object sender, EventArgs e) { if (this.radioButtonSSH.Checked && this.isInitialized) { if (!string.IsNullOrEmpty(OldHostname)) { this.textBoxHostname.Text = OldHostname; OldHostname = null; } this.textBoxPort.Text = "22"; } } public static int GetDefaultPort(ConnectionProtocol protocol) { int port = 22; switch (protocol) { case ConnectionProtocol.Raw: break; case ConnectionProtocol.Rlogin: port = 513; break; case ConnectionProtocol.Serial: break; case ConnectionProtocol.Telnet: port = 23; break; } return port; } #region Icon private void buttonImageSelect_Click(object sender, EventArgs e) { if (this.imgPopup == null) { // TODO: ImageList is null on initial installation and will throw a nullreference exception when creating a new session and trying to select an image. int n = buttonImageSelect.ImageList.Images.Count; int x = (int) Math.Floor(Math.Sqrt(n)) + 1; int cols = x; int rows = x; imgPopup = new ImageListPopup(); imgPopup.BackgroundColor = Color.FromArgb(241, 241, 241); imgPopup.BackgroundOverColor = Color.FromArgb(102, 154, 204); imgPopup.Init(this.buttonImageSelect.ImageList, 8, 8, cols, rows); imgPopup.ItemClick += new ImageListPopupEventHandler(this.OnItemClicked); } Point pt = PointToScreen(new Point(buttonImageSelect.Left, buttonImageSelect.Bottom)); imgPopup.Show(pt.X + 2, pt.Y); } private void OnItemClicked(object sender, ImageListPopupEventArgs e) { if (imgPopup == sender) { buttonImageSelect.ImageKey = e.SelectedItem; this.toolTip.SetToolTip(this.buttonImageSelect, buttonImageSelect.ImageKey); } } #endregion #region Validation Logic public SessionNameValidationHandler SessionNameValidator { get; set; } private void textBoxSessionName_Validating(object sender, CancelEventArgs e) { if (this.SessionNameValidator != null) { string error; if (!this.SessionNameValidator(this.textBoxSessionName.Text, out error)) { e.Cancel = true; this.SetError(this.textBoxSessionName, error ?? "Invalid Session Name"); } } } private void textBoxSessionName_Validated(object sender, EventArgs e) { this.SetError(this.textBoxSessionName, String.Empty); } private void textBoxPort_Validating(object sender, CancelEventArgs e) { int val; if (!Int32.TryParse(this.textBoxPort.Text, out val)) { e.Cancel = true; this.SetError(this.textBoxPort, "Invalid Port"); } } private void textBoxPort_Validated(object sender, EventArgs e) { this.SetError(this.textBoxPort, String.Empty); } private void textBoxHostname_Validating(object sender, CancelEventArgs e) { if (string.IsNullOrEmpty((string)this.comboBoxPuttyProfile.SelectedItem) && string.IsNullOrEmpty(this.textBoxHostname.Text.Trim())) { if (sender == this.textBoxHostname) { this.SetError(this.textBoxHostname, "A host name must be specified if a Putty Session Profile is not selected"); } else if (sender == this.comboBoxPuttyProfile) { this.SetError(this.comboBoxPuttyProfile, "A Putty Session Profile must be selected if a Host Name is not provided"); } } else { this.SetError(this.textBoxHostname, String.Empty); this.SetError(this.comboBoxPuttyProfile, String.Empty); } } private void comboBoxPuttyProfile_Validating(object sender, CancelEventArgs e) { this.textBoxHostname_Validating(sender, e); } private void comboBoxPuttyProfile_SelectedIndexChanged(object sender, EventArgs e) { this.ValidateChildren(ValidationConstraints.ImmediateChildren); } void SetError(Control control, string error) { this.errorProvider.SetError(control, error); this.EnableDisableSaveButton(); } void EnableDisableSaveButton() { this.buttonSave.Enabled = ( this.errorProvider.GetError(this.textBoxSessionName) == String.Empty && this.errorProvider.GetError(this.textBoxHostname) == String.Empty && this.errorProvider.GetError(this.textBoxPort) == String.Empty && this.errorProvider.GetError(this.comboBoxPuttyProfile) == String.Empty); } #endregion } }
using UnityEngine; using System; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using SessionMMiniJSON; /*! * SessionM service implementation. Implements and provides access (via SessionM.GetInstance()) to the SessionM service. * The SessionM Class is a MonoBehaviour singleton. Drop the game object in your scene and set it up instead of trying to instantiate it via code. * Put the SessionM object in your project as early as possible. The object will survive loads, so there's never a reason to put it in more than one place in your scenes. */ public class SessionM : MonoBehaviour { private ISessionMCallback callback; private static SessionM instance; /*! iOS Application ID linked to SampleApp game object. */ public string iosAppId; /*! Android Application ID linked to SampleApp game object. */ public string androidAppId; /*! iOS debug log level linked to SampleApp game object. */ public LogLevel logLevel; /*! Returns the SessionM singleton instance. */ public static SessionM GetInstance() { if(instance == null) { SessionM existingSessionM = GameObject.FindObjectOfType<SessionM>(); if(existingSessionM == null) { Debug.LogError("There is no SessionM GameObject set up in the scene. Please add one and set it up as per the SessionM Plug-In Documentation."); return null; } existingSessionM.SetSessionMNative(); instance = existingSessionM; } return instance; } /*! * The SessionM service region used to determine request routes - must be set before starting session. Default value is ServiceRegion.USA. * * Note: using SetServerType will cause serviceRegion to be set to ServiceRegion.Custom */ public static ServiceRegion serviceRegion = ServiceRegion.USA; /*! The server that the SessionM plug-in routes requests to - must be set before starting session. Default value is 'https://api.sessionm.com'. */ public static string serverURL = "https://api.sessionm.com"; /*! Determines whether the user's achievements list will be updated automatically. Default value is false. */ public static bool shouldAutoUpdateAchievementsList = false; /*! Determines whether access to the messages API is enabled. Default value is false. */ public static bool shouldEnableMessages = false; /*! Determines whether the session is started automatically when the SessionM game object is activated. Default value is true. */ public static bool shouldAutoStartSession = true; /*! Call this method before starting the session to set the service region. Do not use with SetServerType. */ public static void SetServiceRegion(ServiceRegion region) { serviceRegion = region; } /*! Call this method before starting the session to set the server URL. Do not use with SetServiceRegion. */ public static void SetServerType(string url) { serviceRegion = ServiceRegion.Custom; serverURL = url; } /*! Sets whether the user's achievements list will be updated automatically. Default value is false. */ public static void SetShouldAutoUpdateAchievementsList(bool shouldAutoUpdate) { shouldAutoUpdateAchievementsList = shouldAutoUpdate; } /*! Sets whether access to the messages API is enabled. Default value is false. */ public static void SetMessagesEnabled(bool shouldEnable) { shouldEnableMessages = shouldEnable; } /*! Determines whether the session is started automatically when the SessionM game object is activated. Default value is true. */ public static void SetSessionAutoStartEnabled(bool autoStartEnabled) { shouldAutoStartSession = autoStartEnabled; } /*! Returns shouldAutoStartSession. */ public static bool IsSessionAutoStartEnabled() { return shouldAutoStartSession; } private ISessionM sessionMNative; /*! * Instantiates the appropiate native interface to be used for the current platform. * * iOS: ISessionM_IOS * Android: ISessionM_Android * All others: ISessionM_Dummy (the dummy simply catches all calls coming into SessionM from unsupported platforms) * * If you need to modify how SessionM is interacting with either iOS or Android natively, please look in the respective interface class. */ public ISessionM SessionMNative { get { return sessionMNative; } } /*! * Returns SessionM's current session state. * * Can be: Stopped, Started Online, Started Offline */ public SessionState GetSessionState() { return sessionMNative.GetSessionState(); } /*! Manually starts a session with the specified application API key. */ public void StartSession(string appKey) { sessionMNative.StartSession(appKey); } /*! Returns user's number of unclaimed achievements. */ public int GetUnclaimedAchievementCount() { return sessionMNative.GetUnclaimedAchievementCount(); } /*! Returns current user data. */ public UserData GetUserData() { UserData userData = null; string userDataJSON = null; userDataJSON = sessionMNative.GetUser(); if(userDataJSON == null) { return null; } userData = GetUserData(userDataJSON); return userData; } /*! Sends a request to the server to log in the user with the specified email and password. Returns whether the request can be sent. */ public bool LogInUserWithEmail(string email, string password) { return sessionMNative.LogInUserWithEmail(email, password); } /*! Logs out the current user. */ public void LogOutUser() { sessionMNative.LogOutUser(); } /*! Sends a request to the server to sign up the user with the specified parameters. Returns whether the request can be sent. */ public bool SignUpUser(string email, string password, string birthYear, string gender, string zipCode) { return sessionMNative.SignUpUser(email, password, birthYear, gender, zipCode); } /*! Sets current user opt-out status locally. */ public void SetUserOptOutStatus(bool status){ sessionMNative.SetUserOptOutStatus(status); } /*! Manually updates the user's achievements list. */ public void UpdateAchievementsList() { sessionMNative.UpdateAchievementsList(); } /*! Returns current unclaimed achievement data. */ public AchievementData GetUnclaimedAchievementData() { IAchievementData achievementData = null; string achievementJSON = null; achievementJSON = sessionMNative.GetUnclaimedAchievementData(); if(achievementJSON == null) { return null; } achievementData = GetAchievementData(achievementJSON); return achievementData as AchievementData; } /*! Logs an event for the specified achievement action. */ public void LogAction(string action) { sessionMNative.LogAction(action); } /*! Logs multiple events for the specified achievement action. */ public void LogAction(string action, int count) { sessionMNative.LogAction(action, count); } /*! Logs multiple events for the specified achievement action while supplying additional developer-defined data that is associated with the action. */ public void LogAction(string action, int count, Dictionary<string, object> payloads) { sessionMNative.LogAction(action, count, payloads); } /*! Presents UI activity of specified type. */ public bool PresentActivity(ActivityType type) { return sessionMNative.PresentActivity(type); } /*! Returns whether an activity of the specified type is available. */ public bool IsActivityAvailable(ActivityType type) { return sessionMNative.IsActivityAvailable(type); } /*! Displays the rewards portal. */ public bool ShowPortal() { return PresentActivity(ActivityType.Portal); } /*! Returns the version number of the SessionM Unity Plug-in. */ public string GetSDKVersion() { return sessionMNative.GetSDKVersion(); } /*! This method is deprecated. Please use GetOffers instead. */ public Reward[] GetRewards() { return GetRewardData(sessionMNative.GetRewards()); } /*! Returns a list of campaign messages. */ public string GetMessagesList() { return sessionMNative.GetMessagesList(); } /*! Returns the current iOS debug log level. */ public LogLevel GetLogLevel() { return sessionMNative.GetLogLevel(); } /*! Sets the iOS debug log level. For Android, use logcat instead. */ public void SetLogLevel(LogLevel level) { sessionMNative.SetLogLevel(level); } /*! Returns whether a UI activity is currently presented. */ public bool IsActivityPresented() { return sessionMNative.IsActivityPresented(); } /*! Sets session metadata to be sent on session start. */ public void SetMetaData(string data, string key) { sessionMNative.SetMetaData(data, key); } /*! Sends a request to the server to authenticate a user with the specified OAuth token string from the specified provider. Returns whether request can be sent. */ public bool AuthenticateWithToken(string provider, string token) { return sessionMNative.AuthenticateWithToken(provider, token); } /*! Call this method before starting the session to set the app key. */ public void SetAppKey(string appKey) { sessionMNative.SetAppKey(appKey); } /*! Notifies the SessionM SDK that a custom achievement has been presented. */ public void NotifyPresented() { sessionMNative.NotifyPresented(); } /*! Notifies the SessionM SDK that a custom achievement has been dismissed. */ public void NotifyDismissed() { sessionMNative.NotifyDismissed(); } /*! Notifies the SessionM SDK that a custom achievement has been claimed. */ public void NotifyClaimed() { sessionMNative.NotifyClaimed(); } /*! Dismisses currently presented UI activity. */ public void DismissActivity() { sessionMNative.DismissActivity(); } /*! Presents list of tiers the user can reach. Note: this method is deprecated. */ public void PresentTierList() { sessionMNative.PresentTierList(); } /*! Returns the list of tiers the user can reach. */ public Tier[] GetTiers() { return GetTierData(sessionMNative.GetTiers()); } /*! Returns multiplier bonus applied to points received by a user in the current application. */ public double GetApplicationMultiplier() { return sessionMNative.GetApplicationMultiplier(); } /*! Sends a request to the server to update the cached list of offers that the user can redeem. Offers are returned in NotifyOffersUpdated callback. */ public void UpdateOffers() { sessionMNative.UpdateOffers(); } /*! Returns the cached list of offers that the user can redeem. */ public Offer[] GetOffers() { return GetOfferData(sessionMNative.GetOffers()); } /*! Sends a request to the server to fetch the data for the content with the specified ID (external IDs are developer-defined). Content data is returned in NotifyContentFetched callback. */ public void FetchContent(string contentID, bool isExternalID) { sessionMNative.FetchContent(contentID, isExternalID); } /*! Sets the object to use for executing Unity callback implementations. */ public void SetCallback(ISessionMCallback callback) { sessionMNative.SetCallback(callback); } /*! Returns callback object. */ public ISessionMCallback GetCallback() { return sessionMNative.GetCallback(); } // Unity Lifecycle private void Awake() { SetSessionMNative(); GameObject.DontDestroyOnLoad(this.gameObject); instance = this; SetLogLevel (logLevel); } private void SetSessionMNative() { if(sessionMNative != null) return; //Assign the appropiate Native Class to handle method calls here. #if UNITY_EDITOR sessionMNative = new ISessionM_Dummy(); #elif UNITY_IOS sessionMNative = new ISessionM_iOS(this); #elif UNITY_ANDROID sessionMNative = new ISessionM_Android(this); #else sessionMNative = new ISessionM_Dummy(); #endif } /*! This is a useful method you can call whenever you need to parse a JSON string into the IAchievementData custom class. */ public static IAchievementData GetAchievementData(string jsonString) { Dictionary<string, object> achievementDict = Json.Deserialize(jsonString) as Dictionary<string,object>; long mpointValue = (Int64)achievementDict["mpointValue"]; long timesEarned = (Int64)achievementDict["timesEarned"]; long unclaimedCount = (Int64)achievementDict["unclaimedCount"]; long distance = (Int64)achievementDict["distance"]; bool isCustom = (bool)achievementDict["isCustom"]; string identifier = (string)achievementDict["identifier"]; string importID = (string)achievementDict["importID"]; string instructions = (string)achievementDict["instructions"]; string achievementIconURL = (string)achievementDict["achievementIconURL"]; string action = (string)achievementDict["action"]; string name = (string)achievementDict["name"]; string message = (string)achievementDict["message"]; string limitText = (string)achievementDict["limitText"]; DateTime lastEarnedDate = new DateTime((Int64)achievementDict["lastEarnedDate"], DateTimeKind.Utc); IAchievementData achievementData = new AchievementData(identifier, importID, instructions, achievementIconURL, action, name, message, limitText, (int)mpointValue, isCustom, lastEarnedDate, (int)timesEarned, (int)unclaimedCount, (int)distance); return achievementData; } private static UserData GetUserData(string jsonString) { Dictionary<string, object> userDict = Json.Deserialize(jsonString) as Dictionary<string, object>; bool isOptedOut = (bool)userDict["isOptedOut"]; bool isRegistered = (bool)userDict["isRegistered"]; bool isLoggedIn = (bool)userDict["isLoggedIn"]; long userPointBalance = (Int64)userDict["getPointBalance"]; long userTierPointBalance = (Int64)userDict["getTierPointBalance"]; long unclaimedAchievementCount = (Int64)userDict["getUnclaimedAchievementCount"]; long unclaimedAchievementValue = (Int64)userDict["getUnclaimedAchievementValue"]; string achievementsJSON = (string)userDict["getAchievementsJSON"]; string[] achievementsJSONArray = UnpackJSONArray(achievementsJSON); AchievementData[] achievementsArray = new AchievementData[achievementsJSONArray.Length]; for(int i = 0; i < achievementsJSONArray.Length; i++) { string achievement = achievementsJSONArray[i]; if(achievement == "") { break; } achievementsArray[i] = GetAchievementData(achievement) as AchievementData; } List<AchievementData> achievements = new List<AchievementData>(achievementsArray); string achievementsListJSON = (string)userDict["getAchievementsListJSON"]; string[] achievementsListJSONArray = UnpackJSONArray(achievementsListJSON); AchievementData[] achievementsListArray = new AchievementData[achievementsListJSONArray.Length]; for(int i = 0; i < achievementsListJSONArray.Length; i++) { string achievement = achievementsListJSONArray[i]; if(achievement == "") { break; } achievementsListArray[i] = GetAchievementData(achievement) as AchievementData; } List<AchievementData> achievementsList = new List<AchievementData>(achievementsListArray); string tierIdentifier = (string)userDict["getTierIdentifier"]; string tierName = (string)userDict["getTierName"]; string tierPercentage = (string)userDict["getTierPercentage"]; string tierAnniversaryDate = (string)userDict["getTierAnniversaryDate"]; string startTier = (string)userDict["getStartTier"]; UserData userData = new UserData(isOptedOut, isRegistered, isLoggedIn, (int)userPointBalance, userTierPointBalance, (int)unclaimedAchievementCount, (int)unclaimedAchievementValue, achievements, achievementsList, tierIdentifier, tierName, tierPercentage, tierAnniversaryDate, startTier); return userData; } private static Tier[] GetTierData(string jsonString) { List<object> dictList = Json.Deserialize(jsonString) as List<object>; Tier[] tierArray = new Tier[dictList.Count]; for(int i = 0; i < dictList.Count; i++) { Dictionary<string, object> dict = dictList[i] as Dictionary<string, object>; string tier = (string)GetValueOrDefault<string, object>(dict, "tier", ""); string name = (string)GetValueOrDefault<string, object>(dict, "name", ""); string instructions = (string)GetValueOrDefault<string, object>(dict, "instructions", ""); double multiplier = Convert.ToDouble(GetValueOrDefault<string, object>(dict, "multiplier", "0.0")); int startValue = Convert.ToInt32(GetValueOrDefault<string, object>(dict, "start_value", "0")); int endValue = Convert.ToInt32(GetValueOrDefault<string, object>(dict, "end_value", "0")); double securePercent = Convert.ToDouble(GetValueOrDefault<string, object>(dict, "maintenance_percent", "0.0")); int requiredPoints = Convert.ToInt32(GetValueOrDefault<string, object>(dict, "required_points", "0")); tierArray[i] = new Tier(tier, name, instructions, multiplier, startValue, endValue, securePercent, requiredPoints); } return tierArray; } private static Reward[] GetRewardData(string jsonString) { List<object> dictList = Json.Deserialize(jsonString) as List<object>; Reward[] rewardArray = new Reward[dictList.Count]; for (int i = 0; i < dictList.Count; i++) { Dictionary<string, object> dict = dictList[i] as Dictionary<string, object>; long id = (System.Int64) dict["id"]; string name = (string) dict["name"]; long points = (System.Int64) dict["points"]; string imageURL = (string)dict["image"]; string type = (string) dict["type"]; string expiresAt = (string) dict["expires_at"]; string url = (string) dict["url"]; string tier = (string)dict ["tier"]; rewardArray[i] = new Reward ((int)id, name, (int)points, imageURL, url, tier, type, expiresAt); } return rewardArray; } private static Offer[] GetOfferData(string jsonString) { List<object> dictList = Json.Deserialize(jsonString) as List<object>; Offer[] offerArray = new Offer[dictList.Count]; for (int i = 0; i < dictList.Count; i++) { Dictionary<string, object> dict = dictList[i] as Dictionary<string, object>; Offer offer = new Offer(dict); offerArray[i] = offer; } return offerArray; } private static string[] UnpackJSONArray(string json) { string[] separatorArray = new string[] {"__"}; string[] JSONArray = json.Split(separatorArray, StringSplitOptions.None); return JSONArray; } private static TValue GetValueOrDefault<TKey, TValue>(Dictionary<TKey, TValue> dict, TKey key, TValue defaultVal) { TValue val; return dict.TryGetValue(key, out val) ? val : defaultVal; } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using Validation; namespace System.Collections.Immutable { /// <content> /// Contains the inner <see cref="ImmutableHashSet{T}.Builder"/> class. /// </content> [SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "Ignored")] public sealed partial class ImmutableHashSet<T> { /// <summary> /// A hash set that mutates with little or no memory allocations, /// can produce and/or build on immutable hash set instances very efficiently. /// </summary> /// <remarks> /// <para> /// While <see cref="ImmutableHashSet{T}.Union(IEnumerable{T})"/> and other bulk change methods /// already provide fast bulk change operations on the collection, this class allows /// multiple combinations of changes to be made to a set with equal efficiency. /// </para> /// <para> /// Instance members of this class are <em>not</em> thread-safe. /// </para> /// </remarks> [SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "Ignored")] [SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Justification = "Ignored")] [DebuggerDisplay("Count = {Count}")] public sealed class Builder : IReadOnlyCollection<T>, ISet<T> { /// <summary> /// The root of the binary tree that stores the collection. Contents are typically not entirely frozen. /// </summary> private SortedInt32KeyNode<HashBucket> _root = SortedInt32KeyNode<HashBucket>.EmptyNode; /// <summary> /// The equality comparer. /// </summary> private IEqualityComparer<T> _equalityComparer; /// <summary> /// The number of elements in this collection. /// </summary> private int _count; /// <summary> /// Caches an immutable instance that represents the current state of the collection. /// </summary> /// <value>Null if no immutable view has been created for the current version.</value> private ImmutableHashSet<T> _immutable; /// <summary> /// A number that increments every time the builder changes its contents. /// </summary> private int _version; /// <summary> /// Initializes a new instance of the <see cref="ImmutableHashSet{T}.Builder"/> class. /// </summary> /// <param name="set">The set.</param> internal Builder(ImmutableHashSet<T> set) { Requires.NotNull(set, "set"); _root = set._root; _count = set._count; _equalityComparer = set._equalityComparer; _immutable = set; } #region ISet<T> Properties /// <summary> /// Gets the number of elements contained in the <see cref="ICollection{T}"/>. /// </summary> /// <returns>The number of elements contained in the <see cref="ICollection{T}"/>.</returns> public int Count { get { return _count; } } /// <summary> /// Gets a value indicating whether the <see cref="ICollection{T}"/> is read-only. /// </summary> /// <returns>true if the <see cref="ICollection{T}"/> is read-only; otherwise, false.</returns> bool ICollection<T>.IsReadOnly { get { return false; } } #endregion /// <summary> /// Gets or sets the key comparer. /// </summary> /// <value> /// The key comparer. /// </value> public IEqualityComparer<T> KeyComparer { get { return _equalityComparer; } set { Requires.NotNull(value, "value"); if (value != _equalityComparer) { var result = Union(this, new MutationInput(SortedInt32KeyNode<HashBucket>.EmptyNode, value, 0)); _immutable = null; _equalityComparer = value; this.Root = result.Root; _count = result.Count; // whether the offset or absolute, since the base is 0, it's no difference. } } } /// <summary> /// Gets the current version of the contents of this builder. /// </summary> internal int Version { get { return _version; } } /// <summary> /// Gets the initial data to pass to a query or mutation method. /// </summary> private MutationInput Origin { get { return new MutationInput(this.Root, _equalityComparer, _count); } } /// <summary> /// Gets or sets the root of this data structure. /// </summary> private SortedInt32KeyNode<HashBucket> Root { get { return _root; } set { // We *always* increment the version number because some mutations // may not create a new value of root, although the existing root // instance may have mutated. _version++; if (_root != value) { _root = value; // Clear any cached value for the immutable view since it is now invalidated. _immutable = null; } } } #region Public methods /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="IEnumerator{T}"/> that can be used to iterate through the collection. /// </returns> public Enumerator GetEnumerator() { return new Enumerator(_root, this); } /// <summary> /// Creates an immutable hash set based on the contents of this instance. /// </summary> /// <returns>An immutable set.</returns> /// <remarks> /// This method is an O(n) operation, and approaches O(1) time as the number of /// actual mutations to the set since the last call to this method approaches 0. /// </remarks> public ImmutableHashSet<T> ToImmutable() { // Creating an instance of ImmutableSortedMap<T> with our root node automatically freezes our tree, // ensuring that the returned instance is immutable. Any further mutations made to this builder // will clone (and unfreeze) the spine of modified nodes until the next time this method is invoked. if (_immutable == null) { _immutable = ImmutableHashSet<T>.Wrap(_root, _equalityComparer, _count); } return _immutable; } #endregion #region ISet<T> Methods /// <summary> /// Adds the specified item. /// </summary> /// <param name="item">The item.</param> /// <returns>True if the item did not already belong to the collection.</returns> public bool Add(T item) { var result = ImmutableHashSet<T>.Add(item, this.Origin); this.Apply(result); return result.Count != 0; } /// <summary> /// Removes the first occurrence of a specific object from the <see cref="ICollection{T}"/>. /// </summary> /// <param name="item">The object to remove from the <see cref="ICollection{T}"/>.</param> /// <returns> /// true if <paramref name="item"/> was successfully removed from the <see cref="ICollection{T}"/>; otherwise, false. This method also returns false if <paramref name="item"/> is not found in the original <see cref="ICollection{T}"/>. /// </returns> /// <exception cref="NotSupportedException">The <see cref="ICollection{T}"/> is read-only.</exception> public bool Remove(T item) { var result = ImmutableHashSet<T>.Remove(item, this.Origin); this.Apply(result); return result.Count != 0; } /// <summary> /// Determines whether the <see cref="ICollection{T}"/> contains a specific value. /// </summary> /// <param name="item">The object to locate in the <see cref="ICollection{T}"/>.</param> /// <returns> /// true if <paramref name="item"/> is found in the <see cref="ICollection{T}"/>; otherwise, false. /// </returns> public bool Contains(T item) { return ImmutableHashSet<T>.Contains(item, this.Origin); } /// <summary> /// Removes all items from the <see cref="ICollection{T}"/>. /// </summary> /// <exception cref="NotSupportedException">The <see cref="ICollection{T}"/> is read-only. </exception> public void Clear() { _count = 0; this.Root = SortedInt32KeyNode<HashBucket>.EmptyNode; } /// <summary> /// Removes all elements in the specified collection from the current set. /// </summary> /// <param name="other">The collection of items to remove from the set.</param> public void ExceptWith(IEnumerable<T> other) { var result = ImmutableHashSet<T>.Except(other, _equalityComparer, _root); this.Apply(result); } /// <summary> /// Modifies the current set so that it contains only elements that are also in a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> public void IntersectWith(IEnumerable<T> other) { var result = ImmutableHashSet<T>.Intersect(other, this.Origin); this.Apply(result); } /// <summary> /// Determines whether the current set is a proper (strict) subset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a correct subset of other; otherwise, false.</returns> public bool IsProperSubsetOf(IEnumerable<T> other) { return ImmutableHashSet<T>.IsProperSubsetOf(other, this.Origin); } /// <summary> /// Determines whether the current set is a proper (strict) superset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a superset of other; otherwise, false.</returns> public bool IsProperSupersetOf(IEnumerable<T> other) { return ImmutableHashSet<T>.IsProperSupersetOf(other, this.Origin); } /// <summary> /// Determines whether the current set is a subset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a subset of other; otherwise, false.</returns> public bool IsSubsetOf(IEnumerable<T> other) { return ImmutableHashSet<T>.IsSubsetOf(other, this.Origin); } /// <summary> /// Determines whether the current set is a superset of a specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is a superset of other; otherwise, false.</returns> public bool IsSupersetOf(IEnumerable<T> other) { return ImmutableHashSet<T>.IsSupersetOf(other, this.Origin); } /// <summary> /// Determines whether the current set overlaps with the specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set and other share at least one common element; otherwise, false.</returns> public bool Overlaps(IEnumerable<T> other) { return ImmutableHashSet<T>.Overlaps(other, this.Origin); } /// <summary> /// Determines whether the current set and the specified collection contain the same elements. /// </summary> /// <param name="other">The collection to compare to the current set.</param> /// <returns>true if the current set is equal to other; otherwise, false.</returns> public bool SetEquals(IEnumerable<T> other) { if (object.ReferenceEquals(this, other)) { return true; } return ImmutableHashSet<T>.SetEquals(other, this.Origin); } /// <summary> /// Modifies the current set so that it contains only elements that are present either in the current set or in the specified collection, but not both. /// </summary> /// <param name="other">The collection to compare to the current set.</param> public void SymmetricExceptWith(IEnumerable<T> other) { var result = ImmutableHashSet<T>.SymmetricExcept(other, this.Origin); this.Apply(result); } /// <summary> /// Modifies the current set so that it contains all elements that are present in both the current set and in the specified collection. /// </summary> /// <param name="other">The collection to compare to the current set.</param> public void UnionWith(IEnumerable<T> other) { var result = ImmutableHashSet<T>.Union(other, this.Origin); this.Apply(result); } #endregion #region ICollection<T> Members /// <summary> /// Adds an item to the <see cref="ICollection{T}"/>. /// </summary> /// <param name="item">The object to add to the <see cref="ICollection{T}"/>.</param> /// <exception cref="NotSupportedException">The <see cref="ICollection{T}"/> is read-only.</exception> void ICollection<T>.Add(T item) { this.Add(item); } /// <summary> /// See the <see cref="ICollection{T}"/> interface. /// </summary> void ICollection<T>.CopyTo(T[] array, int arrayIndex) { Requires.NotNull(array, "array"); Requires.Range(arrayIndex >= 0, "arrayIndex"); Requires.Range(array.Length >= arrayIndex + this.Count, "arrayIndex"); foreach (T item in this) { array[arrayIndex++] = item; } } #endregion #region IEnumerable<T> Members /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="IEnumerator{T}"/> that can be used to iterate through the collection. /// </returns> IEnumerator<T> IEnumerable<T>.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } #endregion /// <summary> /// Applies the result of some mutation operation to this instance. /// </summary> /// <param name="result">The result.</param> private void Apply(MutationResult result) { this.Root = result.Root; if (result.CountType == CountType.Adjustment) { _count += result.Count; } else { _count = result.Count; } } } } }
using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Bridge { [Bridge.Convention(Member = Bridge.ConventionMember.Field | Bridge.ConventionMember.Method, Notation = Bridge.Notation.CamelCase)] [External] [Name("Bridge")] public static class Script { public static extern object Apply(object obj, object values); public static extern T Apply<T>(T obj, object values); public static extern bool IsDefined(object obj); public static extern bool IsArray(object obj); public static extern T[] ToArray<T>(IEnumerable<T> items); public static extern T Identity<T>(T arg, params object[] args); /// <summary> /// Emit a return statement /// </summary> /// <param name="obj">An object to return.</param> [Template("return {0}")] public static extern void Return(object obj); /// <summary> /// The delete operator removes a property from an object. /// </summary> /// <param name="obj">The name of an object, or an expression evaluating to an object.</param> /// <returns>true for all cases except when the property is an own non-configurable property, in which case, false is returned in non-strict mode.</returns> [Template("delete {0}")] public static extern bool Delete(object obj); /// <summary> /// The delete operator removes a property from an object. /// </summary> /// <param name="obj">The name of an object, or an expression evaluating to an object.</param> /// <param name="prop">The property to delete.</param> /// <returns>true for all cases except when the property is an own non-configurable property, in which case, false is returned in non-strict mode.</returns> [Template("delete {0}[{1}]")] public static extern bool Delete(object obj, string prop); [Template("Bridge.is({0}, {1})")] public static extern bool Is(object type, string typeName); [Template("Bridge.copy({0}, {1}, {2})")] public static extern object Copy(object to, object from, string[] keys); [Template("Bridge.copy({0}, {1}, {2})")] public static extern object Copy(object to, object from, string keys); [Template("Bridge.copy({0}, {1}, {2}, {3})")] public static extern object Copy(object to, object from, string[] keys, bool toIf); [Template("Bridge.copy({0}, {1}, {2}, {3})")] public static extern object Copy(object to, object from, string keys, bool toIf); [Template("Bridge.ns({0}, {1})")] public static extern object NS(string ns, object scope); [Template("Bridge.ns({0})")] public static extern object NS(string ns); [Template("Bridge.getHashCode({0})")] public static extern int GetHashCode(object obj); [Template("Bridge.getDefaultValue({0})")] public static extern T GetDefaultValue<T>(Type type); [Template("Bridge.getDefaultValue({0})")] public static extern object GetDefaultValue(Type type); /// <summary> /// Checks if the specified object is undefined. The object passed in should be a local variable, and not a member of a class (to avoid potential script warnings). /// </summary> /// <param name="obj">The object to test against undefined.</param> /// <returns>true if the object is undefined; false otherwise.</returns> [Template("{0} === undefined")] public static extern bool IsUndefined(object obj); /// <summary> /// Checks if the object has a value. /// </summary> /// <param name="obj">The object to test if there is a value.</param> /// <returns>true if the object has a value; false otherwise.</returns> [Template("Bridge.hasValue({0})")] public static extern bool HasValue(object obj); /// <summary> /// Checks if the specified object is null. /// </summary> /// <param name="obj">The object to test against null.</param> /// <returns>true if the object is null; false otherwise.</returns> [Template("{0} === null")] public static extern bool IsNull(object obj); /// <summary> /// Converts an object into a boolean. /// </summary> /// <param name="obj">The object to convert.</param> /// <returns>true if the object is not null, zero, empty string or undefined.</returns> [Template("!!{0}")] public static extern bool Boolean(object obj); /// <summary> /// Generate <c>member in obj</c>. /// </summary> /// <param name="obj">The object to test against.</param> /// <param name="member">The member to check if in the object.</param> /// <returns>true if member in object; false otherwise.</returns> [Template("{member} in {obj}")] public static extern bool In(object obj, string member); /// <summary> /// Invoke a method on an object /// </summary> /// <param name="obj">The object to invoke the method against.</param> /// <param name="name">The method to invoke.</param> /// <param name="args">The arguments passed into the method.</param> /// <returns></returns> [Template("{obj}[{name}]({*args})")] public static extern object InvokeMethod(object obj, string name, params object[] args); /// <summary> /// Inject javascript code /// </summary> /// <typeparam name="T"></typeparam> /// <param name="code"></param> /// <param name="args"></param> /// <returns></returns> [Template] public static extern T Write<T>(string code, params object[] args); /// <summary> /// Inject javascript code /// </summary> /// <param name="code"></param> /// <param name="args"></param> /// <returns></returns> [Template] public static extern void Write(string code, params object[] args); /// <summary> /// The global undefined property represents the value undefined. /// </summary> [Template("undefined")] public static readonly object Undefined; /// <summary> /// The global NaN property is a value representing Not-A-Number. /// </summary> [Template("NaN")] public static readonly object NaN; /// <summary> /// The global Infinity property is a numeric value representing infinity. /// </summary> [Template("Infinity")] public static readonly object Infinity; [Template("debugger")] public static extern void Debugger(); /// <summary> /// The eval() method evaluates JavaScript code represented as a string. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="expression">A string representing a JavaScript expression, statement, or sequence of statements. The expression can include variables and properties of existing objects.</param> /// <returns></returns> [Template("eval({0})")] public static extern T Eval<T>(string expression); /// <summary> /// The eval() method evaluates JavaScript code represented as a string. /// </summary> /// <param name="expression">A string representing a JavaScript expression, statement, or sequence of statements. The expression can include variables and properties of existing objects.</param> /// <returns></returns> [Template("eval({0})")] public static extern void Eval(string expression); /// <summary> /// The global isFinite() function determines whether the passed value is a finite number. If needed, the parameter is first converted to a number. /// </summary> /// <param name="testValue">The value to be tested for finiteness.</param> /// <returns></returns> [Template("isFinite({0})")] public static extern bool IsFinite(object testValue); /// <summary> /// Parses a string argument and returns a floating point number corresponding to double .Net type. /// </summary> /// <param name="value">A string that represents the value you want to parse.</param> /// <returns>Parsed floating point number with type corresponding to double .Net type</returns> [Template("parseFloat({0})")] public static extern double ParseFloat(string value); /// <summary> /// The parseInt() function parses a string argument and returns an integer of the specified radix or base. /// </summary> /// <param name="value">The value to parse. If string is not a string, then it is converted to one. Leading whitespace in the string is ignored.</param> /// <returns></returns> [Template("parseInt({0})")] public static extern int ParseInt(string value); /// <summary> /// The parseInt() function parses a string argument and returns an integer of the specified radix or base. /// </summary> /// <param name="value">The value to parse. If string is not a string, then it is converted to one. Leading whitespace in the string is ignored.</param> /// <param name="radix">An integer that represents the radix of the above mentioned string. Always specify this parameter to eliminate reader confusion and to guarantee predictable behavior. Different implementations produce different results when a radix is not specified.</param> /// <returns></returns> [Template("parseInt({0}, {1})")] public static extern int ParseInt(string value, int radix); /// <summary> /// The isNaN() function determines whether a value is NaN or not. Be careful, this function is broken. You may be interested in Number.isNaN() as defined in ECMAScript 6 or you can use typeof to determine if the value is Not-A-Number. /// </summary> /// <param name="testValue">The value to be tested.</param> /// <returns></returns> [Template("isNaN({0})")] public static extern bool IsNaN(object testValue); /// <summary> /// The decodeURI() function decodes a Uniform Resource Identifier (URI) previously created by encodeURI or by a similar routine. /// </summary> /// <param name="encodedURI">A complete, encoded Uniform Resource Identifier.</param> /// <returns></returns> [Template("decodeURI({0})")] public static extern string DecodeURI(string encodedURI); /// <summary> /// The decodeURIComponent() method decodes a Uniform Resource Identifier (URI) component previously created by encodeURIComponent or by a similar routine. /// </summary> /// <param name="encodedURI">An encoded component of a Uniform Resource Identifier.</param> /// <returns></returns> [Template("decodeURIComponent({0})")] public static extern string DecodeURIComponent(string encodedURI); /// <summary> /// The encodeURI() method encodes a Uniform Resource Identifier (URI) by replacing each instance of certain characters by one, two, three, or four escape sequences representing the UTF-8 encoding of the character (will only be four escape sequences for characters composed of two "surrogate" characters). /// </summary> /// <param name="uri">A complete Uniform Resource Identifier.</param> /// <returns></returns> [Template("encodeURI({0})")] public static extern string EncodeURI(string uri); /// <summary> /// The encodeURIComponent() method encodes a Uniform Resource Identifier (URI) component by replacing each instance of certain characters by one, two, three, or four escape sequences representing the UTF-8 encoding of the character (will only be four escape sequences for characters composed of two "surrogate" characters). /// </summary> /// <param name="component">A component of a URI.</param> /// <returns></returns> [Template("encodeURIComponent({0})")] public static extern string EncodeURIComponent(string component); [Template("(typeof {0})")] public static extern string TypeOf(object obj); [Template("({obj} instanceof {type})")] public static extern bool InstanceOf(object obj, Type type); [Template("this")] public static extern T This<T>(); [Template("(Bridge.caller[0] || this)")] public static extern T Caller<T>(); [Template("{scope:raw}[{name}] = {value}")] public static extern void Set(object scope, string name, object value); [Template("{name:raw} = {value}")] public static extern void Set(string name, object value); [Template("{name:raw}")] public static extern object Get(string name); [Template("{scope:raw}[{name}]")] public static extern object Get(object scope, string name); [Template("{name:raw}")] public static extern T Get<T>(string name); [Template("{scope:raw}[{name}]")] public static extern T Get<T>(object scope, string name); [Template("{name:raw}({args})")] public static extern void Call(string name, params object[] args); [Template("{name:raw}()")] public static extern void Call(string name); [Template("{name:raw}({args})")] public static extern T Call<T>(string name, params object[] args); [Template("{name:raw}()")] public static extern T Call<T>(string name); [GlobalTarget("Bridge.global")] public new static extern dynamic ToDynamic(); [Template("({a} === {b})")] public static extern bool StrictEquals(object a, object b); [Template("{init}({t})")] public static extern T CallFor<T>(T t, Func<T, T> init); [Template("{init}({t})")] public static extern Task<T> AsyncCallFor<T>(T t, Func<T, Task<T>> init); [Template("({name:tmp} = {t})")] [Unbox(false)] public static extern T ToTemp<T>(string name, T t); [Template("{name:gettmp}")] [Unbox(false)] public static extern T FromTemp<T>(string name); [Template("{name:gettmp}")] [Unbox(false)] public static extern T FromTemp<T>(string name, T t); [Template("{action:body}")] public static extern object FromLambda(Action action); [Template("{o:plain}")] public static extern T ToPlainObject<T>(T o); [Template("{o:plain}")] public static extern T ToObjectLiteral<T>(T o); /// <summary> /// Runs the function in a try/catch statement /// </summary> /// <param name="fn">Function to run</param> /// <returns>Return either function result or false in case of catch</returns> [Template("Bridge.safe({fn})")] public static extern bool SafeFunc(Func<bool> fn); [Template("Bridge.isNode")] public static readonly bool IsNode; [Template("Bridge.Deconstruct({obj}, {t1})")] public extern static void Deconstruct<T1>(object obj, out T1 t1); [Template("Bridge.Deconstruct({obj}, {t1}, {t2})")] public extern static void Deconstruct<T1, T2>(object obj, out T1 t1, out T2 t2); [Template("Bridge.Deconstruct({obj}, {t1}, {t2}, {t3})")] public extern static void Deconstruct<T1, T2, T3>(object obj, out T1 t1, out T2 t2, out T3 t3); [Template("Bridge.Deconstruct({obj}, {t1}, {t2}, {t3}, {t4})")] public extern static void Deconstruct<T1, T2, T3, T4>(object obj, out T1 t1, out T2 t2, out T3 t3, out T4 t4); [Template("Bridge.Deconstruct({obj}, {t1}, {t2}, {t3}, {t4}, {t5})")] public extern static void Deconstruct<T1, T2, T3, T4, T5>(object obj, out T1 t1, out T2 t2, out T3 t3, out T4 t4, out T5 t5); [Template("Bridge.Deconstruct({obj}, {t1}, {t2}, {t3}, {t4}, {t5}, {t6})")] public extern static void Deconstruct<T1, T2, T3, T4, T5, T6>(object obj, out T1 t1, out T2 t2, out T3 t3, out T4 t4, out T5 t5, out T6 t6); [Template("Bridge.Deconstruct({obj}, {t1}, {t2}, {t3}, {t4}, {t5}, {t6}, {t7})")] public extern static void Deconstruct<T1, T2, T3, T4, T5, T6, T7>(object obj, out T1 t1, out T2 t2, out T3 t3, out T4 t4, out T5 t5, out T6 t6, out T7 t7); [Template("Bridge.Deconstruct({obj}, {t1}, {t2}, {t3}, {t4}, {t5}, {t6}, {t7}, {rest})")] public extern static void Deconstruct<T1, T2, T3, T4, T5, T6, T7, TRest>(object obj, out T1 t1, out T2 t2, out T3 t3, out T4 t4, out T5 t5, out T6 t6, out T7 t7, out TRest rest); [Name("_")] [Unbox(false)] public static object Discard; } }
// ------------------------------------- // Domain : IBT / Realtime.co // Author : Nicholas Ventimiglia // Product : Messaging and Storage // Published : 2014 // ------------------------------------- using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; namespace Realtime.LITJson { internal enum Condition { InArray, InObject, NotAProperty, Property, Value } internal class WriterContext { public int Count; public bool InArray; public bool InObject; public bool ExpectingValue; public int Padding; } public class JsonWriter { #region Fields private static NumberFormatInfo number_format; private WriterContext context; private Stack<WriterContext> ctx_stack; private bool has_reached_end; private char[] hex_seq; private int indentation; private int indent_value; private StringBuilder inst_string_builder; private bool pretty_print; private bool validate; private TextWriter writer; #endregion #region Properties public int IndentValue { get { return indent_value; } set { indentation = (indentation / indent_value) * value; indent_value = value; } } public bool PrettyPrint { get { return pretty_print; } set { pretty_print = value; } } public TextWriter TextWriter { get { return writer; } } public bool Validate { get { return validate; } set { validate = value; } } #endregion #region Constructors static JsonWriter () { number_format = NumberFormatInfo.InvariantInfo; } public JsonWriter () { inst_string_builder = new StringBuilder (); writer = new StringWriter (inst_string_builder); Init (); } public JsonWriter (StringBuilder sb) : this (new StringWriter (sb)) { } public JsonWriter (TextWriter writer) { if (writer == null) throw new ArgumentNullException ("writer"); this.writer = writer; Init (); } #endregion #region Private Methods private void DoValidation (Condition cond) { if (! context.ExpectingValue) context.Count++; if (! validate) return; if (has_reached_end) throw new JsonException ( "A complete JSON symbol has already been written"); switch (cond) { case Condition.InArray: if (! context.InArray) throw new JsonException ( "Can't close an array here"); break; case Condition.InObject: if (! context.InObject || context.ExpectingValue) throw new JsonException ( "Can't close an object here"); break; case Condition.NotAProperty: if (context.InObject && ! context.ExpectingValue) throw new JsonException ( "Expected a property"); break; case Condition.Property: if (! context.InObject || context.ExpectingValue) throw new JsonException ( "Can't add a property here"); break; } } private void Init () { has_reached_end = false; hex_seq = new char[4]; indentation = 0; indent_value = 4; pretty_print = false; validate = true; ctx_stack = new Stack<WriterContext> (); context = new WriterContext (); ctx_stack.Push (context); } private static void IntToHex (int n, char[] hex) { int num; for (int i = 0; i < 4; i++) { num = n % 16; if (num < 10) hex[3 - i] = (char) ('0' + num); else hex[3 - i] = (char) ('A' + (num - 10)); n >>= 4; } } private void Indent () { if (pretty_print) indentation += indent_value; } private void Put (string str) { if (pretty_print && ! context.ExpectingValue) for (int i = 0; i < indentation; i++) writer.Write (' '); writer.Write (str); } private void PutNewline () { PutNewline (true); } private void PutNewline (bool add_comma) { if (add_comma && ! context.ExpectingValue && context.Count > 1) writer.Write (','); if (pretty_print && ! context.ExpectingValue) writer.Write ('\n'); } private void PutString (string str) { Put (String.Empty); writer.Write ('"'); int n = str.Length; for (int i = 0; i < n; i++) { switch (str[i]) { case '\n': writer.Write ("\\n"); continue; case '\r': writer.Write ("\\r"); continue; case '\t': writer.Write ("\\t"); continue; case '"': case '\\': writer.Write ('\\'); writer.Write (str[i]); continue; case '\f': writer.Write ("\\f"); continue; case '\b': writer.Write ("\\b"); continue; } if ((int) str[i] >= 32 && (int) str[i] <= 126) { writer.Write (str[i]); continue; } // Default, turn into a \uXXXX sequence IntToHex ((int) str[i], hex_seq); writer.Write ("\\u"); writer.Write (hex_seq); } writer.Write ('"'); } private void Unindent () { if (pretty_print) indentation -= indent_value; } #endregion public override string ToString () { if (inst_string_builder == null) return String.Empty; return inst_string_builder.ToString (); } public void Reset () { has_reached_end = false; ctx_stack.Clear (); context = new WriterContext (); ctx_stack.Push (context); if (inst_string_builder != null) inst_string_builder.Remove (0, inst_string_builder.Length); } public void Write (bool boolean) { DoValidation (Condition.Value); PutNewline (); Put (boolean ? "true" : "false"); context.ExpectingValue = false; } public void Write (decimal number) { DoValidation (Condition.Value); PutNewline (); Put (Convert.ToString (number, number_format)); context.ExpectingValue = false; } public void Write (double number) { DoValidation (Condition.Value); PutNewline (); string str = Convert.ToString (number, number_format); Put (str); if (str.IndexOf ('.') == -1 && str.IndexOf ('E') == -1) writer.Write (".0"); context.ExpectingValue = false; } public void Write (int number) { DoValidation (Condition.Value); PutNewline (); Put (Convert.ToString (number, number_format)); context.ExpectingValue = false; } public void Write (long number) { DoValidation (Condition.Value); PutNewline (); Put (Convert.ToString (number, number_format)); context.ExpectingValue = false; } public void Write (string str) { DoValidation (Condition.Value); PutNewline (); if (str == null) Put ("null"); else PutString (str); context.ExpectingValue = false; } //[CLSCompliant(false)] public void Write (ulong number) { DoValidation (Condition.Value); PutNewline (); Put (Convert.ToString (number, number_format)); context.ExpectingValue = false; } public void WriteArrayEnd () { DoValidation (Condition.InArray); PutNewline (false); ctx_stack.Pop (); if (ctx_stack.Count == 1) has_reached_end = true; else { context = ctx_stack.Peek (); context.ExpectingValue = false; } Unindent (); Put ("]"); } public void WriteArrayStart () { DoValidation (Condition.NotAProperty); PutNewline (); Put ("["); context = new WriterContext (); context.InArray = true; ctx_stack.Push (context); Indent (); } public void WriteObjectEnd () { DoValidation (Condition.InObject); PutNewline (false); ctx_stack.Pop (); if (ctx_stack.Count == 1) has_reached_end = true; else { context = ctx_stack.Peek (); context.ExpectingValue = false; } Unindent (); Put ("}"); } public void WriteObjectStart () { DoValidation (Condition.NotAProperty); PutNewline (); Put ("{"); context = new WriterContext (); context.InObject = true; ctx_stack.Push (context); Indent (); } public void WritePropertyName (string property_name) { DoValidation (Condition.Property); PutNewline (); PutString (property_name); if (pretty_print) { if (property_name.Length > context.Padding) context.Padding = property_name.Length; for (int i = context.Padding - property_name.Length; i >= 0; i--) writer.Write (' '); writer.Write (": "); } else writer.Write (':'); context.ExpectingValue = true; } } }
// This file was created automatically, do not modify the contents of this file. // ReSharper disable InvalidXmlDocComment // ReSharper disable InconsistentNaming // ReSharper disable CheckNamespace // ReSharper disable MemberCanBePrivate.Global using System; using System.Runtime.InteropServices; // Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\Engine\Classes\Camera\CameraModifier.h:22 namespace UnrealEngine { [ManageType("ManageCameraModifier")] public partial class ManageCameraModifier : UCameraModifier, IManageWrapper { public ManageCameraModifier(IntPtr adress) : base(adress) { } #region DLLInmport [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_DisableModifier(IntPtr self, bool bImmediate); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_EnableModifier(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_ToggleModifier(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_UpdateAlpha(IntPtr self, float deltaTime); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_BeginDestroy(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_FinishDestroy(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_MarkAsEditorOnlySubobject(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostCDOContruct(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostEditImport(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostInitProperties(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostLoad(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostNetReceive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostRepNotifies(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PostSaveRoot(IntPtr self, bool bCleanupIsRequired); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PreDestroyFromReplication(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_PreNetReceive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_ShutdownAfterError(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_CreateCluster(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UCameraModifier_OnClusterMarkedAsPendingKill(IntPtr self); #endregion #region Methods /// <summary> /// Disables this modifier. /// </summary> /// <param name="bImmediate">true to disable with no blend out, false (default) to allow blend out</param> public override void DisableModifier(bool bImmediate) => E__Supper__UCameraModifier_DisableModifier(this, bImmediate); /// <summary> /// Enables this modifier. /// </summary> public override void EnableModifier() => E__Supper__UCameraModifier_EnableModifier(this); /// <summary> /// Toggled disabled/enabled state of this modifier. /// </summary> public override void ToggleModifier() => E__Supper__UCameraModifier_ToggleModifier(this); /// <summary> /// Responsible for updating alpha blend value. /// </summary> /// <param name="camera">Camera that is being updated</param> /// <param name="deltaTime">Amount of time since last update</param> public override void UpdateAlpha(float deltaTime) => E__Supper__UCameraModifier_UpdateAlpha(this, deltaTime); /// <summary> /// Called before destroying the object. This is called immediately upon deciding to destroy the object, to allow the object to begin an /// <para>asynchronous cleanup process. </para> /// </summary> public override void BeginDestroy() => E__Supper__UCameraModifier_BeginDestroy(this); /// <summary> /// Called to finish destroying the object. After UObject::FinishDestroy is called, the object's memory should no longer be accessed. /// <para>@warning Because properties are destroyed here, Super::FinishDestroy() should always be called at the end of your child class's FinishDestroy() method, rather than at the beginning. </para> /// </summary> public override void FinishDestroy() => E__Supper__UCameraModifier_FinishDestroy(this); /// <summary> /// Called during subobject creation to mark this component as editor only, which causes it to get stripped in packaged builds /// </summary> public override void MarkAsEditorOnlySubobject() => E__Supper__UCameraModifier_MarkAsEditorOnlySubobject(this); /// <summary> /// Called after the C++ constructor has run on the CDO for a class. This is an obscure routine used to deal with the recursion /// <para>in the construction of the default materials </para> /// </summary> public override void PostCDOContruct() => E__Supper__UCameraModifier_PostCDOContruct(this); /// <summary> /// Called after importing property values for this object (paste, duplicate or .t3d import) /// <para>Allow the object to perform any cleanup for properties which shouldn't be duplicated or </para> /// are unsupported by the script serialization /// </summary> public override void PostEditImport() => E__Supper__UCameraModifier_PostEditImport(this); /// <summary> /// Called after the C++ constructor and after the properties have been initialized, including those loaded from config. /// <para>This is called before any serialization or other setup has happened. </para> /// </summary> public override void PostInitProperties() => E__Supper__UCameraModifier_PostInitProperties(this); /// <summary> /// Do any object-specific cleanup required immediately after loading an object. /// <para>This is not called for newly-created objects, and by default will always execute on the game thread. </para> /// </summary> public override void PostLoad() => E__Supper__UCameraModifier_PostLoad(this); /// <summary> /// Called right after receiving a bunch /// </summary> public override void PostNetReceive() => E__Supper__UCameraModifier_PostNetReceive(this); /// <summary> /// Called right after calling all OnRep notifies (called even when there are no notifies) /// </summary> public override void PostRepNotifies() => E__Supper__UCameraModifier_PostRepNotifies(this); /// <summary> /// Called from within SavePackage on the passed in base/root object. /// <para>This function is called after the package has been saved and can perform cleanup. </para> /// </summary> /// <param name="bCleanupIsRequired">Whether PreSaveRoot dirtied state that needs to be cleaned up</param> public override void PostSaveRoot(bool bCleanupIsRequired) => E__Supper__UCameraModifier_PostSaveRoot(this, bCleanupIsRequired); /// <summary> /// Called right before being marked for destruction due to network replication /// </summary> public override void PreDestroyFromReplication() => E__Supper__UCameraModifier_PreDestroyFromReplication(this); /// <summary> /// Called right before receiving a bunch /// </summary> public override void PreNetReceive() => E__Supper__UCameraModifier_PreNetReceive(this); /// <summary> /// After a critical error, perform any mission-critical cleanup, such as restoring the video mode orreleasing hardware resources. /// </summary> public override void ShutdownAfterError() => E__Supper__UCameraModifier_ShutdownAfterError(this); /// <summary> /// Called after PostLoad to create UObject cluster /// </summary> public override void CreateCluster() => E__Supper__UCameraModifier_CreateCluster(this); /// <summary> /// Called during Garbage Collection to perform additional cleanup when the cluster is about to be destroyed due to PendingKill flag being set on it. /// </summary> public override void OnClusterMarkedAsPendingKill() => E__Supper__UCameraModifier_OnClusterMarkedAsPendingKill(this); #endregion public static implicit operator IntPtr(ManageCameraModifier self) { return self?.NativePointer ?? IntPtr.Zero; } public static implicit operator ManageCameraModifier(ObjectPointerDescription PtrDesc) { return NativeManager.GetWrapper<ManageCameraModifier>(PtrDesc); } } }
namespace iControl { using System.Xml.Serialization; using System.Web.Services; using System.ComponentModel; using System.Web.Services.Protocols; using System; using System.Diagnostics; /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Web.Services.WebServiceBindingAttribute(Name="Networking.BWPriorityGroupBinding", Namespace="urn:iControl")] public partial class NetworkingBWPriorityGroup : iControlInterface { public NetworkingBWPriorityGroup() { this.Url = "https://url_to_service"; } //======================================================================= // Operations //======================================================================= //----------------------------------------------------------------------- // add_priority_class //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void add_priority_class( string [] groups, string [] [] classes, long [] [] percentages ) { this.Invoke("add_priority_class", new object [] { groups, classes, percentages}); } public System.IAsyncResult Beginadd_priority_class(string [] groups,string [] [] classes,long [] [] percentages, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("add_priority_class", new object[] { groups, classes, percentages}, callback, asyncState); } public void Endadd_priority_class(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // create //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void create( string [] groups, CommonEnabledState [] states ) { this.Invoke("create", new object [] { groups, states}); } public System.IAsyncResult Begincreate(string [] groups,CommonEnabledState [] states, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("create", new object[] { groups, states}, callback, asyncState); } public void Endcreate(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // delete_all_priority_groups //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void delete_all_priority_groups( ) { this.Invoke("delete_all_priority_groups", new object [0]); } public System.IAsyncResult Begindelete_all_priority_groups(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("delete_all_priority_groups", new object[0], callback, asyncState); } public void Enddelete_all_priority_groups(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // delete_priority_group //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void delete_priority_group( string [] groups ) { this.Invoke("delete_priority_group", new object [] { groups}); } public System.IAsyncResult Begindelete_priority_group(string [] groups, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("delete_priority_group", new object[] { groups}, callback, asyncState); } public void Enddelete_priority_group(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // get_description //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] get_description( string [] groups ) { object [] results = this.Invoke("get_description", new object [] { groups}); return ((string [])(results[0])); } public System.IAsyncResult Beginget_description(string [] groups, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_description", new object[] { groups}, callback, asyncState); } public string [] Endget_description(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // get_list //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] get_list( ) { object [] results = this.Invoke("get_list", new object [0]); return ((string [])(results[0])); } public System.IAsyncResult Beginget_list(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_list", new object[0], callback, asyncState); } public string [] Endget_list(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // get_priority_class //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] [] get_priority_class( string [] groups ) { object [] results = this.Invoke("get_priority_class", new object [] { groups}); return ((string [] [])(results[0])); } public System.IAsyncResult Beginget_priority_class(string [] groups, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_priority_class", new object[] { groups}, callback, asyncState); } public string [] [] Endget_priority_class(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [] [])(results[0])); } //----------------------------------------------------------------------- // get_priority_class_description //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] [] get_priority_class_description( string [] groups, string [] [] classes ) { object [] results = this.Invoke("get_priority_class_description", new object [] { groups, classes}); return ((string [] [])(results[0])); } public System.IAsyncResult Beginget_priority_class_description(string [] groups,string [] [] classes, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_priority_class_description", new object[] { groups, classes}, callback, asyncState); } public string [] [] Endget_priority_class_description(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [] [])(results[0])); } //----------------------------------------------------------------------- // get_priority_class_weight_percentage //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public long [] [] get_priority_class_weight_percentage( string [] groups, string [] [] classes ) { object [] results = this.Invoke("get_priority_class_weight_percentage", new object [] { groups, classes}); return ((long [] [])(results[0])); } public System.IAsyncResult Beginget_priority_class_weight_percentage(string [] groups,string [] [] classes, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_priority_class_weight_percentage", new object[] { groups, classes}, callback, asyncState); } public long [] [] Endget_priority_class_weight_percentage(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((long [] [])(results[0])); } //----------------------------------------------------------------------- // get_version //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string get_version( ) { object [] results = this.Invoke("get_version", new object [] { }); return ((string)(results[0])); } public System.IAsyncResult Beginget_version(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_version", new object[] { }, callback, asyncState); } public string Endget_version(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string)(results[0])); } //----------------------------------------------------------------------- // remove_all_priority_classes //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void remove_all_priority_classes( string [] groups ) { this.Invoke("remove_all_priority_classes", new object [] { groups}); } public System.IAsyncResult Beginremove_all_priority_classes(string [] groups, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("remove_all_priority_classes", new object[] { groups}, callback, asyncState); } public void Endremove_all_priority_classes(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // remove_priority_class //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void remove_priority_class( string [] groups, string [] [] classes ) { this.Invoke("remove_priority_class", new object [] { groups, classes}); } public System.IAsyncResult Beginremove_priority_class(string [] groups,string [] [] classes, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("remove_priority_class", new object[] { groups, classes}, callback, asyncState); } public void Endremove_priority_class(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // set_description //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void set_description( string [] groups, string [] descriptions ) { this.Invoke("set_description", new object [] { groups, descriptions}); } public System.IAsyncResult Beginset_description(string [] groups,string [] descriptions, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("set_description", new object[] { groups, descriptions}, callback, asyncState); } public void Endset_description(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // set_priority_class_description //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void set_priority_class_description( string [] groups, string [] [] classes, string [] [] descriptions ) { this.Invoke("set_priority_class_description", new object [] { groups, classes, descriptions}); } public System.IAsyncResult Beginset_priority_class_description(string [] groups,string [] [] classes,string [] [] descriptions, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("set_priority_class_description", new object[] { groups, classes, descriptions}, callback, asyncState); } public void Endset_priority_class_description(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // set_priority_class_weight_percentage //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/BWPriorityGroup", RequestNamespace="urn:iControl:Networking/BWPriorityGroup", ResponseNamespace="urn:iControl:Networking/BWPriorityGroup")] public void set_priority_class_weight_percentage( string [] groups, string [] [] classes, long [] [] percentages ) { this.Invoke("set_priority_class_weight_percentage", new object [] { groups, classes, percentages}); } public System.IAsyncResult Beginset_priority_class_weight_percentage(string [] groups,string [] [] classes,long [] [] percentages, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("set_priority_class_weight_percentage", new object[] { groups, classes, percentages}, callback, asyncState); } public void Endset_priority_class_weight_percentage(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } } //======================================================================= // Enums //======================================================================= //======================================================================= // Structs //======================================================================= }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; namespace Greatbone { public static class DataUtility { /// <summary> /// Used in both client and server to parse received content into model. /// </summary> public static ISource ParseContent(string ctyp, byte[] buffer, int length, Type typ = null) { if (string.IsNullOrEmpty(ctyp)) return null; if (ctyp.StartsWith("application/x-www-form-urlencoded")) { return new FormParser(buffer, length).Parse(); } if (ctyp.StartsWith("multipart/form-data; boundary=")) { return new FormMpParser(buffer, length, ctyp.Substring(30)).Parse(); } if (ctyp.StartsWith("application/json")) { return new JsonParser(buffer, length).Parse(); } if (ctyp.StartsWith("application/xml")) { return new XmlParser(buffer, length).Parse(); } if (ctyp.StartsWith("text/")) { if (typ == typeof(JObj) || typ == typeof(JArr)) { return new JsonParser(buffer, length).Parse(); } else if (typ == typeof(XElem)) { return new XmlParser(buffer, length).Parse(); } else { Text text = new Text(); for (int i = 0; i < length; i++) { text.Accept(buffer[i]); } return text; } } return null; } public static M StringTo<M>(string v) where M : class, ISource { Type t = typeof(M); if (t == typeof(JArr) || t == typeof(JObj)) { return new JsonParser(v).Parse() as M; } else if (t == typeof(XElem)) { return new XmlParser(v).Parse() as M; } else if (t == typeof(Form)) { return new FormParser(v).Parse() as M; } return null; } public static D StringToObject<D>(string v, byte proj = 0x0f) where D : IData, new() { JObj jo = (JObj) new JsonParser(v).Parse(); return jo.ToObject<D>(proj); } public static D[] StringToArray<D>(string v, byte proj = 0x0f) where D : IData, new() { JArr ja = (JArr) new JsonParser(v).Parse(); return ja.ToArray<D>(proj); } public static string ToString<D>(D v, byte proj = 0x0f) where D : IData { JsonContent cnt = new JsonContent(false, 4 * 1024); try { cnt.Put(null, v, proj); return cnt.ToString(); } finally { BufferUtility.Return(cnt); // return buffer to pool } } public static string ToString<D>(D[] v, byte proj = 0x0f) where D : IData { JsonContent cnt = new JsonContent(false, 4 * 1024); try { cnt.Put(null, v, proj); return cnt.ToString(); } finally { BufferUtility.Return(cnt); // return buffer to pool } } public static string ToString<D>(List<D> v, byte proj = 0x0f) where D : IData { JsonContent cnt = new JsonContent(false, 4 * 1024); try { cnt.Put(null, v, proj); return cnt.ToString(); } finally { BufferUtility.Return(cnt); // return buffer to pool } } public static T FileTo<T>(string file) where T : class, ISource { try { byte[] bytes = File.ReadAllBytes(file); Type t = typeof(T); if (t == typeof(JArr) || t == typeof(JObj)) { return new JsonParser(bytes, bytes.Length).Parse() as T; } else if (t == typeof(XElem)) { return new XmlParser(bytes, bytes.Length).Parse() as T; } else if (t == typeof(Form)) { return new FormParser(bytes, bytes.Length).Parse() as T; } } catch (Exception ex) { Debug.WriteLine(ex.Message); } return null; } public static D FileToObject<D>(string file, byte proj = 0x0f) where D : IData, new() { try { byte[] bytes = File.ReadAllBytes(file); JObj jo = (JObj) new JsonParser(bytes, bytes.Length).Parse(); if (jo != null) { return jo.ToObject<D>(proj); } } catch (Exception ex) { Debug.WriteLine(ex.Message); } return default; } public static D[] FileToArray<D>(string file, byte proj = 0x0f) where D : IData, new() { try { var bytes = File.ReadAllBytes(file); var ja = (JArr) new JsonParser(bytes, bytes.Length).Parse(); if (ja != null) { return ja.ToArray<D>(proj); } } catch (Exception ex) { Debug.WriteLine(ex.Message); } return null; } public static Map<K, D> FileToMap<K, D>(string file, byte proj = 0x0f, Func<D, K> keyer = null, Predicate<K> toper = null) where D : IData, new() { try { var bytes = File.ReadAllBytes(file); var ja = (JArr) new JsonParser(bytes, bytes.Length).Parse(); if (ja != null) { return ja.ToMap(proj, keyer, toper); } } catch (Exception ex) { Debug.WriteLine(ex.Message); } return null; } } }
// <copyright file="ChromeOptions.cs" company="WebDriver Committers"> // Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IO; using OpenQA.Selenium.Remote; namespace OpenQA.Selenium.Chrome { /// <summary> /// Class to manage options specific to <see cref="ChromeDriver"/> /// </summary> /// <remarks> /// Used with ChromeDriver.exe v17.0.963.0 and higher. /// </remarks> /// <example> /// <code> /// ChromeOptions options = new ChromeOptions(); /// options.AddExtensions("\path\to\extension.crx"); /// options.BinaryLocation = "\path\to\chrome"; /// </code> /// <para></para> /// <para>For use with ChromeDriver:</para> /// <para></para> /// <code> /// ChromeDriver driver = new ChromeDriver(options); /// </code> /// <para></para> /// <para>For use with RemoteWebDriver:</para> /// <para></para> /// <code> /// RemoteWebDriver driver = new RemoteWebDriver(new Uri("http://localhost:4444/wd/hub"), options.ToCapabilities()); /// </code> /// </example> public class ChromeOptions : DriverOptions { /// <summary> /// Gets the name of the capability used to store Chrome options in /// a <see cref="DesiredCapabilities"/> object. /// </summary> public static readonly string Capability = "goog:chromeOptions"; private const string BrowserNameValue = "chrome"; private const string ArgumentsChromeOption = "args"; private const string BinaryChromeOption = "binary"; private const string ExtensionsChromeOption = "extensions"; private const string LocalStateChromeOption = "localState"; private const string PreferencesChromeOption = "prefs"; private const string DetachChromeOption = "detach"; private const string DebuggerAddressChromeOption = "debuggerAddress"; private const string ExcludeSwitchesChromeOption = "excludeSwitches"; private const string MinidumpPathChromeOption = "minidumpPath"; private const string MobileEmulationChromeOption = "mobileEmulation"; private const string PerformanceLoggingPreferencesChromeOption = "perfLoggingPrefs"; private const string WindowTypesChromeOption = "windowTypes"; private bool leaveBrowserRunning; private string binaryLocation; private string debuggerAddress; private string minidumpPath; private List<string> arguments = new List<string>(); private List<string> extensionFiles = new List<string>(); private List<string> encodedExtensions = new List<string>(); private List<string> excludedSwitches = new List<string>(); private List<string> windowTypes = new List<string>(); private Dictionary<string, object> additionalCapabilities = new Dictionary<string, object>(); private Dictionary<string, object> additionalChromeOptions = new Dictionary<string, object>(); private Dictionary<string, object> userProfilePreferences; private Dictionary<string, object> localStatePreferences; private string mobileEmulationDeviceName; private ChromeMobileEmulationDeviceSettings mobileEmulationDeviceSettings; private ChromePerformanceLoggingPreferences perfLoggingPreferences; public ChromeOptions() : base() { this.BrowserName = BrowserNameValue; this.AddKnownCapabilityName(ChromeOptions.Capability, "current ChromeOptions class instance"); this.AddKnownCapabilityName(CapabilityType.LoggingPreferences, "SetLoggingPreference method"); this.AddKnownCapabilityName(ChromeOptions.ArgumentsChromeOption, "AddArguments method"); this.AddKnownCapabilityName(ChromeOptions.BinaryChromeOption, "BinaryLocation property"); this.AddKnownCapabilityName(ChromeOptions.ExtensionsChromeOption, "AddExtensions method"); this.AddKnownCapabilityName(ChromeOptions.LocalStateChromeOption, "AddLocalStatePreference method"); this.AddKnownCapabilityName(ChromeOptions.PreferencesChromeOption, "AddUserProfilePreference method"); this.AddKnownCapabilityName(ChromeOptions.DetachChromeOption, "LeaveBrowserRunning property"); this.AddKnownCapabilityName(ChromeOptions.DebuggerAddressChromeOption, "DebuggerAddress property"); this.AddKnownCapabilityName(ChromeOptions.ExcludeSwitchesChromeOption, "AddExcludedArgument property"); this.AddKnownCapabilityName(ChromeOptions.MinidumpPathChromeOption, "MinidumpPath property"); this.AddKnownCapabilityName(ChromeOptions.MobileEmulationChromeOption, "EnableMobileEmulation method"); this.AddKnownCapabilityName(ChromeOptions.PerformanceLoggingPreferencesChromeOption, "PerformanceLoggingPreferences property"); this.AddKnownCapabilityName(ChromeOptions.WindowTypesChromeOption, "AddWindowTypes method"); } /// <summary> /// Gets or sets the location of the Chrome browser's binary executable file. /// </summary> public string BinaryLocation { get { return this.binaryLocation; } set { this.binaryLocation = value; } } /// <summary> /// Gets or sets a value indicating whether Chrome should be left running after the /// ChromeDriver instance is exited. Defaults to <see langword="false"/>. /// </summary> public bool LeaveBrowserRunning { get { return this.leaveBrowserRunning; } set { this.leaveBrowserRunning = value; } } /// <summary> /// Gets the list of arguments appended to the Chrome command line as a string array. /// </summary> public ReadOnlyCollection<string> Arguments { get { return this.arguments.AsReadOnly(); } } /// <summary> /// Gets the list of extensions to be installed as an array of base64-encoded strings. /// </summary> public ReadOnlyCollection<string> Extensions { get { List<string> allExtensions = new List<string>(this.encodedExtensions); foreach (string extensionFile in this.extensionFiles) { byte[] extensionByteArray = File.ReadAllBytes(extensionFile); string encodedExtension = Convert.ToBase64String(extensionByteArray); allExtensions.Add(encodedExtension); } return allExtensions.AsReadOnly(); } } /// <summary> /// Gets or sets the address of a Chrome debugger server to connect to. /// Should be of the form "{hostname|IP address}:port". /// </summary> public string DebuggerAddress { get { return this.debuggerAddress; } set { this.debuggerAddress = value; } } /// <summary> /// Gets or sets the directory in which to store minidump files. /// </summary> public string MinidumpPath { get { return this.minidumpPath; } set { this.minidumpPath = value; } } /// <summary> /// Gets or sets the performance logging preferences for the driver. /// </summary> public ChromePerformanceLoggingPreferences PerformanceLoggingPreferences { get { return this.perfLoggingPreferences; } set { this.perfLoggingPreferences = value; } } /// <summary> /// Adds a single argument to the list of arguments to be appended to the Chrome.exe command line. /// </summary> /// <param name="argument">The argument to add.</param> public void AddArgument(string argument) { if (string.IsNullOrEmpty(argument)) { throw new ArgumentException("argument must not be null or empty", "argument"); } this.AddArguments(argument); } /// <summary> /// Adds arguments to be appended to the Chrome.exe command line. /// </summary> /// <param name="argumentsToAdd">An array of arguments to add.</param> public void AddArguments(params string[] argumentsToAdd) { this.AddArguments(new List<string>(argumentsToAdd)); } /// <summary> /// Adds arguments to be appended to the Chrome.exe command line. /// </summary> /// <param name="argumentsToAdd">An <see cref="IEnumerable{T}"/> object of arguments to add.</param> public void AddArguments(IEnumerable<string> argumentsToAdd) { if (argumentsToAdd == null) { throw new ArgumentNullException("argumentsToAdd", "argumentsToAdd must not be null"); } this.arguments.AddRange(argumentsToAdd); } /// <summary> /// Adds a single argument to be excluded from the list of arguments passed by default /// to the Chrome.exe command line by chromedriver.exe. /// </summary> /// <param name="argument">The argument to exclude.</param> public void AddExcludedArgument(string argument) { if (string.IsNullOrEmpty(argument)) { throw new ArgumentException("argument must not be null or empty", "argument"); } this.AddExcludedArguments(argument); } /// <summary> /// Adds arguments to be excluded from the list of arguments passed by default /// to the Chrome.exe command line by chromedriver.exe. /// </summary> /// <param name="argumentsToExclude">An array of arguments to exclude.</param> public void AddExcludedArguments(params string[] argumentsToExclude) { this.AddExcludedArguments(new List<string>(argumentsToExclude)); } /// <summary> /// Adds arguments to be excluded from the list of arguments passed by default /// to the Chrome.exe command line by chromedriver.exe. /// </summary> /// <param name="argumentsToExclude">An <see cref="IEnumerable{T}"/> object of arguments to exclude.</param> public void AddExcludedArguments(IEnumerable<string> argumentsToExclude) { if (argumentsToExclude == null) { throw new ArgumentNullException("argumentsToExclude", "argumentsToExclude must not be null"); } this.excludedSwitches.AddRange(argumentsToExclude); } /// <summary> /// Adds a path to a packed Chrome extension (.crx file) to the list of extensions /// to be installed in the instance of Chrome. /// </summary> /// <param name="pathToExtension">The full path to the extension to add.</param> public void AddExtension(string pathToExtension) { if (string.IsNullOrEmpty(pathToExtension)) { throw new ArgumentException("pathToExtension must not be null or empty", "pathToExtension"); } this.AddExtensions(pathToExtension); } /// <summary> /// Adds a list of paths to packed Chrome extensions (.crx files) to be installed /// in the instance of Chrome. /// </summary> /// <param name="extensions">An array of full paths to the extensions to add.</param> public void AddExtensions(params string[] extensions) { this.AddExtensions(new List<string>(extensions)); } /// <summary> /// Adds a list of paths to packed Chrome extensions (.crx files) to be installed /// in the instance of Chrome. /// </summary> /// <param name="extensions">An <see cref="IEnumerable{T}"/> of full paths to the extensions to add.</param> public void AddExtensions(IEnumerable<string> extensions) { if (extensions == null) { throw new ArgumentNullException("extensions", "extensions must not be null"); } foreach (string extension in extensions) { if (!File.Exists(extension)) { throw new FileNotFoundException("No extension found at the specified path", extension); } this.extensionFiles.Add(extension); } } /// <summary> /// Adds a base64-encoded string representing a Chrome extension to the list of extensions /// to be installed in the instance of Chrome. /// </summary> /// <param name="extension">A base64-encoded string representing the extension to add.</param> public void AddEncodedExtension(string extension) { if (string.IsNullOrEmpty(extension)) { throw new ArgumentException("extension must not be null or empty", "extension"); } this.AddExtensions(extension); } /// <summary> /// Adds a list of base64-encoded strings representing Chrome extensions to the list of extensions /// to be installed in the instance of Chrome. /// </summary> /// <param name="extensions">An array of base64-encoded strings representing the extensions to add.</param> public void AddEncodedExtensions(params string[] extensions) { this.AddEncodedExtensions(new List<string>(extensions)); } /// <summary> /// Adds a list of base64-encoded strings representing Chrome extensions to be installed /// in the instance of Chrome. /// </summary> /// <param name="extensions">An <see cref="IEnumerable{T}"/> of base64-encoded strings /// representing the extensions to add.</param> public void AddEncodedExtensions(IEnumerable<string> extensions) { if (extensions == null) { throw new ArgumentNullException("extensions", "extensions must not be null"); } foreach (string extension in extensions) { // Run the extension through the base64 converter to test that the // string is not malformed. try { Convert.FromBase64String(extension); } catch (FormatException ex) { throw new WebDriverException("Could not properly decode the base64 string", ex); } this.encodedExtensions.Add(extension); } } /// <summary> /// Adds a preference for the user-specific profile or "user data directory." /// If the specified preference already exists, it will be overwritten. /// </summary> /// <param name="preferenceName">The name of the preference to set.</param> /// <param name="preferenceValue">The value of the preference to set.</param> public void AddUserProfilePreference(string preferenceName, object preferenceValue) { if (this.userProfilePreferences == null) { this.userProfilePreferences = new Dictionary<string, object>(); } this.userProfilePreferences[preferenceName] = preferenceValue; } /// <summary> /// Adds a preference for the local state file in the user's data directory for Chrome. /// If the specified preference already exists, it will be overwritten. /// </summary> /// <param name="preferenceName">The name of the preference to set.</param> /// <param name="preferenceValue">The value of the preference to set.</param> public void AddLocalStatePreference(string preferenceName, object preferenceValue) { if (this.localStatePreferences == null) { this.localStatePreferences = new Dictionary<string, object>(); } this.localStatePreferences[preferenceName] = preferenceValue; } /// <summary> /// Allows the Chrome browser to emulate a mobile device. /// </summary> /// <param name="deviceName">The name of the device to emulate. The device name must be a /// valid device name from the Chrome DevTools Emulation panel.</param> /// <remarks>Specifying an invalid device name will not throw an exeption, but /// will generate an error in Chrome when the driver starts. To unset mobile /// emulation, call this method with <see langword="null"/> as the argument.</remarks> public void EnableMobileEmulation(string deviceName) { this.mobileEmulationDeviceSettings = null; this.mobileEmulationDeviceName = deviceName; } /// <summary> /// Allows the Chrome browser to emulate a mobile device. /// </summary> /// <param name="deviceSettings">The <see cref="ChromeMobileEmulationDeviceSettings"/> /// object containing the settings of the device to emulate.</param> /// <exception cref="ArgumentException">Thrown if the device settings option does /// not have a user agent string set.</exception> /// <remarks>Specifying an invalid device name will not throw an exeption, but /// will generate an error in Chrome when the driver starts. To unset mobile /// emulation, call this method with <see langword="null"/> as the argument.</remarks> public void EnableMobileEmulation(ChromeMobileEmulationDeviceSettings deviceSettings) { this.mobileEmulationDeviceName = null; if (deviceSettings != null && string.IsNullOrEmpty(deviceSettings.UserAgent)) { throw new ArgumentException("Device settings must include a user agent string.", "deviceSettings"); } this.mobileEmulationDeviceSettings = deviceSettings; } /// <summary> /// Adds a type of window that will be listed in the list of window handles /// returned by the Chrome driver. /// </summary> /// <param name="windowType">The name of the window type to add.</param> /// <remarks>This method can be used to allow the driver to access {webview} /// elements by adding "webview" as a window type.</remarks> public void AddWindowType(string windowType) { if (string.IsNullOrEmpty(windowType)) { throw new ArgumentException("windowType must not be null or empty", "windowType"); } this.AddWindowTypes(windowType); } /// <summary> /// Adds a list of window types that will be listed in the list of window handles /// returned by the Chrome driver. /// </summary> /// <param name="windowTypesToAdd">An array of window types to add.</param> public void AddWindowTypes(params string[] windowTypesToAdd) { this.AddWindowTypes(new List<string>(windowTypesToAdd)); } /// <summary> /// Adds a list of window types that will be listed in the list of window handles /// returned by the Chrome driver. /// </summary> /// <param name="windowTypesToAdd">An <see cref="IEnumerable{T}"/> of window types to add.</param> public void AddWindowTypes(IEnumerable<string> windowTypesToAdd) { if (windowTypesToAdd == null) { throw new ArgumentNullException("windowTypesToAdd", "windowTypesToAdd must not be null"); } this.windowTypes.AddRange(windowTypesToAdd); } /// <summary> /// Provides a means to add additional capabilities not yet added as type safe options /// for the Chrome driver. /// </summary> /// <param name="capabilityName">The name of the capability to add.</param> /// <param name="capabilityValue">The value of the capability to add.</param> /// <exception cref="ArgumentException"> /// thrown when attempting to add a capability for which there is already a type safe option, or /// when <paramref name="capabilityName"/> is <see langword="null"/> or the empty string. /// </exception> /// <remarks>Calling <see cref="AddAdditionalCapability(string, object)"/> /// where <paramref name="capabilityName"/> has already been added will overwrite the /// existing value with the new value in <paramref name="capabilityValue"/>. /// Also, by default, calling this method adds capabilities to the options object passed to /// chromedriver.exe.</remarks> public override void AddAdditionalCapability(string capabilityName, object capabilityValue) { // Add the capability to the chromeOptions object by default. This is to handle // the 80% case where the chromedriver team adds a new option in chromedriver.exe // and the bindings have not yet had a type safe option added. this.AddAdditionalCapability(capabilityName, capabilityValue, false); } /// <summary> /// Provides a means to add additional capabilities not yet added as type safe options /// for the Chrome driver. /// </summary> /// <param name="capabilityName">The name of the capability to add.</param> /// <param name="capabilityValue">The value of the capability to add.</param> /// <param name="isGlobalCapability">Indicates whether the capability is to be set as a global /// capability for the driver instead of a Chrome-specific option.</param> /// <exception cref="ArgumentException"> /// thrown when attempting to add a capability for which there is already a type safe option, or /// when <paramref name="capabilityName"/> is <see langword="null"/> or the empty string. /// </exception> /// <remarks>Calling <see cref="AddAdditionalCapability(string, object, bool)"/> /// where <paramref name="capabilityName"/> has already been added will overwrite the /// existing value with the new value in <paramref name="capabilityValue"/></remarks> public void AddAdditionalCapability(string capabilityName, object capabilityValue, bool isGlobalCapability) { if (this.IsKnownCapabilityName(capabilityName)) { string typeSafeOptionName = this.GetTypeSafeOptionName(capabilityName); string message = string.Format(CultureInfo.InvariantCulture, "There is already an option for the {0} capability. Please use the {1} instead.", capabilityName, typeSafeOptionName); throw new ArgumentException(message, "capabilityName"); } if (string.IsNullOrEmpty(capabilityName)) { throw new ArgumentException("Capability name may not be null an empty string.", "capabilityName"); } if (isGlobalCapability) { this.additionalCapabilities[capabilityName] = capabilityValue; } else { this.additionalChromeOptions[capabilityName] = capabilityValue; } } /// <summary> /// Returns DesiredCapabilities for Chrome with these options included as /// capabilities. This does not copy the options. Further changes will be /// reflected in the returned capabilities. /// </summary> /// <returns>The DesiredCapabilities for Chrome with these options.</returns> public override ICapabilities ToCapabilities() { Dictionary<string, object> chromeOptions = this.BuildChromeOptionsDictionary(); DesiredCapabilities capabilities = this.GenerateDesiredCapabilities(false); capabilities.SetCapability(ChromeOptions.Capability, chromeOptions); Dictionary<string, object> loggingPreferences = this.GenerateLoggingPreferencesDictionary(); if (loggingPreferences != null) { capabilities.SetCapability(CapabilityType.LoggingPreferences, loggingPreferences); } foreach (KeyValuePair<string, object> pair in this.additionalCapabilities) { capabilities.SetCapability(pair.Key, pair.Value); } return capabilities; } private Dictionary<string, object> BuildChromeOptionsDictionary() { Dictionary<string, object> chromeOptions = new Dictionary<string, object>(); if (this.Arguments.Count > 0) { chromeOptions[ArgumentsChromeOption] = this.Arguments; } if (!string.IsNullOrEmpty(this.binaryLocation)) { chromeOptions[BinaryChromeOption] = this.binaryLocation; } ReadOnlyCollection<string> extensions = this.Extensions; if (extensions.Count > 0) { chromeOptions[ExtensionsChromeOption] = extensions; } if (this.localStatePreferences != null && this.localStatePreferences.Count > 0) { chromeOptions[LocalStateChromeOption] = this.localStatePreferences; } if (this.userProfilePreferences != null && this.userProfilePreferences.Count > 0) { chromeOptions[PreferencesChromeOption] = this.userProfilePreferences; } if (this.leaveBrowserRunning) { chromeOptions[DetachChromeOption] = this.leaveBrowserRunning; } if (!string.IsNullOrEmpty(this.debuggerAddress)) { chromeOptions[DebuggerAddressChromeOption] = this.debuggerAddress; } if (this.excludedSwitches.Count > 0) { chromeOptions[ExcludeSwitchesChromeOption] = this.excludedSwitches; } if (!string.IsNullOrEmpty(this.minidumpPath)) { chromeOptions[MinidumpPathChromeOption] = this.minidumpPath; } if (!string.IsNullOrEmpty(this.mobileEmulationDeviceName) || this.mobileEmulationDeviceSettings != null) { chromeOptions[MobileEmulationChromeOption] = this.GenerateMobileEmulationSettingsDictionary(); } if (this.perfLoggingPreferences != null) { chromeOptions[PerformanceLoggingPreferencesChromeOption] = this.GeneratePerformanceLoggingPreferencesDictionary(); } if (this.windowTypes.Count > 0) { chromeOptions[WindowTypesChromeOption] = this.windowTypes; } foreach (KeyValuePair<string, object> pair in this.additionalChromeOptions) { chromeOptions.Add(pair.Key, pair.Value); } return chromeOptions; } private Dictionary<string, object> GeneratePerformanceLoggingPreferencesDictionary() { Dictionary<string, object> perfLoggingPrefsDictionary = new Dictionary<string, object>(); perfLoggingPrefsDictionary["enableNetwork"] = this.perfLoggingPreferences.IsCollectingNetworkEvents; perfLoggingPrefsDictionary["enablePage"] = this.perfLoggingPreferences.IsCollectingPageEvents; string tracingCategories = this.perfLoggingPreferences.TracingCategories; if (!string.IsNullOrEmpty(tracingCategories)) { perfLoggingPrefsDictionary["traceCategories"] = tracingCategories; } perfLoggingPrefsDictionary["bufferUsageReportingInterval"] = Convert.ToInt64(this.perfLoggingPreferences.BufferUsageReportingInterval.TotalMilliseconds); return perfLoggingPrefsDictionary; } private Dictionary<string, object> GenerateMobileEmulationSettingsDictionary() { Dictionary<string, object> mobileEmulationSettings = new Dictionary<string, object>(); if (!string.IsNullOrEmpty(this.mobileEmulationDeviceName)) { mobileEmulationSettings["deviceName"] = this.mobileEmulationDeviceName; } else if (this.mobileEmulationDeviceSettings != null) { mobileEmulationSettings["userAgent"] = this.mobileEmulationDeviceSettings.UserAgent; Dictionary<string, object> deviceMetrics = new Dictionary<string, object>(); deviceMetrics["width"] = this.mobileEmulationDeviceSettings.Width; deviceMetrics["height"] = this.mobileEmulationDeviceSettings.Height; deviceMetrics["pixelRatio"] = this.mobileEmulationDeviceSettings.PixelRatio; if (!this.mobileEmulationDeviceSettings.EnableTouchEvents) { deviceMetrics["touch"] = this.mobileEmulationDeviceSettings.EnableTouchEvents; } mobileEmulationSettings["deviceMetrics"] = deviceMetrics; } return mobileEmulationSettings; } } }
using System; using System.IO; using System.Linq; using System.Threading.Tasks; using Baseline; using Marten.Exceptions; using Marten.Schema; using Weasel.Postgresql; namespace Marten.Storage { internal class TenantSchema: IDocumentSchema { private readonly StorageFeatures _features; private readonly Tenant _tenant; public TenantSchema(StoreOptions options, Tenant tenant) { _features = options.Storage; _tenant = tenant; StoreOptions = options; DdlRules = options.Advanced.DdlRules; } public StoreOptions StoreOptions { get; } public DdlRules DdlRules { get; } public void WriteDatabaseCreationScriptFile(string filename) { var sql = ToDatabaseScript(); new FileSystem().WriteStringToFile(filename, sql); } public void WriteDatabaseCreationScriptByType(string directory) { var system = new FileSystem(); system.DeleteDirectory(directory); system.CreateDirectory(directory); var features = _features.AllActiveFeatures(_tenant).ToArray(); writeDatabaseSchemaGenerationScript(directory, system, features); foreach (var feature in features) { var file = directory.AppendPath(feature.Identifier + ".sql"); DdlRules.WriteTemplatedFile(file, (r, w) => { feature.Write(r, w); }); } } private void writeDatabaseSchemaGenerationScript(string directory, FileSystem system, IFeatureSchema[] schemaObjects) { var allSchemaNames = StoreOptions.Storage.AllSchemaNames(); var script = DatabaseSchemaGenerator.GenerateScript(StoreOptions, allSchemaNames); var writer = new StringWriter(); if (script.IsNotEmpty()) { writer.WriteLine(script); writer.WriteLine(); } foreach (var feature in schemaObjects) { writer.WriteLine($"\\i {feature.Identifier}.sql"); } var filename = directory.AppendPath("all.sql"); system.WriteStringToFile(filename, writer.ToString()); } public async Task<SchemaMigration> CreateMigrationAsync() { var @objects = _features.AllActiveFeatures(_tenant).SelectMany(x => x.Objects).ToArray(); using var conn = _tenant.CreateConnection(); await conn.OpenAsync().ConfigureAwait(false); return await SchemaMigration.Determine(conn, @objects).ConfigureAwait(false); } public string ToDatabaseScript() { var writer = new StringWriter(); StoreOptions.Advanced.DdlRules.WriteScript(writer, (r, w) => { var allSchemaNames = StoreOptions.Storage.AllSchemaNames(); DatabaseSchemaGenerator.WriteSql(StoreOptions, allSchemaNames, w); foreach (var feature in _features.AllActiveFeatures(_tenant)) { feature.Write(r, w); } }); return writer.ToString(); } public async Task WriteMigrationFileAsync(string filename) { if (!Path.IsPathRooted(filename)) { filename = AppContext.BaseDirectory.AppendPath(filename); } var patch = await CreateMigrationAsync().ConfigureAwait(false); DdlRules.WriteTemplatedFile(filename, (r, w) => { patch.WriteAllUpdates(w, r, AutoCreate.All); }); var dropFile = SchemaMigration.ToDropFileName(filename); DdlRules.WriteTemplatedFile(dropFile, (r, w) => { patch.WriteAllRollbacks(w, r); }); } public async Task AssertDatabaseMatchesConfigurationAsync() { var patch = await CreateMigrationAsync().ConfigureAwait(false); if (patch.Difference != SchemaPatchDifference.None) { throw new SchemaValidationException(patch.UpdateSql); } } public async Task ApplyAllConfiguredChangesToDatabaseAsync(AutoCreate? withCreateSchemaObjects = null) { var defaultAutoCreate = StoreOptions.AutoCreateSchemaObjects != AutoCreate.None ? StoreOptions.AutoCreateSchemaObjects : AutoCreate.CreateOrUpdate; var patch = await CreateMigrationAsync().ConfigureAwait(false); if (patch.Difference == SchemaPatchDifference.None) return; using var conn = _tenant.CreateConnection(); await conn.OpenAsync().ConfigureAwait(false); try { var martenLogger = StoreOptions.Logger(); await patch.ApplyAll(conn, DdlRules, withCreateSchemaObjects ?? defaultAutoCreate, sql => martenLogger.SchemaChange(sql)).ConfigureAwait(false); _tenant.MarkAllFeaturesAsChecked(); } catch (Exception e) { throw new MartenSchemaException("All Configured Changes", patch.UpdateSql, e); } } public async Task<SchemaMigration> CreateMigrationAsync(Type documentType) { var mapping = _features.MappingFor(documentType); using var conn = _tenant.CreateConnection(); await conn.OpenAsync().ConfigureAwait(false); var migration = await SchemaMigration.Determine(conn, mapping.Schema.Objects).ConfigureAwait(false); return migration; } public async Task WriteMigrationFileByTypeAsync(string directory) { var system = new FileSystem(); system.DeleteDirectory(directory); system.CreateDirectory(directory); var features = _features.AllActiveFeatures(_tenant).ToArray(); writeDatabaseSchemaGenerationScript(directory, system, features); using var conn = _tenant.CreateConnection(); await conn.OpenAsync().ConfigureAwait(false); foreach (var feature in features) { var migration = await SchemaMigration.Determine(conn, feature.Objects).ConfigureAwait(false); if (migration.Difference == SchemaPatchDifference.None) { continue; } var file = directory.AppendPath(feature.Identifier + ".sql"); DdlRules.WriteTemplatedFile(file, (r, w) => { migration.WriteAllUpdates(w, r, AutoCreate.CreateOrUpdate); }); } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.NetFramework.CSharp.Analyzers; using Microsoft.NetFramework.VisualBasic.Analyzers; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.CSharp; using Test.Utilities; using Xunit; namespace Microsoft.NetFramework.Analyzers.UnitTests { public partial class DoNotUseInsecureXSLTScriptExecutionAnalyzerTests : DiagnosticAnalyzerTestBase { private const string CA3076RuleId = DoNotUseInsecureXSLTScriptExecutionAnalyzer<SyntaxKind>.RuleId; private readonly string _CA3076LoadInsecureInputMessage = MicrosoftNetFrameworkAnalyzersResources.XslCompiledTransformLoadInsecureInputMessage; protected override DiagnosticAnalyzer GetBasicDiagnosticAnalyzer() { return new BasicDoNotUseInsecureXSLTScriptExecutionAnalyzer(); } protected override DiagnosticAnalyzer GetCSharpDiagnosticAnalyzer() { return new CSharpDoNotUseInsecureXSLTScriptExecutionAnalyzer(); } private DiagnosticResult GetCA3076LoadCSharpResultAt(int line, int column, string name) { return GetCSharpResultAt(line, column, CA3076RuleId, string.Format(_CA3076LoadInsecureInputMessage, name)); } private DiagnosticResult GetCA3076LoadBasicResultAt(int line, int column, string name) { return GetBasicResultAt(line, column, CA3076RuleId, string.Format(_CA3076LoadInsecureInputMessage, name)); } [Fact] public void UseXslCompiledTransformLoadSecureOverload1ShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; using System.Xml.XPath; namespace TestNamespace { class TestClass { private static void TestMethod(IXPathNavigable stylesheet) { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheet); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Imports System.Xml.XPath Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheet As IXPathNavigable) Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheet) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload1InTryBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml.Xsl; using System.Xml.XPath; namespace TestNamespace { class TestClass { private static void TestMethod(IXPathNavigable stylesheet) { try { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheet); } catch { throw; } finally { } } } }" ); VerifyBasic(@" Imports System.Xml.Xsl Imports System.Xml.XPath Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheet As IXPathNavigable) Try Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheet) Catch Throw Finally End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload1InCatchBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml.Xsl; using System.Xml.XPath; namespace TestNamespace { class TestClass { private static void TestMethod(IXPathNavigable stylesheet) { try { } catch { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheet); } finally { } } } }" ); VerifyBasic(@" Imports System.Xml.Xsl Imports System.Xml.XPath Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheet As IXPathNavigable) Try Catch Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheet) Finally End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload1InFinallyBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml.Xsl; using System.Xml.XPath; namespace TestNamespace { class TestClass { private static void TestMethod(IXPathNavigable stylesheet) { try { } catch { throw; } finally { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheet); } } } }" ); VerifyBasic(@" Imports System.Xml.Xsl Imports System.Xml.XPath Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheet As IXPathNavigable) Try Catch Throw Finally Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheet) End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload2ShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(String stylesheetUri) { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheetUri); } } }" ); VerifyBasic(@" Imports System Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheetUri As [String]) Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheetUri) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload2InTryBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(String stylesheetUri) { try { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheetUri); } catch { throw; } finally { } } } }" ); VerifyBasic(@" Imports System Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheetUri As [String]) Try Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheetUri) Catch Throw Finally End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload2InCatchBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(String stylesheetUri) { try { } catch { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheetUri); } finally { } } } }" ); VerifyBasic(@" Imports System Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheetUri As [String]) Try Catch Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheetUri) Finally End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload2InFinallyBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(String stylesheetUri) { try { } catch { throw; } finally { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(stylesheetUri); } } } }" ); VerifyBasic(@" Imports System Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(stylesheetUri As [String]) Try Catch Throw Finally Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(stylesheetUri) End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload3ShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(Type compiledStylesheet) { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(compiledStylesheet); } } }" ); VerifyBasic(@" Imports System Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(compiledStylesheet As Type) Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(compiledStylesheet) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload3InTryBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(Type compiledStylesheet) { try { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(compiledStylesheet); } catch { throw; } finally { } } } }" ); VerifyBasic(@" Imports System Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(compiledStylesheet As Type) Try Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(compiledStylesheet) Catch Throw Finally End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload3InCatchBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(Type compiledStylesheet) { try { } catch { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(compiledStylesheet); } finally { } } } }" ); VerifyBasic(@" Imports System Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(compiledStylesheet As Type) Try Catch Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(compiledStylesheet) Finally End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSecureOverload3InFinallyBlockShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod(Type compiledStylesheet) { try { } catch { throw; } finally { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); xslCompiledTransform.Load(compiledStylesheet); } } } }" ); VerifyBasic(@" Imports System Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod(compiledStylesheet As Type) Try Catch Throw Finally Dim xslCompiledTransform As New XslCompiledTransform() xslCompiledTransform.Load(compiledStylesheet) End Try End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadTrustedXsltAndNonSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = System.Xml.Xsl.XsltSettings.TrustedXslt; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(14, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = System.Xml.Xsl.XsltSettings.TrustedXslt Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(11, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadTrustedXsltAndNullResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = System.Xml.Xsl.XsltSettings.TrustedXslt; xslCompiledTransform.Load(""testStylesheet"", settings, null); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = System.Xml.Xsl.XsltSettings.TrustedXslt xslCompiledTransform.Load("""", settings, Nothing) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadTrustedSourceAndSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = System.Xml.Xsl.XsltSettings.TrustedXslt; var resolver = new XmlSecureResolver(new XmlUrlResolver(), """"); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = System.Xml.Xsl.XsltSettings.TrustedXslt Dim resolver = New XmlSecureResolver(New XmlUrlResolver(), """") xslCompiledTransform.Load(""testStylesheet"", settings, resolver) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadDefaultAndNonSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadDefaultAndSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(); xslCompiledTransform.Load(""testStylesheet"", settings, null); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() xslCompiledTransform.Load("""", settings, Nothing) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadDefaultPropertyAndNonSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = XsltSettings.Default; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = XsltSettings.[Default] Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadEnableScriptAndNonSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings() { EnableScript = true }; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(14, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() With { _ .EnableScript = True _ } Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(13, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadSetEnableScriptToTrueAndNonSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(); settings.EnableScript = true; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(15, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() settings.EnableScript = True Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(12, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadEnableDocumentFunctionAndNonSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings() { EnableDocumentFunction = true }; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(14, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() With { _ .EnableDocumentFunction = True _ } Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(13, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadSetEnableDocumentFunctionToTrueAndNonSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(); settings.EnableDocumentFunction = true; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(15, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() settings.EnableDocumentFunction = True Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(12, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadSetEnableDocumentFunctionToTrueAndSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings() { EnableDocumentFunction = true }; var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, null); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() With { _ .EnableDocumentFunction = True _ } Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, Nothing) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadSetEnableScriptPropertyToTrueAndSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(); settings.EnableScript = true; xslCompiledTransform.Load(""testStylesheet"", settings, null); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings() settings.EnableScript = True xslCompiledTransform.Load("""", settings, Nothing) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadConstructSettingsWithTrueParamAndNonSecureResolverShouldGenerateDiagnostic1() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(true, false); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(14, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings(True, False) Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(11, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadConstructSettingsWithTrueParamAndNonSecureResolverShouldGenerateDiagnostic2() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(false, true); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }", GetCA3076LoadCSharpResultAt(14, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings(False, True) Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace", GetCA3076LoadBasicResultAt(11, 13, "TestMethod") ); } [Fact] public void UseXslCompiledTransformLoadConstructSettingsWithFalseParamsAndNonSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var settings = new XsltSettings(false, false); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", settings, resolver); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim settings = New XsltSettings(False, False) Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", settings, resolver) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadNullSettingsAndNonSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", null, resolver); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", Nothing, resolver) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadDefaultAsArgumentAndNonSecureResolverShouldNotGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", XsltSettings.Default, resolver); } } }" ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", XsltSettings.[Default], resolver) End Sub End Class End Namespace"); } [Fact] public void UseXslCompiledTransformLoadTrustedXsltAsArgumentAndNonSecureResolverShouldGenerateDiagnostic() { VerifyCSharp(@" using System.Xml; using System.Xml.Xsl; namespace TestNamespace { class TestClass { private static void TestMethod() { XslCompiledTransform xslCompiledTransform = new XslCompiledTransform(); var resolver = new XmlUrlResolver(); xslCompiledTransform.Load(""testStylesheet"", XsltSettings.TrustedXslt, resolver); } } }", GetCA3076LoadInsecureConstructedCSharpResultAt(13, 13, "TestMethod") ); VerifyBasic(@" Imports System.Xml Imports System.Xml.Xsl Namespace TestNamespace Class TestClass Private Shared Sub TestMethod() Dim xslCompiledTransform As New XslCompiledTransform() Dim resolver = New XmlUrlResolver() xslCompiledTransform.Load("""", XsltSettings.TrustedXslt, resolver) End Sub End Class End Namespace", GetCA3076LoadInsecureConstructedBasicResultAt(10, 13, "TestMethod") ); } } }
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using NUnit.Framework; using osu.Framework.Allocation; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Game.Graphics.Containers.Markdown; using osu.Game.Overlays; namespace osu.Game.Tests.Visual.UserInterface { public class TestSceneOsuMarkdownContainer : OsuTestScene { private OsuMarkdownContainer markdownContainer; [Cached] private readonly OverlayColourProvider overlayColour = new OverlayColourProvider(OverlayColourScheme.Orange); [SetUp] public void Setup() => Schedule(() => { Children = new Drawable[] { new Box { Colour = overlayColour.Background5, RelativeSizeAxes = Axes.Both, }, new BasicScrollContainer { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding(20), Child = markdownContainer = new OsuMarkdownContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y } } }; }); [Test] public void TestEmphases() { AddStep("Emphases", () => { markdownContainer.Text = @"_italic with underscore_ *italic with asterisk* __bold with underscore__ **bold with asterisk** *__italic with asterisk, bold with underscore__* _**italic with underscore, bold with asterisk**_"; }); } [Test] public void TestHeading() { AddStep("Add Heading", () => { markdownContainer.Text = @"# Header 1 ## Header 2 ### Header 3 #### Header 4 ##### Header 5"; }); } [Test] public void TestLink() { AddStep("Add Link", () => { markdownContainer.Text = "[Welcome to osu!](https://osu.ppy.sh)"; }); } [Test] public void TestLinkWithInlineText() { AddStep("Add Link with inline text", () => { markdownContainer.Text = "Hey, [welcome to osu!](https://osu.ppy.sh) Please enjoy the game."; }); } [Test] public void TestLinkWithTitle() { AddStep("Add Link with title", () => { markdownContainer.Text = "[wikipedia](https://www.wikipedia.org \"The Free Encyclopedia\")"; }); } [Test] public void TestAutoLink() { AddStep("Add autolink", () => { markdownContainer.Text = "<https://discord.gg/ppy>"; }); } [Test] public void TestInlineCode() { AddStep("Add inline code", () => { markdownContainer.Text = "This is `inline code` text"; }); } [Test] public void TestParagraph() { AddStep("Add paragraph", () => { markdownContainer.Text = @"first paragraph second paragraph third paragraph"; }); } [Test] public void TestFencedCodeBlock() { AddStep("Add Code Block", () => { markdownContainer.Text = @"```markdown # Markdown code block This is markdown code block. ```"; }); } [Test] public void TestSeparator() { AddStep("Add Seperator", () => { markdownContainer.Text = @"Line above --- Line below"; }); } [Test] public void TestQuote() { AddStep("Add quote", () => { markdownContainer.Text = @"> Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur."; }); } [Test] public void TestTable() { AddStep("Add Table", () => { markdownContainer.Text = @"| Left Aligned | Center Aligned | Right Aligned | | :------------------- | :--------------------: | ---------------------:| | Long Align Left Text | Long Align Center Text | Long Align Right Text | | Align Left | Align Center | Align Right | | Left | Center | Right |"; }); } [Test] public void TestUnorderedList() { AddStep("Add Unordered List", () => { markdownContainer.Text = @"- First item level 1 - Second item level 1 - First item level 2 - First item level 3 - Second item level 3 - Third item level 3 - First item level 4 - Second item level 4 - Third item level 4 - Second item level 2 - Third item level 2 - Third item level 1"; }); } [Test] public void TestOrderedList() { AddStep("Add Ordered List", () => { markdownContainer.Text = @"1. First item level 1 2. Second item level 1 1. First item level 2 1. First item level 3 2. Second item level 3 3. Third item level 3 1. First item level 4 2. Second item level 4 3. Third item level 4 2. Second item level 2 3. Third item level 2 3. Third item level 1"; }); } [Test] public void TestLongMixedList() { AddStep("Add long mixed list", () => { markdownContainer.Text = @"1. The osu! World Cup is a country-based team tournament played on the osu! game mode. - While this competition is planned as a 4 versus 4 setup, this may change depending on the number of incoming registrations. 2. Beatmap scoring is based on Score V2. 3. The beatmaps for each round will be announced by the map selectors in advance on the Sunday before the actual matches take place. Only these beatmaps will be used during the respective matches. - One beatmap will be a tiebreaker beatmap. This beatmap will only be played in case of a tie. **The only exception to this is the Qualifiers pool.** 4. The match schedule will be settled by the Tournament Management (see the [scheduling instructions](#scheduling-instructions)). 5. If no staff or referee is available, the match will be postponed. 6. Use of the Visual Settings to alter background dim or disable beatmap elements like storyboards and skins are allowed. 7. If the beatmap ends in a draw, the map will be nullified and replayed. 8. If a player disconnects, their scores will not be counted towards their team's total. - Disconnects within 30 seconds or 25% of the beatmap length (whichever happens first) after beatmap begin can be aborted and/or rematched. This is up to the referee's discretion. 9. Beatmaps cannot be reused in the same match unless the map was nullified. 10. If less than the minimum required players attend, the maximum time the match can be postponed is 10 minutes. 11. Exchanging players during a match is allowed without limitations. - **If a map rematch is required, exchanging players is not allowed. With the referee's discretion, an exception can be made if the previous roster is unavailable to play.** 12. Lag is not a valid reason to nullify a beatmap. 13. All players are supposed to keep the match running fluently and without delays. Penalties can be issued to the players if they cause excessive match delays. 14. If a player disconnects between maps and the team cannot provide a replacement, the match can be delayed 10 minutes at maximum. 15. All players and referees must be treated with respect. Instructions of the referees and tournament Management are to be followed. Decisions labeled as final are not to be objected. 16. Disrupting the match by foul play, insulting and provoking other players or referees, delaying the match or other deliberate inappropriate misbehavior is strictly prohibited. 17. The multiplayer chatrooms are subject to the [osu! community rules](/wiki/Rules). - Breaking the chat rules will result in a silence. Silenced players can not participate in multiplayer matches and must be exchanged for the time being. 18. **The seeding method will be revealed after all the teams have played their Qualifier rounds.** 19. Unexpected incidents are handled by the tournament management. Referees may allow higher tolerance depending on the circumstances. This is up to their discretion. 20. Penalties for violating the tournament rules may include: - Exclusion of specific players for one beatmap - Exclusion of specific players for an entire match - Declaring the match as Lost by Default - Disqualification from the entire tournament - Disqualification from the current and future official tournaments until appealed - Any modification of these rules will be announced."; }); } } }
using System; using System.Diagnostics; using Fairweather.Service; namespace Common { /// <summary> /// This structure contains the payment amounts received on /// a particular day. /// </summary> [DebuggerStepThrough] public struct Day_Totals { public Day_Totals(DateTime day, decimal cash, decimal credit, decimal gift, decimal cheques, decimal post_dated) : this() { this.Day = day.Date; this.Cash = cash; this.Credit = credit; this.Gift = gift; this.Cheques = cheques; this.Post_Dated = post_dated; } public DateTime Day { get; set; } public decimal Cash { get; set; } public decimal Credit { get; set; } public decimal Gift { get; set; } public decimal Cheques { get; set; } public decimal Post_Dated { get; set; } public decimal Total { get { var ret = Cash + Credit + Gift + Cheques + Post_Dated; return ret; } } /// <summary> /// Adds two Day_Totals instances' payment details pairwise and /// uses the information to create a new instance. /// !allow_different_dates -> throws ApplicationException if the two instances refer to /// different dates. /// The new instance's date is "left.Day" /// </summary> public static Day_Totals Add(Day_Totals left, Day_Totals right, bool allow_different_dates) { if (!allow_different_dates) (left.Day == right.Day).tiff(); var ret = new Day_Totals(left.Day, left.Cash + right.Cash, left.Credit + right.Credit, left.Gift + right.Gift, left.Cheques + right.Cheques, left.Post_Dated + right.Post_Dated); return ret; } /// <summary> /// Adds two Day_Totals instances' payment details pairwise and /// uses the information to create a new instance. /// Throws ApplicationException if the two instances refer to /// different dates. /// </summary> public static Day_Totals operator +(Day_Totals left, Day_Totals right) { return Add(left, right, false); } #region Boilerplate /* Boilerplate */ public override string ToString() { string ret = ""; ret += "cash = " + this.Cash; ret += ", "; ret += "credit = " + this.Credit; ret += ", "; ret += "gift = " + this.Gift; ret += ", "; ret += "cheques = " + this.Cheques; ret += ", "; ret += "post_dated = " + this.Post_Dated; ret = "{Days_Posted_Data: " + ret + "}"; return ret; } public bool Equals(Day_Totals obj2) { if (!this.Cash.Equals(obj2.Cash)) return false; if (!this.Credit.Equals(obj2.Credit)) return false; if (!this.Gift.Equals(obj2.Gift)) return false; if (!this.Cheques.Equals(obj2.Cheques)) return false; if (!this.Post_Dated.Equals(obj2.Post_Dated)) return false; return true; } public override bool Equals(object obj2) { var ret = (obj2 != null && obj2 is Day_Totals); if (ret) ret = this.Equals((Day_Totals)obj2); return ret; } public static bool operator ==(Day_Totals left, Day_Totals right) { var ret = left.Equals(right); return ret; } public static bool operator !=(Day_Totals left, Day_Totals right) { var ret = !left.Equals(right); return ret; } public override int GetHashCode() { unchecked { int ret = 23; int temp; ret *= 31; temp = this.Cash.GetHashCode(); ret += temp; ret *= 31; temp = this.Credit.GetHashCode(); ret += temp; ret *= 31; temp = this.Gift.GetHashCode(); ret += temp; ret *= 31; temp = this.Cheques.GetHashCode(); ret += temp; ret *= 31; temp = this.Post_Dated.GetHashCode(); ret += temp; return ret; } } #endregion } }
// Copyright 2017, Google LLC All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using Google.Api.Gax; using System; using System.Linq; namespace Google.Cloud.Bigtable.Admin.V2 { /// <summary> /// Resource name for the 'project' resource. /// </summary> public sealed partial class ProjectName : IResourceName, IEquatable<ProjectName> { private static readonly PathTemplate s_template = new PathTemplate("projects/{project}"); /// <summary> /// Parses the given project resource name in string form into a new /// <see cref="ProjectName"/> instance. /// </summary> /// <param name="projectName">The project resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="ProjectName"/> if successful.</returns> public static ProjectName Parse(string projectName) { GaxPreconditions.CheckNotNull(projectName, nameof(projectName)); TemplatedResourceName resourceName = s_template.ParseName(projectName); return new ProjectName(resourceName[0]); } /// <summary> /// Tries to parse the given project resource name in string form into a new /// <see cref="ProjectName"/> instance. /// </summary> /// <remarks> /// This method still throws <see cref="ArgumentNullException"/> if <paramref name="projectName"/> is null, /// as this would usually indicate a programming error rather than a data error. /// </remarks> /// <param name="projectName">The project resource name in string form. Must not be <c>null</c>.</param> /// <param name="result">When this method returns, the parsed <see cref="ProjectName"/>, /// or <c>null</c> if parsing fails.</param> /// <returns><c>true</c> if the name was parsed succssfully; <c>false</c> otherwise.</returns> public static bool TryParse(string projectName, out ProjectName result) { GaxPreconditions.CheckNotNull(projectName, nameof(projectName)); TemplatedResourceName resourceName; if (s_template.TryParseName(projectName, out resourceName)) { result = new ProjectName(resourceName[0]); return true; } else { result = null; return false; } } /// <summary> /// Constructs a new instance of the <see cref="ProjectName"/> resource name class /// from its component parts. /// </summary> /// <param name="projectId">The project ID. Must not be <c>null</c>.</param> public ProjectName(string projectId) { ProjectId = GaxPreconditions.CheckNotNull(projectId, nameof(projectId)); } /// <summary> /// The project ID. Never <c>null</c>. /// </summary> public string ProjectId { get; } /// <inheritdoc /> public ResourceNameKind Kind => ResourceNameKind.Simple; /// <inheritdoc /> public override string ToString() => s_template.Expand(ProjectId); /// <inheritdoc /> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc /> public override bool Equals(object obj) => Equals(obj as ProjectName); /// <inheritdoc /> public bool Equals(ProjectName other) => ToString() == other?.ToString(); /// <inheritdoc /> public static bool operator ==(ProjectName a, ProjectName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc /> public static bool operator !=(ProjectName a, ProjectName b) => !(a == b); } /// <summary> /// Resource name for the 'instance' resource. /// </summary> public sealed partial class InstanceName : IResourceName, IEquatable<InstanceName> { private static readonly PathTemplate s_template = new PathTemplate("projects/{project}/instances/{instance}"); /// <summary> /// Parses the given instance resource name in string form into a new /// <see cref="InstanceName"/> instance. /// </summary> /// <param name="instanceName">The instance resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="InstanceName"/> if successful.</returns> public static InstanceName Parse(string instanceName) { GaxPreconditions.CheckNotNull(instanceName, nameof(instanceName)); TemplatedResourceName resourceName = s_template.ParseName(instanceName); return new InstanceName(resourceName[0], resourceName[1]); } /// <summary> /// Tries to parse the given instance resource name in string form into a new /// <see cref="InstanceName"/> instance. /// </summary> /// <remarks> /// This method still throws <see cref="ArgumentNullException"/> if <paramref name="instanceName"/> is null, /// as this would usually indicate a programming error rather than a data error. /// </remarks> /// <param name="instanceName">The instance resource name in string form. Must not be <c>null</c>.</param> /// <param name="result">When this method returns, the parsed <see cref="InstanceName"/>, /// or <c>null</c> if parsing fails.</param> /// <returns><c>true</c> if the name was parsed succssfully; <c>false</c> otherwise.</returns> public static bool TryParse(string instanceName, out InstanceName result) { GaxPreconditions.CheckNotNull(instanceName, nameof(instanceName)); TemplatedResourceName resourceName; if (s_template.TryParseName(instanceName, out resourceName)) { result = new InstanceName(resourceName[0], resourceName[1]); return true; } else { result = null; return false; } } /// <summary> /// Constructs a new instance of the <see cref="InstanceName"/> resource name class /// from its component parts. /// </summary> /// <param name="projectId">The project ID. Must not be <c>null</c>.</param> /// <param name="instanceId">The instance ID. Must not be <c>null</c>.</param> public InstanceName(string projectId, string instanceId) { ProjectId = GaxPreconditions.CheckNotNull(projectId, nameof(projectId)); InstanceId = GaxPreconditions.CheckNotNull(instanceId, nameof(instanceId)); } /// <summary> /// The project ID. Never <c>null</c>. /// </summary> public string ProjectId { get; } /// <summary> /// The instance ID. Never <c>null</c>. /// </summary> public string InstanceId { get; } /// <inheritdoc /> public ResourceNameKind Kind => ResourceNameKind.Simple; /// <inheritdoc /> public override string ToString() => s_template.Expand(ProjectId, InstanceId); /// <inheritdoc /> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc /> public override bool Equals(object obj) => Equals(obj as InstanceName); /// <inheritdoc /> public bool Equals(InstanceName other) => ToString() == other?.ToString(); /// <inheritdoc /> public static bool operator ==(InstanceName a, InstanceName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc /> public static bool operator !=(InstanceName a, InstanceName b) => !(a == b); } /// <summary> /// Resource name for the 'cluster' resource. /// </summary> public sealed partial class ClusterName : IResourceName, IEquatable<ClusterName> { private static readonly PathTemplate s_template = new PathTemplate("projects/{project}/instances/{instance}/clusters/{cluster}"); /// <summary> /// Parses the given cluster resource name in string form into a new /// <see cref="ClusterName"/> instance. /// </summary> /// <param name="clusterName">The cluster resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="ClusterName"/> if successful.</returns> public static ClusterName Parse(string clusterName) { GaxPreconditions.CheckNotNull(clusterName, nameof(clusterName)); TemplatedResourceName resourceName = s_template.ParseName(clusterName); return new ClusterName(resourceName[0], resourceName[1], resourceName[2]); } /// <summary> /// Tries to parse the given cluster resource name in string form into a new /// <see cref="ClusterName"/> instance. /// </summary> /// <remarks> /// This method still throws <see cref="ArgumentNullException"/> if <paramref name="clusterName"/> is null, /// as this would usually indicate a programming error rather than a data error. /// </remarks> /// <param name="clusterName">The cluster resource name in string form. Must not be <c>null</c>.</param> /// <param name="result">When this method returns, the parsed <see cref="ClusterName"/>, /// or <c>null</c> if parsing fails.</param> /// <returns><c>true</c> if the name was parsed succssfully; <c>false</c> otherwise.</returns> public static bool TryParse(string clusterName, out ClusterName result) { GaxPreconditions.CheckNotNull(clusterName, nameof(clusterName)); TemplatedResourceName resourceName; if (s_template.TryParseName(clusterName, out resourceName)) { result = new ClusterName(resourceName[0], resourceName[1], resourceName[2]); return true; } else { result = null; return false; } } /// <summary> /// Constructs a new instance of the <see cref="ClusterName"/> resource name class /// from its component parts. /// </summary> /// <param name="projectId">The project ID. Must not be <c>null</c>.</param> /// <param name="instanceId">The instance ID. Must not be <c>null</c>.</param> /// <param name="clusterId">The cluster ID. Must not be <c>null</c>.</param> public ClusterName(string projectId, string instanceId, string clusterId) { ProjectId = GaxPreconditions.CheckNotNull(projectId, nameof(projectId)); InstanceId = GaxPreconditions.CheckNotNull(instanceId, nameof(instanceId)); ClusterId = GaxPreconditions.CheckNotNull(clusterId, nameof(clusterId)); } /// <summary> /// The project ID. Never <c>null</c>. /// </summary> public string ProjectId { get; } /// <summary> /// The instance ID. Never <c>null</c>. /// </summary> public string InstanceId { get; } /// <summary> /// The cluster ID. Never <c>null</c>. /// </summary> public string ClusterId { get; } /// <inheritdoc /> public ResourceNameKind Kind => ResourceNameKind.Simple; /// <inheritdoc /> public override string ToString() => s_template.Expand(ProjectId, InstanceId, ClusterId); /// <inheritdoc /> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc /> public override bool Equals(object obj) => Equals(obj as ClusterName); /// <inheritdoc /> public bool Equals(ClusterName other) => ToString() == other?.ToString(); /// <inheritdoc /> public static bool operator ==(ClusterName a, ClusterName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc /> public static bool operator !=(ClusterName a, ClusterName b) => !(a == b); } /// <summary> /// Resource name for the 'location' resource. /// </summary> public sealed partial class LocationName : IResourceName, IEquatable<LocationName> { private static readonly PathTemplate s_template = new PathTemplate("projects/{project}/locations/{location}"); /// <summary> /// Parses the given location resource name in string form into a new /// <see cref="LocationName"/> instance. /// </summary> /// <param name="locationName">The location resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="LocationName"/> if successful.</returns> public static LocationName Parse(string locationName) { GaxPreconditions.CheckNotNull(locationName, nameof(locationName)); TemplatedResourceName resourceName = s_template.ParseName(locationName); return new LocationName(resourceName[0], resourceName[1]); } /// <summary> /// Tries to parse the given location resource name in string form into a new /// <see cref="LocationName"/> instance. /// </summary> /// <remarks> /// This method still throws <see cref="ArgumentNullException"/> if <paramref name="locationName"/> is null, /// as this would usually indicate a programming error rather than a data error. /// </remarks> /// <param name="locationName">The location resource name in string form. Must not be <c>null</c>.</param> /// <param name="result">When this method returns, the parsed <see cref="LocationName"/>, /// or <c>null</c> if parsing fails.</param> /// <returns><c>true</c> if the name was parsed succssfully; <c>false</c> otherwise.</returns> public static bool TryParse(string locationName, out LocationName result) { GaxPreconditions.CheckNotNull(locationName, nameof(locationName)); TemplatedResourceName resourceName; if (s_template.TryParseName(locationName, out resourceName)) { result = new LocationName(resourceName[0], resourceName[1]); return true; } else { result = null; return false; } } /// <summary> /// Constructs a new instance of the <see cref="LocationName"/> resource name class /// from its component parts. /// </summary> /// <param name="projectId">The project ID. Must not be <c>null</c>.</param> /// <param name="locationId">The location ID. Must not be <c>null</c>.</param> public LocationName(string projectId, string locationId) { ProjectId = GaxPreconditions.CheckNotNull(projectId, nameof(projectId)); LocationId = GaxPreconditions.CheckNotNull(locationId, nameof(locationId)); } /// <summary> /// The project ID. Never <c>null</c>. /// </summary> public string ProjectId { get; } /// <summary> /// The location ID. Never <c>null</c>. /// </summary> public string LocationId { get; } /// <inheritdoc /> public ResourceNameKind Kind => ResourceNameKind.Simple; /// <inheritdoc /> public override string ToString() => s_template.Expand(ProjectId, LocationId); /// <inheritdoc /> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc /> public override bool Equals(object obj) => Equals(obj as LocationName); /// <inheritdoc /> public bool Equals(LocationName other) => ToString() == other?.ToString(); /// <inheritdoc /> public static bool operator ==(LocationName a, LocationName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc /> public static bool operator !=(LocationName a, LocationName b) => !(a == b); } /// <summary> /// Resource name for the 'table' resource. /// </summary> public sealed partial class TableName : IResourceName, IEquatable<TableName> { private static readonly PathTemplate s_template = new PathTemplate("projects/{project}/instances/{instance}/tables/{table}"); /// <summary> /// Parses the given table resource name in string form into a new /// <see cref="TableName"/> instance. /// </summary> /// <param name="tableName">The table resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="TableName"/> if successful.</returns> public static TableName Parse(string tableName) { GaxPreconditions.CheckNotNull(tableName, nameof(tableName)); TemplatedResourceName resourceName = s_template.ParseName(tableName); return new TableName(resourceName[0], resourceName[1], resourceName[2]); } /// <summary> /// Tries to parse the given table resource name in string form into a new /// <see cref="TableName"/> instance. /// </summary> /// <remarks> /// This method still throws <see cref="ArgumentNullException"/> if <paramref name="tableName"/> is null, /// as this would usually indicate a programming error rather than a data error. /// </remarks> /// <param name="tableName">The table resource name in string form. Must not be <c>null</c>.</param> /// <param name="result">When this method returns, the parsed <see cref="TableName"/>, /// or <c>null</c> if parsing fails.</param> /// <returns><c>true</c> if the name was parsed succssfully; <c>false</c> otherwise.</returns> public static bool TryParse(string tableName, out TableName result) { GaxPreconditions.CheckNotNull(tableName, nameof(tableName)); TemplatedResourceName resourceName; if (s_template.TryParseName(tableName, out resourceName)) { result = new TableName(resourceName[0], resourceName[1], resourceName[2]); return true; } else { result = null; return false; } } /// <summary> /// Constructs a new instance of the <see cref="TableName"/> resource name class /// from its component parts. /// </summary> /// <param name="projectId">The project ID. Must not be <c>null</c>.</param> /// <param name="instanceId">The instance ID. Must not be <c>null</c>.</param> /// <param name="tableId">The table ID. Must not be <c>null</c>.</param> public TableName(string projectId, string instanceId, string tableId) { ProjectId = GaxPreconditions.CheckNotNull(projectId, nameof(projectId)); InstanceId = GaxPreconditions.CheckNotNull(instanceId, nameof(instanceId)); TableId = GaxPreconditions.CheckNotNull(tableId, nameof(tableId)); } /// <summary> /// The project ID. Never <c>null</c>. /// </summary> public string ProjectId { get; } /// <summary> /// The instance ID. Never <c>null</c>. /// </summary> public string InstanceId { get; } /// <summary> /// The table ID. Never <c>null</c>. /// </summary> public string TableId { get; } /// <inheritdoc /> public ResourceNameKind Kind => ResourceNameKind.Simple; /// <inheritdoc /> public override string ToString() => s_template.Expand(ProjectId, InstanceId, TableId); /// <inheritdoc /> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc /> public override bool Equals(object obj) => Equals(obj as TableName); /// <inheritdoc /> public bool Equals(TableName other) => ToString() == other?.ToString(); /// <inheritdoc /> public static bool operator ==(TableName a, TableName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc /> public static bool operator !=(TableName a, TableName b) => !(a == b); } public partial class Cluster { /// <summary> /// <see cref="ClusterName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public ClusterName ClusterName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.ClusterName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } /// <summary> /// <see cref="LocationName"/>-typed view over the <see cref="Location"/> resource name property. /// </summary> public LocationName LocationAsLocationName { get { return string.IsNullOrEmpty(Location) ? null : Google.Cloud.Bigtable.Admin.V2.LocationName.Parse(Location); } set { Location = value != null ? value.ToString() : ""; } } } public partial class CreateClusterRequest { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public InstanceName ParentAsInstanceName { get { return string.IsNullOrEmpty(Parent) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Parent); } set { Parent = value != null ? value.ToString() : ""; } } } public partial class CreateInstanceRequest { /// <summary> /// <see cref="ProjectName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public ProjectName ParentAsProjectName { get { return string.IsNullOrEmpty(Parent) ? null : Google.Cloud.Bigtable.Admin.V2.ProjectName.Parse(Parent); } set { Parent = value != null ? value.ToString() : ""; } } } public partial class CreateTableRequest { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public InstanceName ParentAsInstanceName { get { return string.IsNullOrEmpty(Parent) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Parent); } set { Parent = value != null ? value.ToString() : ""; } } } public partial class DeleteClusterRequest { /// <summary> /// <see cref="ClusterName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public ClusterName ClusterName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.ClusterName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class DeleteInstanceRequest { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public InstanceName InstanceName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class DeleteTableRequest { /// <summary> /// <see cref="TableName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public TableName TableName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.TableName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class DropRowRangeRequest { /// <summary> /// <see cref="TableName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public TableName TableName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.TableName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class GetClusterRequest { /// <summary> /// <see cref="ClusterName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public ClusterName ClusterName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.ClusterName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class GetInstanceRequest { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public InstanceName InstanceName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class GetTableRequest { /// <summary> /// <see cref="TableName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public TableName TableName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.TableName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class Instance { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public InstanceName InstanceName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class ListClustersRequest { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public InstanceName ParentAsInstanceName { get { return string.IsNullOrEmpty(Parent) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Parent); } set { Parent = value != null ? value.ToString() : ""; } } } public partial class ListInstancesRequest { /// <summary> /// <see cref="ProjectName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public ProjectName ParentAsProjectName { get { return string.IsNullOrEmpty(Parent) ? null : Google.Cloud.Bigtable.Admin.V2.ProjectName.Parse(Parent); } set { Parent = value != null ? value.ToString() : ""; } } } public partial class ListTablesRequest { /// <summary> /// <see cref="InstanceName"/>-typed view over the <see cref="Parent"/> resource name property. /// </summary> public InstanceName ParentAsInstanceName { get { return string.IsNullOrEmpty(Parent) ? null : Google.Cloud.Bigtable.Admin.V2.InstanceName.Parse(Parent); } set { Parent = value != null ? value.ToString() : ""; } } } public partial class ModifyColumnFamiliesRequest { /// <summary> /// <see cref="TableName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public TableName TableName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.TableName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } public partial class Table { /// <summary> /// <see cref="TableName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public TableName TableName { get { return string.IsNullOrEmpty(Name) ? null : Google.Cloud.Bigtable.Admin.V2.TableName.Parse(Name); } set { Name = value != null ? value.ToString() : ""; } } } }
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/ // Portions Copyright 2000-2004 Jonathan de Halleux // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.IO; using System.Text; using Gallio.Properties; namespace Gallio.Runtime.ConsoleSupport { ///<summary> /// Responsible for creating output. ///</summary> public class CommandLineOutput { private const int LeftMargin = 2; private const int HangingIndent = 19; private const int Gutter = 2; private readonly TextWriter output; private int lineLength; /// <summary> /// Initializes new instance of CommandLineOutput. /// </summary> ///<param name="console">The console.</param> public CommandLineOutput(IRichConsole console) : this(console.Out, console.Width) { } /// <summary> /// Initializes new instance of CommandLineOutput that outputs to specified stream. /// </summary> /// <param name="output">Text writer.</param> public CommandLineOutput(TextWriter output) { this.output = output; lineLength = 80; } /// <summary> /// Initializes new instance of CommandLineOutput that outputs to specified stream. /// </summary> /// <param name="output">Text writer.</param> /// <param name="width">The output width.</param> public CommandLineOutput(TextWriter output, int width) { this.output = output; lineLength = width; } ///<summary> /// Output Stream ///</summary> public TextWriter Output { get { return output; } } ///<summary> /// Maximum line length allowed before the text will be wraped. ///</summary> public int LineLength { get { return lineLength; } set { lineLength = value; } } ///<summary> /// Prints out a new line. ///</summary> public void NewLine() { output.WriteLine(); } /// <summary> /// Outputs text with specified indentation. /// </summary> /// <param name="text">Text to output possibly including newlines.</param> /// <param name="indentation">Number of blank spaces to indent the first line.</param> public void PrintText(string text, int indentation) { PrintText(text, indentation, indentation); } /// <summary> /// Outputs text with specified indentation. /// </summary> /// <param name="text">Text to output possibly including newlines.</param> /// <param name="indentation">Number of blank spaces to indent all but the first line.</param> /// <param name="firstLineIndent">Number of blank spaces to indent the first line.</param> public void PrintText(string text, int indentation, int firstLineIndent) { int currentIndentation = firstLineIndent; text = text.Trim(); while (text.Length != 0) { int maxLength = lineLength - currentIndentation - 1; int pos = text.IndexOf('\n'); if (pos < 0 || pos > maxLength) { if (text.Length <= maxLength) { output.Write(Space(currentIndentation)); output.WriteLine(text); break; } else { pos = text.LastIndexOf(' ', maxLength); if (pos < 0) pos = maxLength; } } output.Write(Space(currentIndentation)); output.WriteLine(text.Substring(0, pos).TrimEnd()); if (pos == text.Length - 1) break; text = text.Substring(pos + 1).TrimStart(' ', '\t', '\r'); // exclude \n currentIndentation = indentation; } } /// <summary> /// Prints help for a specified argument. /// </summary> /// <param name="prefix">The argument prefix, such as "/", or null or empty if none.</param> /// <param name="longName">The argument's long name, or null or empty if none.</param> /// <param name="shortName">The argument's short short name, or null or empty if none.</param> /// <param name="description">The argument's description, or null or empty if none.</param> /// <param name="valueLabel">The argument's value label such as "path", or null or empty if none.</param> /// <param name="valueType">The argument's value type, or null if none.</param> public void PrintArgumentHelp(string prefix, string longName, string shortName, string description, string valueLabel, Type valueType) { StringBuilder argumentHelp = new StringBuilder(); if (!string.IsNullOrEmpty(prefix)) argumentHelp.Append(prefix); if (! string.IsNullOrEmpty(longName)) { argumentHelp.Append(longName); if (! string.IsNullOrEmpty(valueLabel)) argumentHelp.Append(':'); } if (! string.IsNullOrEmpty(valueLabel)) { argumentHelp.Append('<'); argumentHelp.Append(valueLabel); argumentHelp.Append('>'); } if (argumentHelp.Length > HangingIndent - Gutter) argumentHelp.Append('\n'); else argumentHelp.Append(Space(HangingIndent - argumentHelp.Length)); if (! string.IsNullOrEmpty(description)) argumentHelp.Append(description); if (valueType != null && valueType.IsEnum) { argumentHelp.Append(@" "); AppendEnumerationValues(argumentHelp, valueType); } if (!string.IsNullOrEmpty(shortName)) { argumentHelp.Append(@" "); argumentHelp.AppendFormat(Resources.CommandLineOutput_ShortForm, (prefix ?? @"") + shortName); } PrintText(argumentHelp.ToString(), HangingIndent + LeftMargin, LeftMargin); } private static void AppendEnumerationValues(StringBuilder builder, Type valueType) { builder.Append(Resources.CommandLineOutput_AvailableOptions); string[] values = Enum.GetNames(valueType); for (int i = 0; i < values.Length; i++) { if (i != 0) builder.Append(@", "); builder.Append(@"'"); builder.Append(values[i]); builder.Append(@"'"); } builder.Append('.'); } private static string Space(int spaceCount) { return new string(' ', spaceCount); } } }
namespace Azure.AI.AnomalyDetector { public partial class AnomalyDetectorClient { protected AnomalyDetectorClient() { } public AnomalyDetectorClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.AnomalyDetector.AnomalyDetectorClientOptions options = null) { } public AnomalyDetectorClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.AI.AnomalyDetector.AnomalyDetectorClientOptions options = null) { } public virtual Azure.Response DeleteMultivariateModel(System.Guid modelId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response> DeleteMultivariateModelAsync(System.Guid modelId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response DetectAnomaly(System.Guid modelId, Azure.AI.AnomalyDetector.Models.DetectionRequest detectionRequest, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response> DetectAnomalyAsync(System.Guid modelId, Azure.AI.AnomalyDetector.Models.DetectionRequest detectionRequest, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response<Azure.AI.AnomalyDetector.Models.ChangePointDetectResponse> DetectChangePoint(Azure.AI.AnomalyDetector.Models.ChangePointDetectRequest body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response<Azure.AI.AnomalyDetector.Models.ChangePointDetectResponse>> DetectChangePointAsync(Azure.AI.AnomalyDetector.Models.ChangePointDetectRequest body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response<Azure.AI.AnomalyDetector.Models.EntireDetectResponse> DetectEntireSeries(Azure.AI.AnomalyDetector.Models.DetectRequest body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response<Azure.AI.AnomalyDetector.Models.EntireDetectResponse>> DetectEntireSeriesAsync(Azure.AI.AnomalyDetector.Models.DetectRequest body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response<Azure.AI.AnomalyDetector.Models.LastDetectResponse> DetectLastPoint(Azure.AI.AnomalyDetector.Models.DetectRequest body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response<Azure.AI.AnomalyDetector.Models.LastDetectResponse>> DetectLastPointAsync(Azure.AI.AnomalyDetector.Models.DetectRequest body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response<System.IO.Stream> ExportModel(System.Guid modelId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response<System.IO.Stream>> ExportModelAsync(System.Guid modelId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response<Azure.AI.AnomalyDetector.Models.DetectionResult> GetDetectionResult(System.Guid resultId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response<Azure.AI.AnomalyDetector.Models.DetectionResult>> GetDetectionResultAsync(System.Guid resultId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response<Azure.AI.AnomalyDetector.Models.Model> GetMultivariateModel(System.Guid modelId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response<Azure.AI.AnomalyDetector.Models.Model>> GetMultivariateModelAsync(System.Guid modelId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Pageable<Azure.AI.AnomalyDetector.Models.ModelSnapshot> ListMultivariateModel(int? skip = default(int?), int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.AsyncPageable<Azure.AI.AnomalyDetector.Models.ModelSnapshot> ListMultivariateModelAsync(int? skip = default(int?), int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response TrainMultivariateModel(Azure.AI.AnomalyDetector.Models.ModelInfo modelRequest, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task<Azure.Response> TrainMultivariateModelAsync(Azure.AI.AnomalyDetector.Models.ModelInfo modelRequest, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } public partial class AnomalyDetectorClientOptions : Azure.Core.ClientOptions { public AnomalyDetectorClientOptions(Azure.AI.AnomalyDetector.AnomalyDetectorClientOptions.ServiceVersion version = Azure.AI.AnomalyDetector.AnomalyDetectorClientOptions.ServiceVersion.V1_1_preview) { } public enum ServiceVersion { V1_1_preview = 1, } } } namespace Azure.AI.AnomalyDetector.Models { public enum AlignMode { Inner = 0, Outer = 1, } public partial class AlignPolicy { public AlignPolicy() { } public Azure.AI.AnomalyDetector.Models.AlignMode? AlignMode { get { throw null; } set { } } public Azure.AI.AnomalyDetector.Models.FillNAMethod? FillNAMethod { get { throw null; } set { } } public int? PaddingValue { get { throw null; } set { } } } public partial class AnomalyContributor { internal AnomalyContributor() { } public float? ContributionScore { get { throw null; } } public string Variable { get { throw null; } } } public static partial class AnomalyDetectorModelFactory { public static Azure.AI.AnomalyDetector.Models.AnomalyContributor AnomalyContributor(float? contributionScore = default(float?), string variable = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.AnomalyState AnomalyState(System.DateTimeOffset timestamp = default(System.DateTimeOffset), Azure.AI.AnomalyDetector.Models.AnomalyValue value = null, System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.ErrorResponse> errors = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.AnomalyValue AnomalyValue(System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.AnomalyContributor> contributors = null, bool isAnomaly = false, float severity = 0f, float? score = default(float?)) { throw null; } public static Azure.AI.AnomalyDetector.Models.ChangePointDetectResponse ChangePointDetectResponse(int? period = default(int?), System.Collections.Generic.IEnumerable<bool> isChangePoint = null, System.Collections.Generic.IEnumerable<float> confidenceScores = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.DetectionResult DetectionResult(System.Guid resultId = default(System.Guid), Azure.AI.AnomalyDetector.Models.DetectionResultSummary summary = null, System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.AnomalyState> results = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.DetectionResultSummary DetectionResultSummary(Azure.AI.AnomalyDetector.Models.DetectionStatus status = Azure.AI.AnomalyDetector.Models.DetectionStatus.Created, System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.ErrorResponse> errors = null, System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.VariableState> variableStates = null, Azure.AI.AnomalyDetector.Models.DetectionRequest setupInfo = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.DiagnosticsInfo DiagnosticsInfo(Azure.AI.AnomalyDetector.Models.ModelState modelState = null, System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.VariableState> variableStates = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.EntireDetectResponse EntireDetectResponse(int period = 0, System.Collections.Generic.IEnumerable<float> expectedValues = null, System.Collections.Generic.IEnumerable<float> upperMargins = null, System.Collections.Generic.IEnumerable<float> lowerMargins = null, System.Collections.Generic.IEnumerable<bool> isAnomaly = null, System.Collections.Generic.IEnumerable<bool> isNegativeAnomaly = null, System.Collections.Generic.IEnumerable<bool> isPositiveAnomaly = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.ErrorResponse ErrorResponse(string code = null, string message = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.LastDetectResponse LastDetectResponse(int period = 0, int suggestedWindow = 0, float expectedValue = 0f, float upperMargin = 0f, float lowerMargin = 0f, bool isAnomaly = false, bool isNegativeAnomaly = false, bool isPositiveAnomaly = false) { throw null; } public static Azure.AI.AnomalyDetector.Models.Model Model(System.Guid modelId = default(System.Guid), System.DateTimeOffset createdTime = default(System.DateTimeOffset), System.DateTimeOffset lastUpdatedTime = default(System.DateTimeOffset), Azure.AI.AnomalyDetector.Models.ModelInfo modelInfo = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.ModelInfo ModelInfo(int? slidingWindow = default(int?), Azure.AI.AnomalyDetector.Models.AlignPolicy alignPolicy = null, string source = null, System.DateTimeOffset startTime = default(System.DateTimeOffset), System.DateTimeOffset endTime = default(System.DateTimeOffset), string displayName = null, Azure.AI.AnomalyDetector.Models.ModelStatus? status = default(Azure.AI.AnomalyDetector.Models.ModelStatus?), System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.ErrorResponse> errors = null, Azure.AI.AnomalyDetector.Models.DiagnosticsInfo diagnosticsInfo = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.ModelSnapshot ModelSnapshot(System.Guid modelId = default(System.Guid), System.DateTimeOffset createdTime = default(System.DateTimeOffset), System.DateTimeOffset lastUpdatedTime = default(System.DateTimeOffset), Azure.AI.AnomalyDetector.Models.ModelStatus status = Azure.AI.AnomalyDetector.Models.ModelStatus.Created, string displayName = null, int variablesCount = 0) { throw null; } public static Azure.AI.AnomalyDetector.Models.ModelState ModelState(System.Collections.Generic.IEnumerable<int> epochIds = null, System.Collections.Generic.IEnumerable<float> trainLosses = null, System.Collections.Generic.IEnumerable<float> validationLosses = null, System.Collections.Generic.IEnumerable<float> latenciesInSeconds = null) { throw null; } public static Azure.AI.AnomalyDetector.Models.VariableState VariableState(string variable = null, float? filledNARatio = default(float?), int? effectiveCount = default(int?), System.DateTimeOffset? startTime = default(System.DateTimeOffset?), System.DateTimeOffset? endTime = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.ErrorResponse> errors = null) { throw null; } } public partial class AnomalyState { internal AnomalyState() { } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.ErrorResponse> Errors { get { throw null; } } public System.DateTimeOffset Timestamp { get { throw null; } } public Azure.AI.AnomalyDetector.Models.AnomalyValue Value { get { throw null; } } } public partial class AnomalyValue { internal AnomalyValue() { } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.AnomalyContributor> Contributors { get { throw null; } } public bool IsAnomaly { get { throw null; } } public float? Score { get { throw null; } } public float Severity { get { throw null; } } } public partial class ChangePointDetectRequest { public ChangePointDetectRequest(System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.TimeSeriesPoint> series, Azure.AI.AnomalyDetector.Models.TimeGranularity granularity) { } public int? CustomInterval { get { throw null; } set { } } public Azure.AI.AnomalyDetector.Models.TimeGranularity Granularity { get { throw null; } } public int? Period { get { throw null; } set { } } public System.Collections.Generic.IList<Azure.AI.AnomalyDetector.Models.TimeSeriesPoint> Series { get { throw null; } } public int? StableTrendWindow { get { throw null; } set { } } public float? Threshold { get { throw null; } set { } } } public partial class ChangePointDetectResponse { internal ChangePointDetectResponse() { } public System.Collections.Generic.IReadOnlyList<float> ConfidenceScores { get { throw null; } } public System.Collections.Generic.IReadOnlyList<bool> IsChangePoint { get { throw null; } } public int? Period { get { throw null; } } } public partial class DetectionRequest { public DetectionRequest(string source, System.DateTimeOffset startTime, System.DateTimeOffset endTime) { } public System.DateTimeOffset EndTime { get { throw null; } set { } } public string Source { get { throw null; } set { } } public System.DateTimeOffset StartTime { get { throw null; } set { } } } public partial class DetectionResult { internal DetectionResult() { } public System.Guid ResultId { get { throw null; } } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.AnomalyState> Results { get { throw null; } } public Azure.AI.AnomalyDetector.Models.DetectionResultSummary Summary { get { throw null; } } } public partial class DetectionResultSummary { internal DetectionResultSummary() { } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.ErrorResponse> Errors { get { throw null; } } public Azure.AI.AnomalyDetector.Models.DetectionRequest SetupInfo { get { throw null; } } public Azure.AI.AnomalyDetector.Models.DetectionStatus Status { get { throw null; } } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.VariableState> VariableStates { get { throw null; } } } public enum DetectionStatus { Created = 0, Running = 1, Ready = 2, Failed = 3, } public partial class DetectRequest { public DetectRequest(System.Collections.Generic.IEnumerable<Azure.AI.AnomalyDetector.Models.TimeSeriesPoint> series) { } public int? CustomInterval { get { throw null; } set { } } public Azure.AI.AnomalyDetector.Models.TimeGranularity? Granularity { get { throw null; } set { } } public float? MaxAnomalyRatio { get { throw null; } set { } } public int? Period { get { throw null; } set { } } public int? Sensitivity { get { throw null; } set { } } public System.Collections.Generic.IList<Azure.AI.AnomalyDetector.Models.TimeSeriesPoint> Series { get { throw null; } } } public partial class DiagnosticsInfo { internal DiagnosticsInfo() { } public Azure.AI.AnomalyDetector.Models.ModelState ModelState { get { throw null; } } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.VariableState> VariableStates { get { throw null; } } } public partial class EntireDetectResponse { internal EntireDetectResponse() { } public System.Collections.Generic.IReadOnlyList<float> ExpectedValues { get { throw null; } } public System.Collections.Generic.IReadOnlyList<bool> IsAnomaly { get { throw null; } } public System.Collections.Generic.IReadOnlyList<bool> IsNegativeAnomaly { get { throw null; } } public System.Collections.Generic.IReadOnlyList<bool> IsPositiveAnomaly { get { throw null; } } public System.Collections.Generic.IReadOnlyList<float> LowerMargins { get { throw null; } } public int Period { get { throw null; } } public System.Collections.Generic.IReadOnlyList<float> UpperMargins { get { throw null; } } } public partial class ErrorResponse { internal ErrorResponse() { } public string Code { get { throw null; } } public string Message { get { throw null; } } } public enum FillNAMethod { Previous = 0, Subsequent = 1, Linear = 2, Zero = 3, Pad = 4, NotFill = 5, } public partial class LastDetectResponse { internal LastDetectResponse() { } public float ExpectedValue { get { throw null; } } public bool IsAnomaly { get { throw null; } } public bool IsNegativeAnomaly { get { throw null; } } public bool IsPositiveAnomaly { get { throw null; } } public float LowerMargin { get { throw null; } } public int Period { get { throw null; } } public int SuggestedWindow { get { throw null; } } public float UpperMargin { get { throw null; } } } public partial class Model { internal Model() { } public System.DateTimeOffset CreatedTime { get { throw null; } } public System.DateTimeOffset LastUpdatedTime { get { throw null; } } public System.Guid ModelId { get { throw null; } } public Azure.AI.AnomalyDetector.Models.ModelInfo ModelInfo { get { throw null; } } } public partial class ModelInfo { public ModelInfo(string source, System.DateTimeOffset startTime, System.DateTimeOffset endTime) { } public Azure.AI.AnomalyDetector.Models.AlignPolicy AlignPolicy { get { throw null; } set { } } public Azure.AI.AnomalyDetector.Models.DiagnosticsInfo DiagnosticsInfo { get { throw null; } } public string DisplayName { get { throw null; } set { } } public System.DateTimeOffset EndTime { get { throw null; } set { } } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.ErrorResponse> Errors { get { throw null; } } public int? SlidingWindow { get { throw null; } set { } } public string Source { get { throw null; } set { } } public System.DateTimeOffset StartTime { get { throw null; } set { } } public Azure.AI.AnomalyDetector.Models.ModelStatus? Status { get { throw null; } } } public partial class ModelSnapshot { internal ModelSnapshot() { } public System.DateTimeOffset CreatedTime { get { throw null; } } public string DisplayName { get { throw null; } } public System.DateTimeOffset LastUpdatedTime { get { throw null; } } public System.Guid ModelId { get { throw null; } } public Azure.AI.AnomalyDetector.Models.ModelStatus Status { get { throw null; } } public int VariablesCount { get { throw null; } } } public partial class ModelState { internal ModelState() { } public System.Collections.Generic.IReadOnlyList<int> EpochIds { get { throw null; } } public System.Collections.Generic.IReadOnlyList<float> LatenciesInSeconds { get { throw null; } } public System.Collections.Generic.IReadOnlyList<float> TrainLosses { get { throw null; } } public System.Collections.Generic.IReadOnlyList<float> ValidationLosses { get { throw null; } } } public enum ModelStatus { Created = 0, Running = 1, Ready = 2, Failed = 3, } public enum TimeGranularity { Yearly = 0, Monthly = 1, Weekly = 2, Daily = 3, Hourly = 4, PerMinute = 5, PerSecond = 6, Microsecond = 7, None = 8, } public partial class TimeSeriesPoint { public TimeSeriesPoint(float value) { } public System.DateTimeOffset? Timestamp { get { throw null; } set { } } public float Value { get { throw null; } } } public partial class VariableState { internal VariableState() { } public int? EffectiveCount { get { throw null; } } public System.DateTimeOffset? EndTime { get { throw null; } } public System.Collections.Generic.IReadOnlyList<Azure.AI.AnomalyDetector.Models.ErrorResponse> Errors { get { throw null; } } public float? FilledNARatio { get { throw null; } } public System.DateTimeOffset? StartTime { get { throw null; } } public string Variable { get { throw null; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using Internal.Runtime.CompilerServices; namespace System { public static partial class Convert { /// <summary> /// Decode the span of UTF-16 encoded text represented as base 64 into binary data. /// If the input is not a multiple of 4, or contains illegal characters, it will decode as much as it can, to the largest possible multiple of 4. /// This invariant allows continuation of the parse with a slower, whitespace-tolerant algorithm. /// /// <param name="utf16">The input span which contains UTF-16 encoded text in base 64 that needs to be decoded.</param> /// <param name="bytes">The output span which contains the result of the operation, i.e. the decoded binary data.</param> /// <param name="consumed">The number of input bytes consumed during the operation. This can be used to slice the input for subsequent calls, if necessary.</param> /// <param name="written">The number of bytes written into the output span. This can be used to slice the output for subsequent calls, if necessary.</param> /// </summary> /// <returns>Returns: /// - true - The entire input span was successfully parsed. /// - false - Only a part of the input span was successfully parsed. Failure causes may include embedded or trailing whitespace, /// other illegal Base64 characters, trailing characters after an encoding pad ('='), an input span whose length is not divisible by 4 /// or a destination span that's too small. <paramref name="consumed"/> and <paramref name="written"/> are set so that /// parsing can continue with a slower whitespace-tolerant algorithm. /// /// Note: This is a cut down version of the implementation of Base64.DecodeFromUtf8(), modified the accept UTF16 chars and act as a fast-path /// helper for the Convert routines when the input string contains no whitespace. /// </returns> private static bool TryDecodeFromUtf16(ReadOnlySpan<char> utf16, Span<byte> bytes, out int consumed, out int written) { ref char srcChars = ref MemoryMarshal.GetReference(utf16); ref byte destBytes = ref MemoryMarshal.GetReference(bytes); int srcLength = utf16.Length & ~0x3; // only decode input up to the closest multiple of 4. int destLength = bytes.Length; int sourceIndex = 0; int destIndex = 0; if (utf16.Length == 0) goto DoneExit; ref sbyte decodingMap = ref MemoryMarshal.GetReference(DecodingMap); // Last bytes could have padding characters, so process them separately and treat them as valid. const int skipLastChunk = 4; int maxSrcLength; if (destLength >= (srcLength >> 2) * 3) { maxSrcLength = srcLength - skipLastChunk; } else { // This should never overflow since destLength here is less than int.MaxValue / 4 * 3 (i.e. 1610612733) // Therefore, (destLength / 3) * 4 will always be less than 2147483641 maxSrcLength = (destLength / 3) * 4; } while (sourceIndex < maxSrcLength) { int result = Decode(ref Unsafe.Add(ref srcChars, sourceIndex), ref decodingMap); if (result < 0) goto InvalidExit; WriteThreeLowOrderBytes(ref Unsafe.Add(ref destBytes, destIndex), result); destIndex += 3; sourceIndex += 4; } if (maxSrcLength != srcLength - skipLastChunk) goto InvalidExit; // If input is less than 4 bytes, srcLength == sourceIndex == 0 // If input is not a multiple of 4, sourceIndex == srcLength != 0 if (sourceIndex == srcLength) { goto InvalidExit; } int i0 = Unsafe.Add(ref srcChars, srcLength - 4); int i1 = Unsafe.Add(ref srcChars, srcLength - 3); int i2 = Unsafe.Add(ref srcChars, srcLength - 2); int i3 = Unsafe.Add(ref srcChars, srcLength - 1); if (((i0 | i1 | i2 | i3) & 0xffffff00) != 0) goto InvalidExit; i0 = Unsafe.Add(ref decodingMap, i0); i1 = Unsafe.Add(ref decodingMap, i1); i0 <<= 18; i1 <<= 12; i0 |= i1; if (i3 != EncodingPad) { i2 = Unsafe.Add(ref decodingMap, i2); i3 = Unsafe.Add(ref decodingMap, i3); i2 <<= 6; i0 |= i3; i0 |= i2; if (i0 < 0) goto InvalidExit; if (destIndex > destLength - 3) goto InvalidExit; WriteThreeLowOrderBytes(ref Unsafe.Add(ref destBytes, destIndex), i0); destIndex += 3; } else if (i2 != EncodingPad) { i2 = Unsafe.Add(ref decodingMap, i2); i2 <<= 6; i0 |= i2; if (i0 < 0) goto InvalidExit; if (destIndex > destLength - 2) goto InvalidExit; Unsafe.Add(ref destBytes, destIndex) = (byte)(i0 >> 16); Unsafe.Add(ref destBytes, destIndex + 1) = (byte)(i0 >> 8); destIndex += 2; } else { if (i0 < 0) goto InvalidExit; if (destIndex > destLength - 1) goto InvalidExit; Unsafe.Add(ref destBytes, destIndex) = (byte)(i0 >> 16); destIndex++; } sourceIndex += 4; if (srcLength != utf16.Length) goto InvalidExit; DoneExit: consumed = sourceIndex; written = destIndex; return true; InvalidExit: consumed = sourceIndex; written = destIndex; Debug.Assert((consumed % 4) == 0); return false; } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static int Decode(ref char encodedChars, ref sbyte decodingMap) { int i0 = encodedChars; int i1 = Unsafe.Add(ref encodedChars, 1); int i2 = Unsafe.Add(ref encodedChars, 2); int i3 = Unsafe.Add(ref encodedChars, 3); if (((i0 | i1 | i2 | i3) & 0xffffff00) != 0) return -1; // One or more chars falls outside the 00..ff range. This cannot be a valid Base64 character. i0 = Unsafe.Add(ref decodingMap, i0); i1 = Unsafe.Add(ref decodingMap, i1); i2 = Unsafe.Add(ref decodingMap, i2); i3 = Unsafe.Add(ref decodingMap, i3); i0 <<= 18; i1 <<= 12; i2 <<= 6; i0 |= i3; i1 |= i2; i0 |= i1; return i0; } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void WriteThreeLowOrderBytes(ref byte destination, int value) { destination = (byte)(value >> 16); Unsafe.Add(ref destination, 1) = (byte)(value >> 8); Unsafe.Add(ref destination, 2) = (byte)value; } // Pre-computing this table using a custom string(s_characters) and GenerateDecodingMapAndVerify (found in tests) private static ReadOnlySpan<sbyte> DecodingMap => new sbyte[] // rely on C# compiler optimization to reference static data { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, // 62 is placed at index 43 (for +), 63 at index 47 (for /) 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1, // 52-61 are placed at index 48-57 (for 0-9), 64 at index 61 (for =) -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, // 0-25 are placed at index 65-90 (for A-Z) -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1, // 26-51 are placed at index 97-122 (for a-z) -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // Bytes over 122 ('z') are invalid and cannot be decoded -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // Hence, padding the map with 255, which indicates invalid input -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, }; private const byte EncodingPad = (byte)'='; // '=', for padding } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Diagnostics.TraceSourceTests { using Method = TestTraceListener.Method; public class TraceListenerClassTests { [Fact] public void NameTest() { var listener = new TestTraceListener(); listener.Name = null; Assert.Equal("", listener.Name); } [Fact] public void IndentLevelTest() { var listener = new TestTraceListener(); listener.IndentLevel = 0; Assert.Equal(0, listener.IndentLevel); listener.IndentLevel = 2; Assert.Equal(2, listener.IndentLevel); listener.IndentLevel = -1; Assert.Equal(0, listener.IndentLevel); } [Fact] public void IndentSizeTest() { var listener = new TestTraceListener(); listener.IndentSize = 0; Assert.Equal(0, listener.IndentSize); listener.IndentSize = 2; Assert.Equal(2, listener.IndentSize); Assert.Throws<ArgumentOutOfRangeException>(() => listener.IndentSize = -1); } [Fact] public void FilterTest() { var listener = new TestTraceListener(); listener.Filter = new SourceFilter("TestSource"); Assert.NotNull(listener.Filter); } [Fact] public void TraceOutputOptionsTest() { var listener = new TestTraceListener(); listener.TraceOutputOptions = TraceOptions.None; // NOTE: TraceOptions includes values for 0x01 and 0x20 in .NET Framework 4.5, but not in CoreFX // These assertions test for those missing values, and the exceptional condition that // maintains compatibility with 4.5 var missingValue = (TraceOptions)0x01; listener.TraceOutputOptions = missingValue; missingValue = (TraceOptions)0x20; listener.TraceOutputOptions = missingValue; var badValue = (TraceOptions)0x80; Assert.Throws<ArgumentOutOfRangeException>(() => listener.TraceOutputOptions = badValue); } [Fact] public void WriteObjectTest() { var listener = new TestTraceListener(); listener.Filter = new TestTraceFilter(false); listener.Write((object)"Message"); Assert.Equal(0, listener.GetCallCount(Method.Write)); listener.Filter = new TestTraceFilter(true); listener.Write((object)null); Assert.Equal(0, listener.GetCallCount(Method.Write)); listener.Write((object)"Message"); Assert.Equal(1, listener.GetCallCount(Method.Write)); } [Fact] public void WriteCategoryTest() { var listener = new TestTraceListener(); listener.Filter = new TestTraceFilter(false); listener.Write("Message", "Category"); Assert.Equal(0, listener.GetCallCount(Method.Write)); listener.Filter = new TestTraceFilter(true); listener.Write("Message", null); Assert.Equal(1, listener.GetCallCount(Method.Write)); listener.Write("Message", "Category"); Assert.Equal(2, listener.GetCallCount(Method.Write)); listener.Write(null, "Category"); Assert.Equal(3, listener.GetCallCount(Method.Write)); } [Fact] public void WriteCategoryTest2() { var listener = new TestTraceListener(); listener.Filter = new TestTraceFilter(false); listener.Write((object)"Message", "Category"); Assert.Equal(0, listener.GetCallCount(Method.Write)); listener.Filter = new TestTraceFilter(true); listener.Write((object)"Message", null); Assert.Equal(1, listener.GetCallCount(Method.Write)); listener.Write((object)"Message", "Category"); Assert.Equal(2, listener.GetCallCount(Method.Write)); listener.Write((object)null, "Category"); Assert.Equal(3, listener.GetCallCount(Method.Write)); } [Fact] public void IndentTest() { var listener = new TestTextTraceListener(); listener.IndentLevel = 2; listener.IndentSize = 4; listener.Write("Message"); listener.Flush(); Assert.Equal(" Message", listener.Output); listener = new TestTextTraceListener(); listener.IndentLevel = 1; listener.IndentSize = 3; listener.Write("Message"); listener.Flush(); Assert.Equal(" Message", listener.Output); } [Fact] public void WriteLineObjectTest() { var listener = new TestTraceListener(); listener.Filter = new TestTraceFilter(false); listener.WriteLine((object)"Message"); Assert.Equal(0, listener.GetCallCount(Method.WriteLine)); listener.Filter = new TestTraceFilter(true); // NOTE: Writing null will result in a newline being written listener.WriteLine((object)null); Assert.Equal(1, listener.GetCallCount(Method.WriteLine)); listener.WriteLine((object)"Message"); Assert.Equal(2, listener.GetCallCount(Method.WriteLine)); } [Fact] public void WriteLineCategoryTest() { var listener = new TestTraceListener(); listener.Filter = new TestTraceFilter(false); listener.WriteLine("Message", "Category"); Assert.Equal(0, listener.GetCallCount(Method.WriteLine)); listener.Filter = new TestTraceFilter(true); listener.WriteLine("Message", null); Assert.Equal(1, listener.GetCallCount(Method.WriteLine)); listener.WriteLine("Message", "Category"); Assert.Equal(2, listener.GetCallCount(Method.WriteLine)); listener.WriteLine(null, "Category"); Assert.Equal(3, listener.GetCallCount(Method.WriteLine)); } [Fact] public void WriteLineCategoryTest2() { var listener = new TestTraceListener(); listener.Filter = new TestTraceFilter(false); listener.WriteLine((object)"Message", "Category"); Assert.Equal(0, listener.GetCallCount(Method.WriteLine)); listener.Filter = new TestTraceFilter(true); listener.WriteLine((object)"Message", null); Assert.Equal(1, listener.GetCallCount(Method.WriteLine)); listener.WriteLine((object)"Message", "Category"); Assert.Equal(2, listener.GetCallCount(Method.WriteLine)); listener.WriteLine((object)null, "Category"); Assert.Equal(3, listener.GetCallCount(Method.WriteLine)); } [Fact] public void TraceDataTest() { var cache = new TraceEventCache(); var listener = new TestTextTraceListener(); listener.Filter = new TestTraceFilter(false); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, new object()); Assert.Equal(0, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, new object()); var expected = 2; // header and message. Assert.Equal(expected, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, (object)null); Assert.Equal(expected, listener.WriteCount); } [Fact] public void TraceDataTest2() { var cache = new TraceEventCache(); var listener = new TestTextTraceListener(); listener.Filter = new TestTraceFilter(false); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, new object[0]); Assert.Equal(0, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, (object[])null); var expected = 2; // header and message. Assert.Equal(expected, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, "Arg1", "Arg2"); Assert.Equal(expected, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceData(cache, "Source", TraceEventType.Critical, 1, "Arg1", null); Assert.Equal(expected, listener.WriteCount); } [Fact] public void TraceEventTest() { var cache = new TraceEventCache(); var listener = new TestTextTraceListener(); listener.Filter = new TestTraceFilter(false); listener.TraceEvent(cache, "Source", TraceEventType.Critical, 1); Assert.Equal(0, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceEvent(cache, "Source", TraceEventType.Critical, 1); var expected = 2; // header and message. Assert.Equal(expected, listener.WriteCount); } [Fact] public void TraceEventTest2() { var cache = new TraceEventCache(); var listener = new TestTextTraceListener(); listener.Filter = new TestTraceFilter(false); listener.TraceEvent(cache, "Source", TraceEventType.Critical, 1, "Format", "arg1"); Assert.Equal(0, listener.WriteCount); listener = new TestTextTraceListener(); listener.TraceEvent(cache, "Source", TraceEventType.Critical, 1, "Format", "arg1"); var expected = 2; // header and message. Assert.Equal(expected, listener.WriteCount); } [Theory] [InlineData(TraceOptions.None, 0)] [InlineData(TraceOptions.Timestamp, 1)] [InlineData(TraceOptions.ProcessId | TraceOptions.ThreadId, 2)] [InlineData(TraceOptions.DateTime | TraceOptions.Timestamp, 2)] public void WriteFooterTest(TraceOptions opts, int flagCount) { var cache = new TraceEventCache(); var listener = new TestTextTraceListener(); listener.TraceOutputOptions = opts; listener.Filter = new TestTraceFilter(false); listener.TraceEvent(cache, "Source", TraceEventType.Critical, 1); Assert.Equal(0, listener.WriteCount); var baseExpected = 2; // header + message var expected = baseExpected; listener = new TestTextTraceListener(); listener.TraceOutputOptions = opts; listener.TraceEvent(null, "Source", TraceEventType.Critical, 1); Assert.Equal(expected, listener.WriteCount); // Two calls to write per flag, one call for writing the indent, one for the message. expected = baseExpected + flagCount * 2; listener = new TestTextTraceListener(); listener.TraceOutputOptions = opts; listener.TraceEvent(cache, "Source", TraceEventType.Critical, 1); Assert.Equal(expected, listener.WriteCount); } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; namespace PSTaskDialog { public partial class frmTaskDialog : Form { //-------------------------------------------------------------------------------- #region PRIVATE members //-------------------------------------------------------------------------------- eSysIcons m_mainIcon = eSysIcons.Question; eSysIcons m_footerIcon = eSysIcons.Warning; string m_mainInstruction = "Main Instruction Text"; int m_mainInstructionHeight = 0; Font m_mainInstructionFont = new Font("Arial", 11.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, (byte)0); List<RadioButton> m_radioButtonCtrls = new List<RadioButton>(); string m_radioButtons = ""; int m_initialRadioButtonIndex = 0; List<Button> m_cmdButtons = new List<Button>(); string m_commandButtons = ""; int m_commandButtonClicked = -1; int m_defaultButtonIndex = 0; Control m_focusControl = null; eTaskDialogButtons m_Buttons = eTaskDialogButtons.YesNoCancel; bool m_Expanded = false; bool m_isVista = false; #endregion //-------------------------------------------------------------------------------- #region PROPERTIES //-------------------------------------------------------------------------------- public eSysIcons MainIcon { get { return m_mainIcon; } set { m_mainIcon = value; } } public eSysIcons FooterIcon { get { return m_footerIcon; } set { m_footerIcon = value; } } public string Title { get { return this.Text; } set { this.Text = value; } } public string MainInstruction { get { return m_mainInstruction; } set { m_mainInstruction = value; this.Invalidate(); } } public string Content { get { return lbContent.Text; } set { lbContent.Text = value; } } public string ExpandedInfo { get { return lbExpandedInfo.Text; } set { lbExpandedInfo.Text = value; } } public string Footer { get { return lbFooter.Text; } set { lbFooter.Text = value; } } public int DefaultButtonIndex { get { return m_defaultButtonIndex; } set { m_defaultButtonIndex = value; } } public string RadioButtons { get { return m_radioButtons; } set { m_radioButtons = value; } } public int InitialRadioButtonIndex { get { return m_initialRadioButtonIndex; } set { m_initialRadioButtonIndex = value; } } public int RadioButtonIndex { get { foreach (RadioButton rb in m_radioButtonCtrls) if (rb.Checked) return (int)rb.Tag; return -1; } } public string CommandButtons { get { return m_commandButtons; } set { m_commandButtons = value; } } public int CommandButtonClickedIndex { get { return m_commandButtonClicked; } } public eTaskDialogButtons Buttons { get { return m_Buttons; } set { m_Buttons = value; } } public string VerificationText { get { return cbVerify.Text; } set { cbVerify.Text = value; } } public bool VerificationCheckBoxChecked { get { return cbVerify.Checked; } set { cbVerify.Checked = value; } } public bool Expanded { get { return m_Expanded; } set { m_Expanded = value; } } #endregion //-------------------------------------------------------------------------------- #region CONSTRUCTOR //-------------------------------------------------------------------------------- public frmTaskDialog() { InitializeComponent(); m_isVista = VistaTaskDialog.IsAvailableOnThisOS; if (!m_isVista && cTaskDialog.UseToolWindowOnXP) // <- shall we use the smaller toolbar? this.FormBorderStyle = FormBorderStyle.FixedToolWindow; MainInstruction = "Main Instruction"; Content = ""; ExpandedInfo = ""; Footer = ""; VerificationText = ""; } #endregion //-------------------------------------------------------------------------------- #region BuildForm // This is the main routine that should be called before .ShowDialog() //-------------------------------------------------------------------------------- bool m_formBuilt = false; public void BuildForm() { int form_height = 0; // Setup Main Instruction switch (m_mainIcon) { case eSysIcons.Information: imgMain.Image = SystemIcons.Information.ToBitmap(); break; case eSysIcons.Question: imgMain.Image = SystemIcons.Question.ToBitmap(); break; case eSysIcons.Warning: imgMain.Image = SystemIcons.Warning.ToBitmap(); break; case eSysIcons.Error: imgMain.Image = SystemIcons.Error.ToBitmap(); break; } //AdjustLabelHeight(lbMainInstruction); //pnlMainInstruction.Height = Math.Max(41, lbMainInstruction.Height + 16); if (m_mainInstructionHeight == 0) GetMainInstructionTextSizeF(); pnlMainInstruction.Height = Math.Max(41, m_mainInstructionHeight + 16); form_height += pnlMainInstruction.Height; // Setup Content pnlContent.Visible = (Content != ""); if (Content != "") { AdjustLabelHeight(lbContent); pnlContent.Height = lbContent.Height + 4; form_height += pnlContent.Height; } bool show_verify_checkbox = (cbVerify.Text != ""); cbVerify.Visible = show_verify_checkbox; // Setup Expanded Info and Buttons panels if (ExpandedInfo == "") { pnlExpandedInfo.Visible = false; lbShowHideDetails.Visible = false; cbVerify.Top = 12; pnlButtons.Height = 40; } else { AdjustLabelHeight(lbExpandedInfo); pnlExpandedInfo.Height = lbExpandedInfo.Height + 4; pnlExpandedInfo.Visible = m_Expanded; lbShowHideDetails.Text = (m_Expanded ? " Hide details" : " Show details"); lbShowHideDetails.ImageIndex = (m_Expanded ? 0 : 3); if (!show_verify_checkbox) pnlButtons.Height = 40; if (m_Expanded) form_height += pnlExpandedInfo.Height; } // Setup RadioButtons pnlRadioButtons.Visible = (m_radioButtons != ""); if (m_radioButtons != "") { string[] arr = m_radioButtons.Split(new char[] { '|' }); int pnl_height = 12; for (int i = 0; i < arr.Length; i++) { RadioButton rb = new RadioButton(); rb.Parent = pnlRadioButtons; rb.Location = new Point(60, 4 + (i * rb.Height)); rb.Text = arr[i]; rb.Tag = i; rb.Checked = (m_defaultButtonIndex == i); rb.Width = this.Width - rb.Left - 15; pnl_height += rb.Height; m_radioButtonCtrls.Add(rb); } pnlRadioButtons.Height = pnl_height; form_height += pnlRadioButtons.Height; } // Setup CommandButtons pnlCommandButtons.Visible = (m_commandButtons != ""); if (m_commandButtons != "") { string[] arr = m_commandButtons.Split(new char[] { '|' }); int t = 8; int pnl_height = 16; for (int i = 0; i < arr.Length; i++) { CommandButton btn = new CommandButton(); btn.Parent = pnlCommandButtons; btn.Location = new Point(50, t); if (m_isVista) // <- tweak font if vista btn.Font = new Font(btn.Font, FontStyle.Regular); btn.Text = arr[i]; btn.Size = new Size(this.Width - btn.Left - 15, btn.GetBestHeight()); t += btn.Height; pnl_height += btn.Height; btn.Tag = i; btn.Click += new EventHandler(CommandButton_Click); if (i == m_defaultButtonIndex) m_focusControl = btn; } pnlCommandButtons.Height = pnl_height; form_height += pnlCommandButtons.Height; } // Setup Buttons switch (m_Buttons) { case eTaskDialogButtons.YesNo: bt1.Visible = false; bt2.Text = "&Yes"; bt2.DialogResult = DialogResult.Yes; bt3.Text = "&No"; bt3.DialogResult = DialogResult.No; this.AcceptButton = bt2; this.CancelButton = bt3; break; case eTaskDialogButtons.YesNoCancel: bt1.Text = "&Yes"; bt1.DialogResult = DialogResult.Yes; bt2.Text = "&No"; bt2.DialogResult = DialogResult.No; bt3.Text = "&Cancel"; bt3.DialogResult = DialogResult.Cancel; this.AcceptButton = bt1; this.CancelButton = bt3; break; case eTaskDialogButtons.OKCancel: bt1.Visible = false; bt2.Text = "&OK"; bt2.DialogResult = DialogResult.OK; bt3.Text = "&Cancel"; bt3.DialogResult = DialogResult.Cancel; this.AcceptButton = bt2; this.CancelButton = bt3; break; case eTaskDialogButtons.OK: bt1.Visible = false; bt2.Visible = false; bt3.Text = "&OK"; bt3.DialogResult = DialogResult.OK; this.AcceptButton = bt3; this.CancelButton = bt3; break; case eTaskDialogButtons.Close: bt1.Visible = false; bt2.Visible = false; bt3.Text = "&Close"; bt3.DialogResult = DialogResult.Cancel; this.CancelButton = bt3; break; case eTaskDialogButtons.Cancel: bt1.Visible = false; bt2.Visible = false; bt3.Text = "&Cancel"; bt3.DialogResult = DialogResult.Cancel; this.CancelButton = bt3; break; case eTaskDialogButtons.None: bt1.Visible = false; bt2.Visible = false; bt3.Visible = false; break; } this.ControlBox = (Buttons == eTaskDialogButtons.Cancel || Buttons == eTaskDialogButtons.Close || Buttons == eTaskDialogButtons.OKCancel || Buttons == eTaskDialogButtons.YesNoCancel); if (!show_verify_checkbox && ExpandedInfo == "" && m_Buttons == eTaskDialogButtons.None) pnlButtons.Visible = false; else form_height += pnlButtons.Height; pnlFooter.Visible = (Footer != ""); if (Footer != "") { AdjustLabelHeight(lbFooter); pnlFooter.Height = Math.Max(28, lbFooter.Height + 16); switch (m_footerIcon) { case eSysIcons.Information: // SystemIcons.Information.ToBitmap().GetThumbnailImage(16, 16, null, IntPtr.Zero); imgFooter.Image = ResizeBitmap(SystemIcons.Information.ToBitmap(), 16, 16); break; case eSysIcons.Question: // SystemIcons.Question.ToBitmap().GetThumbnailImage(16, 16, null, IntPtr.Zero); imgFooter.Image = ResizeBitmap(SystemIcons.Question.ToBitmap(), 16, 16); break; case eSysIcons.Warning: // SystemIcons.Warning.ToBitmap().GetThumbnailImage(16, 16, null, IntPtr.Zero); imgFooter.Image = ResizeBitmap(SystemIcons.Warning.ToBitmap(), 16, 16); break; case eSysIcons.Error: // SystemIcons.Error.ToBitmap().GetThumbnailImage(16, 16, null, IntPtr.Zero); imgFooter.Image = ResizeBitmap(SystemIcons.Error.ToBitmap(), 16, 16); break; } form_height += pnlFooter.Height; } this.ClientSize = new Size(ClientSize.Width, form_height); m_formBuilt = true; } //-------------------------------------------------------------------------------- Image ResizeBitmap(Image SrcImg, int NewWidth, int NewHeight) { float percent_width = (NewWidth / (float)SrcImg.Width); float percent_height = (NewHeight / (float)SrcImg.Height); float resize_percent = (percent_height < percent_width ? percent_height : percent_width); int w = (int)(SrcImg.Width * resize_percent); int h = (int)(SrcImg.Height * resize_percent); Bitmap b = new Bitmap(w, h); using (Graphics g = Graphics.FromImage(b)) { g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.HighQualityBicubic; g.DrawImage(SrcImg, 0, 0, w, h); } return b; } //-------------------------------------------------------------------------------- // utility function for setting a Label's height void AdjustLabelHeight(Label lb) { string text = lb.Text; Font textFont = lb.Font; SizeF layoutSize = new SizeF(lb.ClientSize.Width, 5000.0F); Graphics g = Graphics.FromHwnd(lb.Handle); SizeF stringSize = g.MeasureString(text, textFont, layoutSize); lb.Height = (int)stringSize.Height + 4; g.Dispose(); } #endregion //-------------------------------------------------------------------------------- #region EVENTS //-------------------------------------------------------------------------------- void CommandButton_Click(object sender, EventArgs e) { m_commandButtonClicked = (int)((CommandButton)sender).Tag; this.DialogResult = DialogResult.OK; } //-------------------------------------------------------------------------------- protected override void OnLoad(EventArgs e) { base.OnLoad(e); } //-------------------------------------------------------------------------------- protected override void OnShown(EventArgs e) { if (!m_formBuilt) throw new Exception("frmTaskDialog : Please call .BuildForm() before showing the TaskDialog"); base.OnShown(e); } //-------------------------------------------------------------------------------- private void lbDetails_MouseEnter(object sender, EventArgs e) { lbShowHideDetails.ImageIndex = (m_Expanded ? 1 : 4); } //-------------------------------------------------------------------------------- private void lbDetails_MouseLeave(object sender, EventArgs e) { lbShowHideDetails.ImageIndex = (m_Expanded ? 0 : 3); } //-------------------------------------------------------------------------------- private void lbDetails_MouseUp(object sender, MouseEventArgs e) { lbShowHideDetails.ImageIndex = (m_Expanded ? 1 : 4); } //-------------------------------------------------------------------------------- private void lbDetails_MouseDown(object sender, MouseEventArgs e) { lbShowHideDetails.ImageIndex =(m_Expanded ? 2 : 5); } //-------------------------------------------------------------------------------- private void lbDetails_Click(object sender, EventArgs e) { m_Expanded = !m_Expanded; pnlExpandedInfo.Visible = m_Expanded; lbShowHideDetails.Text = (m_Expanded ? " Hide details" : " Show details"); if (m_Expanded) this.Height += pnlExpandedInfo.Height; else this.Height -= pnlExpandedInfo.Height; } //-------------------------------------------------------------------------------- const int MAIN_INSTRUCTION_LEFT_MARGIN = 46; const int MAIN_INSTRUCTION_RIGHT_MARGIN = 8; SizeF GetMainInstructionTextSizeF() { SizeF mzSize = new SizeF(pnlMainInstruction.Width - MAIN_INSTRUCTION_LEFT_MARGIN - MAIN_INSTRUCTION_RIGHT_MARGIN, 5000.0F); Graphics g = Graphics.FromHwnd(this.Handle); SizeF textSize = g.MeasureString(m_mainInstruction, m_mainInstructionFont, mzSize); m_mainInstructionHeight = (int)textSize.Height; return textSize; } private void pnlMainInstruction_Paint(object sender, PaintEventArgs e) { SizeF szL = GetMainInstructionTextSizeF(); e.Graphics.TextRenderingHint = System.Drawing.Text.TextRenderingHint.ClearTypeGridFit; e.Graphics.DrawString(m_mainInstruction, m_mainInstructionFont, new SolidBrush(Color.DarkBlue), new RectangleF(new PointF(MAIN_INSTRUCTION_LEFT_MARGIN, 10), szL)); } //-------------------------------------------------------------------------------- private void frmTaskDialog_Shown(object sender, EventArgs e) { if (cTaskDialog.PlaySystemSounds) { switch (m_mainIcon) { case eSysIcons.Error: System.Media.SystemSounds.Hand.Play(); break; case eSysIcons.Information: System.Media.SystemSounds.Asterisk.Play(); break; case eSysIcons.Question: System.Media.SystemSounds.Asterisk.Play(); break; case eSysIcons.Warning: System.Media.SystemSounds.Exclamation.Play(); break; } } if (m_focusControl != null) m_focusControl.Focus(); } #endregion //-------------------------------------------------------------------------------- } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Drawing.Drawing2D; using System.Linq; using System.Xml; using System.Xml.Serialization; using AgenaTrader.API; using AgenaTrader.Custom; using AgenaTrader.Plugins; using AgenaTrader.Helper; /// <summary> /// Version: 1.2.0 /// ------------------------------------------------------------------------- /// Simon Pucher 2016 /// ------------------------------------------------------------------------- /// Opens web browser by clicking on the chart. /// ------------------------------------------------------------------------- /// ****** Important ****** /// To compile this indicator without any error you also need access to the utility indicator to use these global source code elements. /// You will find this indicator on GitHub: https://raw.githubusercontent.com/simonpucher/AgenaTrader/master/Utilities/GlobalUtilities_Utility.cs /// ------------------------------------------------------------------------- /// Namespace holds all indicators and is required. Do not change it. /// </summary> namespace AgenaTrader.UserCode { [Description("Opens web browser by clicking on a button on the chart.")] public class OpenBrowser_Utility_Tool : UserIndicator { #region Variables private RectangleF _rect; private RectangleF _rect2; //private Pen _pen = Pens.Black; private Brush _brush = Brushes.Gray; private bool _opengooglefinance = true; private bool _openmorningstar = true; private bool _openmorningstar_direct = true; private bool _openyahoofinance = true; private bool _openearningswhispers = true; private bool _openzacks = true; private bool _openzacks_direct = true; #endregion protected override void OnInit() { //Add(new OutputDescriptor(Color.FromKnownColor(KnownColor.Orange), "MyPlot1")); IsOverlay = true; } protected override void OnStart() { // Add event listener if (Chart != null) Chart.ChartPanelMouseDown += OnChartPanelMouseDown; } protected override void OnCalculate() { //if (this.IsProcessingBarIndexLast && this.Instrument.InstrumentType == InstrumentType.Stock) //{ // _brush = Brushes.Green; //} } protected override void OnDispose() { // Remove event listener if (Chart != null) Chart.ChartPanelMouseDown -= OnChartPanelMouseDown; } public override string DisplayName { get { return "Open Browser (T)"; } } public override string ToString() { return "Open Browser (T)"; } #region Events public override void OnPaint(Graphics g, Rectangle r, double min, double max) { if (Bars == null || Chart == null) return; //Only draw button if parameters are available. if (this.Instrument != null) { string strtext = "open browser"; //Only stocks are possible to lookup if (this.Instrument.InstrumentType == InstrumentType.Stock) { _brush = Brushes.Green; } else { _brush = Brushes.Gray; strtext = "not supported"; } using (Font font1 = new Font("Arial", 8, FontStyle.Bold, GraphicsUnit.Point)) { StringFormat stringFormat = new StringFormat(); stringFormat.Alignment = StringAlignment.Center; stringFormat.LineAlignment = StringAlignment.Center; this.Core.GetDataDirectory(); Brush tempbrush = new SolidBrush(GlobalUtilities.AdjustOpacity(((SolidBrush)_brush).Color, 0.5F)); _rect = new RectangleF(r.Width - 100, 10, 86, 27); g.FillRectangle(tempbrush, _rect); g.DrawString(strtext, font1, Brushes.White, _rect, stringFormat); _rect2 = new RectangleF(r.Width - 100, 40, 86, 27); } } } private void OnChartPanelMouseDown(object sender, System.Windows.Forms.MouseEventArgs e) { Point cursorPos = new Point(e.X, e.Y); if (_rect.Contains(cursorPos) && this.Instrument.InstrumentType == InstrumentType.Stock) { string symbol = this.Instrument.Symbol; string isin = this.Instrument.ISIN; //if (this.Instrument.InstrumentType == InstrumentType.CFD) //{ //} if (this.OpenGoogleFinance) { GUIHelper.OpenInBrowser("https://www.google.com/finance?q=" + symbol); } if (this.OpenMorningstar) { GUIHelper.OpenInBrowser("http://beta.morningstar.com/search.html?q=" + isin); } if (this.OpenMorningstar_Direct) { //GUIHelper.OpenInBrowser("http://quote.morningstar.com/Quote.html?ticker=" + symbol); GUIHelper.OpenInBrowser("http://quote.morningstar.com/Quote.html?t=" + isin); } if (this.OpenYahooFinance) { GUIHelper.OpenInBrowser("https://finance.yahoo.com/quote/" + symbol); } if (this.OpenEarningswhispers) { GUIHelper.OpenInBrowser("https://earningswhispers.com/stocks/" + symbol); } if (this.OpenZacks) { GUIHelper.OpenInBrowser("https://www.zacks.com/search.php?q=" + symbol); } if (this.OpenZacks_Direct) { GUIHelper.OpenInBrowser("https://www.zacks.com/stock/quote/" + symbol); } } else { //nothing to do } this.OnCalculate(); } #endregion #region Properties #region Output //[Browsable(false)] // [XmlIgnore()] // public DataSeries MyPlot1 // { // get { return Outputs[0]; } // } #endregion #region InSeries [Description("Opens Yahoo Finance with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Yahoo Finance")] public bool OpenYahooFinance { get { return _openyahoofinance; } set { _openyahoofinance = value; } } [Description("Opens Google Finance with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Google Finance")] public bool OpenGoogleFinance { get { return _opengooglefinance; } set { _opengooglefinance = value; } } [Description("Opens Morningstar with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Morningstar")] public bool OpenMorningstar { get { return _openmorningstar; } set { _openmorningstar = value; } } [Description("Opens Morningstar Direct with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Morningstar Direct")] public bool OpenMorningstar_Direct { get { return _openmorningstar_direct; } set { _openmorningstar_direct = value; } } [Description("Opens Earnings whispers with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Earnings whispers")] public bool OpenEarningswhispers { get { return _openearningswhispers; } set { _openearningswhispers = value; } } [Description("Opens Zacks with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Zacks")] public bool OpenZacks { get { return _openzacks; } set { _openzacks = value; } } [Description("Opens Zacks Direct with the current symbol displayed in the chart")] [InputParameter] [DisplayName("Zacks Direct")] public bool OpenZacks_Direct { get { return _openzacks_direct; } set { _openzacks_direct = value; } } private Color _col_positive = Color.Green; /// <summary> /// </summary> [Description("Color for in the list")] [Category("Plots")] [DisplayName("Color positive")] public Color Color_Positive { get { return _col_positive; } set { _col_positive = value; } } [Browsable(false)] public string Color_Positive_Serialize { get { return SerializableColor.ToString(_col_positive); } set { _col_positive = SerializableColor.FromString(value); } } private Color _col_negative = Color.Gray; /// <summary> /// </summary> [Description("Color for not in the list")] [Category("Plots")] [DisplayName("Color negative")] public Color Color_Negative { get { return _col_negative; } set { _col_negative = value; } } [Browsable(false)] public string Color_Negative_Serialize { get { return SerializableColor.ToString(_col_negative); } set { _col_negative = SerializableColor.FromString(value); } } #endregion #endregion } }
//#define Trace // ParallelDeflateOutputStream.cs // ------------------------------------------------------------------ // // A DeflateStream that does compression only, it uses a // divide-and-conquer approach with multiple threads to exploit multiple // CPUs for the DEFLATE computation. // // last saved: <2011-July-31 14:49:40> // // ------------------------------------------------------------------ // // Copyright (c) 2009-2011 by Dino Chiesa // All rights reserved! // // This code module is part of DotNetZip, a zipfile class library. // // ------------------------------------------------------------------ // // This code is licensed under the Microsoft Public License. // See the file License.txt for the license details. // More info on: http://dotnetzip.codeplex.com // // ------------------------------------------------------------------ using System; using System.Collections.Generic; using System.Threading; using Ionic.Zlib; using System.IO; namespace Ionic.Zlib { internal class WorkItem { public byte[] buffer; public byte[] compressed; public int crc; public int index; public int ordinal; public int inputBytesAvailable; public int compressedBytesAvailable; public ZlibCodec compressor; public WorkItem(int size, Ionic.Zlib.CompressionLevel compressLevel, CompressionStrategy strategy, int ix) { this.buffer= new byte[size]; // alloc 5 bytes overhead for every block (margin of safety= 2) int n = size + ((size / 32768)+1) * 5 * 2; this.compressed = new byte[n]; this.compressor = new ZlibCodec(); this.compressor.InitializeDeflate(compressLevel, false); this.compressor.OutputBuffer = this.compressed; this.compressor.InputBuffer = this.buffer; this.index = ix; } } /// <summary> /// A class for compressing streams using the /// Deflate algorithm with multiple threads. /// </summary> /// /// <remarks> /// <para> /// This class performs DEFLATE compression through writing. For /// more information on the Deflate algorithm, see IETF RFC 1951, /// "DEFLATE Compressed Data Format Specification version 1.3." /// </para> /// /// <para> /// This class is similar to <see cref="Ionic.Zlib.DeflateStream"/>, except /// that this class is for compression only, and this implementation uses an /// approach that employs multiple worker threads to perform the DEFLATE. On /// a multi-cpu or multi-core computer, the performance of this class can be /// significantly higher than the single-threaded DeflateStream, particularly /// for larger streams. How large? Anything over 10mb is a good candidate /// for parallel compression. /// </para> /// /// <para> /// The tradeoff is that this class uses more memory and more CPU than the /// vanilla DeflateStream, and also is less efficient as a compressor. For /// large files the size of the compressed data stream can be less than 1% /// larger than the size of a compressed data stream from the vanialla /// DeflateStream. For smaller files the difference can be larger. The /// difference will also be larger if you set the BufferSize to be lower than /// the default value. Your mileage may vary. Finally, for small files, the /// ParallelDeflateOutputStream can be much slower than the vanilla /// DeflateStream, because of the overhead associated to using the thread /// pool. /// </para> /// /// </remarks> /// <seealso cref="Ionic.Zlib.DeflateStream" /> public class ParallelDeflateOutputStream : System.IO.Stream { private static readonly int IO_BUFFER_SIZE_DEFAULT = 64 * 1024; // 128k private static readonly int BufferPairsPerCore = 4; private System.Collections.Generic.List<WorkItem> _pool; private bool _leaveOpen; private bool emitting; private System.IO.Stream _outStream; private int _maxBufferPairs; private int _bufferSize = IO_BUFFER_SIZE_DEFAULT; private AutoResetEvent _newlyCompressedBlob; //private ManualResetEvent _writingDone; //private ManualResetEvent _sessionReset; private object _outputLock = new object(); private bool _isClosed; private bool _firstWriteDone; private int _currentlyFilling; private int _lastFilled; private int _lastWritten; private int _latestCompressed; private int _Crc32; private Ionic.Crc.CRC32 _runningCrc; private object _latestLock = new object(); private System.Collections.Generic.Queue<int> _toWrite; private System.Collections.Generic.Queue<int> _toFill; private Int64 _totalBytesProcessed; private Ionic.Zlib.CompressionLevel _compressLevel; private volatile Exception _pendingException; private bool _handlingException; private object _eLock = new Object(); // protects _pendingException // This bitfield is used only when Trace is defined. //private TraceBits _DesiredTrace = TraceBits.Write | TraceBits.WriteBegin | //TraceBits.WriteDone | TraceBits.Lifecycle | TraceBits.Fill | TraceBits.Flush | //TraceBits.Session; //private TraceBits _DesiredTrace = TraceBits.WriteBegin | TraceBits.WriteDone | TraceBits.Synch | TraceBits.Lifecycle | TraceBits.Session ; private TraceBits _DesiredTrace = TraceBits.Session | TraceBits.Compress | TraceBits.WriteTake | TraceBits.WriteEnter | TraceBits.EmitEnter | TraceBits.EmitDone | TraceBits.EmitLock | TraceBits.EmitSkip | TraceBits.EmitBegin; /// <summary> /// Create a ParallelDeflateOutputStream. /// </summary> /// <remarks> /// /// <para> /// This stream compresses data written into it via the DEFLATE /// algorithm (see RFC 1951), and writes out the compressed byte stream. /// </para> /// /// <para> /// The instance will use the default compression level, the default /// buffer sizes and the default number of threads and buffers per /// thread. /// </para> /// /// <para> /// This class is similar to <see cref="Ionic.Zlib.DeflateStream"/>, /// except that this implementation uses an approach that employs /// multiple worker threads to perform the DEFLATE. On a multi-cpu or /// multi-core computer, the performance of this class can be /// significantly higher than the single-threaded DeflateStream, /// particularly for larger streams. How large? Anything over 10mb is /// a good candidate for parallel compression. /// </para> /// /// </remarks> /// /// <example> /// /// This example shows how to use a ParallelDeflateOutputStream to compress /// data. It reads a file, compresses it, and writes the compressed data to /// a second, output file. /// /// <code> /// byte[] buffer = new byte[WORKING_BUFFER_SIZE]; /// int n= -1; /// String outputFile = fileToCompress + ".compressed"; /// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress)) /// { /// using (var raw = System.IO.File.Create(outputFile)) /// { /// using (Stream compressor = new ParallelDeflateOutputStream(raw)) /// { /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// compressor.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// <code lang="VB"> /// Dim buffer As Byte() = New Byte(4096) {} /// Dim n As Integer = -1 /// Dim outputFile As String = (fileToCompress &amp; ".compressed") /// Using input As Stream = File.OpenRead(fileToCompress) /// Using raw As FileStream = File.Create(outputFile) /// Using compressor As Stream = New ParallelDeflateOutputStream(raw) /// Do While (n &lt;&gt; 0) /// If (n &gt; 0) Then /// compressor.Write(buffer, 0, n) /// End If /// n = input.Read(buffer, 0, buffer.Length) /// Loop /// End Using /// End Using /// End Using /// </code> /// </example> /// <param name="stream">The stream to which compressed data will be written.</param> public ParallelDeflateOutputStream(System.IO.Stream stream) : this(stream, CompressionLevel.Default, CompressionStrategy.Default, false) { } /// <summary> /// Create a ParallelDeflateOutputStream using the specified CompressionLevel. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level) : this(stream, level, CompressionStrategy.Default, false) { } /// <summary> /// Create a ParallelDeflateOutputStream and specify whether to leave the captive stream open /// when the ParallelDeflateOutputStream is closed. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after inflation/deflation. /// </param> public ParallelDeflateOutputStream(System.IO.Stream stream, bool leaveOpen) : this(stream, CompressionLevel.Default, CompressionStrategy.Default, leaveOpen) { } /// <summary> /// Create a ParallelDeflateOutputStream and specify whether to leave the captive stream open /// when the ParallelDeflateOutputStream is closed. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after inflation/deflation. /// </param> public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level, bool leaveOpen) : this(stream, CompressionLevel.Default, CompressionStrategy.Default, leaveOpen) { } /// <summary> /// Create a ParallelDeflateOutputStream using the specified /// CompressionLevel and CompressionStrategy, and specifying whether to /// leave the captive stream open when the ParallelDeflateOutputStream is /// closed. /// </summary> /// <remarks> /// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/> /// constructor for example code. /// </remarks> /// <param name="stream">The stream to which compressed data will be written.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> /// <param name="strategy"> /// By tweaking this parameter, you may be able to optimize the compression for /// data with particular characteristics. /// </param> /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after inflation/deflation. /// </param> public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level, CompressionStrategy strategy, bool leaveOpen) { TraceOutput(TraceBits.Lifecycle | TraceBits.Session, "-------------------------------------------------------"); TraceOutput(TraceBits.Lifecycle | TraceBits.Session, "Create {0:X8}", this.GetHashCode()); _outStream = stream; _compressLevel= level; Strategy = strategy; _leaveOpen = leaveOpen; this.MaxBufferPairs = 16; // default } /// <summary> /// The ZLIB strategy to be used during compression. /// </summary> /// public CompressionStrategy Strategy { get; private set; } /// <summary> /// The maximum number of buffer pairs to use. /// </summary> /// /// <remarks> /// <para> /// This property sets an upper limit on the number of memory buffer /// pairs to create. The implementation of this stream allocates /// multiple buffers to facilitate parallel compression. As each buffer /// fills up, this stream uses <see /// cref="System.Threading.ThreadPool.QueueUserWorkItem(WaitCallback)"> /// ThreadPool.QueueUserWorkItem()</see> /// to compress those buffers in a background threadpool thread. After a /// buffer is compressed, it is re-ordered and written to the output /// stream. /// </para> /// /// <para> /// A higher number of buffer pairs enables a higher degree of /// parallelism, which tends to increase the speed of compression on /// multi-cpu computers. On the other hand, a higher number of buffer /// pairs also implies a larger memory consumption, more active worker /// threads, and a higher cpu utilization for any compression. This /// property enables the application to limit its memory consumption and /// CPU utilization behavior depending on requirements. /// </para> /// /// <para> /// For each compression "task" that occurs in parallel, there are 2 /// buffers allocated: one for input and one for output. This property /// sets a limit for the number of pairs. The total amount of storage /// space allocated for buffering will then be (N*S*2), where N is the /// number of buffer pairs, S is the size of each buffer (<see /// cref="BufferSize"/>). By default, DotNetZip allocates 4 buffer /// pairs per CPU core, so if your machine has 4 cores, and you retain /// the default buffer size of 128k, then the /// ParallelDeflateOutputStream will use 4 * 4 * 2 * 128kb of buffer /// memory in total, or 4mb, in blocks of 128kb. If you then set this /// property to 8, then the number will be 8 * 2 * 128kb of buffer /// memory, or 2mb. /// </para> /// /// <para> /// CPU utilization will also go up with additional buffers, because a /// larger number of buffer pairs allows a larger number of background /// threads to compress in parallel. If you find that parallel /// compression is consuming too much memory or CPU, you can adjust this /// value downward. /// </para> /// /// <para> /// The default value is 16. Different values may deliver better or /// worse results, depending on your priorities and the dynamic /// performance characteristics of your storage and compute resources. /// </para> /// /// <para> /// This property is not the number of buffer pairs to use; it is an /// upper limit. An illustration: Suppose you have an application that /// uses the default value of this property (which is 16), and it runs /// on a machine with 2 CPU cores. In that case, DotNetZip will allocate /// 4 buffer pairs per CPU core, for a total of 8 pairs. The upper /// limit specified by this property has no effect. /// </para> /// /// <para> /// The application can set this value at any time, but it is effective /// only before the first call to Write(), which is when the buffers are /// allocated. /// </para> /// </remarks> public int MaxBufferPairs { get { return _maxBufferPairs; } set { if (value < 4) throw new ArgumentException("MaxBufferPairs", "Value must be 4 or greater."); _maxBufferPairs = value; } } /// <summary> /// The size of the buffers used by the compressor threads. /// </summary> /// <remarks> /// /// <para> /// The default buffer size is 128k. The application can set this value /// at any time, but it is effective only before the first Write(). /// </para> /// /// <para> /// Larger buffer sizes implies larger memory consumption but allows /// more efficient compression. Using smaller buffer sizes consumes less /// memory but may result in less effective compression. For example, /// using the default buffer size of 128k, the compression delivered is /// within 1% of the compression delivered by the single-threaded <see /// cref="Ionic.Zlib.DeflateStream"/>. On the other hand, using a /// BufferSize of 8k can result in a compressed data stream that is 5% /// larger than that delivered by the single-threaded /// <c>DeflateStream</c>. Excessively small buffer sizes can also cause /// the speed of the ParallelDeflateOutputStream to drop, because of /// larger thread scheduling overhead dealing with many many small /// buffers. /// </para> /// /// <para> /// The total amount of storage space allocated for buffering will be /// (N*S*2), where N is the number of buffer pairs, and S is the size of /// each buffer (this property). There are 2 buffers used by the /// compressor, one for input and one for output. By default, DotNetZip /// allocates 4 buffer pairs per CPU core, so if your machine has 4 /// cores, then the number of buffer pairs used will be 16. If you /// accept the default value of this property, 128k, then the /// ParallelDeflateOutputStream will use 16 * 2 * 128kb of buffer memory /// in total, or 4mb, in blocks of 128kb. If you set this property to /// 64kb, then the number will be 16 * 2 * 64kb of buffer memory, or /// 2mb. /// </para> /// /// </remarks> public int BufferSize { get { return _bufferSize;} set { if (value < 1024) throw new ArgumentOutOfRangeException("BufferSize", "BufferSize must be greater than 1024 bytes"); _bufferSize = value; } } /// <summary> /// The CRC32 for the data that was written out, prior to compression. /// </summary> /// <remarks> /// This value is meaningful only after a call to Close(). /// </remarks> public int Crc32 { get { return _Crc32; } } /// <summary> /// The total number of uncompressed bytes processed by the ParallelDeflateOutputStream. /// </summary> /// <remarks> /// This value is meaningful only after a call to Close(). /// </remarks> public Int64 BytesProcessed { get { return _totalBytesProcessed; } } private void _InitializePoolOfWorkItems() { _toWrite = new Queue<int>(); _toFill = new Queue<int>(); _pool = new System.Collections.Generic.List<WorkItem>(); int nTasks = BufferPairsPerCore * Environment.ProcessorCount; nTasks = Math.Min(nTasks, _maxBufferPairs); for(int i=0; i < nTasks; i++) { _pool.Add(new WorkItem(_bufferSize, _compressLevel, Strategy, i)); _toFill.Enqueue(i); } _newlyCompressedBlob = new AutoResetEvent(false); _runningCrc = new Ionic.Crc.CRC32(); _currentlyFilling = -1; _lastFilled = -1; _lastWritten = -1; _latestCompressed = -1; } /// <summary> /// Write data to the stream. /// </summary> /// /// <remarks> /// /// <para> /// To use the ParallelDeflateOutputStream to compress data, create a /// ParallelDeflateOutputStream with CompressionMode.Compress, passing a /// writable output stream. Then call Write() on that /// ParallelDeflateOutputStream, providing uncompressed data as input. The /// data sent to the output stream will be the compressed form of the data /// written. /// </para> /// /// <para> /// To decompress data, use the <see cref="Ionic.Zlib.DeflateStream"/> class. /// </para> /// /// </remarks> /// <param name="buffer">The buffer holding data to write to the stream.</param> /// <param name="offset">the offset within that data array to find the first byte to write.</param> /// <param name="count">the number of bytes to write.</param> public override void Write(byte[] buffer, int offset, int count) { bool mustWait = false; // This method does this: // 0. handles any pending exceptions // 1. write any buffers that are ready to be written, // 2. fills a work buffer; when full, flip state to 'Filled', // 3. if more data to be written, goto step 1 if (_isClosed) throw new InvalidOperationException(); // dispense any exceptions that occurred on the BG threads if (_pendingException != null) { _handlingException = true; var pe = _pendingException; _pendingException = null; throw pe; } if (count == 0) return; if (!_firstWriteDone) { // Want to do this on first Write, first session, and not in the // constructor. We want to allow MaxBufferPairs to // change after construction, but before first Write. _InitializePoolOfWorkItems(); _firstWriteDone = true; } do { // may need to make buffers available EmitPendingBuffers(false, mustWait); mustWait = false; // use current buffer, or get a new buffer to fill int ix = -1; if (_currentlyFilling >= 0) { ix = _currentlyFilling; TraceOutput(TraceBits.WriteTake, "Write notake wi({0}) lf({1})", ix, _lastFilled); } else { TraceOutput(TraceBits.WriteTake, "Write take?"); if (_toFill.Count == 0) { // no available buffers, so... need to emit // compressed buffers. mustWait = true; continue; } ix = _toFill.Dequeue(); TraceOutput(TraceBits.WriteTake, "Write take wi({0}) lf({1})", ix, _lastFilled); ++_lastFilled; // TODO: consider rollover? } WorkItem workitem = _pool[ix]; int limit = ((workitem.buffer.Length - workitem.inputBytesAvailable) > count) ? count : (workitem.buffer.Length - workitem.inputBytesAvailable); workitem.ordinal = _lastFilled; TraceOutput(TraceBits.Write, "Write lock wi({0}) ord({1}) iba({2})", workitem.index, workitem.ordinal, workitem.inputBytesAvailable ); // copy from the provided buffer to our workitem, starting at // the tail end of whatever data we might have in there currently. Buffer.BlockCopy(buffer, offset, workitem.buffer, workitem.inputBytesAvailable, limit); count -= limit; offset += limit; workitem.inputBytesAvailable += limit; if (workitem.inputBytesAvailable == workitem.buffer.Length) { // No need for interlocked.increment: the Write() // method is documented as not multi-thread safe, so // we can assume Write() calls come in from only one // thread. TraceOutput(TraceBits.Write, "Write QUWI wi({0}) ord({1}) iba({2}) nf({3})", workitem.index, workitem.ordinal, workitem.inputBytesAvailable ); if (!ThreadPool.QueueUserWorkItem( _DeflateOne, workitem )) throw new Exception("Cannot enqueue workitem"); _currentlyFilling = -1; // will get a new buffer next time } else _currentlyFilling = ix; if (count > 0) TraceOutput(TraceBits.WriteEnter, "Write more"); } while (count > 0); // until no more to write TraceOutput(TraceBits.WriteEnter, "Write exit"); return; } private void _FlushFinish() { // After writing a series of compressed buffers, each one closed // with Flush.Sync, we now write the final one as Flush.Finish, // and then stop. byte[] buffer = new byte[128]; var compressor = new ZlibCodec(); int rc = compressor.InitializeDeflate(_compressLevel, false); compressor.InputBuffer = null; compressor.NextIn = 0; compressor.AvailableBytesIn = 0; compressor.OutputBuffer = buffer; compressor.NextOut = 0; compressor.AvailableBytesOut = buffer.Length; rc = compressor.Deflate(FlushType.Finish); if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK) throw new Exception("deflating: " + compressor.Message); if (buffer.Length - compressor.AvailableBytesOut > 0) { TraceOutput(TraceBits.EmitBegin, "Emit begin flush bytes({0})", buffer.Length - compressor.AvailableBytesOut); _outStream.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut); TraceOutput(TraceBits.EmitDone, "Emit done flush"); } compressor.EndDeflate(); _Crc32 = _runningCrc.Crc32Result; } private void _Flush(bool lastInput) { if (_isClosed) throw new InvalidOperationException(); if (emitting) return; // compress any partial buffer if (_currentlyFilling >= 0) { WorkItem workitem = _pool[_currentlyFilling]; _DeflateOne(workitem); _currentlyFilling = -1; // get a new buffer next Write() } if (lastInput) { EmitPendingBuffers(true, false); _FlushFinish(); } else { EmitPendingBuffers(false, false); } } /// <summary> /// Flush the stream. /// </summary> public override void Flush() { if (_pendingException != null) { _handlingException = true; var pe = _pendingException; _pendingException = null; throw pe; } if (_handlingException) return; _Flush(false); } /// <summary> /// Close the stream. /// </summary> /// <remarks> /// You must call Close on the stream to guarantee that all of the data written in has /// been compressed, and the compressed data has been written out. /// </remarks> public override void Close() { TraceOutput(TraceBits.Session, "Close {0:X8}", this.GetHashCode()); if (_pendingException != null) { _handlingException = true; var pe = _pendingException; _pendingException = null; throw pe; } if (_handlingException) return; if (_isClosed) return; _Flush(true); if (!_leaveOpen) _outStream.Close(); _isClosed= true; } // workitem 10030 - implement a new Dispose method /// <summary>Dispose the object</summary> /// <remarks> /// <para> /// Because ParallelDeflateOutputStream is IDisposable, the /// application must call this method when finished using the instance. /// </para> /// <para> /// This method is generally called implicitly upon exit from /// a <c>using</c> scope in C# (<c>Using</c> in VB). /// </para> /// </remarks> new public void Dispose() { TraceOutput(TraceBits.Lifecycle, "Dispose {0:X8}", this.GetHashCode()); Close(); _pool = null; Dispose(true); } /// <summary>The Dispose method</summary> /// <param name="disposing"> /// indicates whether the Dispose method was invoked by user code. /// </param> protected override void Dispose(bool disposing) { base.Dispose(disposing); } /// <summary> /// Resets the stream for use with another stream. /// </summary> /// <remarks> /// Because the ParallelDeflateOutputStream is expensive to create, it /// has been designed so that it can be recycled and re-used. You have /// to call Close() on the stream first, then you can call Reset() on /// it, to use it again on another stream. /// </remarks> /// /// <param name="stream"> /// The new output stream for this era. /// </param> /// /// <example> /// <code> /// ParallelDeflateOutputStream deflater = null; /// foreach (var inputFile in listOfFiles) /// { /// string outputFile = inputFile + ".compressed"; /// using (System.IO.Stream input = System.IO.File.OpenRead(inputFile)) /// { /// using (var outStream = System.IO.File.Create(outputFile)) /// { /// if (deflater == null) /// deflater = new ParallelDeflateOutputStream(outStream, /// CompressionLevel.Best, /// CompressionStrategy.Default, /// true); /// deflater.Reset(outStream); /// /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// deflater.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// </example> public void Reset(Stream stream) { TraceOutput(TraceBits.Session, "-------------------------------------------------------"); TraceOutput(TraceBits.Session, "Reset {0:X8} firstDone({1})", this.GetHashCode(), _firstWriteDone); if (!_firstWriteDone) return; // reset all status _toWrite.Clear(); _toFill.Clear(); foreach (var workitem in _pool) { _toFill.Enqueue(workitem.index); workitem.ordinal = -1; } _firstWriteDone = false; _totalBytesProcessed = 0L; _runningCrc = new Ionic.Crc.CRC32(); _isClosed= false; _currentlyFilling = -1; _lastFilled = -1; _lastWritten = -1; _latestCompressed = -1; _outStream = stream; } private void EmitPendingBuffers(bool doAll, bool mustWait) { // When combining parallel deflation with a ZipSegmentedStream, it's // possible for the ZSS to throw from within this method. In that // case, Close/Dispose will be called on this stream, if this stream // is employed within a using or try/finally pair as required. But // this stream is unaware of the pending exception, so the Close() // method invokes this method AGAIN. This can lead to a deadlock. // Therefore, failfast if re-entering. if (emitting) return; emitting = true; if (doAll || mustWait) _newlyCompressedBlob.WaitOne(); do { int firstSkip = -1; int millisecondsToWait = doAll ? 200 : (mustWait ? -1 : 0); int nextToWrite = -1; do { if (Monitor.TryEnter(_toWrite, millisecondsToWait)) { nextToWrite = -1; try { if (_toWrite.Count > 0) nextToWrite = _toWrite.Dequeue(); } finally { Monitor.Exit(_toWrite); } if (nextToWrite >= 0) { WorkItem workitem = _pool[nextToWrite]; if (workitem.ordinal != _lastWritten + 1) { // out of order. requeue and try again. TraceOutput(TraceBits.EmitSkip, "Emit skip wi({0}) ord({1}) lw({2}) fs({3})", workitem.index, workitem.ordinal, _lastWritten, firstSkip); lock(_toWrite) { _toWrite.Enqueue(nextToWrite); } if (firstSkip == nextToWrite) { // We went around the list once. // None of the items in the list is the one we want. // Now wait for a compressor to signal again. _newlyCompressedBlob.WaitOne(); firstSkip = -1; } else if (firstSkip == -1) firstSkip = nextToWrite; continue; } firstSkip = -1; TraceOutput(TraceBits.EmitBegin, "Emit begin wi({0}) ord({1}) cba({2})", workitem.index, workitem.ordinal, workitem.compressedBytesAvailable); _outStream.Write(workitem.compressed, 0, workitem.compressedBytesAvailable); _runningCrc.Combine(workitem.crc, workitem.inputBytesAvailable); _totalBytesProcessed += workitem.inputBytesAvailable; workitem.inputBytesAvailable = 0; TraceOutput(TraceBits.EmitDone, "Emit done wi({0}) ord({1}) cba({2}) mtw({3})", workitem.index, workitem.ordinal, workitem.compressedBytesAvailable, millisecondsToWait); _lastWritten = workitem.ordinal; _toFill.Enqueue(workitem.index); // don't wait next time through if (millisecondsToWait == -1) millisecondsToWait = 0; } } else nextToWrite = -1; } while (nextToWrite >= 0); } while (doAll && (_lastWritten != _latestCompressed)); emitting = false; } #if OLD private void _PerpetualWriterMethod(object state) { TraceOutput(TraceBits.WriterThread, "_PerpetualWriterMethod START"); try { do { // wait for the next session TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.WaitOne(begin) PWM"); _sessionReset.WaitOne(); TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.WaitOne(done) PWM"); if (_isDisposed) break; TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.Reset() PWM"); _sessionReset.Reset(); // repeatedly write buffers as they become ready WorkItem workitem = null; Ionic.Zlib.CRC32 c= new Ionic.Zlib.CRC32(); do { workitem = _pool[_nextToWrite % _pc]; lock(workitem) { if (_noMoreInputForThisSegment) TraceOutput(TraceBits.Write, "Write drain wi({0}) stat({1}) canuse({2}) cba({3})", workitem.index, workitem.status, (workitem.status == (int)WorkItem.Status.Compressed), workitem.compressedBytesAvailable); do { if (workitem.status == (int)WorkItem.Status.Compressed) { TraceOutput(TraceBits.WriteBegin, "Write begin wi({0}) stat({1}) cba({2})", workitem.index, workitem.status, workitem.compressedBytesAvailable); workitem.status = (int)WorkItem.Status.Writing; _outStream.Write(workitem.compressed, 0, workitem.compressedBytesAvailable); c.Combine(workitem.crc, workitem.inputBytesAvailable); _totalBytesProcessed += workitem.inputBytesAvailable; _nextToWrite++; workitem.inputBytesAvailable= 0; workitem.status = (int)WorkItem.Status.Done; TraceOutput(TraceBits.WriteDone, "Write done wi({0}) stat({1}) cba({2})", workitem.index, workitem.status, workitem.compressedBytesAvailable); Monitor.Pulse(workitem); break; } else { int wcycles = 0; // I've locked a workitem I cannot use. // Therefore, wake someone else up, and then release the lock. while (workitem.status != (int)WorkItem.Status.Compressed) { TraceOutput(TraceBits.WriteWait, "Write waiting wi({0}) stat({1}) nw({2}) nf({3}) nomore({4})", workitem.index, workitem.status, _nextToWrite, _nextToFill, _noMoreInputForThisSegment ); if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill) break; wcycles++; // wake up someone else Monitor.Pulse(workitem); // release and wait Monitor.Wait(workitem); if (workitem.status == (int)WorkItem.Status.Compressed) TraceOutput(TraceBits.WriteWait, "Write A-OK wi({0}) stat({1}) iba({2}) cba({3}) cyc({4})", workitem.index, workitem.status, workitem.inputBytesAvailable, workitem.compressedBytesAvailable, wcycles); } if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill) break; } } while (true); } if (_noMoreInputForThisSegment) TraceOutput(TraceBits.Write, "Write nomore nw({0}) nf({1}) break({2})", _nextToWrite, _nextToFill, (_nextToWrite == _nextToFill)); if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill) break; } while (true); // Finish: // After writing a series of buffers, closing each one with // Flush.Sync, we now write the final one as Flush.Finish, and // then stop. byte[] buffer = new byte[128]; ZlibCodec compressor = new ZlibCodec(); int rc = compressor.InitializeDeflate(_compressLevel, false); compressor.InputBuffer = null; compressor.NextIn = 0; compressor.AvailableBytesIn = 0; compressor.OutputBuffer = buffer; compressor.NextOut = 0; compressor.AvailableBytesOut = buffer.Length; rc = compressor.Deflate(FlushType.Finish); if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK) throw new Exception("deflating: " + compressor.Message); if (buffer.Length - compressor.AvailableBytesOut > 0) { TraceOutput(TraceBits.WriteBegin, "Write begin flush bytes({0})", buffer.Length - compressor.AvailableBytesOut); _outStream.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut); TraceOutput(TraceBits.WriteBegin, "Write done flush"); } compressor.EndDeflate(); _Crc32 = c.Crc32Result; // signal that writing is complete: TraceOutput(TraceBits.Synch, "Synch _writingDone.Set() PWM"); _writingDone.Set(); } while (true); } catch (System.Exception exc1) { lock(_eLock) { // expose the exception to the main thread if (_pendingException!=null) _pendingException = exc1; } } TraceOutput(TraceBits.WriterThread, "_PerpetualWriterMethod FINIS"); } #endif private void _DeflateOne(Object wi) { // compress one buffer WorkItem workitem = (WorkItem) wi; try { Ionic.Crc.CRC32 crc = new Ionic.Crc.CRC32(); // calc CRC on the buffer crc.SlurpBlock(workitem.buffer, 0, workitem.inputBytesAvailable); // deflate it DeflateOneSegment(workitem); // update status workitem.crc = crc.Crc32Result; TraceOutput(TraceBits.Compress, "Compress wi({0}) ord({1}) len({2})", workitem.index, workitem.ordinal, workitem.compressedBytesAvailable ); lock(_latestLock) { if (workitem.ordinal > _latestCompressed) _latestCompressed = workitem.ordinal; } lock (_toWrite) { _toWrite.Enqueue(workitem.index); } _newlyCompressedBlob.Set(); } catch (System.Exception exc1) { lock(_eLock) { // expose the exception to the main thread if (_pendingException!=null) _pendingException = exc1; } } } private bool DeflateOneSegment(WorkItem workitem) { ZlibCodec compressor = workitem.compressor; int rc = 0; compressor.ResetDeflate(); compressor.NextIn = 0; compressor.AvailableBytesIn = workitem.inputBytesAvailable; // step 1: deflate the buffer compressor.NextOut = 0; compressor.AvailableBytesOut = workitem.compressed.Length; do { compressor.Deflate(FlushType.None); } while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0); // step 2: flush (sync) rc = compressor.Deflate(FlushType.Sync); // The rc is not processed here, this is only to eliminate the warning if (rc != ZlibConstants.Z_OK | rc != ZlibConstants.Z_STREAM_END) throw new ZlibException("Deflate: unknown return code"); workitem.compressedBytesAvailable= (int) compressor.TotalBytesOut; return true; } [System.Diagnostics.ConditionalAttribute("Trace")] private void TraceOutput(TraceBits bits, string format, params object[] varParams) { if ((bits & _DesiredTrace) != 0) { lock(_outputLock) { int tid = Thread.CurrentThread.GetHashCode(); #if !SILVERLIGHT Console.ForegroundColor = (ConsoleColor) (tid % 8 + 8); #endif Console.Write("{0:000} PDOS ", tid); Console.WriteLine(format, varParams); #if !SILVERLIGHT Console.ResetColor(); #endif } } } // used only when Trace is defined [Flags] enum TraceBits : uint { None = 0, NotUsed1 = 1, EmitLock = 2, EmitEnter = 4, // enter _EmitPending EmitBegin = 8, // begin to write out EmitDone = 16, // done writing out EmitSkip = 32, // writer skipping a workitem EmitAll = 58, // All Emit flags Flush = 64, Lifecycle = 128, // constructor/disposer Session = 256, // Close/Reset Synch = 512, // thread synchronization Instance = 1024, // instance settings Compress = 2048, // compress task Write = 4096, // filling buffers, when caller invokes Write() WriteEnter = 8192, // upon entry to Write() WriteTake = 16384, // on _toFill.Take() All = 0xffffffff, } /// <summary> /// Indicates whether the stream supports Seek operations. /// </summary> /// <remarks> /// Always returns false. /// </remarks> public override bool CanSeek { get { return false; } } /// <summary> /// Indicates whether the stream supports Read operations. /// </summary> /// <remarks> /// Always returns false. /// </remarks> public override bool CanRead { get {return false;} } /// <summary> /// Indicates whether the stream supports Write operations. /// </summary> /// <remarks> /// Returns true if the provided stream is writable. /// </remarks> public override bool CanWrite { get { return _outStream.CanWrite; } } /// <summary> /// Reading this property always throws a NotSupportedException. /// </summary> public override long Length { get { throw new NotSupportedException(); } } /// <summary> /// Returns the current position of the output stream. /// </summary> /// <remarks> /// <para> /// Because the output gets written by a background thread, /// the value may change asynchronously. Setting this /// property always throws a NotSupportedException. /// </para> /// </remarks> public override long Position { get { return _outStream.Position; } set { throw new NotSupportedException(); } } /// <summary> /// This method always throws a NotSupportedException. /// </summary> /// <param name="buffer"> /// The buffer into which data would be read, IF THIS METHOD /// ACTUALLY DID ANYTHING. /// </param> /// <param name="offset"> /// The offset within that data array at which to insert the /// data that is read, IF THIS METHOD ACTUALLY DID /// ANYTHING. /// </param> /// <param name="count"> /// The number of bytes to write, IF THIS METHOD ACTUALLY DID /// ANYTHING. /// </param> /// <returns>nothing.</returns> public override int Read(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } /// <summary> /// This method always throws a NotSupportedException. /// </summary> /// <param name="offset"> /// The offset to seek to.... /// IF THIS METHOD ACTUALLY DID ANYTHING. /// </param> /// <param name="origin"> /// The reference specifying how to apply the offset.... IF /// THIS METHOD ACTUALLY DID ANYTHING. /// </param> /// <returns>nothing. It always throws.</returns> public override long Seek(long offset, System.IO.SeekOrigin origin) { throw new NotSupportedException(); } /// <summary> /// This method always throws a NotSupportedException. /// </summary> /// <param name="value"> /// The new value for the stream length.... IF /// THIS METHOD ACTUALLY DID ANYTHING. /// </param> public override void SetLength(long value) { throw new NotSupportedException(); } } }
using System; using System.Collections.Generic; /// <summary> /// Dictionary.ValueCollection.CopyTo(Array,Int32) /// </summary> public class DictionaryValueCollectionCopyTo { private const int SIZE = 10; public static int Main() { DictionaryValueCollectionCopyTo valuecollectCopyTo = new DictionaryValueCollectionCopyTo(); TestLibrary.TestFramework.BeginTestCase("DictionaryValueCollectionCopyTo"); if (valuecollectCopyTo.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; retVal = NegTest4() && retVal; return retVal; } #region PositiveTest public bool PosTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest1:Invoke the method CopyTo in the ValueCollection 1"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); dic.Add("str1", "Test1"); dic.Add("str2", "Test2"); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = new string[SIZE]; values.CopyTo(TVals, 0); string strVals = null; for (int i = 0; i < TVals.Length; i++) { if (TVals[i] != null) { strVals += TVals[i].ToString(); } } if (TVals[0].ToString() != "Test1" || TVals[1].ToString() != "Test2" || strVals != "Test1Test2") { TestLibrary.TestFramework.LogError("001", "the ExpecResult is not the ActualResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("002", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest2:Invoke the method CopyTo in the ValueCollection 2"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); dic.Add("str1", "Test1"); dic.Add("str2", "Test2"); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = new string[SIZE]; values.CopyTo(TVals, 5); string strVals = null; for (int i = 0; i < TVals.Length; i++) { if (TVals[i] != null) { strVals += TVals[i].ToString(); } } if (TVals[5].ToString() != "Test1" || TVals[6].ToString() != "Test2" || strVals != "Test1Test2") { TestLibrary.TestFramework.LogError("003", "the ExpecResult is not the ActualResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("004", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest3:Invoke the method CopyTo in the ValueCollection 3"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = new string[SIZE]; values.CopyTo(TVals, 0); for (int i = 0; i < TVals.Length; i++) { if (TVals[i] != null) { TestLibrary.TestFramework.LogError("005", "the ExpecResult is not the ActualResult"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpect exception:" + e); retVal = false; } return retVal; } #endregion #region NegativeTest public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1:The argument array is null"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = null; values.CopyTo(TVals, 0); TestLibrary.TestFramework.LogError("N001", "The argument array is null but not throw exception"); retVal = false; } catch (ArgumentNullException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N002", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest2:The argument index is less than zero"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = new string[SIZE]; int index = -1; values.CopyTo(TVals, index); TestLibrary.TestFramework.LogError("N003", "The argument index is less than zero but not throw exception"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N004", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest3:The argument index is larger than array length"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); dic.Add("str1", "Test1"); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = new string[SIZE]; int index = SIZE + 1; values.CopyTo(TVals, index); TestLibrary.TestFramework.LogError("N005", "The argument index is larger than array length but not throw exception"); retVal = false; } catch (ArgumentException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N006", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool NegTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest4:The number of elements in the source Dictionary.ValueCollection is greater than the available space from index to the end of the destination array"); try { Dictionary<string, string> dic = new Dictionary<string, string>(); dic.Add("str1", "Test1"); dic.Add("str1", "Test1"); Dictionary<string, string>.ValueCollection values = new Dictionary<string, string>.ValueCollection(dic); string[] TVals = new string[SIZE]; int index = SIZE - 1; values.CopyTo(TVals, index); TestLibrary.TestFramework.LogError("N007", "The ExpectResult should throw exception but the ActualResult not throw exception"); retVal = false; } catch (ArgumentException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N008", "Unexpect exception:" + e); retVal = false; } return retVal; } #endregion }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Network { using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// NetworkInterfaceLoadBalancersOperations operations. /// </summary> internal partial class NetworkInterfaceLoadBalancersOperations : IServiceOperations<NetworkManagementClient>, INetworkInterfaceLoadBalancersOperations { /// <summary> /// Initializes a new instance of the NetworkInterfaceLoadBalancersOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal NetworkInterfaceLoadBalancersOperations(NetworkManagementClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the NetworkManagementClient /// </summary> public NetworkManagementClient Client { get; private set; } /// <summary> /// List all load balancers in a network interface. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkInterfaceName'> /// The name of the network interface. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<LoadBalancer>>> ListWithHttpMessagesAsync(string resourceGroupName, string networkInterfaceName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (networkInterfaceName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "networkInterfaceName"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2017-08-01"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("networkInterfaceName", networkInterfaceName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{networkInterfaceName}", System.Uri.EscapeDataString(networkInterfaceName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<LoadBalancer>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<LoadBalancer>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// List all load balancers in a network interface. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<LoadBalancer>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<LoadBalancer>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<LoadBalancer>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Drawing; using System.IO; using System.Runtime.InteropServices; using DotSpatial.Projections; using OSGeo.GDAL; namespace DotSpatial.Data.Rasters.GdalExtension { /// <summary> /// A GDAL raster. /// </summary> /// <typeparam name="T">Type of the contained items.</typeparam> internal class GdalRaster<T> : Raster<T> where T : IEquatable<T>, IComparable<T> { #region Fields private readonly Band _band; private readonly Dataset _dataset; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="GdalRaster{T}"/> class. /// This can be a raster with multiple bands. /// </summary> /// <param name="fileName">The file name.</param> /// <param name="fromDataset">The dataset.</param> public GdalRaster(string fileName, Dataset fromDataset) : base(fromDataset.RasterYSize, fromDataset.RasterXSize) { _dataset = fromDataset; Filename = fileName; Name = Path.GetFileNameWithoutExtension(fileName); ReadHeader(); int numBands = _dataset.RasterCount; if (numBands == 1) { _band = _dataset.GetRasterBand(1); } else { for (int i = 1; i <= numBands; i++) { Bands.Add(new GdalRaster<T>(fileName, fromDataset, _dataset.GetRasterBand(i))); } } } /// <summary> /// Initializes a new instance of the <see cref="GdalRaster{T}"/> class. /// Creates a new raster from the specified band. /// </summary> /// <param name="fileName">The string path of the file if any.</param> /// <param name="fromDataset">The dataset.</param> /// <param name="fromBand">The band.</param> public GdalRaster(string fileName, Dataset fromDataset, Band fromBand) : base(fromDataset.RasterYSize, fromDataset.RasterXSize) { _dataset = fromDataset; _band = fromBand; Filename = fileName; Name = Path.GetFileNameWithoutExtension(fileName); ReadHeader(); } #endregion #region Properties /// <summary> /// Gets the GDAL data type. /// </summary> public DataType GdalDataType => _band.DataType; /// <summary> /// Gets or sets the maximum. /// </summary> public override double Maximum { get { return base.Maximum; } protected set { base.Maximum = value; if (_band != null) { _band.SetStatistics(Minimum, value, Mean, StdDeviation); _band.SetMetadataItem("STATISTICS_MAXIMUM", Maximum.ToString(), string.Empty); } else { foreach (GdalRaster<T> raster in Bands) { raster.Maximum = value; } } } } /// <summary> /// Gets or sets the mean. /// </summary> public override double Mean { get { return base.Mean; } protected set { base.Mean = value; if (_band != null) { _band.SetStatistics(Minimum, Maximum, value, StdDeviation); _band.SetMetadataItem("STATISTICS_MEAN", Mean.ToString(), string.Empty); } else { foreach (GdalRaster<T> raster in Bands) { raster.Mean = value; } } } } /// <summary> /// Gets or sets the minimum. /// </summary> public override double Minimum { get { return base.Minimum; } protected set { base.Minimum = value; if (_band != null) { _band.SetStatistics(value, Maximum, Mean, StdDeviation); _band.SetMetadataItem("STATISTICS_MINIMUM", Minimum.ToString(), string.Empty); } else { foreach (GdalRaster<T> raster in Bands) { raster.Minimum = value; } } } } /// <summary> /// Gets or sets the NoDataValue. /// </summary> public override double NoDataValue { get { return base.NoDataValue; } set { base.NoDataValue = value; if (_band != null) { _band.SetNoDataValue(value); } else { foreach (var raster in Bands) { raster.NoDataValue = value; } } } } /// <summary> /// Gets or sets the standard deviation. /// </summary> public override double StdDeviation { get { return base.StdDeviation; } protected set { base.StdDeviation = value; if (_band != null) { _band.SetStatistics(Minimum, Maximum, Mean, value); _band.SetMetadataItem("STATISTICS_STDDEV", StdDeviation.ToString(), string.Empty); } else { foreach (GdalRaster<T> raster in Bands) { raster.StdDeviation = value; } } } } #endregion #region Methods /// <summary> /// Gets the category colors. /// </summary> /// <returns>The category colors.</returns> public override Color[] CategoryColors() { Color[] colors = null; ColorTable table = GetColorTable(); if (table != null) { int colorCount = table.GetCount(); if (colorCount > 0) { colors = new Color[colorCount]; for (int colorIndex = 0; colorIndex < colorCount; colorIndex += 1) { colors[colorIndex] = Color.DimGray; ColorEntry entry = table.GetColorEntry(colorIndex); switch (table.GetPaletteInterpretation()) { case PaletteInterp.GPI_RGB: colors[colorIndex] = Color.FromArgb(entry.c4, entry.c1, entry.c2, entry.c3); break; case PaletteInterp.GPI_Gray: colors[colorIndex] = Color.FromArgb(255, entry.c1, entry.c1, entry.c1); break; // TODO: do any files use these types? // case PaletteInterp.GPI_HLS // case PaletteInterp.GPI_CMYK } } } } return colors; } /// <summary> /// Gets the category names. /// </summary> /// <returns>The category names.</returns> public override string[] CategoryNames() { if (_band != null) { return _band.GetCategoryNames(); } foreach (GdalRaster<T> raster in Bands) { return raster._band.GetCategoryNames(); } return null; } /// <summary> /// Closes the raster. /// </summary> public override void Close() { base.Close(); if (_band != null) { _band.Dispose(); } else { foreach (IRaster raster in Bands) { raster.Close(); raster.Dispose(); } } if (_dataset != null) { _dataset.FlushCache(); _dataset.Dispose(); } } /// <summary> /// Copies the fileName. /// </summary> /// <param name="fileName">The file name.</param> /// <param name="copyValues">Indicates whether the values should be copied.</param> public override void Copy(string fileName, bool copyValues) { using (Driver d = _dataset.GetDriver()) { DataType myType = OSGeo.GDAL.DataType.GDT_Int32; if (_band != null) { myType = _band.DataType; } else { GdalRaster<T> r = Bands[0] as GdalRaster<T>; if (r != null) { myType = r.GdalDataType; } } if (copyValues) { d.CreateCopy(fileName, _dataset, 1, Options, GdalProgressFunc, "Copy Progress"); } else { d.Create(fileName, NumColumnsInFile, NumRowsInFile, NumBands, myType, Options); } } } /// <summary> /// Gets the mean, standard deviation, minimum and maximum /// </summary> public override void GetStatistics() { if (IsInRam && this.IsFullyWindowed()) { base.GetStatistics(); return; } if (_band != null) { double min, max, mean, std; CPLErr err; try { if (Value.Updated) err = _band.ComputeStatistics(false, out min, out max, out mean, out std, null, null); else err = _band.GetStatistics(0, 1, out min, out max, out mean, out std); Value.Updated = false; Minimum = min; Maximum = max; Mean = mean; StdDeviation = std; } catch (Exception ex) { err = CPLErr.CE_Failure; max = min = std = mean = 0; Trace.WriteLine(ex); } Value.Updated = false; // http://dotspatial.codeplex.com/workitem/22221 // GetStatistics didn't return anything, so try use the raster default method. if (err != CPLErr.CE_None || (max == 0 && min == 0 && std == 0 && mean == 0)) base.GetStatistics(); } else { // ?? doesn't this mean the stats get overwritten several times. foreach (IRaster raster in Bands) { raster.GetStatistics(); } } } /// <summary> /// Most reading is optimized to read in a block at a time and process it. This method is designed /// for seeking through the file. It should work faster than the buffered methods in cases where /// an unusually arranged collection of values are required. Sorting the list before calling /// this should significantly improve performance. /// </summary> /// <param name="indices">A list or array of long values that are (Row * NumRowsInFile + Column)</param> /// <returns>The values.</returns> public override List<T> GetValuesT(IEnumerable<long> indices) { if (IsInRam) return base.GetValuesT(indices); if (_band == null) { Raster<T> ri = Bands[CurrentBand] as Raster<T>; return ri?.GetValuesT(indices); } #if DEBUG var sw = new Stopwatch(); sw.Start(); #endif List<T> result = new List<T>(); foreach (long index in indices) { int row = (int)(index / NumColumnsInFile); int col = (int)(index % NumColumnsInFile); T[] data = new T[1]; // http://trac.osgeo.org/gdal/wiki/GdalOgrCsharpRaster GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned); try { IntPtr ptr = handle.AddrOfPinnedObject(); _band.ReadRaster(col, row, 1, 1, ptr, 1, 1, GdalDataType, PixelSpace, LineSpace); } finally { if (handle.IsAllocated) { handle.Free(); } } result.Add(data[0]); } #if DEBUG sw.Stop(); Debug.WriteLine("Time to read values from file:" + sw.ElapsedMilliseconds); #endif return result; } /// <summary> /// Reads values from the raster to the jagged array of values /// </summary> /// <param name="xOff">The horizontal offset from the left to start reading from</param> /// <param name="yOff">The vertical offset from the top to start reading from</param> /// <param name="sizeX">The number of cells to read horizontally</param> /// <param name="sizeY">The number of cells ot read vertically</param> /// <returns>A jagged array of values from the raster</returns> public override T[][] ReadRaster(int xOff, int yOff, int sizeX, int sizeY) { T[][] result = new T[sizeY][]; T[] rawData = new T[sizeY * sizeX]; if (_band == null) { Raster<T> ri = Bands[CurrentBand] as Raster<T>; if (ri != null) { return ri.ReadRaster(xOff, yOff, sizeX, sizeY); } } else { GCHandle handle = GCHandle.Alloc(rawData, GCHandleType.Pinned); try { IntPtr ptr = handle.AddrOfPinnedObject(); _band.ReadRaster(xOff, yOff, sizeX, sizeY, ptr, sizeX, sizeY, GdalDataType, PixelSpace, LineSpace); } finally { if (handle.IsAllocated) { handle.Free(); } } for (int row = 0; row < sizeY; row++) { result[row] = new T[sizeX]; Array.Copy(rawData, row * sizeX, result[row], 0, sizeX); } return result; } return null; } /// <summary> /// Writes values from the jagged array to the raster at the specified location /// </summary> /// <param name="buffer">A jagged array of values to write to the raster</param> /// <param name="xOff">The horizontal offset from the left to start reading from</param> /// <param name="yOff">The vertical offset from the top to start reading from</param> /// <param name="xSize">The number of cells to write horizontally</param> /// <param name="ySize">The number of cells ot write vertically</param> public override void WriteRaster(T[][] buffer, int xOff, int yOff, int xSize, int ySize) { if (_band == null) { Raster<T> ri = Bands[CurrentBand] as Raster<T>; if (ri != null) { ri.NoDataValue = NoDataValue; ri.WriteRaster(buffer, xOff, yOff, xSize, ySize); } } else { T[] rawValues = new T[xSize * ySize]; for (int row = 0; row < ySize; row++) { Array.Copy(buffer[row], 0, rawValues, row * xSize, xSize); } GCHandle handle = GCHandle.Alloc(rawValues, GCHandleType.Pinned); try { IntPtr ptr = handle.AddrOfPinnedObject(); // int stride = ((xSize * sizeof(T) + 7) / 8); _band.WriteRaster(xOff, yOff, xSize, ySize, ptr, xSize, ySize, GdalDataType, PixelSpace, 0); _band.FlushCache(); _dataset.FlushCache(); } finally { if (handle.IsAllocated) { handle.Free(); } } } } /// <summary> /// Updates the header information about the projection and the affine coefficients /// </summary> protected override void UpdateHeader() { _dataset.SetGeoTransform(Bounds.AffineCoefficients); if (Projection != null) { _dataset.SetProjection(Projection.ToEsriString()); } } /// <summary> /// Handles the callback progress content. /// </summary> /// <param name="complete">Percent of completeness.</param> /// <param name="message">Message is not used.</param> /// <param name="data">Data is not used.</param> /// <returns>0</returns> private int GdalProgressFunc(double complete, IntPtr message, IntPtr data) { ProgressHandler.Progress("Copy Progress", Convert.ToInt32(complete), "Copy Progress"); return 0; } private ColorTable GetColorTable() { if (_band != null) { return _band.GetColorTable(); } foreach (GdalRaster<T> raster in Bands) { return raster._band.GetColorTable(); } return null; } private void ReadHeader() { DataType = typeof(T); NumColumnsInFile = _dataset.RasterXSize; NumColumns = NumColumnsInFile; NumRowsInFile = _dataset.RasterYSize; NumRows = NumRowsInFile; // Todo: look for prj file if GetProjection returns null. // Do we need to read this as an Esri string if we don't get a proj4 string? string projString = _dataset.GetProjection(); Projection = ProjectionInfo.FromProj4String(projString); if (_band != null) { double val; int hasInterval; _band.GetNoDataValue(out val, out hasInterval); base.NoDataValue = val; } double[] affine = new double[6]; _dataset.GetGeoTransform(affine); // in gdal (row,col) coordinates are defined relative to the top-left corner of the top-left cell // shift them by half a cell to give coordinates relative to the center of the top-left cell affine = new AffineTransform(affine).TransfromToCorner(0.5, 0.5); ProjectionString = projString; Bounds = new RasterBounds(NumRows, NumColumns, affine); PixelSpace = Marshal.SizeOf(typeof(T)); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Runtime; using System.Diagnostics.Contracts; namespace System.Text { public sealed class EncoderReplacementFallback : EncoderFallback { // Our variables private String _strDefault; // Construction. Default replacement fallback uses no best fit and ? replacement string public EncoderReplacementFallback() : this("?") { } public EncoderReplacementFallback(String replacement) { // Must not be null if (replacement == null) throw new ArgumentNullException("replacement"); Contract.EndContractBlock(); // Make sure it doesn't have bad surrogate pairs bool bFoundHigh = false; for (int i = 0; i < replacement.Length; i++) { // Found a surrogate? if (Char.IsSurrogate(replacement, i)) { // High or Low? if (Char.IsHighSurrogate(replacement, i)) { // if already had a high one, stop if (bFoundHigh) break; // break & throw at the bFoundHIgh below bFoundHigh = true; } else { // Low, did we have a high? if (!bFoundHigh) { // Didn't have one, make if fail when we stop bFoundHigh = true; break; } // Clear flag bFoundHigh = false; } } // If last was high we're in trouble (not surrogate so not low surrogate, so break) else if (bFoundHigh) break; } if (bFoundHigh) throw new ArgumentException(SR.Argument_InvalidCharSequenceNoIndex, "replacement"); _strDefault = replacement; } public String DefaultString { get { return _strDefault; } } public override EncoderFallbackBuffer CreateFallbackBuffer() { return new EncoderReplacementFallbackBuffer(this); } // Maximum number of characters that this instance of this fallback could return public override int MaxCharCount { get { return _strDefault.Length; } } public override bool Equals(Object value) { EncoderReplacementFallback that = value as EncoderReplacementFallback; if (that != null) { return (_strDefault == that._strDefault); } return (false); } public override int GetHashCode() { return _strDefault.GetHashCode(); } } public sealed class EncoderReplacementFallbackBuffer : EncoderFallbackBuffer { // Store our default string private String _strDefault; private int _fallbackCount = -1; private int _fallbackIndex = -1; // Construction public EncoderReplacementFallbackBuffer(EncoderReplacementFallback fallback) { // 2X in case we're a surrogate pair _strDefault = fallback.DefaultString + fallback.DefaultString; } // Fallback Methods public override bool Fallback(char charUnknown, int index) { // If we had a buffer already we're being recursive, throw, it's probably at the suspect // character in our array. if (_fallbackCount >= 1) { // If we're recursive we may still have something in our buffer that makes this a surrogate if (char.IsHighSurrogate(charUnknown) && _fallbackCount >= 0 && char.IsLowSurrogate(_strDefault[_fallbackIndex + 1])) ThrowLastCharRecursive(Char.ConvertToUtf32(charUnknown, _strDefault[_fallbackIndex + 1])); // Nope, just one character ThrowLastCharRecursive(unchecked((int)charUnknown)); } // Go ahead and get our fallback // Divide by 2 because we aren't a surrogate pair _fallbackCount = _strDefault.Length / 2; _fallbackIndex = -1; return _fallbackCount != 0; } public override bool Fallback(char charUnknownHigh, char charUnknownLow, int index) { // Double check input surrogate pair if (!Char.IsHighSurrogate(charUnknownHigh)) throw new ArgumentOutOfRangeException("charUnknownHigh", SR.Format(SR.ArgumentOutOfRange_Range, 0xD800, 0xDBFF)); if (!Char.IsLowSurrogate(charUnknownLow)) throw new ArgumentOutOfRangeException("CharUnknownLow", SR.Format(SR.ArgumentOutOfRange_Range, 0xDC00, 0xDFFF)); Contract.EndContractBlock(); // If we had a buffer already we're being recursive, throw, it's probably at the suspect // character in our array. if (_fallbackCount >= 1) ThrowLastCharRecursive(Char.ConvertToUtf32(charUnknownHigh, charUnknownLow)); // Go ahead and get our fallback _fallbackCount = _strDefault.Length; _fallbackIndex = -1; return _fallbackCount != 0; } public override char GetNextChar() { // We want it to get < 0 because == 0 means that the current/last character is a fallback // and we need to detect recursion. We could have a flag but we already have this counter. _fallbackCount--; _fallbackIndex++; // Do we have anything left? 0 is now last fallback char, negative is nothing left if (_fallbackCount < 0) return '\0'; // Need to get it out of the buffer. // Make sure it didn't wrap from the fast count-- path if (_fallbackCount == int.MaxValue) { _fallbackCount = -1; return '\0'; } // Now make sure its in the expected range Contract.Assert(_fallbackIndex < _strDefault.Length && _fallbackIndex >= 0, "Index exceeds buffer range"); return _strDefault[_fallbackIndex]; } public override bool MovePrevious() { // Back up one, only if we just processed the last character (or earlier) if (_fallbackCount >= -1 && _fallbackIndex >= 0) { _fallbackIndex--; _fallbackCount++; return true; } // Return false 'cause we couldn't do it. return false; } // How many characters left to output? public override int Remaining { get { // Our count is 0 for 1 character left. return (_fallbackCount < 0) ? 0 : _fallbackCount; } } // Clear the buffer [System.Security.SecuritySafeCritical] // auto-generated public override unsafe void Reset() { _fallbackCount = -1; _fallbackIndex = 0; charStart = null; bFallingBack = false; } } }
// Author: Robert Scheller, Melissa Lucash using Landis.Core; using Landis.SpatialModeling; using Landis.Utilities; using Landis.Library.Succession; using Landis.Library.LeafBiomassCohorts; using System.Collections.Generic; using System; namespace Landis.Extension.Succession.NECN { /// <summary> /// The pools of dead biomass for the landscape's sites. /// </summary> public static class SiteVars { // Time of last succession simulation: private static ISiteVar<int> timeOfLast; // Live biomass: private static ISiteVar<Landis.Library.AgeOnlyCohorts.ISiteCohorts> baseCohortsSiteVar; private static ISiteVar<Landis.Library.BiomassCohorts.ISiteCohorts> biomassCohortsSiteVar; // Dead biomass: private static ISiteVar<Layer> surfaceDeadWood; private static ISiteVar<Layer> soilDeadWood; private static ISiteVar<Layer> surfaceStructural; private static ISiteVar<Layer> surfaceMetabolic; private static ISiteVar<Layer> soilStructural; private static ISiteVar<Layer> soilMetabolic; // Soil layers private static ISiteVar<Layer> som1surface; private static ISiteVar<Layer> som1soil; private static ISiteVar<Layer> som2; private static ISiteVar<Layer> som3; private static ISiteVar<int> soilDepth; private static ISiteVar<double> soilDrain; private static ISiteVar<double> soilBaseFlowFraction; private static ISiteVar<double> soilStormFlowFraction; private static ISiteVar<double> soilFieldCapacity; private static ISiteVar<double> soilWiltingPoint; private static ISiteVar<double> soilPercentSand; private static ISiteVar<double> soilPercentClay; // Similar to soil layers with respect to their pools: private static ISiteVar<Layer> stream; private static ISiteVar<Layer> sourceSink; // Other variables: private static ISiteVar<double> mineralN; private static ISiteVar<double> resorbedN; private static ISiteVar<double> waterMovement; private static ISiteVar<double> availableWater; private static ISiteVar<double> soilWaterContent; private static ISiteVar<double> liquidSnowPack; private static ISiteVar<double> decayFactor; private static ISiteVar<double> soilTemperature; private static ISiteVar<double> anaerobicEffect; // Annual accumulators for reporting purposes. private static ISiteVar<double> grossMineralization; private static ISiteVar<double> ag_nppC; private static ISiteVar<double> bg_nppC; private static ISiteVar<double> litterfallC; private static ISiteVar<double> cohortLeafN; private static ISiteVar<double> cohortFRootN; private static ISiteVar<double> cohortLeafC; private static ISiteVar<double> cohortFRootC; private static ISiteVar<double> cohortWoodN; private static ISiteVar<double> cohortCRootN; private static ISiteVar<double> cohortWoodC; private static ISiteVar<double> cohortCRootC; private static ISiteVar<double[]> monthlyAGNPPC; private static ISiteVar<double[]> monthlyBGNPPC; private static ISiteVar<double[]> monthlyNEE; private static ISiteVar<double[]> monthlyStreamN; private static ISiteVar<double> totalNuptake; private static ISiteVar<double[]> monthlymineralN; private static ISiteVar<double> frassC; private static ISiteVar<double> lai; //private static ISiteVar<double> annualPPT_AET; //Annual water budget calculation. private static ISiteVar<int> dryDays; public static ISiteVar<double> AnnualNEE; public static ISiteVar<double> FireCEfflux; public static ISiteVar<double> FireNEfflux; public static ISiteVar<double> Nvol; public static ISiteVar<double> TotalWoodBiomass; public static ISiteVar<int> PrevYearMortality; public static ISiteVar<byte> FireSeverity; public static ISiteVar<double> WoodMortality; public static ISiteVar<string> HarvestPrescriptionName; public static ISiteVar<int> HarvestTime; public static ISiteVar<Dictionary<int, Dictionary<int, double>>> CohortResorbedNallocation; public static ISiteVar<double> FineFuels; public static ISiteVar<double> SmolderConsumption; public static ISiteVar<double> FlamingConsumption; public static ISiteVar<double> AnnualClimaticWaterDeficit; //Annual soil moisture calculation, defined as pet - aet public static ISiteVar<double> AnnualPotentialEvapotranspiration; //PET public static ISiteVar<double> AnnualWaterBalance; //Annual soil moisture calculation, defined as pet - aet public static ISiteVar<double[]> MonthlySoilResp; public static ISiteVar<double[]> MonthlyLAI; public static ISiteVar<double[]> MonthlyLAI_Trees; public static ISiteVar<double[]> MonthlyLAI_Grasses; // Chihiro, 2021.03.30: tentative public static ISiteVar<double> MonthlyLAI_GrassesLastMonth; // Chihiro, 2021.03.30: tentative public static ISiteVar<double[]> MonthlyHeteroResp; public static ISiteVar<double[]> MonthlySoilWaterContent; //--------------------------------------------------------------------- /// <summary> /// Initializes the module. /// </summary> public static void Initialize() { cohorts = PlugIn.ModelCore.Landscape.NewSiteVar<Library.LeafBiomassCohorts.SiteCohorts>(); biomassCohortsSiteVar = Landis.Library.Succession.CohortSiteVar<Landis.Library.BiomassCohorts.ISiteCohorts>.Wrap(cohorts); baseCohortsSiteVar = Landis.Library.Succession.CohortSiteVar<Landis.Library.AgeOnlyCohorts.ISiteCohorts>.Wrap(cohorts); FineFuels = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); timeOfLast = PlugIn.ModelCore.Landscape.NewSiteVar<int>(); // Dead biomass: surfaceDeadWood = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); soilDeadWood = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); surfaceStructural = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); surfaceMetabolic = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); soilStructural = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); soilMetabolic = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); // Soil Layers som1surface = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); som1soil = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); som2 = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); som3 = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); soilDepth = PlugIn.ModelCore.Landscape.NewSiteVar<int>(); soilDrain = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilBaseFlowFraction = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilStormFlowFraction = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilFieldCapacity = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilWiltingPoint = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilPercentSand = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilPercentClay = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); // Other Layers stream = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); sourceSink = PlugIn.ModelCore.Landscape.NewSiteVar<Layer>(); // Other variables MonthlyLAI = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); MonthlyLAI_Trees = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); MonthlyLAI_Grasses = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); // Chihiro, 2021.03.30: tentative MonthlyLAI_GrassesLastMonth = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); // Chihiro, 2021.03.30: tentative mineralN = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); resorbedN = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); waterMovement = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); availableWater = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); liquidSnowPack = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilWaterContent = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); decayFactor = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); soilTemperature = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); anaerobicEffect = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); dryDays = PlugIn.ModelCore.Landscape.NewSiteVar<int>(); // Annual accumulators grossMineralization = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); ag_nppC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); bg_nppC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); litterfallC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); monthlyAGNPPC = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); monthlyBGNPPC = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); monthlyNEE = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); monthlyStreamN = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); MonthlyHeteroResp = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); MonthlySoilWaterContent = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); AnnualNEE = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); FireCEfflux = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); FireNEfflux = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortLeafN = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortFRootN = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortLeafC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortFRootC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortWoodN = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortCRootN = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortWoodC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); cohortCRootC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); TotalWoodBiomass = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); WoodMortality = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); Nvol = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); PrevYearMortality = PlugIn.ModelCore.Landscape.NewSiteVar<int>(); totalNuptake = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); monthlymineralN = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); frassC = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); lai = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); AnnualWaterBalance = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); AnnualClimaticWaterDeficit = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); AnnualPotentialEvapotranspiration = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); SmolderConsumption = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); FlamingConsumption = PlugIn.ModelCore.Landscape.NewSiteVar<double>(); HarvestPrescriptionName = PlugIn.ModelCore.GetSiteVar<string>("Harvest.PrescriptionName"); //HarvestTime = PlugIn.ModelCore.GetSiteVar<int>("Harvest.TimeOfLastEvent"); HarvestTime = PlugIn.ModelCore.Landscape.NewSiteVar<int>(); MonthlySoilResp = PlugIn.ModelCore.Landscape.NewSiteVar<double[]>(); CohortResorbedNallocation = PlugIn.ModelCore.Landscape.NewSiteVar<Dictionary<int, Dictionary<int, double>>>(); PlugIn.ModelCore.RegisterSiteVar(cohorts, "Succession.LeafBiomassCohorts"); PlugIn.ModelCore.RegisterSiteVar(baseCohortsSiteVar, "Succession.AgeCohorts"); PlugIn.ModelCore.RegisterSiteVar(biomassCohortsSiteVar, "Succession.BiomassCohorts"); PlugIn.ModelCore.RegisterSiteVar(SiteVars.FineFuels, "Succession.FineFuels"); PlugIn.ModelCore.RegisterSiteVar(SiteVars.SmolderConsumption, "Succession.SmolderConsumption"); PlugIn.ModelCore.RegisterSiteVar(SiteVars.FlamingConsumption, "Succession.FlamingConsumption"); PlugIn.ModelCore.RegisterSiteVar(SiteVars.AnnualClimaticWaterDeficit, "Succession.CWD"); PlugIn.ModelCore.RegisterSiteVar(SiteVars.AnnualPotentialEvapotranspiration, "Succession.PET"); foreach (ActiveSite site in PlugIn.ModelCore.Landscape) { surfaceDeadWood[site] = new Layer(LayerName.Wood, LayerType.Surface); soilDeadWood[site] = new Layer(LayerName.CoarseRoot, LayerType.Soil); surfaceStructural[site] = new Layer(LayerName.Structural, LayerType.Surface); surfaceMetabolic[site] = new Layer(LayerName.Metabolic, LayerType.Surface); soilStructural[site] = new Layer(LayerName.Structural, LayerType.Soil); soilMetabolic[site] = new Layer(LayerName.Metabolic, LayerType.Soil); som1surface[site] = new Layer(LayerName.SOM1, LayerType.Surface); som1soil[site] = new Layer(LayerName.SOM1, LayerType.Soil); som2[site] = new Layer(LayerName.SOM2, LayerType.Soil); som3[site] = new Layer(LayerName.SOM3, LayerType.Soil); stream[site] = new Layer(LayerName.Other, LayerType.Other); sourceSink[site] = new Layer(LayerName.Other, LayerType.Other); monthlyAGNPPC[site] = new double[12]; monthlyBGNPPC[site] = new double[12]; monthlyNEE[site] = new double[12]; monthlyStreamN[site] = new double[12]; MonthlyHeteroResp[site] = new double[12]; MonthlySoilResp[site] = new double[12]; MonthlyLAI[site] = new double[12]; MonthlyLAI_Trees[site] = new double[12]; MonthlyLAI_Grasses[site] = new double[12]; MonthlySoilWaterContent[site] = new double[12]; CohortResorbedNallocation[site] = new Dictionary<int, Dictionary<int, double>>(); } } //--------------------------------------------------------------------- /// <summary> /// Initializes for disturbances. /// </summary> public static void InitializeDisturbances() { FireSeverity = PlugIn.ModelCore.GetSiteVar<byte>("Fire.Severity"); HarvestPrescriptionName = PlugIn.ModelCore.GetSiteVar<string>("Harvest.PrescriptionName"); //HarvestTime = PlugIn.ModelCore.GetSiteVar<int>("Harvest.TimeOfLastEvent"); //if(HarvestPrescriptionName == null) // throw new System.ApplicationException("TEST Error: Harvest Prescription Names NOT Initialized."); } //--------------------------------------------------------------------- /// <summary> /// Biomass cohorts at each site. /// </summary> private static ISiteVar<SiteCohorts> cohorts; public static ISiteVar<SiteCohorts> Cohorts { get { return cohorts; } set { cohorts = value; } } //--------------------------------------------------------------------- /// <summary> /// Computes the actual biomass at a site. The biomass is the total /// of all the site's cohorts except young ones. The total is limited /// to being no more than the site's maximum biomass less the previous /// year's mortality at the site. /// </summary> public static double ActualSiteBiomass(ActiveSite site) { IEcoregion ecoregion = PlugIn.ModelCore.Ecoregion[site]; ISiteCohorts siteCohorts = SiteVars.Cohorts[site]; if(siteCohorts == null) return 0.0; int youngBiomass; int totalBiomass = Library.LeafBiomassCohorts.Cohorts.ComputeBiomass(siteCohorts, out youngBiomass); double B_ACT = totalBiomass - youngBiomass; //int lastMortality = SiteVars.PrevYearMortality[site]; //B_ACT = System.Math.Min(ClimateRegionData.B_MAX[ecoregion] - lastMortality, B_ACT); return B_ACT; } //--------------------------------------------------------------------- public static void ResetAnnualValues(Site site) { // Reset these accumulators to zero: SiteVars.DryDays[site] = 0; SiteVars.CohortLeafN[site] = 0.0; SiteVars.CohortFRootN[site] = 0.0; SiteVars.CohortLeafC[site] = 0.0; SiteVars.CohortFRootC[site] = 0.0; SiteVars.CohortWoodN[site] = 0.0; SiteVars.CohortCRootN[site] = 0.0; SiteVars.CohortWoodC[site] = 0.0; SiteVars.CohortCRootC[site] = 0.0; SiteVars.GrossMineralization[site] = 0.0; SiteVars.AGNPPcarbon[site] = 0.0; SiteVars.BGNPPcarbon[site] = 0.0; SiteVars.LitterfallC[site] = 0.0; SiteVars.Stream[site] = new Layer(LayerName.Other, LayerType.Other); SiteVars.SourceSink[site] = new Layer(LayerName.Other, LayerType.Other); SiteVars.SurfaceDeadWood[site].NetMineralization = 0.0; SiteVars.SurfaceStructural[site].NetMineralization = 0.0; SiteVars.SurfaceMetabolic[site].NetMineralization = 0.0; SiteVars.SoilDeadWood[site].NetMineralization = 0.0; SiteVars.SoilStructural[site].NetMineralization = 0.0; SiteVars.SoilMetabolic[site].NetMineralization = 0.0; SiteVars.SOM1surface[site].NetMineralization = 0.0; SiteVars.SOM1soil[site].NetMineralization = 0.0; SiteVars.SOM2[site].NetMineralization = 0.0; SiteVars.SOM3[site].NetMineralization = 0.0; SiteVars.AnnualNEE[site] = 0.0; SiteVars.Nvol[site] = 0.0; SiteVars.AnnualNEE[site] = 0.0; SiteVars.TotalNuptake[site] = 0.0; SiteVars.ResorbedN[site] = 0.0; SiteVars.FrassC[site] = 0.0; SiteVars.LAI[site] = 0.0; SiteVars.AnnualWaterBalance[site] = 0.0; SiteVars.AnnualClimaticWaterDeficit[site] = 0.0; SiteVars.AnnualPotentialEvapotranspiration[site] = 0.0; SiteVars.WoodMortality[site] = 0.0; //SiteVars.DryDays[site] = 0; //SiteVars.FireEfflux[site] = 0.0; } //--------------------------------------------------------------------- public static ISiteVar<int> TimeOfLast { get { return timeOfLast; } } //--------------------------------------------------------------------- /// <summary> /// The intact dead woody pools for the landscape's sites. /// </summary> public static ISiteVar<Layer> SurfaceDeadWood { get { return surfaceDeadWood; } } //--------------------------------------------------------------------- /// <summary> /// The DEAD coarse root pool for the landscape's sites. /// </summary> public static ISiteVar<Layer> SoilDeadWood { get { return soilDeadWood; } } //--------------------------------------------------------------------- /// <summary> /// The dead surface pool for the landscape's sites. /// </summary> public static ISiteVar<Layer> SurfaceStructural { get { return surfaceStructural; } } //--------------------------------------------------------------------- /// <summary> /// The dead surface pool for the landscape's sites. /// </summary> public static ISiteVar<Layer> SurfaceMetabolic { get { return surfaceMetabolic; } } //--------------------------------------------------------------------- /// <summary> /// The fine root pool for the landscape's sites. /// </summary> public static ISiteVar<Layer> SoilStructural { get { return soilStructural; } } //--------------------------------------------------------------------- /// <summary> /// The fine root pool for the landscape's sites. /// </summary> public static ISiteVar<Layer> SoilMetabolic { get { return soilMetabolic; } } //--------------------------------------------------------------------- /// <summary> /// The soil organic matter (SOM1-Surface) for the landscape's sites. /// </summary> public static ISiteVar<Layer> SOM1surface { get { return som1surface; } } //--------------------------------------------------------------------- /// <summary> /// The soil organic matter (SOM1-Soil) for the landscape's sites. /// </summary> public static ISiteVar<Layer> SOM1soil { get { return som1soil; } } //--------------------------------------------------------------------- /// <summary> /// The soil organic matter (SOM2) for the landscape's sites. /// </summary> public static ISiteVar<Layer> SOM2 { get { return som2; } } //--------------------------------------------------------------------- /// <summary> /// The soil organic matter (SOM3) for the landscape's sites. /// </summary> public static ISiteVar<Layer> SOM3 { get { return som3; } } public static ISiteVar<int> SoilDepth {get{return soilDepth;}} public static ISiteVar<double> SoilDrain { get { return soilDrain; } } public static ISiteVar<double> SoilBaseFlowFraction { get { return soilBaseFlowFraction; } } public static ISiteVar<double> SoilStormFlowFraction { get { return soilStormFlowFraction; } } public static ISiteVar<double> SoilFieldCapacity { get { return soilFieldCapacity; } } public static ISiteVar<double> SoilWiltingPoint { get { return soilWiltingPoint; } } public static ISiteVar<double> SoilPercentSand { get { return soilPercentSand; } } public static ISiteVar<double> SoilPercentClay { get { return soilPercentClay; } } //--------------------------------------------------------------------- /// <summary> /// Leaching to a stream - using the soil layer object is a cheat /// </summary> public static ISiteVar<Layer> Stream { get { return stream; } } //--------------------------------------------------------------------- /// <summary> /// Fine Fuels biomass /// </summary> //public static ISiteVar<double> FineFuels //{ // get // { // return fineFuels; // } // set // { // fineFuels = value; // } //} //--------------------------------------------------------------------- /// <summary> /// Water loss /// </summary> public static ISiteVar<double> WaterMovement { get { return waterMovement; } set { waterMovement = value; } } //--------------------------------------------------------------------- /// <summary> /// Water loss /// </summary> public static ISiteVar<double> AvailableWater { get { return availableWater; } set { availableWater = value; } } //--------------------------------------------------------------------- /// <summary> /// Water loss /// </summary> public static ISiteVar<double> SoilWaterContent { get { return soilWaterContent; } set { soilWaterContent = value; } } /// <summary> /// Liquid Snowpack /// </summary> public static ISiteVar<double> LiquidSnowPack { get { return liquidSnowPack; } set { liquidSnowPack = value; } } //--------------------------------------------------------------------- /// <summary> /// Available mineral Nitrogen /// </summary> public static ISiteVar<double> MineralN { get { return mineralN; } set { mineralN = value; } } //--------------------------------------------------------------------- /// <summary> /// The amount of N resorbed before leaf fall /// </summary> public static ISiteVar<double> ResorbedN { get { return resorbedN; } set { resorbedN = value; } } //--------------------------------------------------------------------- /// <summary> /// A generic decay factor determined by soil water and soil temperature. /// </summary> public static ISiteVar<double> DecayFactor { get { return decayFactor; } set { decayFactor = value; } } //--------------------------------------------------------------------- /// <summary> /// Soil temperature (C) /// </summary> public static ISiteVar<double> SoilTemperature { get { return soilTemperature; } set { soilTemperature = value; } } //--------------------------------------------------------------------- /// <summary> /// A generic decay factor determined by soil water and soil temperature. /// </summary> public static ISiteVar<double> AnaerobicEffect { get { return anaerobicEffect; } set { anaerobicEffect = value; } } //--------------------------------------------------------------------- /// <summary> /// Soil moisture at the time of reproduction /// </summary> public static ISiteVar<int> DryDays { get { return dryDays; } set { dryDays = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of all Leaf Nitrogen in the Cohorts. /// </summary> public static ISiteVar<double> CohortLeafN { get { return cohortLeafN; } set { cohortLeafN = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of all Fine Root Nitrogen in the Cohorts. /// </summary> public static ISiteVar<double> CohortFRootN { get { return cohortFRootN; } set { cohortFRootN = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of all Carbon in the Leaves /// </summary> public static ISiteVar<double> CohortLeafC { get { return cohortLeafC; } set { cohortLeafC = value; } } /// <summary> /// A summary of all Carbon in the Fine Roots /// </summary> public static ISiteVar<double> CohortFRootC { get { return cohortFRootC; } set { cohortFRootC = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of all Aboveground Wood Nitrogen in the Cohorts. /// </summary> public static ISiteVar<double> CohortWoodN { get { return cohortWoodN; } set { cohortWoodN = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of all Coarse Root Nitrogen in the Cohorts. /// </summary> public static ISiteVar<double> CohortCRootN { get { return cohortCRootN; } set { cohortCRootN = value; } } /// <summary> /// A summary of all Aboveground Wood Carbon in the Cohorts. /// </summary> public static ISiteVar<double> CohortWoodC { get { return cohortWoodC; } set { cohortWoodC = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of all Carbon in the Coarse Roots /// </summary> public static ISiteVar<double> CohortCRootC { get { return cohortCRootC; } set { cohortCRootC = value; } } //------------------------- /// <summary> /// A summary of Gross Mineraliztion. /// </summary> public static ISiteVar<double> GrossMineralization { get { return grossMineralization; } set { grossMineralization = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Aboveground Net Primary Productivity (g C/m2) /// </summary> public static ISiteVar<double> AGNPPcarbon { get { return ag_nppC; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Belowground Net Primary Productivity (g C/m2) /// </summary> public static ISiteVar<double> BGNPPcarbon { get { return bg_nppC; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Litter fall (g C/m2). /// </summary> public static ISiteVar<double> LitterfallC { get { return litterfallC; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Aboveground Net Primary Productivity (g C/m2) /// </summary> public static ISiteVar<double[]> MonthlyAGNPPcarbon { get { return monthlyAGNPPC; } set { monthlyAGNPPC = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Belowground Net Primary Productivity (g C/m2) /// </summary> public static ISiteVar<double[]> MonthlyBGNPPcarbon { get { return monthlyBGNPPC; } set { monthlyBGNPPC = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of heterotrophic respiration, i.e. CO2 loss from decomposition (g C/m2) /// </summary> //public static ISiteVar<double[]> MonthlyHeterotrophicResp //{ // get { // return monthlyHeteroResp; // } // set { // monthlyHeteroResp = value; // } //} //--------------------------------------------------------------------- /// <summary> /// A summary of Net Ecosystem Exchange (g C/m2), from a flux tower's perspective, /// whereby positive values indicate terrestrial C loss, negative values indicate C gain. /// Replace SourceSink? /// </summary> public static ISiteVar<double[]> MonthlyNEE { get { return monthlyNEE; } set { monthlyNEE = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of N leaching /// </summary> public static ISiteVar<double[]> MonthlyStreamN { get { return monthlyStreamN; } set { monthlyStreamN = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Monthly LAI /// </summary> //public static ISiteVar<double[]> MonthlyLAI //{ // get // { // return MonthlyLAI; // } // set // { // MonthlyLAI = value; // } //} //--------------------------------------------------------------------- /// <summary> /// A summary of Monthly SoilWaterContent /// </summary> //public static ISiteVar<double[]> MonthlySoilWaterContent //{ // get // { // return monthlySoilWaterContent; // } // set // { // monthlySoilWaterContent = value; // } //} //--------------------------------------------------------------------- /// <summary> /// Water loss /// </summary> public static ISiteVar<Layer> SourceSink { get { return sourceSink; } set { sourceSink = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of N uptake (g N/m2) /// </summary> public static ISiteVar<double> TotalNuptake { get { return totalNuptake; } set { totalNuptake = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of frass deposition (g C/m2) /// </summary> public static ISiteVar<double> FrassC { get { return frassC; } set { frassC = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of LAI (m2/m2) /// </summary> public static ISiteVar<double> LAI { get { return lai; } set { lai = value; } } //--------------------------------------------------------------------- /// <summary> /// A summary of Annual Water Budget (PPT - AET) /// </summary> //public static ISiteVar<double> AnnualWaterBalance //{ // get // { // return annualPPT_AET; // } // set // { // annualPPT_AET = value; // } //} /// <summary> /// A summary of Soil Moisture (PET - AET) /// </summary> //public static ISiteVar<double> AnnualClimaticWaterDeficit //{ // get // { // return annualClimaticWaterDeficit; // } // set // { // annualClimaticWaterDeficit = value; // } //} } }
//----------------------------------------------------------------------------- // Filename: SIPRequest.cs // // Description: SIP Request. // // History: // 20 Oct 2005 Aaron Clauson Created. // // License: // This software is licensed under the BSD License http://www.opensource.org/licenses/bsd-license.php // // Copyright (c) 2010 Aaron Clauson ([email protected]), SIP Sorcery Ltd, (www.sipsorcery.com) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that // the following conditions are met: // // Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of SIP Sorcery PTY LTD. // nor the names of its contributors may be used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, // BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, // OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, // OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. //----------------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Net; using System.Text; using System.Text.RegularExpressions; using SIPSorcery.Sys; using log4net; #if UNITTEST using Microsoft.VisualStudio.TestTools.UnitTesting; //using NUnit.Framework; #endif namespace SIPSorcery.SIP { /// <bnf> /// Method SP Request-URI SP SIP-Version CRLF /// *message-header /// CRLF /// [ message-body ] /// /// Methods: REGISTER, INVITE, ACK, CANCEL, BYE, OPTIONS /// SIP-Version: SIP/2.0 /// /// SIP-Version = "SIP" "/" 1*DIGIT "." 1*DIGIT /// </bnf> public class SIPRequest { private static ILog logger = AssemblyState.logger; private delegate bool IsLocalSIPSocketDelegate(string socket, SIPProtocolsEnum protocol); private static string m_CRLF = SIPConstants.CRLF; private static string m_sipFullVersion = SIPConstants.SIP_FULLVERSION_STRING; private static string m_sipVersion = SIPConstants.SIP_VERSION_STRING; private static int m_sipMajorVersion = SIPConstants.SIP_MAJOR_VERSION; private static int m_sipMinorVersion = SIPConstants.SIP_MINOR_VERSION; public string SIPVersion = m_sipVersion; public int SIPMajorVersion = m_sipMajorVersion; public int SIPMinorVersion = m_sipMinorVersion; public SIPMethodsEnum Method; public string UnknownMethod = null; public SIPURI URI; public SIPHeader Header; public string Body; public SIPRoute ReceivedRoute; public DateTime Created = DateTime.Now; public SIPEndPoint RemoteSIPEndPoint; // The remote IP socket the request was received from or sent to. public SIPEndPoint LocalSIPEndPoint; // The local SIP socket the request was received on or sent from. private SIPRequest() { //Created++; } public SIPRequest(SIPMethodsEnum method, string uri) { try { Method = method; URI = SIPURI.ParseSIPURI(uri); SIPVersion = m_sipFullVersion; } catch (Exception excp) { logger.Error("Exception SIPRequest ctor. " + excp.Message); throw; } } public SIPRequest(SIPMethodsEnum method, SIPURI uri) { //Created++; Method = method; URI = uri; SIPVersion = m_sipFullVersion; } public static SIPRequest ParseSIPRequest(SIPMessage sipMessage) { string uriStr = null; try { SIPRequest sipRequest = new SIPRequest(); sipRequest.LocalSIPEndPoint = sipMessage.LocalSIPEndPoint; sipRequest.RemoteSIPEndPoint = sipMessage.RemoteSIPEndPoint; string statusLine = sipMessage.FirstLine; int firstSpacePosn = statusLine.IndexOf(" "); string method = statusLine.Substring(0, firstSpacePosn).Trim(); sipRequest.Method = SIPMethods.GetMethod(method); if (sipRequest.Method == SIPMethodsEnum.UNKNOWN) { sipRequest.UnknownMethod = method; logger.Warn("Unknown SIP method received " + sipRequest.UnknownMethod + "."); } statusLine = statusLine.Substring(firstSpacePosn).Trim(); int secondSpacePosn = statusLine.IndexOf(" "); if (secondSpacePosn != -1) { uriStr = statusLine.Substring(0, secondSpacePosn); sipRequest.URI = SIPURI.ParseSIPURI(uriStr); sipRequest.SIPVersion = statusLine.Substring(secondSpacePosn, statusLine.Length - secondSpacePosn).Trim(); sipRequest.Header = SIPHeader.ParseSIPHeaders(sipMessage.SIPHeaders); sipRequest.Body = sipMessage.Body; return sipRequest; } else { throw new SIPValidationException(SIPValidationFieldsEnum.Request, "URI was missing on Request."); } } catch (SIPValidationException) { throw; } catch (Exception excp) { logger.Error("Exception parsing SIP Request. " + excp.Message); logger.Error(sipMessage.RawMessage); throw new SIPValidationException(SIPValidationFieldsEnum.Request, "Unknown error parsing SIP Request"); } } public static SIPRequest ParseSIPRequest(string sipMessageStr) { try { SIPMessage sipMessage = SIPMessage.ParseSIPMessage(sipMessageStr, null, null); return SIPRequest.ParseSIPRequest(sipMessage); } catch (SIPValidationException) { throw; } catch (Exception excp) { logger.Error("Exception ParseSIPRequest. " + excp.Message); logger.Error(sipMessageStr); throw new SIPValidationException(SIPValidationFieldsEnum.Request, "Unknown error parsing SIP Request"); } } public new string ToString() { try { string methodStr = (Method != SIPMethodsEnum.UNKNOWN) ? Method.ToString() : UnknownMethod; string message = methodStr + " " + URI.ToString() + " " + SIPVersion + m_CRLF + this.Header.ToString(); if(Body != null) { message += m_CRLF + Body; } else { message += m_CRLF; } return message; } catch(Exception excp) { logger.Error("Exception SIPRequest ToString. " + excp.Message); throw excp; } } /// <summary> /// Creates an identical copy of the SIP Request for the caller. /// </summary> /// <returns>New copy of the SIPRequest.</returns> public SIPRequest Copy() { return ParseSIPRequest(this.ToString()); } public string CreateBranchId() { string routeStr = (Header.Routes != null) ? Header.Routes.ToString() : null; string toTagStr = (Header.To != null) ? Header.To.ToTag : null; string fromTagStr = (Header.From != null) ? Header.From.FromTag : null; string topViaStr = (Header.Vias != null && Header.Vias.TopViaHeader != null) ? Header.Vias.TopViaHeader.ToString() : null; return CallProperties.CreateBranchId( SIPConstants.SIP_BRANCH_MAGICCOOKIE, toTagStr, fromTagStr, Header.CallId, URI.ToString(), topViaStr, Header.CSeq, routeStr, Header.ProxyRequire, null); } /// <summary> /// Determines if this SIP header is a looped header. The basis for the decision is the branchid in the Via header. If the branchid for a new /// header computes to the same branchid as a Via header already in the SIP header then it is considered a loop. /// </summary> /// <returns>True if this header is a loop otherwise false.</returns> public bool IsLoop(string ipAddress, int port, string currentBranchId) { foreach(SIPViaHeader viaHeader in Header.Vias.Via) { if(viaHeader.Host == ipAddress && viaHeader.Port == port) { if(viaHeader.Branch == currentBranchId) { return true; } } } return false; } public bool IsValid(out SIPValidationFieldsEnum errorField, out string errorMessage) { errorField = SIPValidationFieldsEnum.Unknown; errorMessage = null; if (Header.Vias.Length == 0) { errorField = SIPValidationFieldsEnum.ViaHeader; errorMessage = "No Via headers"; return false; } return true; } //~SIPRequest() //{ // Destroyed++; //} } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using Xunit; using SL = SortedList_SortedListUtils; using SortedList_ICollection; using TestSupport.Common_TestSupport; using TestSupport.Collections.SortedList_GenericICollectionTest; using TestSupport.Collections.SortedList_GenericIEnumerableTest; namespace SortedListKeys { public class Driver<KeyType, ValueType> { private Test m_test; public Driver(Test test) { m_test = test; } public void TestVanilla(KeyType[] keys, ValueType[] values) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); for (int i = 0; i < keys.Length - 1; i++) _dic.Add(keys[i], values[i]); ICollection<KeyType> _col = _dic.Keys; m_test.Eval(_col.Count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", _col.Count)); IEnumerator<KeyType> _enum = _col.GetEnumerator(); int count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey(_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_enum.Current))); count++; } m_test.Eval(count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", count)); KeyType[] _keys = new KeyType[_dic.Count]; _col.CopyTo(_keys, 0); for (int i = 0; i < keys.Length - 1; i++) m_test.Eval(_dic.ContainsKey(_keys[i]), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_keys[i]))); count = 0; foreach (KeyType currKey in _dic.Keys) { m_test.Eval(_dic.ContainsKey(currKey), String.Format("Err_53497gs! Not equal {0}", _dic.ContainsKey(currKey))); count++; } m_test.Eval(count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", count)); try { //The behavior here is undefined as long as we don't AV were fine KeyType item = _enum.Current; } catch (Exception) { } if (keys.Length > 0) { _dic.Add(keys[keys.Length - 1], values[values.Length - 1]); try { _enum.MoveNext(); m_test.Eval(false, "Expected InvalidOperationException, but got no exception."); } catch (InvalidOperationException) { } catch (Exception E) { m_test.Eval(false, "Expected InvalidOperationException, but got unknown exception: " + E); } } } public void TestModify(KeyType[] keys, ValueType[] values, KeyType[] newKeys) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); for (int i = 0; i < keys.Length; i++) _dic.Add(keys[i], values[i]); ICollection<KeyType> _col = _dic.Keys; for (int i = 0; i < keys.Length; i++) _dic.Remove(keys[i]); m_test.Eval(_col.Count == 0, String.Format("Err_3497gs! Not equal {0}", _col.Count)); IEnumerator<KeyType> _enum = _col.GetEnumerator(); int count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey(_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_enum.Current))); count++; } m_test.Eval(count == 0, String.Format("Err_3497gs! Not equal {0}", count)); for (int i = 0; i < keys.Length; i++) _dic.Add(newKeys[i], values[i]); m_test.Eval(_col.Count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", _col.Count)); _enum = _col.GetEnumerator(); count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey(_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_enum.Current))); count++; } m_test.Eval(count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", count)); KeyType[] _keys = new KeyType[_dic.Count]; _col.CopyTo(_keys, 0); for (int i = 0; i < keys.Length; i++) m_test.Eval(_dic.ContainsKey(_keys[i]), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_keys[i]))); } /** public void TestNonExistentKeys(KeyType[] keys, ValueType[] values, KeyType[] nonExistentKeys) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); for(int i=0; i<keys.Length; i++) _dic.Add(keys[i], values[i]); for(int i=0; i<nonExistentKeys.Length; i++) { try{ ValueType v = _dic[nonExistentKeys[i]]; m_test.Eval(false, "Err_23raf! Exception not thrown"); }catch(ArgumentException){ }catch(Exception ex){ m_test.Eval(false, String.Format("Err_387tsg! Wrong exception thrown: {0}", ex)); } } } public void TestParm(KeyType[] keys, ValueType[] values, KeyType value) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); try{ ValueType v = _dic[value]; m_test.Eval(false, "Err_23raf! Exception not thrown"); }catch(ArgumentNullException){ }catch(Exception ex){ m_test.Eval(false, String.Format("Err_387tsg! Wrong exception thrown: {0}", ex)); } } **/ public void NonGenericIDictionaryTestVanilla(KeyType[] keys, ValueType[] values) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); IDictionary _idic = _dic; for (int i = 0; i < keys.Length - 1; i++) _dic.Add(keys[i], values[i]); ICollection _col = _idic.Keys; m_test.Eval(_col.Count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", _col.Count)); IEnumerator _enum = _col.GetEnumerator(); int count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey((KeyType)_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey((KeyType)_enum.Current))); count++; } m_test.Eval(count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", count)); KeyType[] _keys = new KeyType[_dic.Count]; _col.CopyTo(_keys, 0); for (int i = 0; i < keys.Length - 1; i++) m_test.Eval(_dic.ContainsKey(_keys[i]), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_keys[i]))); _enum.Reset(); count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey((KeyType)_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey((KeyType)_enum.Current))); count++; } m_test.Eval(count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", count)); _keys = new KeyType[_dic.Count]; _col.CopyTo(_keys, 0); for (int i = 0; i < keys.Length - 1; i++) m_test.Eval(_dic.ContainsKey(_keys[i]), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_keys[i]))); try { _dic.ContainsKey((KeyType)_enum.Current); m_test.Eval(false, "Expected InvalidOperationException, but got no exception."); } catch (InvalidOperationException) { } catch (Exception E) { m_test.Eval(false, "Expected InvalidOperationException, but got unknown exception: " + E); } if (keys.Length > 0) { _dic.Add(keys[keys.Length - 1], values[values.Length - 1]); try { _enum.MoveNext(); m_test.Eval(false, "Expected InvalidOperationException, but got no exception."); } catch (InvalidOperationException) { } catch (Exception E) { m_test.Eval(false, "Expected InvalidOperationException, but got unknown exception: " + E); } try { _enum.Reset(); m_test.Eval(false, "Expected InvalidOperationException, but got no exception."); } catch (InvalidOperationException) { } catch (Exception E) { m_test.Eval(false, "Expected InvalidOperationException, but got unknown exception: " + E); } } } public void NonGenericIDictionaryTestModify(KeyType[] keys, ValueType[] values, KeyType[] newKeys) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); IDictionary _idic = _dic; for (int i = 0; i < keys.Length; i++) _dic.Add(keys[i], values[i]); ICollection _col = _idic.Keys; for (int i = 0; i < keys.Length; i++) _dic.Remove(keys[i]); m_test.Eval(_col.Count == 0, String.Format("Err_3497gs! Not equal {0}", _col.Count)); IEnumerator _enum = _col.GetEnumerator(); int count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey((KeyType)_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey((KeyType)_enum.Current))); count++; } m_test.Eval(count == 0, String.Format("Err_3497gs! Not equal {0}", count)); for (int i = 0; i < keys.Length; i++) _dic.Add(newKeys[i], values[i]); m_test.Eval(_col.Count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", _col.Count)); _enum = _col.GetEnumerator(); count = 0; while (_enum.MoveNext()) { m_test.Eval(_dic.ContainsKey((KeyType)_enum.Current), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey((KeyType)_enum.Current))); count++; } m_test.Eval(count == _dic.Count, String.Format("Err_3497gs! Not equal {0}", count)); KeyType[] _keys = new KeyType[_dic.Count]; _col.CopyTo(_keys, 0); for (int i = 0; i < keys.Length; i++) m_test.Eval(_dic.ContainsKey(_keys[i]), String.Format("Err_3497gs! Not equal {0}", _dic.ContainsKey(_keys[i]))); } public void TestVanillaIListReturned(KeyType[] keys, ValueType[] values, KeyType valueNotInList) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); IList<KeyType> _ilist; for (int i = 0; i < keys.Length; i++) _dic.Add(keys[i], values[i]); _ilist = _dic.Keys; //IsReadOnly m_test.Eval(_ilist.IsReadOnly == true, "Expected IsReadOnly of IList of Values to be true, but found " + _ilist.IsReadOnly); //This get for (int i = 0; i < keys.Length; i++) { m_test.Eval(Array.IndexOf(keys, _ilist[i]) != -1, "Expected This at " + i + " to be found in original array , but it was not"); } try { Console.WriteLine(_ilist[-1]); m_test.Eval(false, "Expected ArgumentOutOfRangeException, but found value of " + _ilist[-1]); } catch (ArgumentOutOfRangeException) { } catch (Exception E) { m_test.Eval(false, "Expected ArgumentOutOfRangeException, but found " + E); } try { Console.WriteLine(_ilist[keys.Length]); m_test.Eval(false, "Expected ArgumentOutOfRangeException, but found value of " + _ilist[keys.Length]); } catch (ArgumentOutOfRangeException) { } catch (Exception E) { m_test.Eval(false, "Expected ArgumentOutOfRangeException, but found " + E); } //Add try { _ilist.Add(keys[keys.Length - 1]); m_test.Eval(false, "Expected NotSupportedException, but was able to Add a value with no key"); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } //Clear try { _ilist.Clear(); m_test.Eval(false, "Expected NotSupportedException, but was able to Clear a value list"); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } //Contains for (int i = 0; i < keys.Length; i++) { m_test.Eval(_ilist.Contains(keys[i]), "Expected Contains of item " + i + " with value " + keys[i] + " to return true, but found false"); } //IndexOf for (int i = 0; i < keys.Length; i++) { m_test.Eval(_ilist.IndexOf(keys[i]) < keys.Length && _ilist.IndexOf(keys[i]) >= 0, "Expected IndexOf of item " + i + " with value " + keys[i] + " to return something within the allowed length but found " + _ilist.IndexOf(keys[i])); } m_test.Eval(_ilist.IndexOf(valueNotInList) == -1, "Expected IndexOf of item not in list, " + valueNotInList + " to return -1, but found " + _ilist.IndexOf(valueNotInList)); //if(!typeof(KeyType).IsSubclassOf(typeof(System.ValueType))) if (!(keys[0] is System.ValueType)) { try { _ilist.IndexOf((KeyType)(Object)null); m_test.Eval(false, "Expected ArgumentNullException when attempting to find IndexOf for null, but did not get one."); } catch (ArgumentNullException) { } catch (Exception E) { m_test.Eval(false, "Expected ArgumentNullException when attempting to find IndexOf for null, but got unknown exception: " + E); } } //Insert try { _ilist.Insert(0, keys[keys.Length - 1]); m_test.Eval(false, "Expected NotSupportedException, but was able to Insert a value with no key"); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } //Remove try { _ilist.Remove(keys[keys.Length - 1]); m_test.Eval(false, "Expected NotSupportedException, but was able to Insert a value with no key"); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } //RemoveAt try { _ilist.RemoveAt(0); m_test.Eval(false, "Expected NotSupportedException, but was able to Insert a value with no key"); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } //This set try { _ilist[keys.Length - 1] = keys[keys.Length - 1]; m_test.Eval(false, "Expected NotSupportedException, but was able to assign via This a value with no key"); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } try { _ilist[-1] = keys[keys.Length - 1]; m_test.Eval(false, "Expected NotSupportedException, but found value of " + _ilist[-1]); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } try { _ilist[keys.Length] = keys[keys.Length - 1]; m_test.Eval(false, "Expected NotSupportedException, but found value of " + _ilist[keys.Length]); } catch (NotSupportedException) { } catch (Exception E) { m_test.Eval(false, "Expected NotSupportedException, but found " + E); } } public void TestVanillaICollectionReturned(KeyType[] keys, ValueType[] values) { SortedList<KeyType, ValueType> _dic = new SortedList<KeyType, ValueType>(); KeyType[] arrayToCheck = new KeyType[keys.Length]; for (int i = 0; i < keys.Length; i++) { arrayToCheck[i] = keys[i]; _dic.Add(keys[i], values[i]); } Array.Sort(arrayToCheck); var tester = new ICollectionTester<KeyType>(); tester.RunTest(m_test, ((IDictionary)_dic).Keys, keys.Length, false, ((IDictionary)_dic).SyncRoot, arrayToCheck); } public bool VerifyICollection_T(GenerateItem<KeyType> keyGenerator, GenerateItem<ValueType> valueGenerator, int numItems) { Dictionary<KeyType, ValueType> d = new Dictionary<KeyType, ValueType>(); KeyType[] keys = new KeyType[numItems]; ICollection_T_Test<KeyType> iCollectionTest; bool retValue = true; for (int i = 0; i < numItems; ++i) { keys[i] = keyGenerator(); d.Add(keys[i], valueGenerator()); } iCollectionTest = new ICollection_T_Test<KeyType>(m_test, d.Keys, keyGenerator, keys, true); iCollectionTest.ItemsMustBeUnique = true; iCollectionTest.ItemsMustBeNonNull = default(KeyType) == null; iCollectionTest.CollectionOrder = TestSupport.CollectionOrder.Unspecified; retValue &= m_test.Eval(iCollectionTest.RunAllTests(), "Err_98382apeuie System.Collections.Generic.ICollection<KeyType> tests FAILED"); return retValue; } } public class get_Keys { public class IntGenerator { private int _index; public IntGenerator() { _index = 0; } public int NextValue() { return _index++; } public Object NextValueObject() { return (Object)NextValue(); } } public class StringGenerator { private int _index; public StringGenerator() { _index = 0; } public String NextValue() { return (_index++).ToString(); } public Object NextValueObject() { return (Object)NextValue(); } } [Fact] public static void RunTests() { Test test = new Test(); IntGenerator intGenerator = new IntGenerator(); StringGenerator stringGenerator = new StringGenerator(); intGenerator.NextValue(); stringGenerator.NextValue(); //This mostly follows the format established by the original author of these tests //Scenario 1: Vanilla - fill in an SortedList with 10 keys and check this property Driver<int, int> IntDriver = new Driver<int, int>(test); Driver<SL.SimpleRef<String>, SL.SimpleRef<String>> simpleRef = new Driver<SL.SimpleRef<String>, SL.SimpleRef<String>>(test); Driver<SL.SimpleRef<int>, SL.SimpleRef<int>> simpleVal = new Driver<SL.SimpleRef<int>, SL.SimpleRef<int>>(test); SL.SimpleRef<int>[] simpleInts; SL.SimpleRef<String>[] simpleStrings; int[] ints; int count; count = 1000; simpleInts = SL.SortedListUtils.GetSimpleInts(count); simpleStrings = SL.SortedListUtils.GetSimpleStrings(count); ints = new int[count]; for (int i = 0; i < count; i++) ints[i] = i; IntDriver.TestVanilla(ints, ints); simpleRef.TestVanilla(simpleStrings, simpleStrings); simpleVal.TestVanilla(simpleInts, simpleInts); IntDriver.NonGenericIDictionaryTestVanilla(ints, ints); simpleRef.NonGenericIDictionaryTestVanilla(simpleStrings, simpleStrings); simpleVal.NonGenericIDictionaryTestVanilla(simpleInts, simpleInts); IntDriver.TestVanillaIListReturned(ints, ints, -1); simpleRef.TestVanillaIListReturned(simpleStrings, simpleStrings, new SL.SimpleRef<string>("bozo")); simpleVal.TestVanillaIListReturned(simpleInts, simpleInts, new SL.SimpleRef<int>(-1)); IntDriver.TestVanillaICollectionReturned(ints, ints); simpleRef.TestVanillaICollectionReturned(simpleStrings, simpleStrings); simpleVal.TestVanillaICollectionReturned(simpleInts, simpleInts); //Scenario 2: Check for an empty SortedList IntDriver.TestVanilla(new int[0], new int[0]); simpleRef.TestVanilla(new SL.SimpleRef<String>[0], new SL.SimpleRef<String>[0]); simpleVal.TestVanilla(new SL.SimpleRef<int>[0], new SL.SimpleRef<int>[0]); IntDriver.NonGenericIDictionaryTestVanilla(new int[0], new int[0]); simpleRef.NonGenericIDictionaryTestVanilla(new SL.SimpleRef<String>[0], new SL.SimpleRef<String>[0]); simpleVal.NonGenericIDictionaryTestVanilla(new SL.SimpleRef<int>[0], new SL.SimpleRef<int>[0]); //Scenario 3: Check the underlying reference. Change the SortedList afterwards and examine ICollection keys and make sure that the //change is reflected SL.SimpleRef<int>[] simpleInts_1; SL.SimpleRef<String>[] simpleStrings_1; int[] ints_1; SL.SimpleRef<int>[] simpleInts_2; SL.SimpleRef<String>[] simpleStrings_2; int[] ints_2; int half = count / 2; simpleInts_1 = new SL.SimpleRef<int>[half]; simpleStrings_1 = new SL.SimpleRef<String>[half]; ints_2 = new int[half]; simpleInts_2 = new SL.SimpleRef<int>[half]; simpleStrings_2 = new SL.SimpleRef<String>[half]; ints_1 = new int[half]; for (int i = 0; i < half; i++) { simpleInts_1[i] = simpleInts[i]; simpleStrings_1[i] = simpleStrings[i]; ints_1[i] = ints[i]; simpleInts_2[i] = simpleInts[i + half]; simpleStrings_2[i] = simpleStrings[i + half]; ints_2[i] = ints[i + half]; } IntDriver.TestModify(ints_1, ints_1, ints_2); simpleRef.TestModify(simpleStrings_1, simpleStrings_1, simpleStrings_2); simpleVal.TestModify(simpleInts_1, simpleInts_1, simpleInts_2); IntDriver.NonGenericIDictionaryTestModify(ints_1, ints_1, ints_2); simpleRef.NonGenericIDictionaryTestModify(simpleStrings_1, simpleStrings_1, simpleStrings_2); simpleVal.NonGenericIDictionaryTestModify(simpleInts_1, simpleInts_1, simpleInts_2); //Scenario 4: Change keys via ICollection (how?) and examine SortedList //How indeed? //Verify ICollection<K> through ICollection testing suite Driver<int, string> intStringDriver = new Driver<int, string>(test); Driver<string, int> stringIntDriver = new Driver<string, int>(test); test.Eval(intStringDriver.VerifyICollection_T(new GenerateItem<int>(intGenerator.NextValue), new GenerateItem<string>(stringGenerator.NextValue), 0), "Err_085184aehdke Test Int32, String Empty Dictionary FAILED\n"); test.Eval(intStringDriver.VerifyICollection_T(new GenerateItem<int>(intGenerator.NextValue), new GenerateItem<string>(stringGenerator.NextValue), 1), "Err_05164anhekjd Test Int32, String Dictionary with 1 item FAILED\n"); test.Eval(intStringDriver.VerifyICollection_T(new GenerateItem<int>(intGenerator.NextValue), new GenerateItem<string>(stringGenerator.NextValue), 16), "Err_1088ajeid Test Int32, String Dictionary with 16 items FAILED\n"); test.Eval(stringIntDriver.VerifyICollection_T(new GenerateItem<string>(stringGenerator.NextValue), new GenerateItem<int>(intGenerator.NextValue), 0), "Err_31288ajkekd Test String, Int32 Empty Dictionary FAILED\n"); test.Eval(stringIntDriver.VerifyICollection_T(new GenerateItem<string>(stringGenerator.NextValue), new GenerateItem<int>(intGenerator.NextValue), 1), "Err_0215548aheuid Test String, Int32 Dictionary with 1 item FAILED\n"); test.Eval(stringIntDriver.VerifyICollection_T(new GenerateItem<string>(stringGenerator.NextValue), new GenerateItem<int>(intGenerator.NextValue), 16), "Err_21057ajeipzd Test String, Int32 Dictionary with 16 items FAILED\n"); Assert.True(test.Pass); } } }
using System; using System.Collections.Generic; using System.Text; namespace Tibia.Addresses { public static class DatItem { public static uint StepItems; public static uint Width; public static uint Height; public static uint MaxSizeInPixels; public static uint Layers; public static uint PatternX; public static uint PatternY; public static uint PatternDepth; public static uint Phase; public static uint Sprite; public static uint Flags; public static uint CanEquip; public static uint CanLookAt; public static uint WalkSpeed; public static uint TextLimit; public static uint LightRadius; public static uint LightColor; public static uint ShiftX; public static uint ShiftY; public static uint WalkHeight; public static uint Automap; // Minimap color public static uint LensHelp; public enum Flag : uint { IsGround, TopOrder1, TopOrder2, TopOrder3, IsContainer, IsStackable, IsCorpse, IsUsable, IsRune, IsWritable, IsReadable, IsFluidContainer, IsSplash, Blocking, IsImmovable, BlocksMissiles, BlocksPath, IsPickupable, IsHangable, IsHangableHorizontal, IsHangableVertical, IsRotatable, IsLightSource, Floorchange, IsShifted, HasHeight, IsLayer, IsIdleAnimation, HasAutoMapColor, HasHelpLens, Unknown, IgnoreStackpos } private static readonly Dictionary<Flag, uint> flagOffsets860 = new Dictionary<Flag, uint>() { { Flag.IsGround, 1 }, { Flag.TopOrder1, 2 }, { Flag.TopOrder2, 4 }, { Flag.TopOrder3, 8 }, { Flag.IsContainer, 16 }, { Flag.IsStackable, 32 }, { Flag.IsCorpse, 64 }, { Flag.IsUsable, 128 }, { Flag.IsWritable, 256 }, { Flag.IsReadable, 512 }, { Flag.IsFluidContainer, 1024 }, { Flag.IsSplash, 2048 }, { Flag.Blocking, 4096 }, { Flag.IsImmovable, 8192 }, { Flag.BlocksMissiles, 16384 }, { Flag.BlocksPath, 32768 }, { Flag.IsPickupable, 65536 }, { Flag.IsHangable, 131072 }, { Flag.IsHangableHorizontal, 262144 }, { Flag.IsHangableVertical, 524288 }, { Flag.IsRotatable, 1048576 }, { Flag.IsLightSource, 2097152 }, { Flag.Floorchange, 4194304 }, { Flag.IsShifted, 8388608 }, { Flag.HasHeight, 16777216 }, { Flag.IsLayer, 33554432 }, { Flag.IsIdleAnimation, 67108864 }, { Flag.HasAutoMapColor, 134217728 }, { Flag.HasHelpLens, 268435456 }, { Flag.Unknown, 536870912 }, { Flag.IgnoreStackpos, 1073741824 } }; private static readonly Dictionary<Flag, uint> flagOffsetsPre860 = new Dictionary<Flag, uint>() { { Flag.IsGround, 1 }, { Flag.TopOrder1, 2 }, { Flag.TopOrder2, 4 }, { Flag.TopOrder3, 8 }, { Flag.IsContainer, 16 }, { Flag.IsStackable, 32 }, { Flag.IsCorpse, 64 }, { Flag.IsUsable, 128 }, { Flag.IsRune, 256 }, { Flag.IsWritable, 512 }, { Flag.IsReadable, 1024 }, { Flag.IsFluidContainer, 2048 }, { Flag.IsSplash, 4096 }, { Flag.Blocking, 8192 }, { Flag.IsImmovable, 16384 }, { Flag.BlocksMissiles, 32768 }, { Flag.BlocksPath, 65536 }, { Flag.IsPickupable, 131072 }, { Flag.IsHangable, 262144 }, { Flag.IsHangableHorizontal, 524288 }, { Flag.IsHangableVertical, 1048576 }, { Flag.IsRotatable, 2097152 }, { Flag.IsLightSource, 4194304 }, { Flag.Floorchange, 8388608 }, { Flag.IsShifted, 16777216 }, { Flag.HasHeight, 33554432 }, { Flag.IsLayer, 67108864 }, { Flag.IsIdleAnimation, 134217728 }, { Flag.HasAutoMapColor, 268435456 }, { Flag.HasHelpLens, 536870912 }, { Flag.Unknown, 1073741824 }, { Flag.IgnoreStackpos, 2147483648 } }; private static readonly Dictionary<Flag, uint> flagOffsetsPre850 = new Dictionary<Flag, uint>() { { Flag.IsGround, 1 }, { Flag.TopOrder1, 2 }, { Flag.TopOrder2, 4 }, { Flag.TopOrder3, 8 }, { Flag.IsContainer, 16 }, { Flag.IsStackable, 32 }, { Flag.IsCorpse, 64 }, { Flag.IsUsable, 128 }, { Flag.IsRune, 256 }, { Flag.IsWritable, 512 }, { Flag.IsReadable, 1024 }, { Flag.IsFluidContainer, 2048 }, { Flag.IsSplash, 4096 }, { Flag.Blocking, 8192 }, { Flag.IsImmovable, 16384 }, { Flag.BlocksMissiles, 32768 }, { Flag.BlocksPath, 65536 }, { Flag.IsPickupable, 131072 }, { Flag.IsHangable, 262144 }, { Flag.IsHangableHorizontal, 524288 }, { Flag.IsHangableVertical, 1048576 }, { Flag.IsRotatable, 2097152 }, { Flag.IsLightSource, 4194304 }, { Flag.Floorchange, 8388608 }, { Flag.IsShifted, 16777216 }, { Flag.HasHeight, 33554432 }, { Flag.IsLayer, 67108864 }, { Flag.IsIdleAnimation, 134217728 }, { Flag.HasAutoMapColor, 268435456 }, { Flag.HasHelpLens, 536870912 }, { Flag.Unknown, 1073741824 } }; public static uint GetFlagOffset(uint version, Flag flag) { uint offset; if (version >= 860) { // offset is set to zero if flag does not exist flagOffsets860.TryGetValue(flag, out offset); } else if (version <= 857 && version >= 850) { flagOffsetsPre860.TryGetValue(flag, out offset); } else { flagOffsetsPre850.TryGetValue(flag, out offset); } return offset; } public enum Help { IsLadder = 0x44C, IsSewer = 0x44D, IsDoor = 0x450, IsDoorWithLock = 0x451, IsRopeSpot = 0x44E, IsSwitch = 0x44F, IsStairs = 0x452, IsMailbox = 0x453, IsDepot = 0x454, IsTrash = 0x455, IsHole = 0x456, HasSpecialDescription = 0x457, IsReadOnly = 0x458 } } }
// // This code was created by Jeff Molofee '99 // // If you've found this code useful, please let me know. // // Visit me at www.demonews.com/hosted/nehe // //===================================================================== // Converted to C# and MonoMac by Kenneth J. Pouncey // http://www.cocoa-mono.org // // Copyright (c) 2011 Kenneth J. Pouncey // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Drawing; using MonoMac.Foundation; using MonoMac.AppKit; using MonoMac.CoreGraphics; using MonoMac.OpenGL; namespace NeHeLesson13 { public class BitmapFont { NSFont font; string fontName; float fontSize; // Base display list for the font set int baseDL; public BitmapFont (string fontName,float fontSize) { this.fontName = fontName; this.fontSize = fontSize; BuildFont(); } void BuildFont () { // 95 since if we do 96, we get the delete character... baseDL = GL.GenLists (95); font = NSFont.FromFontName (fontName, fontSize); if (font == null) Console.WriteLine ("Unable to create font: " + fontName); else MakeGLDisplayListFirst (' ', 95, baseDL); } // Create the set of display lists for the bitmaps bool MakeGLDisplayListFirst (char first, int count, int baseDL) { int curListIndex; NSColor blackColor; NSMutableDictionary attribDict; int dListNum; NSString currentChar; char currentUnichar; SizeF charSize; RectangleF charRect; NSImage theImage; bool retval; // Make sure the list isn't already under construction GL.GetInteger (GetPName.ListIndex, out curListIndex); if (curListIndex != 0) { Console.WriteLine ("Display list already under construction"); return false; } // Save pixel unpacking state GL.PushClientAttrib (ClientAttribMask.ClientPixelStoreBit); GL.PixelStore (PixelStoreParameter.UnpackSwapBytes, 0); GL.PixelStore (PixelStoreParameter.UnpackLsbFirst, 0); GL.PixelStore (PixelStoreParameter.UnpackSkipPixels, 0); GL.PixelStore (PixelStoreParameter.UnpackSkipRows, 0); GL.PixelStore (PixelStoreParameter.UnpackRowLength, 0); GL.PixelStore (PixelStoreParameter.UnpackAlignment, 0); blackColor = NSColor.Black; attribDict = new NSMutableDictionary (); attribDict.SetValueForKey (font, NSAttributedString.FontAttributeName); attribDict.SetValueForKey (NSColor.White, NSAttributedString.ForegroundColorAttributeName); attribDict.SetValueForKey (blackColor, NSAttributedString.BackgroundColorAttributeName); charRect.Location.X = charRect.Location.Y = 0; theImage = new NSImage (new SizeF (0,0)); retval = true; for (dListNum = baseDL, currentUnichar = first; currentUnichar < first + count; dListNum++, currentUnichar++) { currentChar = new NSString (Char.ToString (currentUnichar)); charSize = currentChar.StringSize (attribDict); charRect.Size = charSize; charRect = charRect.Integral (); if (charRect.Size.Width > 0 && charRect.Size.Height > 0) { theImage.Size = charRect.Size; theImage.LockFocus (); NSGraphicsContext.CurrentContext.ShouldAntialias = false; blackColor.Set (); NSBezierPath.FillRect (charRect); currentChar.DrawString (charRect, attribDict); theImage.UnlockFocus (); if (!MakeDisplayList(dListNum, theImage)) { retval = false; break; } } } return retval; } // Create one display list based on the given image. This assumes the image // uses 8-bit chunks to represent a sample bool MakeDisplayList (int listNum, NSImage theImage) { NSBitmapImageRep bitmap; int bytesPerRow, pixelsHigh, pixelsWide, samplesPerPixel; byte currentBit, byteValue; byte[] newBuffer; int rowIndex, colIndex; bitmap = new NSBitmapImageRep ( theImage.AsTiff (NSTiffCompression.None, 0) ); pixelsHigh = bitmap.PixelsHigh; pixelsWide = bitmap.PixelsWide; bytesPerRow = bitmap.BytesPerRow; samplesPerPixel = bitmap.SamplesPerPixel; newBuffer = new byte[(int)Math.Ceiling ((float)bytesPerRow / 8.0) * pixelsHigh]; byte[] bitmapBytesArray = new byte[(pixelsWide * pixelsHigh) * samplesPerPixel]; System.Runtime.InteropServices.Marshal.Copy (bitmap.BitmapData, bitmapBytesArray, 0, (pixelsWide * pixelsHigh) * samplesPerPixel); int curIdx = 0; /* * Convert the color bitmap into a true bitmap, ie, one bit per pixel. We * read at last row, write to first row as Cocoa and OpenGL have opposite * y origins */ for (rowIndex = pixelsHigh - 1; rowIndex >= 0; rowIndex--) { currentBit = 0x80; byteValue = 0; for (colIndex = 0; colIndex < pixelsWide; colIndex++) { if (bitmapBytesArray [rowIndex * bytesPerRow + colIndex * samplesPerPixel] > 0) byteValue |= currentBit; currentBit >>= 1; if (currentBit == 0) { newBuffer [curIdx++] = byteValue; currentBit = 0x80; byteValue = 0; } } /* * Fill out the last byte; extra is ignored by OpenGL, but each row * must start on a new byte */ if (currentBit != 0x80) newBuffer[curIdx++] = byteValue; } GL.NewList( listNum, ListMode.Compile); GL.Bitmap(pixelsWide, pixelsHigh, 0, 0, pixelsWide, 0, newBuffer); GL.EndList(); return true; } // Writes a text string out based on this objects font settings. public void RenderText (string text) { // Pushes the display list bits GL.PushAttrib (AttribMask.ListBit); // Sets the base character to space ' ' GL.ListBase (baseDL -' '); // Convert our string into a byte array for CallLists System.Text.UTF8Encoding encoding = new System.Text.UTF8Encoding (); byte[] textBytes = encoding.GetBytes (text); // Draws the display list text GL.CallLists (text.Length, ListNameType.UnsignedByte, textBytes); // Pops the display list bits GL.PopAttrib (); } } }
using IrcClientCore; using OpenGraph_Net; using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using System.Security.Cryptography; using System.Text; using System.Threading.Tasks; using Windows.ApplicationModel.DataTransfer; using Windows.Foundation; using Windows.Foundation.Collections; using Windows.UI; using Windows.UI.Core; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Controls.Primitives; using Windows.UI.Xaml.Data; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using Windows.UI.Xaml.Media.Animation; using Windows.UI.Xaml.Navigation; using WinIRC.Net; using WinIRC.Views; using WinIRC.Views.InlineViewers; // The User Control item template is documented at http://go.microsoft.com/fwlink/?LinkId=234236 namespace WinIRC.Ui { public sealed partial class MessageLineClassic : UserControl, INotifyPropertyChanged { public static readonly DependencyProperty MessageProperty = DependencyProperty.Register( "MessageItem", typeof(Message), typeof(MessageLineClassic), new PropertyMetadata(null)); public static readonly DependencyProperty CompactModeProperty = DependencyProperty.Register( "CompactMode", typeof(bool), typeof(MessageLineClassic), new PropertyMetadata(null)); private HyperlinkManager hyperlinkManager; private Uri lastUri; public event PropertyChangedEventHandler PropertyChanged; private void NotifyPropertyChanged(String propertyName = "") { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName)); } public string Username { get { if (MessageItem == null) return ""; if (MessageItem.User.Contains("*")) { return "*"; } if (MessageItem.Type == MessageType.Normal) { return String.Format("<{0}>", MessageItem.User); } else if (MessageItem.Type == MessageType.Notice) { return String.Format("->{0}<-", MessageItem.User); } else { return String.Format("* {0}", MessageItem.User); } } } public bool CompactMode { get { return (bool)GetValue(CompactModeProperty); } set { SetValue(CompactModeProperty, value); } } public bool HasLoaded { get; private set; } public Message MessageItem { get { return (Message)GetValue(MessageProperty); } set { SetValue(MessageProperty, value); NotifyPropertyChanged("MessageItem"); NotifyPropertyChanged("Username"); NotifyPropertyChanged("UserColor"); NotifyPropertyChanged("MessageColor"); NotifyPropertyChanged("TextIndent"); UpdateUi(); } } public Color MentionRed => ThemeColor(Colors.Red); public SolidColorBrush UserColor { get { if (MessageItem == null) return null; var color = ThemeColor(ColorUtils.GenerateColor(MessageItem.User)); if (MessageItem.Mention) { return new SolidColorBrush(MentionRed); } return new SolidColorBrush(color); } } public SolidColorBrush MessageColor { get { if (MessageItem == null) return null; if (MessageItem.Mention) { return new SolidColorBrush(MentionRed); } Color defaultColor = Config.GetBoolean(Config.DarkTheme, true) ? Colors.White : Colors.Black; return new SolidColorBrush(defaultColor); } } public MessageLineClassic() : this(null) { } private Color ThemeColor(Color color) { if (Config.GetBoolean(Config.DarkTheme, true)) { color = ColorUtils.ChangeColorBrightness(color, 0.2f); } else { color = ColorUtils.ChangeColorBrightness(color, -0.4f); } return color; } public MessageLineClassic(Message line) { this.InitializeComponent(); this.MessageItem = line; Unloaded += MessageLine_Unloaded; Loaded += MessageLine_Loaded; LayoutUpdated += MessageLine_LayoutUpdated; MainPage.instance.UiUpdated += Instance_UiUpdated; } private void Instance_UiUpdated(object sender, EventArgs e) { UpdateUi(); NotifyPropertyChanged("UserColor"); NotifyPropertyChanged("MessageColor"); } private void MessageLine_LayoutUpdated(object sender, object e) { var wantedPadding = UsernameBox.ActualWidth + TimestampBox.ActualWidth; if (MessageParagraph.TextIndent != wantedPadding) { MessageParagraph.TextIndent = wantedPadding; } } private void MessageLine_Loaded(object sender, RoutedEventArgs e) { UpdateUi(); } public void UpdateUi() { this.hyperlinkManager = new HyperlinkManager(); if (double.IsNaN(UsernameBox.ActualWidth) || double.IsNaN(TimestampBox.ActualWidth)) return; if (MessageItem != null) { PreviewFrame.Visibility = Visibility.Collapsed; if (hyperlinkManager.LinkClicked != null) { hyperlinkManager.LinkClicked -= MediaPreview_Clicked; } if (MessageBox.ActualHeight > UsernameBox.ActualHeight) { Thickness margin = new Thickness(0, -1, 0, 0); MessageBox.Margin = margin; } if (MessageItem.Type == MessageType.Info || MessageItem.Type == MessageType.JoinPart) { UsernameBox.Style = (Style)Application.Current.Resources["InfoTextBlockStyle"]; MessageBox.Style = (Style)Application.Current.Resources["InfoTextRichStyle"]; } else if (MessageItem.Type == MessageType.Action) { UsernameBox.FontStyle = Windows.UI.Text.FontStyle.Italic; MessageBox.FontStyle = Windows.UI.Text.FontStyle.Italic; } if (MessageItem.Mention) { UsernameBox.Foreground = new SolidColorBrush(Colors.Red); } if (MessageItem.Type == MessageType.MOTD) { this.FontFamily = new FontFamily("Consolas"); } hyperlinkManager.SetText(MessageParagraph, MessageItem.Text); hyperlinkManager.LinkClicked += MediaPreview_Clicked; } try { if (!hyperlinkManager.InlineLink && hyperlinkManager.FirstLink != null && Config.GetBoolean(Config.ShowMetadata, true)) { Task.Run(async () => { var graph = await OpenGraph.ParseUrlAsync(hyperlinkManager.FirstLink); if (graph.Values.Count > 0 && graph.Title != "" && graph["description"] != "") { await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { PreviewFrame.Visibility = Visibility.Visible; PreviewFrame.Navigate(typeof(LinkView), graph, new SuppressNavigationTransitionInfo()); }); } }); } } catch { } // swallow exceptions this.HasLoaded = true; UpdateLayout(); } private void MessageLine_Unloaded(object sender, RoutedEventArgs e) { PreviewFrame.Navigate(typeof(Page)); hyperlinkManager.SetText(MessageParagraph, ""); hyperlinkManager.LinkClicked -= MediaPreview_Clicked; hyperlinkManager = null; MainPage.instance.UiUpdated -= Instance_UiUpdated; UpdateLayout(); } private void MediaPreview_Clicked(Uri uri) { if (PreviewFrame.Visibility == Visibility.Collapsed) { PreviewFrame.Visibility = Visibility.Visible; if (uri != lastUri) { if (uri.Host.Contains("twitter.com")) PreviewFrame.Navigate(typeof(TwitterView), uri, new SuppressNavigationTransitionInfo()); else if (uri.Host.Contains("youtube.com") || uri.Host.Contains("youtu.be")) PreviewFrame.Navigate(typeof(YoutubeView), uri, new SuppressNavigationTransitionInfo()); else if (HyperlinkManager.isImage(uri.ToString())) PreviewFrame.Navigate(typeof(ImageView), uri, new SuppressNavigationTransitionInfo()); } lastUri = uri; } else { PreviewFrame.Visibility = Visibility.Collapsed; } } private void Share_Click(object sender, RoutedEventArgs e) { if (hyperlinkManager.FirstLink == null) return; DataTransferManager.ShowShareUI(); DataTransferManager.GetForCurrentView().DataRequested += MessageLine_DataRequested; } private void MessageLine_DataRequested(Windows.ApplicationModel.DataTransfer.DataTransferManager sender, Windows.ApplicationModel.DataTransfer.DataRequestedEventArgs args) { args.Request.Data.SetWebLink(hyperlinkManager.FirstLink); args.Request.Data.Properties.Title = Windows.ApplicationModel.Package.Current.DisplayName; DataTransferManager.GetForCurrentView().DataRequested -= MessageLine_DataRequested; } private void Copy_Click(object sender, RoutedEventArgs e) { DataPackage dataPackage = new DataPackage { RequestedOperation = DataPackageOperation.Copy }; dataPackage.SetText(hyperlinkManager.FirstLink.ToString()); Clipboard.SetContent(dataPackage); } private void PreviewFrame_RightTapped(object sender, RightTappedRoutedEventArgs e) { ShareFlyout.ShowAt(sender as FrameworkElement); } } }
// Copyright 2009 The Noda Time Authors. All rights reserved. // Use of this source code is governed by the Apache License 2.0, // as found in the LICENSE.txt file. using System; using NodaTime.Annotations; using NodaTime.Text; namespace NodaTime { /// <summary> /// Represents a local date and time without reference to a calendar system. Essentially /// this is a duration since a Unix epoch shifted by an offset (but we don't store what that /// offset is). This class has been slimmed down considerably over time - it's used much less /// than it used to be... almost solely for time zones. /// </summary> internal struct LocalInstant : IEquatable<LocalInstant> { public static readonly LocalInstant BeforeMinValue = new LocalInstant(Instant.BeforeMinValue.DaysSinceEpoch, deliberatelyInvalid: true); public static readonly LocalInstant AfterMaxValue = new LocalInstant(Instant.AfterMaxValue.DaysSinceEpoch, deliberatelyInvalid: true); /// <summary> /// Elapsed time since the local 1970-01-01T00:00:00. /// </summary> [ReadWriteForEfficiency] private Duration duration; /// <summary> /// Constructor which should *only* be used to construct the invalid instances. /// </summary> private LocalInstant([Trusted] int days, bool deliberatelyInvalid) { this.duration = new Duration(days, 0); } /// <summary> /// Initializes a new instance of the <see cref="LocalInstant"/> struct. /// </summary> internal LocalInstant(Duration nanoseconds) { int days = nanoseconds.FloorDays; if (days < Instant.MinDays || days > Instant.MaxDays) { throw new OverflowException("Operation would overflow bounds of local date/time"); } this.duration = nanoseconds; } /// <summary> /// Initializes a new instance of the <see cref="LocalInstant"/> struct. /// </summary> /// <param name="days">Number of days since 1970-01-01, in a time zone neutral fashion.</param> /// <param name="nanoOfDay">Nanosecond of the local day.</param> internal LocalInstant([Trusted] int days, [Trusted] long nanoOfDay) { this.duration = new Duration(days, nanoOfDay); } /// <summary> /// Returns whether or not this is a valid instant. Returns true for all but /// <see cref="BeforeMinValue"/> and <see cref="AfterMaxValue"/>. /// </summary> internal bool IsValid => DaysSinceEpoch >= Instant.MinDays && DaysSinceEpoch <= Instant.MaxDays; /// <summary> /// Number of nanoseconds since the local unix epoch. /// </summary> internal Duration TimeSinceLocalEpoch => duration; /// <summary> /// Number of days since the local unix epoch. /// </summary> internal int DaysSinceEpoch => duration.FloorDays; /// <summary> /// Nanosecond within the day. /// </summary> internal long NanosecondOfDay => duration.NanosecondOfFloorDay; #region Operators /// <summary> /// Returns a new instant based on this local instant, as if we'd applied a zero offset. /// This is just a slight optimization over calling <c>localInstant.Minus(Offset.Zero)</c>. /// </summary> internal Instant MinusZeroOffset() => new Instant(duration); /// <summary> /// Subtracts the given time zone offset from this local instant, to give an <see cref="Instant" />. /// </summary> /// <remarks> /// This would normally be implemented as an operator, but as the corresponding "plus" operation /// on Instant cannot be written (as Instant is a public type and LocalInstant is an internal type) /// it makes sense to keep them both as methods for consistency. /// </remarks> /// <param name="offset">The offset between UTC and a time zone for this local instant</param> /// <returns>A new <see cref="Instant"/> representing the difference of the given values.</returns> public Instant Minus(Offset offset) => new Instant(duration.MinusSmallNanoseconds(offset.Nanoseconds)); /// <summary> /// Implements the operator == (equality). /// </summary> /// <param name="left">The left hand side of the operator.</param> /// <param name="right">The right hand side of the operator.</param> /// <returns><c>true</c> if values are equal to each other, otherwise <c>false</c>.</returns> public static bool operator ==(LocalInstant left, LocalInstant right) => left.duration == right.duration; /// <summary> /// Equivalent to <see cref="Instant.SafePlus"/>, but in the opposite direction. /// </summary> internal Instant SafeMinus(Offset offset) { int days = duration.FloorDays; // If we can do the arithmetic safely, do so. if (days > Instant.MinDays && days < Instant.MaxDays) { return Minus(offset); } // Handle BeforeMinValue and BeforeMaxValue simply. if (days < Instant.MinDays) { return Instant.BeforeMinValue; } if (days > Instant.MaxDays) { return Instant.AfterMaxValue; } // Okay, do the arithmetic as a Duration, then check the result for overflow, effectively. var asDuration = duration.PlusSmallNanoseconds(offset.Nanoseconds); if (asDuration.FloorDays < Instant.MinDays) { return Instant.BeforeMinValue; } if (asDuration.FloorDays > Instant.MaxDays) { return Instant.AfterMaxValue; } return new Instant(asDuration); } /// <summary> /// Implements the operator != (inequality). /// </summary> /// <param name="left">The left hand side of the operator.</param> /// <param name="right">The right hand side of the operator.</param> /// <returns><c>true</c> if values are not equal to each other, otherwise <c>false</c>.</returns> public static bool operator !=(LocalInstant left, LocalInstant right) => !(left == right); /// <summary> /// Implements the operator &lt; (less than). /// </summary> /// <param name="left">The left hand side of the operator.</param> /// <param name="right">The right hand side of the operator.</param> /// <returns><c>true</c> if the left value is less than the right value, otherwise <c>false</c>.</returns> public static bool operator <(LocalInstant left, LocalInstant right) => left.duration < right.duration; /// <summary> /// Implements the operator &lt;= (less than or equal). /// </summary> /// <param name="left">The left hand side of the operator.</param> /// <param name="right">The right hand side of the operator.</param> /// <returns><c>true</c> if the left value is less than or equal to the right value, otherwise <c>false</c>.</returns> public static bool operator <=(LocalInstant left, LocalInstant right) => left.duration <= right.duration; /// <summary> /// Implements the operator &gt; (greater than). /// </summary> /// <param name="left">The left hand side of the operator.</param> /// <param name="right">The right hand side of the operator.</param> /// <returns><c>true</c> if the left value is greater than the right value, otherwise <c>false</c>.</returns> public static bool operator >(LocalInstant left, LocalInstant right) => left.duration > right.duration; /// <summary> /// Implements the operator &gt;= (greater than or equal). /// </summary> /// <param name="left">The left hand side of the operator.</param> /// <param name="right">The right hand side of the operator.</param> /// <returns><c>true</c> if the left value is greater than or equal to the right value, otherwise <c>false</c>.</returns> public static bool operator >=(LocalInstant left, LocalInstant right) => left.duration >= right.duration; #endregion // Operators #region Object overrides /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to this instance. /// </summary> /// <param name="obj">The <see cref="System.Object"/> to compare with this instance.</param> /// <returns> /// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; /// otherwise, <c>false</c>. /// </returns> public override bool Equals(object obj) => obj is LocalInstant && Equals((LocalInstant)obj); /// <summary> /// Returns a hash code for this instance. /// </summary> /// <returns> /// A hash code for this instance, suitable for use in hashing algorithms and data /// structures like a hash table. /// </returns> public override int GetHashCode() => duration.GetHashCode(); /// <summary> /// Returns a <see cref="System.String"/> that represents this instance. /// </summary> /// <returns> /// A <see cref="System.String"/> that represents this instance. /// </returns> public override string ToString() { var date = new LocalDate(duration.FloorDays); var pattern = LocalDateTimePattern.CreateWithInvariantCulture("yyyy-MM-ddTHH:mm:ss LOC"); var utc = new LocalDateTime(date, LocalTime.FromNanosecondsSinceMidnight(duration.NanosecondOfFloorDay)); return pattern.Format(utc); } #endregion // Object overrides #region IEquatable<LocalInstant> Members /// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <param name="other">An object to compare with this object.</param> /// <returns> /// true if the current object is equal to the <paramref name="other"/> parameter; /// otherwise, false. /// </returns> public bool Equals(LocalInstant other) => this == other; #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information. using System; using System.Windows; using System.Windows.Controls; using Microsoft.Practices.ServiceLocation; using Microsoft.Practices.Unity; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using Prism.Events; using Prism.Logging; using Prism.Modularity; using Prism.Regions; namespace Prism.Unity.Wpf.Tests { [TestClass] public class UnityBootstrapperRunMethodFixture { [TestMethod] public void CanRunBootstrapper() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); } [TestMethod] public void RunShouldNotFailIfReturnedNullShell() { var bootstrapper = new DefaultUnityBootstrapper { ShellObject = null }; bootstrapper.Run(); } [TestMethod] public void RunConfiguresServiceLocatorProvider() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(Microsoft.Practices.ServiceLocation.ServiceLocator.Current is UnityServiceLocatorAdapter); } [TestMethod] public void RunShouldInitializeContainer() { var bootstrapper = new DefaultUnityBootstrapper(); var container = bootstrapper.BaseContainer; Assert.IsNull(container); bootstrapper.Run(); container = bootstrapper.BaseContainer; Assert.IsNotNull(container); Assert.IsInstanceOfType(container, typeof(UnityContainer)); } [TestMethod] public void RunAddsCompositionContainerToContainer() { var bootstrapper = new DefaultUnityBootstrapper(); var createdContainer = bootstrapper.CallCreateContainer(); var returnedContainer = createdContainer.Resolve<IUnityContainer>(); Assert.IsNotNull(returnedContainer); Assert.AreEqual(typeof(UnityContainer), returnedContainer.GetType()); } [TestMethod] public void RunShouldCallInitializeModules() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.InitializeModulesCalled); } [TestMethod] public void RunShouldCallConfigureDefaultRegionBehaviors() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.ConfigureDefaultRegionBehaviorsCalled); } [TestMethod] public void RunShouldCallConfigureRegionAdapterMappings() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.ConfigureRegionAdapterMappingsCalled); } [TestMethod] public void RunShouldAssignRegionManagerToReturnedShell() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsNotNull(RegionManager.GetRegionManager(bootstrapper.BaseShell)); } [TestMethod] public void RunShouldCallCreateLogger() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.CreateLoggerCalled); } [TestMethod] public void RunShouldCallCreateModuleCatalog() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.CreateModuleCatalogCalled); } [TestMethod] public void RunShouldCallConfigureModuleCatalog() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.ConfigureModuleCatalogCalled); } [TestMethod] public void RunShouldCallCreateContainer() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.CreateContainerCalled); } [TestMethod] public void RunShouldCallCreateShell() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.CreateShellCalled); } [TestMethod] public void RunShouldCallConfigureContainer() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.IsTrue(bootstrapper.ConfigureContainerCalled); } [TestMethod] public void RunRegistersInstanceOfILoggerFacade() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterInstance(typeof(ILoggerFacade), null, bootstrapper.BaseLogger, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersInstanceOfIModuleCatalog() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterInstance(typeof(IModuleCatalog), null, It.IsAny<object>(), It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForIServiceLocator() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(IServiceLocator), typeof(UnityServiceLocatorAdapter), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForIModuleInitializer() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(IModuleInitializer), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForIRegionManager() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(IRegionManager), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForRegionAdapterMappings() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(RegionAdapterMappings), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForIRegionViewRegistry() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(IRegionViewRegistry), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForIRegionBehaviorFactory() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(IRegionBehaviorFactory), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunRegistersTypeForIEventAggregator() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(); mockedContainer.Verify(c => c.RegisterType(typeof(IEventAggregator), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Once()); } [TestMethod] public void RunFalseShouldNotRegisterDefaultServicesAndTypes() { var mockedContainer = new Mock<IUnityContainer>(); SetupMockedContainerForVerificationTests(mockedContainer); var bootstrapper = new MockedContainerBootstrapper(mockedContainer.Object); bootstrapper.Run(false); mockedContainer.Verify(c => c.RegisterType(typeof(IEventAggregator), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Never()); mockedContainer.Verify(c => c.RegisterType(typeof(IRegionManager), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Never()); mockedContainer.Verify(c => c.RegisterType(typeof(RegionAdapterMappings), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Never()); mockedContainer.Verify(c => c.RegisterType(typeof(IServiceLocator), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Never()); mockedContainer.Verify(c => c.RegisterType(typeof(IModuleInitializer), It.IsAny<Type>(), null, It.IsAny<LifetimeManager>()), Times.Never()); } [TestMethod] public void ModuleManagerRunCalled() { // Have to use a non-mocked container because of IsRegistered<> extension method, Registrations property,and ContainerRegistration var container = new UnityContainer(); var mockedModuleInitializer = new Mock<IModuleInitializer>(); var mockedModuleManager = new Mock<IModuleManager>(); var regionAdapterMappings = new RegionAdapterMappings(); var serviceLocatorAdapter = new UnityServiceLocatorAdapter(container); var regionBehaviorFactory = new RegionBehaviorFactory(serviceLocatorAdapter); container.RegisterInstance<IServiceLocator>(serviceLocatorAdapter); container.RegisterInstance<UnityBootstrapperExtension>(new UnityBootstrapperExtension()); container.RegisterInstance<IModuleCatalog>(new ModuleCatalog()); container.RegisterInstance<IModuleInitializer>(mockedModuleInitializer.Object); container.RegisterInstance<IModuleManager>(mockedModuleManager.Object); container.RegisterInstance<RegionAdapterMappings>(regionAdapterMappings); container.RegisterInstance<SelectorRegionAdapter>(new SelectorRegionAdapter(regionBehaviorFactory)); container.RegisterInstance<ItemsControlRegionAdapter>(new ItemsControlRegionAdapter(regionBehaviorFactory)); container.RegisterInstance<ContentControlRegionAdapter>(new ContentControlRegionAdapter(regionBehaviorFactory)); var bootstrapper = new MockedContainerBootstrapper(container); bootstrapper.Run(); mockedModuleManager.Verify(mm => mm.Run(), Times.Once()); } [TestMethod] public void RunShouldCallTheMethodsInOrder() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); Assert.AreEqual("CreateLogger", bootstrapper.MethodCalls[0]); Assert.AreEqual("CreateModuleCatalog", bootstrapper.MethodCalls[1]); Assert.AreEqual("ConfigureModuleCatalog", bootstrapper.MethodCalls[2]); Assert.AreEqual("CreateContainer", bootstrapper.MethodCalls[3]); Assert.AreEqual("ConfigureContainer", bootstrapper.MethodCalls[4]); Assert.AreEqual("ConfigureServiceLocator", bootstrapper.MethodCalls[5]); Assert.AreEqual("ConfigureRegionAdapterMappings", bootstrapper.MethodCalls[6]); Assert.AreEqual("ConfigureDefaultRegionBehaviors", bootstrapper.MethodCalls[7]); Assert.AreEqual("RegisterFrameworkExceptionTypes", bootstrapper.MethodCalls[8]); Assert.AreEqual("CreateShell", bootstrapper.MethodCalls[9]); Assert.AreEqual("InitializeShell", bootstrapper.MethodCalls[10]); Assert.AreEqual("InitializeModules", bootstrapper.MethodCalls[11]); } [TestMethod] public void RunShouldLogBootstrapperSteps() { var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages[0].Contains("Logger was created successfully.")); Assert.IsTrue(messages[1].Contains("Creating module catalog.")); Assert.IsTrue(messages[2].Contains("Configuring module catalog.")); Assert.IsTrue(messages[3].Contains("Creating Unity container.")); Assert.IsTrue(messages[4].Contains("Configuring the Unity container.")); Assert.IsTrue(messages[5].Contains("Adding UnityBootstrapperExtension to container.")); Assert.IsTrue(messages[6].Contains("Configuring ServiceLocator singleton.")); Assert.IsTrue(messages[7].Contains("Configuring the ViewModelLocator to use Unity.")); Assert.IsTrue(messages[8].Contains("Configuring region adapters.")); Assert.IsTrue(messages[9].Contains("Configuring default region behaviors.")); Assert.IsTrue(messages[10].Contains("Registering Framework Exception Types.")); Assert.IsTrue(messages[11].Contains("Creating the shell.")); Assert.IsTrue(messages[12].Contains("Setting the RegionManager.")); Assert.IsTrue(messages[13].Contains("Updating Regions.")); Assert.IsTrue(messages[14].Contains("Initializing the shell.")); Assert.IsTrue(messages[15].Contains("Initializing modules.")); Assert.IsTrue(messages[16].Contains("Bootstrapper sequence completed.")); } [TestMethod] public void RunShouldLogLoggerCreationSuccess() { const string expectedMessageText = "Logger was created successfully."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutModuleCatalogCreation() { const string expectedMessageText = "Creating module catalog."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutConfiguringModuleCatalog() { const string expectedMessageText = "Configuring module catalog."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutCreatingTheContainer() { const string expectedMessageText = "Creating Unity container."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutConfiguringContainer() { const string expectedMessageText = "Configuring the Unity container."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutConfiguringRegionAdapters() { const string expectedMessageText = "Configuring region adapters."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutConfiguringRegionBehaviors() { const string expectedMessageText = "Configuring default region behaviors."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutRegisteringFrameworkExceptionTypes() { const string expectedMessageText = "Registering Framework Exception Types."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutCreatingTheShell() { const string expectedMessageText = "Creating the shell."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutInitializingTheShellIfShellCreated() { const string expectedMessageText = "Initializing the shell."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldNotLogAboutInitializingTheShellIfShellIsNotCreated() { const string expectedMessageText = "Initializing shell"; var bootstrapper = new DefaultUnityBootstrapper { ShellObject = null }; bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsFalse(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutInitializingModules() { const string expectedMessageText = "Initializing modules."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } [TestMethod] public void RunShouldLogAboutRunCompleting() { const string expectedMessageText = "Bootstrapper sequence completed."; var bootstrapper = new DefaultUnityBootstrapper(); bootstrapper.Run(); var messages = bootstrapper.BaseLogger.Messages; Assert.IsTrue(messages.Contains(expectedMessageText)); } private static void SetupMockedContainerForVerificationTests(Mock<IUnityContainer> mockedContainer) { var mockedModuleInitializer = new Mock<IModuleInitializer>(); var mockedModuleManager = new Mock<IModuleManager>(); var regionAdapterMappings = new RegionAdapterMappings(); var serviceLocatorAdapter = new UnityServiceLocatorAdapter(mockedContainer.Object); var regionBehaviorFactory = new RegionBehaviorFactory(serviceLocatorAdapter); mockedContainer.Setup(c => c.Resolve(typeof(IServiceLocator), (string)null)).Returns(serviceLocatorAdapter); mockedContainer.Setup(c => c.RegisterInstance(It.IsAny<Type>(), It.IsAny<string>(), It.IsAny<object>(), It.IsAny<LifetimeManager>())); mockedContainer.Setup(c => c.Resolve(typeof(UnityBootstrapperExtension), (string)null)).Returns( new UnityBootstrapperExtension()); mockedContainer.Setup(c => c.Resolve(typeof(IModuleCatalog), (string)null)).Returns( new ModuleCatalog()); mockedContainer.Setup(c => c.Resolve(typeof(IModuleInitializer), (string)null)).Returns( mockedModuleInitializer.Object); mockedContainer.Setup(c => c.Resolve(typeof(IModuleManager), (string)null)).Returns( mockedModuleManager.Object); mockedContainer.Setup(c => c.Resolve(typeof(RegionAdapterMappings), (string)null)).Returns( regionAdapterMappings); mockedContainer.Setup(c => c.Resolve(typeof(SelectorRegionAdapter), (string)null)).Returns( new SelectorRegionAdapter(regionBehaviorFactory)); mockedContainer.Setup(c => c.Resolve(typeof(ItemsControlRegionAdapter), (string)null)).Returns( new ItemsControlRegionAdapter(regionBehaviorFactory)); mockedContainer.Setup(c => c.Resolve(typeof(ContentControlRegionAdapter), (string)null)).Returns( new ContentControlRegionAdapter(regionBehaviorFactory)); } private class MockedContainerBootstrapper : UnityBootstrapper { private readonly IUnityContainer container; public ILoggerFacade BaseLogger { get { return base.Logger; } } public void CallConfigureContainer() { base.ConfigureContainer(); } public MockedContainerBootstrapper(IUnityContainer container) { this.container = container; } protected override IUnityContainer CreateContainer() { return container; } protected override DependencyObject CreateShell() { return new UserControl(); } protected override void InitializeShell() { // no op } } } }
// Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Batch.Protocol { using Microsoft.Rest.Azure; using Models; /// <summary> /// Extension methods for JobOperations. /// </summary> public static partial class JobOperationsExtensions { /// <summary> /// Gets lifetime summary statistics for all of the jobs in the specified /// account. /// </summary> /// <remarks> /// Statistics are aggregated across all jobs that have ever existed in the /// account, from account creation to the last update time of the statistics. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobGetAllLifetimeStatisticsOptions'> /// Additional parameters for the operation /// </param> public static JobStatistics GetAllLifetimeStatistics(this IJobOperations operations, JobGetAllLifetimeStatisticsOptions jobGetAllLifetimeStatisticsOptions = default(JobGetAllLifetimeStatisticsOptions)) { return ((IJobOperations)operations).GetAllLifetimeStatisticsAsync(jobGetAllLifetimeStatisticsOptions).GetAwaiter().GetResult(); } /// <summary> /// Gets lifetime summary statistics for all of the jobs in the specified /// account. /// </summary> /// <remarks> /// Statistics are aggregated across all jobs that have ever existed in the /// account, from account creation to the last update time of the statistics. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobGetAllLifetimeStatisticsOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobStatistics> GetAllLifetimeStatisticsAsync(this IJobOperations operations, JobGetAllLifetimeStatisticsOptions jobGetAllLifetimeStatisticsOptions = default(JobGetAllLifetimeStatisticsOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetAllLifetimeStatisticsWithHttpMessagesAsync(jobGetAllLifetimeStatisticsOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Deletes a job. /// </summary> /// <remarks> /// Deleting a job also deletes all tasks that are part of that job, and all /// job statistics. This also overrides the retention period for task data; /// that is, if the job contains tasks which are still retained on compute /// nodes, the Batch services deletes those tasks' working directories and all /// their contents. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to delete. /// </param> /// <param name='jobDeleteOptions'> /// Additional parameters for the operation /// </param> public static JobDeleteHeaders Delete(this IJobOperations operations, string jobId, JobDeleteOptions jobDeleteOptions = default(JobDeleteOptions)) { return ((IJobOperations)operations).DeleteAsync(jobId, jobDeleteOptions).GetAwaiter().GetResult(); } /// <summary> /// Deletes a job. /// </summary> /// <remarks> /// Deleting a job also deletes all tasks that are part of that job, and all /// job statistics. This also overrides the retention period for task data; /// that is, if the job contains tasks which are still retained on compute /// nodes, the Batch services deletes those tasks' working directories and all /// their contents. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to delete. /// </param> /// <param name='jobDeleteOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobDeleteHeaders> DeleteAsync(this IJobOperations operations, string jobId, JobDeleteOptions jobDeleteOptions = default(JobDeleteOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.DeleteWithHttpMessagesAsync(jobId, jobDeleteOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Gets information about the specified job. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job. /// </param> /// <param name='jobGetOptions'> /// Additional parameters for the operation /// </param> public static CloudJob Get(this IJobOperations operations, string jobId, JobGetOptions jobGetOptions = default(JobGetOptions)) { return ((IJobOperations)operations).GetAsync(jobId, jobGetOptions).GetAwaiter().GetResult(); } /// <summary> /// Gets information about the specified job. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job. /// </param> /// <param name='jobGetOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<CloudJob> GetAsync(this IJobOperations operations, string jobId, JobGetOptions jobGetOptions = default(JobGetOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetWithHttpMessagesAsync(jobId, jobGetOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Updates the properties of the specified job. /// </summary> /// <remarks> /// This replaces only the job properties specified in the request. For /// example, if the job has constraints, and a request does not specify the /// constraints element, then the job keeps the existing constraints. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job whose properties you want to update. /// </param> /// <param name='jobPatchParameter'> /// The parameters for the request. /// </param> /// <param name='jobPatchOptions'> /// Additional parameters for the operation /// </param> public static JobPatchHeaders Patch(this IJobOperations operations, string jobId, JobPatchParameter jobPatchParameter, JobPatchOptions jobPatchOptions = default(JobPatchOptions)) { return ((IJobOperations)operations).PatchAsync(jobId, jobPatchParameter, jobPatchOptions).GetAwaiter().GetResult(); } /// <summary> /// Updates the properties of the specified job. /// </summary> /// <remarks> /// This replaces only the job properties specified in the request. For /// example, if the job has constraints, and a request does not specify the /// constraints element, then the job keeps the existing constraints. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job whose properties you want to update. /// </param> /// <param name='jobPatchParameter'> /// The parameters for the request. /// </param> /// <param name='jobPatchOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobPatchHeaders> PatchAsync(this IJobOperations operations, string jobId, JobPatchParameter jobPatchParameter, JobPatchOptions jobPatchOptions = default(JobPatchOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.PatchWithHttpMessagesAsync(jobId, jobPatchParameter, jobPatchOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Updates the properties of the specified job. /// </summary> /// <remarks> /// This fully replaces all the updateable properties of the job. For example, /// if the job has constraints associated with it and if constraints is not /// specified with this request, then the Batch service will remove the /// existing constraints. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job whose properties you want to update. /// </param> /// <param name='jobUpdateParameter'> /// The parameters for the request. /// </param> /// <param name='jobUpdateOptions'> /// Additional parameters for the operation /// </param> public static JobUpdateHeaders Update(this IJobOperations operations, string jobId, JobUpdateParameter jobUpdateParameter, JobUpdateOptions jobUpdateOptions = default(JobUpdateOptions)) { return ((IJobOperations)operations).UpdateAsync(jobId, jobUpdateParameter, jobUpdateOptions).GetAwaiter().GetResult(); } /// <summary> /// Updates the properties of the specified job. /// </summary> /// <remarks> /// This fully replaces all the updateable properties of the job. For example, /// if the job has constraints associated with it and if constraints is not /// specified with this request, then the Batch service will remove the /// existing constraints. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job whose properties you want to update. /// </param> /// <param name='jobUpdateParameter'> /// The parameters for the request. /// </param> /// <param name='jobUpdateOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobUpdateHeaders> UpdateAsync(this IJobOperations operations, string jobId, JobUpdateParameter jobUpdateParameter, JobUpdateOptions jobUpdateOptions = default(JobUpdateOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.UpdateWithHttpMessagesAsync(jobId, jobUpdateParameter, jobUpdateOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Disables the specified job, preventing new tasks from running. /// </summary> /// <remarks> /// The Batch Service immediately moves the job to the disabling state. Batch /// then uses the disableTasks parameter to determine what to do with the /// currently running tasks of the job. The job remains in the disabling state /// until the disable operation is completed and all tasks have been dealt with /// according to the disableTasks option; the job then moves to the disabled /// state. No new tasks are started under the job until it moves back to active /// state. If you try to disable a job that is in any state other than active, /// disabling, or disabled, the request fails with status code 409. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to disable. /// </param> /// <param name='disableTasks'> /// What to do with active tasks associated with the job. requeue - Terminate /// running tasks and requeue them. The tasks will run again when the job is /// enabled. terminate - Terminate running tasks. The tasks will not run again. /// wait - Allow currently running tasks to complete. Possible values include: /// 'requeue', 'terminate', 'wait' /// </param> /// <param name='jobDisableOptions'> /// Additional parameters for the operation /// </param> public static JobDisableHeaders Disable(this IJobOperations operations, string jobId, DisableJobOption disableTasks, JobDisableOptions jobDisableOptions = default(JobDisableOptions)) { return ((IJobOperations)operations).DisableAsync(jobId, disableTasks, jobDisableOptions).GetAwaiter().GetResult(); } /// <summary> /// Disables the specified job, preventing new tasks from running. /// </summary> /// <remarks> /// The Batch Service immediately moves the job to the disabling state. Batch /// then uses the disableTasks parameter to determine what to do with the /// currently running tasks of the job. The job remains in the disabling state /// until the disable operation is completed and all tasks have been dealt with /// according to the disableTasks option; the job then moves to the disabled /// state. No new tasks are started under the job until it moves back to active /// state. If you try to disable a job that is in any state other than active, /// disabling, or disabled, the request fails with status code 409. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to disable. /// </param> /// <param name='disableTasks'> /// What to do with active tasks associated with the job. requeue - Terminate /// running tasks and requeue them. The tasks will run again when the job is /// enabled. terminate - Terminate running tasks. The tasks will not run again. /// wait - Allow currently running tasks to complete. Possible values include: /// 'requeue', 'terminate', 'wait' /// </param> /// <param name='jobDisableOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobDisableHeaders> DisableAsync(this IJobOperations operations, string jobId, DisableJobOption disableTasks, JobDisableOptions jobDisableOptions = default(JobDisableOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.DisableWithHttpMessagesAsync(jobId, disableTasks, jobDisableOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Enables the specified job, allowing new tasks to run. /// </summary> /// <remarks> /// When you call this API, the Batch service sets a disabled job to the /// enabling state. After the this operation is completed, the job moves to the /// active state, and scheduling of new tasks under the job resumes. The Batch /// service does not allow a task to remain in the active state for more than 7 /// days. Therefore, if you enable a job containing active tasks which were /// added more than 7 days ago, those tasks will not run. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to enable. /// </param> /// <param name='jobEnableOptions'> /// Additional parameters for the operation /// </param> public static JobEnableHeaders Enable(this IJobOperations operations, string jobId, JobEnableOptions jobEnableOptions = default(JobEnableOptions)) { return ((IJobOperations)operations).EnableAsync(jobId, jobEnableOptions).GetAwaiter().GetResult(); } /// <summary> /// Enables the specified job, allowing new tasks to run. /// </summary> /// <remarks> /// When you call this API, the Batch service sets a disabled job to the /// enabling state. After the this operation is completed, the job moves to the /// active state, and scheduling of new tasks under the job resumes. The Batch /// service does not allow a task to remain in the active state for more than 7 /// days. Therefore, if you enable a job containing active tasks which were /// added more than 7 days ago, those tasks will not run. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to enable. /// </param> /// <param name='jobEnableOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobEnableHeaders> EnableAsync(this IJobOperations operations, string jobId, JobEnableOptions jobEnableOptions = default(JobEnableOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.EnableWithHttpMessagesAsync(jobId, jobEnableOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Terminates the specified job, marking it as completed. /// </summary> /// <remarks> /// When a Terminate Job request is received, the Batch service sets the job to /// the terminating state. The Batch service then terminates any active or /// running tasks associated with the job, and runs any required Job Release /// tasks. The job then moves into the completed state. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to terminate. /// </param> /// <param name='terminateReason'> /// The text you want to appear as the job's TerminateReason. The default is /// 'UserTerminate'. /// </param> /// <param name='jobTerminateOptions'> /// Additional parameters for the operation /// </param> public static JobTerminateHeaders Terminate(this IJobOperations operations, string jobId, string terminateReason = default(string), JobTerminateOptions jobTerminateOptions = default(JobTerminateOptions)) { return ((IJobOperations)operations).TerminateAsync(jobId, terminateReason, jobTerminateOptions).GetAwaiter().GetResult(); } /// <summary> /// Terminates the specified job, marking it as completed. /// </summary> /// <remarks> /// When a Terminate Job request is received, the Batch service sets the job to /// the terminating state. The Batch service then terminates any active or /// running tasks associated with the job, and runs any required Job Release /// tasks. The job then moves into the completed state. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job to terminate. /// </param> /// <param name='terminateReason'> /// The text you want to appear as the job's TerminateReason. The default is /// 'UserTerminate'. /// </param> /// <param name='jobTerminateOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobTerminateHeaders> TerminateAsync(this IJobOperations operations, string jobId, string terminateReason = default(string), JobTerminateOptions jobTerminateOptions = default(JobTerminateOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.TerminateWithHttpMessagesAsync(jobId, terminateReason, jobTerminateOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Adds a job to the specified account. /// </summary> /// <remarks> /// The Batch service supports two ways to control the work done as part of a /// job. In the first approach, the user specifies a Job Manager task. The /// Batch service launches this task when it is ready to start the job. The Job /// Manager task controls all other tasks that run under this job, by using the /// Task APIs. In the second approach, the user directly controls the execution /// of tasks under an active job, by using the Task APIs. Also note: when /// naming jobs, avoid including sensitive information such as user names or /// secret project names. This information may appear in telemetry logs /// accessible to Microsoft Support engineers. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='job'> /// The job to be added. /// </param> /// <param name='jobAddOptions'> /// Additional parameters for the operation /// </param> public static JobAddHeaders Add(this IJobOperations operations, JobAddParameter job, JobAddOptions jobAddOptions = default(JobAddOptions)) { return ((IJobOperations)operations).AddAsync(job, jobAddOptions).GetAwaiter().GetResult(); } /// <summary> /// Adds a job to the specified account. /// </summary> /// <remarks> /// The Batch service supports two ways to control the work done as part of a /// job. In the first approach, the user specifies a Job Manager task. The /// Batch service launches this task when it is ready to start the job. The Job /// Manager task controls all other tasks that run under this job, by using the /// Task APIs. In the second approach, the user directly controls the execution /// of tasks under an active job, by using the Task APIs. Also note: when /// naming jobs, avoid including sensitive information such as user names or /// secret project names. This information may appear in telemetry logs /// accessible to Microsoft Support engineers. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='job'> /// The job to be added. /// </param> /// <param name='jobAddOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<JobAddHeaders> AddAsync(this IJobOperations operations, JobAddParameter job, JobAddOptions jobAddOptions = default(JobAddOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.AddWithHttpMessagesAsync(job, jobAddOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Headers; } } /// <summary> /// Lists all of the jobs in the specified account. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobListOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<CloudJob> List(this IJobOperations operations, JobListOptions jobListOptions = default(JobListOptions)) { return ((IJobOperations)operations).ListAsync(jobListOptions).GetAwaiter().GetResult(); } /// <summary> /// Lists all of the jobs in the specified account. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobListOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<Microsoft.Rest.Azure.IPage<CloudJob>> ListAsync(this IJobOperations operations, JobListOptions jobListOptions = default(JobListOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.ListWithHttpMessagesAsync(jobListOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Lists the jobs that have been created under the specified job schedule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobScheduleId'> /// The ID of the job schedule from which you want to get a list of jobs. /// </param> /// <param name='jobListFromJobScheduleOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<CloudJob> ListFromJobSchedule(this IJobOperations operations, string jobScheduleId, JobListFromJobScheduleOptions jobListFromJobScheduleOptions = default(JobListFromJobScheduleOptions)) { return ((IJobOperations)operations).ListFromJobScheduleAsync(jobScheduleId, jobListFromJobScheduleOptions).GetAwaiter().GetResult(); } /// <summary> /// Lists the jobs that have been created under the specified job schedule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobScheduleId'> /// The ID of the job schedule from which you want to get a list of jobs. /// </param> /// <param name='jobListFromJobScheduleOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<Microsoft.Rest.Azure.IPage<CloudJob>> ListFromJobScheduleAsync(this IJobOperations operations, string jobScheduleId, JobListFromJobScheduleOptions jobListFromJobScheduleOptions = default(JobListFromJobScheduleOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.ListFromJobScheduleWithHttpMessagesAsync(jobScheduleId, jobListFromJobScheduleOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Lists the execution status of the Job Preparation and Job Release task for /// the specified job across the compute nodes where the job has run. /// </summary> /// <remarks> /// This API returns the Job Preparation and Job Release task status on all /// compute nodes that have run the Job Preparation or Job Release task. This /// includes nodes which have since been removed from the pool. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job. /// </param> /// <param name='jobListPreparationAndReleaseTaskStatusOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<JobPreparationAndReleaseTaskExecutionInformation> ListPreparationAndReleaseTaskStatus(this IJobOperations operations, string jobId, JobListPreparationAndReleaseTaskStatusOptions jobListPreparationAndReleaseTaskStatusOptions = default(JobListPreparationAndReleaseTaskStatusOptions)) { return ((IJobOperations)operations).ListPreparationAndReleaseTaskStatusAsync(jobId, jobListPreparationAndReleaseTaskStatusOptions).GetAwaiter().GetResult(); } /// <summary> /// Lists the execution status of the Job Preparation and Job Release task for /// the specified job across the compute nodes where the job has run. /// </summary> /// <remarks> /// This API returns the Job Preparation and Job Release task status on all /// compute nodes that have run the Job Preparation or Job Release task. This /// includes nodes which have since been removed from the pool. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='jobId'> /// The ID of the job. /// </param> /// <param name='jobListPreparationAndReleaseTaskStatusOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<Microsoft.Rest.Azure.IPage<JobPreparationAndReleaseTaskExecutionInformation>> ListPreparationAndReleaseTaskStatusAsync(this IJobOperations operations, string jobId, JobListPreparationAndReleaseTaskStatusOptions jobListPreparationAndReleaseTaskStatusOptions = default(JobListPreparationAndReleaseTaskStatusOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.ListPreparationAndReleaseTaskStatusWithHttpMessagesAsync(jobId, jobListPreparationAndReleaseTaskStatusOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Lists all of the jobs in the specified account. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='jobListNextOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<CloudJob> ListNext(this IJobOperations operations, string nextPageLink, JobListNextOptions jobListNextOptions = default(JobListNextOptions)) { return ((IJobOperations)operations).ListNextAsync(nextPageLink, jobListNextOptions).GetAwaiter().GetResult(); } /// <summary> /// Lists all of the jobs in the specified account. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='jobListNextOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<Microsoft.Rest.Azure.IPage<CloudJob>> ListNextAsync(this IJobOperations operations, string nextPageLink, JobListNextOptions jobListNextOptions = default(JobListNextOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, jobListNextOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Lists the jobs that have been created under the specified job schedule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='jobListFromJobScheduleNextOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<CloudJob> ListFromJobScheduleNext(this IJobOperations operations, string nextPageLink, JobListFromJobScheduleNextOptions jobListFromJobScheduleNextOptions = default(JobListFromJobScheduleNextOptions)) { return ((IJobOperations)operations).ListFromJobScheduleNextAsync(nextPageLink, jobListFromJobScheduleNextOptions).GetAwaiter().GetResult(); } /// <summary> /// Lists the jobs that have been created under the specified job schedule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='jobListFromJobScheduleNextOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<Microsoft.Rest.Azure.IPage<CloudJob>> ListFromJobScheduleNextAsync(this IJobOperations operations, string nextPageLink, JobListFromJobScheduleNextOptions jobListFromJobScheduleNextOptions = default(JobListFromJobScheduleNextOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.ListFromJobScheduleNextWithHttpMessagesAsync(nextPageLink, jobListFromJobScheduleNextOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Lists the execution status of the Job Preparation and Job Release task for /// the specified job across the compute nodes where the job has run. /// </summary> /// <remarks> /// This API returns the Job Preparation and Job Release task status on all /// compute nodes that have run the Job Preparation or Job Release task. This /// includes nodes which have since been removed from the pool. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='jobListPreparationAndReleaseTaskStatusNextOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<JobPreparationAndReleaseTaskExecutionInformation> ListPreparationAndReleaseTaskStatusNext(this IJobOperations operations, string nextPageLink, JobListPreparationAndReleaseTaskStatusNextOptions jobListPreparationAndReleaseTaskStatusNextOptions = default(JobListPreparationAndReleaseTaskStatusNextOptions)) { return ((IJobOperations)operations).ListPreparationAndReleaseTaskStatusNextAsync(nextPageLink, jobListPreparationAndReleaseTaskStatusNextOptions).GetAwaiter().GetResult(); } /// <summary> /// Lists the execution status of the Job Preparation and Job Release task for /// the specified job across the compute nodes where the job has run. /// </summary> /// <remarks> /// This API returns the Job Preparation and Job Release task status on all /// compute nodes that have run the Job Preparation or Job Release task. This /// includes nodes which have since been removed from the pool. /// </remarks> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='jobListPreparationAndReleaseTaskStatusNextOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async System.Threading.Tasks.Task<Microsoft.Rest.Azure.IPage<JobPreparationAndReleaseTaskExecutionInformation>> ListPreparationAndReleaseTaskStatusNextAsync(this IJobOperations operations, string nextPageLink, JobListPreparationAndReleaseTaskStatusNextOptions jobListPreparationAndReleaseTaskStatusNextOptions = default(JobListPreparationAndReleaseTaskStatusNextOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.ListPreparationAndReleaseTaskStatusNextWithHttpMessagesAsync(nextPageLink, jobListPreparationAndReleaseTaskStatusNextOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
// The MIT License (MIT) // Copyright (c) 2013 lailongwei<[email protected]> // // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.IO; using System.Net; using System.Collections.Generic; using System.Runtime.InteropServices; namespace llbc { #region SessionInfo /// <summary> /// The session information class encapsulation. /// </summary> public class SessionInfo { public SessionInfo(bool isListen, int sessionId, int acceptSessionId, int socketHandle, IPEndPoint localEndPoint, IPEndPoint remoteEndPoint) { _isListen = isListen; _sessionId = sessionId; _acceptSessionId = acceptSessionId; _socketHandle = socketHandle; _localEndPoint = localEndPoint; _remoteEndPoint = remoteEndPoint; _BuildStringRepr(); } public bool isListenSession { get { return _isListen; } } public int sessionId { get { return _sessionId; } } public int acceptSessionId { get { return _acceptSessionId; } } public int socketHandle { get { return _socketHandle; } } public IPEndPoint localEndPoint { get { return _localEndPoint; } } public string localHost { get { return _localEndPoint.Address.ToString(); } } public int localPort { get { return _localEndPoint.Port; } } public IPEndPoint remoteEndPoint { get { return _remoteEndPoint; } } public string remoteHost { get { return _remoteEndPoint.Address.ToString(); } } public int remotePort { get { return _remoteEndPoint.Port; } } public override string ToString() { return _repr; } #region Internal implementaions private void _BuildStringRepr() { _repr = string.Format( "SessionInfo: [sessionId: {0}, acceptSessionId: {1}, socketHandle: {2}, isListen: {3}, localEP: {4}, remoteEP: {5}]", _sessionId, _acceptSessionId, _socketHandle, _isListen, _localEndPoint, _remoteEndPoint); } #endregion bool _isListen; int _sessionId; int _acceptSessionId; int _socketHandle; IPEndPoint _localEndPoint; IPEndPoint _remoteEndPoint; string _repr; } #endregion #region SessionDestroyInfo /// <summary> /// The session destroy information class encapsulation. /// </summary> public class SessionDestroyInfo { public SessionDestroyInfo(SessionInfo sessionInfo, bool fromSvc, string reason, int errNo, int subErrNo) { _sessionInfo = sessionInfo; _fromSvc = fromSvc; _reason = reason; _errNo = errNo; _subErrNo = subErrNo; _BuildStringRepr(); } public SessionInfo sessionInfo { get { return _sessionInfo; } } public int sessionId { get { return _sessionInfo.sessionId; } } public bool isDestroyedFromService { get { return _fromSvc; } } public string reason { get { return _reason; } } public int errNo { get { return _errNo; } } public int subErrNo { get { return _subErrNo; } } public override string ToString() { return _repr; } private void _BuildStringRepr() { _repr = string.Format( "SessionDestroyInfo: [sessionInfo: {0}, fromSvc: {1}, reason: {2}]", _sessionInfo, _fromSvc, _reason); } private SessionInfo _sessionInfo; private bool _fromSvc; private string _reason; private int _errNo; private int _subErrNo; private string _repr; } #endregion #region AsyncConnResult /// <summary> /// The Service.AsyncConn result information class encapsulation. /// </summary> public class AsyncConnResult { public AsyncConnResult(bool connected, string reason, IPEndPoint remoteEndPoint) { _connected = connected; _reason = reason; _remoteEndPoint = remoteEndPoint; _BuildStringRepr(); } public bool connected { get { return _connected; } } public string reason { get { return _reason; } } public IPEndPoint remoteEndPoint { get { return _remoteEndPoint; } } public string remoteHost { get { return _remoteEndPoint.Address.ToString(); } } public int remotePort { get { return _remoteEndPoint.Port; } } public override string ToString() { return _repr; } private void _BuildStringRepr() { _repr = string.Format( "AsyncConnResult: [connected: {0}, reason: {1}, remoteEndPoint: {2}]", _connected, _reason, _remoteEndPoint); } private bool _connected; private string _reason; private IPEndPoint _remoteEndPoint; private string _repr; } #endregion #region ProtoReport public class ProtoReport { public ProtoReport(int sessionId, ProtoLayer layer, ProtoReportLevel level, string report) { _sessionId = sessionId; _layer = layer; _reportLevel = level; _report = report; _BuildStringRepr(); } public int sessionId { get { return _sessionId; } } public ProtoLayer layer { get { return _layer; } } public ProtoReportLevel reportLevel { get { return _reportLevel; } } public string report { get { return _report; } } public override string ToString() { return _repr; } private void _BuildStringRepr() { _repr = string.Format( "ProtoReport: [sessionId: {0}, layer: {1}, level: {2}, report: {3}]", _sessionId, _layer, _reportLevel, _report); } private int _sessionId; private ProtoLayer _layer; private ProtoReportLevel _reportLevel; private string _report; private string _repr; } #endregion /// <summary> /// Service component base class encapsulation. /// </summary> public class IComponent { /// <summary> /// Service getter/setter. /// </summary> public Service svc { get { return _svc; } set { _svc = value; } } #region OnInit/OnDestroy, OnStart/OnStop /// <summary> /// When component first start, will call this method to initialize component before OnStart method call. /// </summary> public virtual void OnInit() { } /// <summary> /// When service destroy, will call this method to destroy component. /// </summary> public virtual void OnDestroy() { } /// <summary> /// When service startup, will call this method. /// </summary> public virtual void OnStart() { } /// <summary> /// When service stop, will call this method. /// </summary> public virtual void OnStop() { } #endregion #region OnUpdate/OnIdle /// <summary> /// Service update handler. /// </summary> public virtual void OnUpdate() { } /// <summary> /// Service per-frame idle handler. /// </summary> /// <param name="idleTime">idle time, in milli-seconds</param> public virtual void OnIdle(int idleTime) { } #endregion #region SessionEvents: OnSessionCreate/OnSessionDestroy/OnAsyncConnResult /// <summary> /// When new session create, will call this handler to process. /// </summary> /// <param name="sessionInfo">new session info</param> public virtual void OnSessionCreate(SessionInfo sessionInfo) { } /// <summary> /// When session destroyed, will call this handler to process. /// </summary> /// <param name="destroyInfo">destroy info</param> public virtual void OnSessionDestroy(SessionDestroyInfo destroyInfo) { } /// <summary> /// When a Service.AsyncConn result, will call this handler to process. /// </summary> /// <param name="asyncConnResult">async-connect result info</param> public virtual void OnAsyncConnResult(AsyncConnResult asyncConnResult) { } #endregion #region OnProtoReport/OnUnHandledPacket /// <summary> /// When proto-stack has message to report, will call this handler to process. /// </summary> /// <param name="report">report message info</param> public virtual void OnProtoReport(ProtoReport report) { } /// <summary> /// When one packet unhandled, will call this handler to process. /// </summary> /// <param name="opcode">unhandled packet opcode</param> public virtual void OnUnHandledPacket(Packet packet) { } #endregion private Service _svc; } }
using System; using UnityEngine; using System.Collections.Generic; using Flunity.Utils; namespace Flunity { /// <summary> /// Provides events to handle touches on target DisplayObject. /// Events will not be fired if target object or its parent is not enabled. /// </summary> public class TouchListener { /// <summary> /// Rect in local coordinates system which will react on events. /// If not specified, <c>DisplayObject.internalBounds</c> will be used /// </summary> public Rect? hitArea; internal bool isRemoved; private readonly DisplayObject _target; private readonly List<TouchState> _touches = new List<TouchState>(); private FlashStage _stage; private bool _debugDrawEnabled; #region events /// <summary> /// Dispatches when object is touched. /// Will be dispatched several times in case of multitouch. /// </summary> public event Action<TouchListener, TouchState> TouchBegan; /// <summary> /// Dispatches when touch is ended. /// Will be dispatched several times in case of multitouch. /// </summary> public event Action<TouchListener, TouchState> TouchEnded; /// <summary> /// Dispatches when touch is canceled. /// Will be dispatched several times in case of multitouch. /// </summary> public event Action<TouchListener, TouchState> TouchCanceled; /// <summary> /// Dispatches when object is touched first time. /// Will be dispatched once in case of multitouch. /// </summary> public event Action<TouchListener> Pressed; /// <summary> /// Dispatches when all touches are ended. /// Will be dispatched once in case of multitouch. /// </summary> public event Action<TouchListener> Released; /// <summary> /// Dispatches when all touches are canceled. /// Will be dispatched once in case of multitouch. /// </summary> public event Action<TouchListener> Canceled; #endregion #region event helpers public TouchListener OnTouchBegan(Action<TouchListener, TouchState> handler) { TouchBegan += handler; return this; } public TouchListener OnTouchEnded(Action<TouchListener, TouchState> handler) { TouchEnded += handler; return this; } public TouchListener OnTouchCanceled(Action<TouchListener, TouchState> handler) { TouchCanceled += handler; return this; } public TouchListener OnPressed(Action<TouchListener> handler) { Pressed += handler; return this; } public TouchListener OnReleased(Action<TouchListener> handler) { Released += handler; return this; } public TouchListener OnCanceled(Action<TouchListener> handler) { Canceled += handler; return this; } #endregion #region isPressed private bool _isPressed = false; public bool isPressed { get { return _isPressed; } private set { if (_isPressed == value) return; _isPressed = value; if (_isPressed) Pressed.Dispatch(this); else Released.Dispatch(this); } } #endregion public TouchListener(DisplayObject target) { _target = target; _target.AddedToStage += OnTargetAddedToStage; _target.RemovedFromStage += OnTargetRemovedFromStage; if (_target.isOnStage) OnTargetAddedToStage(target); } void OnTargetAddedToStage(DisplayObject obj) { ClearState(); _stage = target.stage; _stage.touchController.AddListener(this); _debugDrawEnabled = Debug.isDebugBuild; if (_debugDrawEnabled) _stage.drawEvent.AddListener(DrawDebugRect); } void OnTargetRemovedFromStage(DisplayObject obj) { ClearState(); if (_debugDrawEnabled) _stage.drawEvent.RemoveListener(DrawDebugRect); _stage.touchController.RemoveListener(this); _stage = null; } internal bool HandleTouchBegin(TouchState touch) { if (isRemoved || !IsTouchEnabled()) return false; if (HasTouch(touch.id)) return false; if (!HitTestPoint(touch.position)) return false; _touches.Add(touch); TouchBegan.Dispatch(this, touch); isPressed = _touches.Count > 0; return true; } internal void HandleTouchEnd(TouchState touch) { if (isRemoved || !IsTouchEnabled()) return; if (!HasTouch(touch.id)) return; _touches.RemoveAt(GetTouchIndex(touch.id)); TouchEnded.Dispatch(this, touch); isPressed = _touches.Count > 0; } internal void HandleTouchCancel(TouchState touch) { if (isRemoved || !IsTouchEnabled()) return; if (!HasTouch(touch.id)) return; if (HitTestPoint(touch.position)) return; _touches.RemoveAt(GetTouchIndex(touch.id)); _isPressed = _touches.Count > 0; TouchCanceled.Dispatch(this, touch); if (_touches.Count == 0) Canceled.Dispatch(this); } internal void refreshTouchState() { if (!IsTouchEnabled()) ClearState(); } private bool HasTouch(int touchId) { for (int i = 0; i < _touches.Count; i++) { if (_touches[i].id == touchId) return true; } return false; } private int GetTouchIndex(int touchId) { for (int i = 0; i < _touches.Count; i++) { if (_touches[i].id == touchId) return i; } return -1; } /// <summary> /// Returns true if touch area contains point specified in global coordinates. /// </summary> public bool HitTestPoint(Vector2 globalPoint) { var localBounds = hitArea.HasValue ? hitArea.Value : target.GetInternalBounds(); var localPoint = target.GlobalToLocal(globalPoint); return localBounds.Contains(localPoint); } private bool IsTouchEnabled() { for (var t = target; t != null; t = t.parent) { if (!t.isTouchEnabled || !t.visible) return false; } return true; } private void ClearState() { _isPressed = false; _touches.Clear(); } private void DrawDebugRect() { if (DebugDraw.drawHitAreas && IsTouchEnabled()) { var rect = hitArea.HasValue ? hitArea.Value : target.GetInternalBounds(); DebugDraw.DrawRect(target, rect, DebugDraw.drawHitAreasColor); } } /// <summary> /// Returns all touches are active at the moment. Does not allocate memory. /// </summary> public ICollection<TouchState> touches { get { return _touches; } } public DisplayObject target { get { return _target; } } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Linq; using System.Text; using ICSharpCode.SharpZipLib.Core; using ICSharpCode.SharpZipLib.GZip; using ICSharpCode.SharpZipLib.Tar; using QuantConnect.Logging; using ZipEntry = ICSharpCode.SharpZipLib.Zip.ZipEntry; using ZipFile = Ionic.Zip.ZipFile; using ZipInputStream = ICSharpCode.SharpZipLib.Zip.ZipInputStream; using ZipOutputStream = ICSharpCode.SharpZipLib.Zip.ZipOutputStream; namespace QuantConnect { /// <summary> /// Compression class manages the opening and extraction of compressed files (zip, tar, tar.gz). /// </summary> /// <remarks>QuantConnect's data library is stored in zip format locally on the hard drive.</remarks> public static class Compression { /// <summary> /// Global Flag :: Operating System /// </summary> private static bool IsLinux { get { var p = (int)Environment.OSVersion.Platform; return (p == 4) || (p == 6) || (p == 128); } } /// <summary> /// Create a zip file of the supplied file names and string data source /// </summary> /// <param name="zipPath">Output location to save the file.</param> /// <param name="filenamesAndData">File names and data in a dictionary format.</param> /// <returns>True on successfully creating the zip file.</returns> public static bool ZipData(string zipPath, Dictionary<string, string> filenamesAndData) { try { //Create our output using (var stream = new ZipOutputStream(File.Create(zipPath))) { stream.SetLevel(0); foreach (var filename in filenamesAndData.Keys) { //Create the space in the zip file: var entry = new ZipEntry(filename); var data = filenamesAndData[filename]; var bytes = Encoding.Default.GetBytes(data); stream.PutNextEntry(entry); stream.Write(bytes, 0, bytes.Length); stream.CloseEntry(); } // End For Each File. //Close stream: stream.Finish(); stream.Close(); } // End Using } catch (Exception err) { Log.Error(err); return false; } return true; } /// <summary> /// Create a zip file of the supplied file names and data using a byte array /// </summary> /// <param name="zipPath">Output location to save the file.</param> /// <param name="filenamesAndData">File names and data in a dictionary format.</param> /// <returns>True on successfully saving the file</returns> public static bool ZipData(string zipPath, IEnumerable<KeyValuePair<string, byte[]>> filenamesAndData) { var success = true; var buffer = new byte[4096]; try { //Create our output using (var stream = new ZipOutputStream(File.Create(zipPath))) { foreach (var file in filenamesAndData) { //Create the space in the zip file: var entry = new ZipEntry(file.Key); //Get a Byte[] of the file data: stream.PutNextEntry(entry); using (var ms = new MemoryStream(file.Value)) { int sourceBytes; do { sourceBytes = ms.Read(buffer, 0, buffer.Length); stream.Write(buffer, 0, sourceBytes); } while (sourceBytes > 0); } } // End For Each File. //Close stream: stream.Finish(); stream.Close(); } // End Using } catch (Exception err) { Log.Error(err); success = false; } return success; } /// <summary> /// Zips the specified lines of text into the zipPath /// </summary> /// <param name="zipPath">The destination zip file path</param> /// <param name="zipEntry">The entry name in the zip</param> /// <param name="lines">The lines to be written to the zip</param> /// <returns>True if successful, otherwise false</returns> public static bool ZipData(string zipPath, string zipEntry, IEnumerable<string> lines) { try { using (var stream = new ZipOutputStream(File.Create(zipPath))) using (var writer = new StreamWriter(stream)) { var entry = new ZipEntry(zipEntry); stream.PutNextEntry(entry); foreach (var line in lines) { writer.WriteLine(line); } } return true; } catch (Exception err) { Log.Error(err); return false; } } /// <summary> /// Append the zip data to the file-entry specified. /// </summary> /// <param name="path">The zip file path</param> /// <param name="entry">The entry name</param> /// <param name="data">The entry data</param> /// <param name="overrideEntry">True if should override entry if it already exists</param> /// <returns>True on success</returns> public static bool ZipCreateAppendData(string path, string entry, string data, bool overrideEntry = false) { try { using (var zip = File.Exists(path) ? ZipFile.Read(path) : new ZipFile(path)) { if (zip.ContainsEntry(entry) && overrideEntry) { zip.RemoveEntry(entry); } zip.AddEntry(entry, data); zip.Save(); } } catch (Exception err) { Log.Error(err); return false; } return true; } /// <summary> /// Uncompress zip data byte array into a dictionary string array of filename-contents. /// </summary> /// <param name="zipData">Byte data array of zip compressed information</param> /// <param name="encoding">Specifies the encoding used to read the bytes. If not specified, defaults to ASCII</param> /// <returns>Uncompressed dictionary string-sting of files in the zip</returns> public static Dictionary<string, string> UnzipData(byte[] zipData, Encoding encoding = null) { // Initialize: var data = new Dictionary<string, string>(); try { using (var ms = new MemoryStream(zipData)) { //Read out the zipped data into a string, save in array: using (var zipStream = new ZipInputStream(ms)) { while (true) { //Get the next file var entry = zipStream.GetNextEntry(); if (entry != null) { //Read the file into buffer: var buffer = new byte[entry.Size]; zipStream.Read(buffer, 0, (int)entry.Size); //Save into array: var str = (encoding ?? Encoding.ASCII).GetString(buffer); data.Add(entry.Name, str); } else { break; } } } // End Zip Stream. } // End Using Memory Stream } catch (Exception err) { Log.Error(err); } return data; } /// <summary> /// Performs an in memory zip of the specified bytes /// </summary> /// <param name="bytes">The file contents in bytes to be zipped</param> /// <param name="zipEntryName">The zip entry name</param> /// <returns>The zipped file as a byte array</returns> public static byte[] ZipBytes(byte[] bytes, string zipEntryName) { using (var memoryStream = new MemoryStream()) { using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, true)) { var entry = archive.CreateEntry(zipEntryName); using (var entryStream = entry.Open()) { entryStream.Write(bytes, 0, bytes.Length); } } // 'ToArray' after disposing of 'ZipArchive' since it finishes writing all the data return memoryStream.ToArray(); } } /// <summary> /// Extract .gz files to disk /// </summary> /// <param name="gzipFileName"></param> /// <param name="targetDirectory"></param> public static string UnGZip(string gzipFileName, string targetDirectory) { // Use a 4K buffer. Any larger is a waste. var dataBuffer = new byte[4096]; var newFileOutput = Path.Combine(targetDirectory, Path.GetFileNameWithoutExtension(gzipFileName)); using (Stream fileStream = new FileStream(gzipFileName, FileMode.Open, FileAccess.Read)) using (var gzipStream = new GZipInputStream(fileStream)) using (var fileOutput = File.Create(newFileOutput)) { StreamUtils.Copy(gzipStream, fileOutput, dataBuffer); } return newFileOutput; } /// <summary> /// Compress a given file and delete the original file. Automatically rename the file to name.zip. /// </summary> /// <param name="textPath">Path of the original file</param> /// <param name="zipEntryName">The name of the entry inside the zip file</param> /// <param name="deleteOriginal">Boolean flag to delete the original file after completion</param> /// <returns>String path for the new zip file</returns> public static string Zip(string textPath, string zipEntryName, bool deleteOriginal = true) { var zipPath = textPath.Replace(".csv", ".zip").Replace(".txt", ".zip"); Zip(textPath, zipPath, zipEntryName, deleteOriginal); return zipPath; } /// <summary> /// Compresses the specified source file. /// </summary> /// <param name="source">The source file to be compressed</param> /// <param name="destination">The destination zip file path</param> /// <param name="zipEntryName">The zip entry name for the file</param> /// <param name="deleteOriginal">True to delete the source file upon completion</param> public static void Zip(string source, string destination, string zipEntryName, bool deleteOriginal) { try { var buffer = new byte[4096]; using (var stream = new ZipOutputStream(File.Create(destination))) { //Zip the text file. var entry = new ZipEntry(zipEntryName); stream.PutNextEntry(entry); using (var fs = File.OpenRead(source)) { int sourceBytes; do { sourceBytes = fs.Read(buffer, 0, buffer.Length); stream.Write(buffer, 0, sourceBytes); } while (sourceBytes > 0); } } //Delete the old text file: if (deleteOriginal) { File.Delete(source); } } catch (Exception err) { Log.Error(err); } } /// <summary> /// Compress a given file and delete the original file. Automatically rename the file to name.zip. /// </summary> /// <param name="textPath">Path of the original file</param> /// <param name="deleteOriginal">Boolean flag to delete the original file after completion</param> /// <returns>String path for the new zip file</returns> public static string Zip(string textPath, bool deleteOriginal = true) { return Zip(textPath, Path.GetFileName(textPath), deleteOriginal); } /// <summary> /// Compress given data to the path given /// </summary> /// <param name="data">Data to write to zip</param> /// <param name="zipPath">Path to write to</param> /// <param name="zipEntry">Entry to save the data as</param> public static void Zip(string data, string zipPath, string zipEntry) { using (var stream = new ZipOutputStream(File.Create(zipPath))) { var entry = new ZipEntry(zipEntry); stream.PutNextEntry(entry); var buffer = new byte[4096]; using (var dataReader = new MemoryStream(Encoding.Default.GetBytes(data))) { int sourceBytes; do { sourceBytes = dataReader.Read(buffer, 0, buffer.Length); stream.Write(buffer, 0, sourceBytes); } while (sourceBytes > 0); } } } /// <summary> /// Zips the specified directory, preserving folder structure /// </summary> /// <param name="directory">The directory to be zipped</param> /// <param name="destination">The output zip file destination</param> /// <param name="includeRootInZip">True to include the root 'directory' in the zip, false otherwise</param> /// <returns>True on a successful zip, false otherwise</returns> public static bool ZipDirectory(string directory, string destination, bool includeRootInZip = true) { try { if (File.Exists(destination)) File.Delete(destination); System.IO.Compression.ZipFile.CreateFromDirectory(directory, destination, CompressionLevel.Fastest, includeRootInZip, new PathEncoder()); return true; } catch (Exception err) { Log.Error(err); return false; } } /// <summary> /// Encode the paths as linux format for cross platform compatibility /// </summary> private class PathEncoder : UTF8Encoding { public override byte[] GetBytes(string s) { s = s.Replace("\\", "/"); return base.GetBytes(s); } } /// <summary> /// Unzips the specified zip file to the specified directory /// </summary> /// <param name="zip">The zip to be unzipped</param> /// <param name="directory">The directory to place the unzipped files</param> /// <param name="overwrite">Flag specifying whether or not to overwrite existing files</param> public static bool Unzip(string zip, string directory, bool overwrite = false) { if (!File.Exists(zip)) return false; try { if (!overwrite) { System.IO.Compression.ZipFile.ExtractToDirectory(zip, directory); } else { using (var archive = new ZipArchive(File.OpenRead(zip))) { foreach (var file in archive.Entries) { // skip directories if (file.Name == "") continue; var filepath = Path.Combine(directory, file.FullName); if (IsLinux) filepath = filepath.Replace(@"\", "/"); var outputFile = new FileInfo(filepath); if (!outputFile.Directory.Exists) { outputFile.Directory.Create(); } file.ExtractToFile(outputFile.FullName, true); } } } return true; } catch (Exception err) { Log.Error(err); return false; } } /// <summary> /// Zips all files specified to a new zip at the destination path /// </summary> public static void ZipFiles(string destination, IEnumerable<string> files) { try { using (var zipStream = new ZipOutputStream(File.Create(destination))) { var buffer = new byte[4096]; foreach (var file in files) { if (!File.Exists(file)) { Log.Trace($"ZipFiles(): File does not exist: {file}"); continue; } var entry = new ZipEntry(Path.GetFileName(file)); zipStream.PutNextEntry(entry); using (var fstream = File.OpenRead(file)) { StreamUtils.Copy(fstream, zipStream, buffer); } } } } catch (Exception err) { Log.Error(err); } } /// <summary> /// Streams a local zip file using a streamreader. /// Important: the caller must call Dispose() on the returned ZipFile instance. /// </summary> /// <param name="filename">Location of the original zip file</param> /// <param name="zip">The ZipFile instance to be returned to the caller</param> /// <returns>Stream reader of the first file contents in the zip file</returns> public static StreamReader Unzip(string filename, out ZipFile zip) { return Unzip(filename, null, out zip); } /// <summary> /// Streams a local zip file using a streamreader. /// Important: the caller must call Dispose() on the returned ZipFile instance. /// </summary> /// <param name="filename">Location of the original zip file</param> /// <param name="zipEntryName">The zip entry name to open a reader for. Specify null to access the first entry</param> /// <param name="zip">The ZipFile instance to be returned to the caller</param> /// <returns>Stream reader of the first file contents in the zip file</returns> public static StreamReader Unzip(string filename, string zipEntryName, out ZipFile zip) { StreamReader reader = null; zip = null; try { if (File.Exists(filename)) { try { zip = new ZipFile(filename); var entry = zip.FirstOrDefault(x => zipEntryName == null || string.Compare(x.FileName, zipEntryName, StringComparison.OrdinalIgnoreCase) == 0); if (entry == null) { // Unable to locate zip entry return null; } reader = new StreamReader(entry.OpenReader()); } catch (Exception err) { Log.Error(err, "Inner try/catch"); if (zip != null) zip.Dispose(); if (reader != null) reader.Close(); } } else { Log.Error($"Data.UnZip(2): File doesn\'t exist: {filename}"); } } catch (Exception err) { Log.Error(err, "File: " + filename); } return reader; } /// <summary> /// Streams the unzipped file as key value pairs of file name to file contents. /// NOTE: When the returned enumerable finishes enumerating, the zip stream will be /// closed rendering all key value pair Value properties unaccessible. Ideally this /// would be enumerated depth first. /// </summary> /// <remarks> /// This method has the potential for a memory leak if each kvp.Value enumerable is not disposed /// </remarks> /// <param name="filename">The zip file to stream</param> /// <returns>The stream zip contents</returns> public static IEnumerable<KeyValuePair<string, IEnumerable<string>>> Unzip(string filename) { if (!File.Exists(filename)) { Log.Error($"Compression.Unzip(): File does not exist: {filename}"); return Enumerable.Empty<KeyValuePair<string, IEnumerable<string>>>(); } try { return ReadLinesImpl(filename); } catch (Exception err) { Log.Error(err); } return Enumerable.Empty<KeyValuePair<string, IEnumerable<string>>>(); } /// <summary> /// Lazily unzips the specified stream /// </summary> /// <param name="stream">The zipped stream to be read</param> /// <returns>An enumerable whose elements are zip entry key value pairs with /// a key of the zip entry name and the value of the zip entry's file lines</returns> public static IEnumerable<KeyValuePair<string, IEnumerable<string>>> Unzip(Stream stream) { using (var zip = ZipFile.Read(stream)) { foreach (var entry in zip) { yield return new KeyValuePair<string, IEnumerable<string>>(entry.FileName, ReadZipEntry(entry)); } } } /// <summary> /// Streams each line from the first zip entry in the specified zip file /// </summary> /// <param name="filename">The zip file path to stream</param> /// <returns>An enumerable containing each line from the first unzipped entry</returns> public static IEnumerable<string> ReadLines(string filename) { if (!File.Exists(filename)) { Log.Error($"Compression.ReadFirstZipEntry(): File does not exist: {filename}"); return Enumerable.Empty<string>(); } try { return ReadLinesImpl(filename, firstEntryOnly: true).Single().Value; } catch (Exception err) { Log.Error(err); } return Enumerable.Empty<string>(); } private static IEnumerable<KeyValuePair<string, IEnumerable<string>>> ReadLinesImpl(string filename, bool firstEntryOnly = false) { using (var zip = ZipFile.Read(filename)) { if (firstEntryOnly) { var entry = zip[0]; yield return new KeyValuePair<string, IEnumerable<string>>(entry.FileName, ReadZipEntry(entry)); yield break; } foreach (var entry in zip) { yield return new KeyValuePair<string, IEnumerable<string>>(entry.FileName, ReadZipEntry(entry)); } } } private static IEnumerable<string> ReadZipEntry(Ionic.Zip.ZipEntry entry) { using (var entryReader = new StreamReader(entry.OpenReader())) { var line = entryReader.ReadLine(); while (line != null) { yield return line; line = entryReader.ReadLine(); } } } /// <summary> /// Unzip a local file and return its contents via streamreader: /// </summary> public static StreamReader UnzipStreamToStreamReader(Stream zipstream) { StreamReader reader = null; try { //Initialise: MemoryStream file; //If file exists, open a zip stream for it. using (var zipStream = new ZipInputStream(zipstream)) { //Read the file entry into buffer: var entry = zipStream.GetNextEntry(); var buffer = new byte[entry.Size]; zipStream.Read(buffer, 0, (int)entry.Size); //Load the buffer into a memory stream. file = new MemoryStream(buffer); } //Open the memory stream with a stream reader. reader = new StreamReader(file); } catch (Exception err) { Log.Error(err); } return reader; } // End UnZip /// <summary> /// Unzip a stream that represents a zip file and return the first entry as a stream /// </summary> public static Stream UnzipStream(Stream zipstream, out ZipFile zipFile) { zipFile = ZipFile.Read(zipstream); try { //Read the file entry into buffer: var entry = zipFile.Entries.FirstOrDefault(); if (entry != null) { return entry.OpenReader(); } } catch (Exception err) { Log.Error(err); } return null; } // End UnZip /// <summary> /// Unzip a local file and return its contents via streamreader to a local the same location as the ZIP. /// </summary> /// <param name="zipFile">Location of the zip on the HD</param> /// <returns>List of unzipped file names</returns> public static List<string> UnzipToFolder(string zipFile) { //1. Initialize: var files = new List<string>(); var outFolder = Path.GetDirectoryName(zipFile); if (string.IsNullOrEmpty(outFolder)) { outFolder = Directory.GetCurrentDirectory(); } ICSharpCode.SharpZipLib.Zip.ZipFile zf = null; try { var fs = File.OpenRead(zipFile); zf = new ICSharpCode.SharpZipLib.Zip.ZipFile(fs); foreach (ZipEntry zipEntry in zf) { //Ignore Directories if (!zipEntry.IsFile) continue; var buffer = new byte[4096]; // 4K is optimum var zipStream = zf.GetInputStream(zipEntry); // Manipulate the output filename here as desired. var fullZipToPath = Path.Combine(outFolder, zipEntry.Name); var targetFile = new FileInfo(fullZipToPath); if (targetFile.Directory != null && !targetFile.Directory.Exists) { targetFile.Directory.Create(); } //Save the file name for later: files.Add(fullZipToPath); //Copy the data in buffer chunks using (var streamWriter = File.Create(fullZipToPath)) { StreamUtils.Copy(zipStream, streamWriter, buffer); } } } catch { // lets catch the exception just to log some information about the zip file Log.Error($"Compression.UnzipToFolder(): Failure: zipFile: {zipFile} - outFolder: {outFolder} - files: {string.Join(",", files)}"); throw; } finally { if (zf != null) { zf.IsStreamOwner = true; // Makes close also shut the underlying stream zf.Close(); // Ensure we release resources } } return files; } // End UnZip /// <summary> /// Extracts all file from a zip archive and copies them to a destination folder. /// </summary> /// <param name="source">The source zip file.</param> /// <param name="destination">The destination folder to extract the file to.</param> public static void UnTarFiles(string source, string destination) { var inStream = File.OpenRead(source); var tarArchive = TarArchive.CreateInputTarArchive(inStream); tarArchive.ExtractContents(destination); tarArchive.Close(); inStream.Close(); } /// <summary> /// Extract tar.gz files to disk /// </summary> /// <param name="source">Tar.gz source file</param> /// <param name="destination">Location folder to unzip to</param> public static void UnTarGzFiles(string source, string destination) { var inStream = File.OpenRead(source); var gzipStream = new GZipInputStream(inStream); var tarArchive = TarArchive.CreateInputTarArchive(gzipStream); tarArchive.ExtractContents(destination); tarArchive.Close(); gzipStream.Close(); inStream.Close(); } /// <summary> /// Enumerate through the files of a TAR and get a list of KVP names-byte arrays /// </summary> /// <param name="stream">The input tar stream</param> /// <param name="isTarGz">True if the input stream is a .tar.gz or .tgz</param> /// <returns>An enumerable containing each tar entry and it's contents</returns> public static IEnumerable<KeyValuePair<string, byte[]>> UnTar(Stream stream, bool isTarGz) { using (var tar = new TarInputStream(isTarGz ? (Stream)new GZipInputStream(stream) : stream)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { if (entry.IsDirectory) continue; using (var output = new MemoryStream()) { tar.CopyEntryContents(output); yield return new KeyValuePair<string, byte[]>(entry.Name, output.ToArray()); } } } } /// <summary> /// Enumerate through the files of a TAR and get a list of KVP names-byte arrays. /// </summary> /// <param name="source"></param> /// <returns></returns> public static IEnumerable<KeyValuePair<string, byte[]>> UnTar(string source) { //This is a tar.gz file. var gzip = (source.Substring(Math.Max(0, source.Length - 6)) == "tar.gz"); using (var file = File.OpenRead(source)) { var tarIn = new TarInputStream(file); if (gzip) { var gzipStream = new GZipInputStream(file); tarIn = new TarInputStream(gzipStream); } TarEntry tarEntry; while ((tarEntry = tarIn.GetNextEntry()) != null) { if (tarEntry.IsDirectory) continue; using (var stream = new MemoryStream()) { tarIn.CopyEntryContents(stream); yield return new KeyValuePair<string, byte[]>(tarEntry.Name, stream.ToArray()); } } tarIn.Close(); } } /// <summary> /// Validates whether the zip is corrupted or not /// </summary> /// <param name="path">Path to the zip file</param> /// <returns>true if archive tests ok; false otherwise.</returns> public static bool ValidateZip(string path) { using (var zip = new ICSharpCode.SharpZipLib.Zip.ZipFile(path)) { return zip.TestArchive(true); } } /// <summary> /// Returns the entry file names contained in a zip file /// </summary> /// <param name="zipFileName">The zip file name</param> /// <returns>An IEnumerable of entry file names</returns> public static IEnumerable<string> GetZipEntryFileNames(string zipFileName) { using (var zip = ZipFile.Read(zipFileName)) { return zip.EntryFileNames; } } /// <summary> /// Return the entry file names contained in a zip file /// </summary> /// <param name="zipFileStream">Stream to the file</param> /// <returns>IEnumerable of entry file names</returns> public static IEnumerable<string> GetZipEntryFileNames(Stream zipFileStream) { using (var zip = ZipFile.Read(zipFileStream)) { return zip.EntryFileNames; } } /// <summary> /// Extracts a 7-zip archive to disk, using the 7-zip CLI utility /// </summary> /// <param name="inputFile">Path to the 7z file</param> /// <param name="outputDirectory">Directory to output contents of 7z</param> /// <param name="execTimeout">Timeout in seconds for how long we should wait for the extraction to complete</param> /// <exception cref="Exception">The extraction failed because of a timeout or the exit code was not 0</exception> public static void Extract7ZipArchive(string inputFile, string outputDirectory, int execTimeout = 60000) { var zipper = IsLinux ? "7z" : "C:/Program Files/7-Zip/7z.exe"; var psi = new ProcessStartInfo(zipper, " e " + inputFile + " -o" + outputDirectory) { CreateNoWindow = true, WindowStyle = ProcessWindowStyle.Hidden, UseShellExecute = false, RedirectStandardOutput = false }; var process = new Process(); process.StartInfo = psi; process.Start(); if (!process.WaitForExit(execTimeout)) { throw new TimeoutException($"Timed out extracting 7Zip archive: {inputFile} ({execTimeout} seconds)"); } if (process.ExitCode > 0) { throw new Exception($"Compression.Extract7ZipArchive(): 7Zip exited unsuccessfully (code {process.ExitCode})"); } } } }
//----------------------------------------------------------------------- // <copyright file="ModuleInfo.cs" company="Protobuild Project"> // The MIT License (MIT) // // Copyright (c) Various Authors // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // </copyright> //----------------------------------------------------------------------- namespace Protobuild { using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; using System.Xml.Serialization; /// <summary> /// Represents a Protobuild module. /// </summary> [Serializable] public class ModuleInfo { /// <summary> /// The root path of this module. /// </summary> [NonSerialized] [System.Diagnostics.CodeAnalysis.SuppressMessageAttribute( "Microsoft.StyleCop.CSharp.Maintainability", "SA1401:FieldsMustBePrivate", Justification = "This must be a field to allow usage of the NonSerialized attribute.")] public string Path; /// <summary> /// Initializes a new instance of the <see cref="Protobuild.ModuleInfo"/> class. /// </summary> public ModuleInfo() { this.ModuleAssemblies = new string[0]; this.DefaultAction = "resync"; this.GenerateNuGetRepositories = true; } /// <summary> /// Gets or sets the name of the module. /// </summary> /// <value>The module name.</value> public string Name { get; set; } /// <summary> /// Gets or sets the default action to be taken when Protobuild runs in the module. /// </summary> /// <value>The default action that Protobuild will take.</value> public string DefaultAction { get; set; } /// <summary> /// Gets or sets a comma seperated list of default platforms when the host platform is Windows. /// </summary> /// <value>The comma seperated list of default platforms when the host platform is Windows.</value> public string DefaultWindowsPlatforms { get; set; } /// <summary> /// Gets or sets a comma seperated list of default platforms when the host platform is Mac OS. /// </summary> /// <value>The comma seperated list of default platforms when the host platform is Mac OS.</value> public string DefaultMacOSPlatforms { get; set; } /// <summary> /// Gets or sets a comma seperated list of default platforms when the host platform is Linux. /// </summary> /// <value>The comma seperated list of default platforms when the host platform is Linux.</value> public string DefaultLinuxPlatforms { get; set; } /// <summary> /// Gets or sets a value indicating whether the NuGet repositories.config file is generated. /// </summary> /// <value><c>true</c> if the NuGet repositories.config file is generated; otherwise, <c>false</c>.</value> public bool GenerateNuGetRepositories { get; set; } /// <summary> /// Gets or sets a comma seperated list of supported platforms in this module. /// </summary> /// <value>The comma seperated list of supported platforms.</value> public string SupportedPlatforms { get; set; } /// <summary> /// Gets or sets a value indicating whether synchronisation is completely disabled in this module. /// </summary> /// <value><c>true</c> if synchronisation is disabled and will be skipped; otherwise, <c>false</c>.</value> public bool? DisableSynchronisation { get; set; } /// <summary> /// Gets or sets the list of .NET assemblies to load when running Protobuild in this module. /// </summary> /// <remarks> /// This list of assemblies is used to load additional templates in the GUI-based module manager. /// </remarks> /// <value>The list of assemblies to load.</value> public string[] ModuleAssemblies { get; set; } /// <summary> /// Gets or sets the name of the default startup project. /// </summary> /// <value>The name of the default startup project.</value> public string DefaultStartupProject { get; set; } /// <summary> /// Gets or sets a set of package references. /// </summary> /// <value>The registered packages.</value> public List<PackageRef> Packages { get; set; } /// <summary> /// Loads the Protobuild module from the Module.xml file. /// </summary> /// <param name="xmlFile">The path to a Module.xml file.</param> /// <returns>The loaded Protobuild module.</returns> public static ModuleInfo Load(string xmlFile) { var serializer = new XmlSerializer(typeof(ModuleInfo)); var reader = new StreamReader(xmlFile); var module = (ModuleInfo)serializer.Deserialize(reader); module.Path = new FileInfo(xmlFile).Directory.Parent.FullName; reader.Close(); return module; } /// <summary> /// Loads all of the project definitions present in the current module. /// </summary> /// <returns>The loaded project definitions.</returns> public DefinitionInfo[] GetDefinitions() { var result = new List<DefinitionInfo>(); var path = System.IO.Path.Combine(this.Path, "Build", "Projects"); if (!Directory.Exists(path)) { return new DefinitionInfo[0]; } foreach (var file in new DirectoryInfo(path).GetFiles("*.definition")) { result.Add(DefinitionInfo.Load(file.FullName)); } return result.ToArray(); } /// <summary> /// Loads all of the project definitions present in the current module and all submodules. /// </summary> /// <returns>The loaded project definitions.</returns> /// <param name="relative">The current directory being scanned.</param> public IEnumerable<DefinitionInfo> GetDefinitionsRecursively(string platform = null, string relative = "") { var definitions = new List<DefinitionInfo>(); foreach (var definition in this.GetDefinitions()) { definition.AbsolutePath = (this.Path + '\\' + definition.RelativePath).Trim('\\'); definition.RelativePath = (relative + '\\' + definition.RelativePath).Trim('\\'); definition.ModulePath = this.Path; definitions.Add(definition); } foreach (var submodule in this.GetSubmodules(platform)) { var from = this.Path.Replace('\\', '/').TrimEnd('/') + "/"; var to = submodule.Path.Replace('\\', '/'); var subRelativePath = (new Uri(from).MakeRelativeUri(new Uri(to))) .ToString().Replace('/', '\\'); foreach (var definition in submodule.GetDefinitionsRecursively(platform, subRelativePath.Trim('\\'))) { definitions.Add(definition); } } return definitions.Distinct(new DefinitionEqualityComparer()); } private class DefinitionEqualityComparer : IEqualityComparer<DefinitionInfo> { public bool Equals(DefinitionInfo a, DefinitionInfo b) { return a.ModulePath == b.ModulePath && a.Name == b.Name; } public int GetHashCode(DefinitionInfo obj) { return obj.ModulePath.GetHashCode() + obj.Name.GetHashCode() * 37; } } /// <summary> /// Loads all of the submodules present in this module. /// </summary> /// <returns>The loaded submodules.</returns> public ModuleInfo[] GetSubmodules(string platform = null) { var modules = new List<ModuleInfo>(); foreach (var directoryInit in new DirectoryInfo(this.Path).GetDirectories()) { var directory = directoryInit; if (File.Exists(System.IO.Path.Combine(directory.FullName, ".redirect"))) { // This is a redirected submodule (due to package resolution). Load the // module from it's actual path. using (var reader = new StreamReader(System.IO.Path.Combine(directory.FullName, ".redirect"))) { var targetPath = reader.ReadToEnd().Trim(); directory = new DirectoryInfo(targetPath); } } var build = directory.GetDirectories().FirstOrDefault(x => x.Name == "Build"); if (build == null) { continue; } var module = build.GetFiles().FirstOrDefault(x => x.Name == "Module.xml"); if (module == null) { continue; } modules.Add(ModuleInfo.Load(module.FullName)); } if (platform != null) { foreach (var directoryInit in new DirectoryInfo(this.Path).GetDirectories()) { var directory = directoryInit; if (File.Exists(System.IO.Path.Combine(directory.FullName, ".redirect"))) { // This is a redirected submodule (due to package resolution). Load the // module from it's actual path. using (var reader = new StreamReader(System.IO.Path.Combine(directory.FullName, ".redirect"))) { var targetPath = reader.ReadToEnd().Trim(); directory = new DirectoryInfo(targetPath); } } var platformDirectory = new DirectoryInfo(System.IO.Path.Combine(directory.FullName, platform)); if (!platformDirectory.Exists) { continue; } var build = platformDirectory.GetDirectories().FirstOrDefault(x => x.Name == "Build"); if (build == null) { continue; } var module = build.GetFiles().FirstOrDefault(x => x.Name == "Module.xml"); if (module == null) { continue; } modules.Add(ModuleInfo.Load(module.FullName)); } } return modules.ToArray(); } /// <summary> /// Saves the current module to a Module.xml file. /// </summary> /// <param name="xmlFile">The path to a Module.xml file.</param> public void Save(string xmlFile) { var serializer = new XmlSerializer(typeof(ModuleInfo)); var writer = new StreamWriter(xmlFile); serializer.Serialize(writer, this); writer.Close(); } private string[] featureCache; /// <summary> /// Determines whether the instance of Protobuild in this module /// has a specified feature. /// </summary> public bool HasProtobuildFeature(string feature) { if (featureCache == null) { var result = this.RunProtobuild("--query-features", true); var exitCode = result.Item1; var stdout = result.Item2; if (exitCode != 0 || stdout.Contains("Protobuild.exe [options]")) { featureCache = new string[0]; } featureCache = stdout.Split('\n'); } return featureCache.Contains(feature); } /// <summary> /// Runs the instance of Protobuild.exe present in the module. /// </summary> /// <param name="args">The arguments to pass to Protobuild.</param> public Tuple<int, string, string> RunProtobuild(string args, bool capture = false) { if (ExecEnvironment.RunProtobuildInProcess && !capture) { var old = Environment.CurrentDirectory; try { Environment.CurrentDirectory = this.Path; return new Tuple<int, string, string>( ExecEnvironment.InvokeSelf(args.Split(' ')), string.Empty, string.Empty); } finally { Environment.CurrentDirectory = old; } } var protobuildPath = System.IO.Path.Combine(this.Path, "Protobuild.exe"); try { var chmodStartInfo = new ProcessStartInfo { FileName = "chmod", Arguments = "a+x Protobuild.exe", WorkingDirectory = this.Path, CreateNoWindow = true, UseShellExecute = false }; Process.Start(chmodStartInfo); } catch (ExecEnvironment.SelfInvokeExitException) { throw; } catch { } var stdout = string.Empty; var stderr = string.Empty; for (var attempt = 0; attempt < 3; attempt++) { if (File.Exists(protobuildPath)) { var pi = new ProcessStartInfo { FileName = protobuildPath, Arguments = args, WorkingDirectory = this.Path, CreateNoWindow = true, RedirectStandardError = true, RedirectStandardInput = true, RedirectStandardOutput = true, UseShellExecute = false }; var p = new Process { StartInfo = pi }; p.OutputDataReceived += (sender, eventArgs) => { if (!string.IsNullOrEmpty(eventArgs.Data)) { if (capture) { stdout += eventArgs.Data + "\n"; } else { Console.WriteLine(eventArgs.Data); } } }; p.ErrorDataReceived += (sender, eventArgs) => { if (!string.IsNullOrEmpty(eventArgs.Data)) { if (capture) { stderr += eventArgs.Data + "\n"; } else { Console.Error.WriteLine(eventArgs.Data); } } }; try { p.Start(); } catch (System.ComponentModel.Win32Exception ex) { if (ex.Message.Contains("Cannot find the specified file")) { // Mono sometimes throws this error even though the // file does exist on disk. The best guess is there's // a race condition between performing chmod on the // file and Mono actually seeing it as an executable file. // Show a warning and sleep for a bit before retrying. if (attempt != 2) { Console.WriteLine("WARNING: Unable to execute Protobuild.exe, will retry again..."); System.Threading.Thread.Sleep(2000); continue; } else { Console.WriteLine("ERROR: Still unable to execute Protobuild.exe."); throw; } } } p.BeginOutputReadLine(); p.BeginErrorReadLine(); p.WaitForExit(); return new Tuple<int, string, string>(p.ExitCode, stdout, stderr); } } return new Tuple<int, string, string>(1, string.Empty, string.Empty); } /// <summary> /// Normalizes the platform string from user input, automatically correcting case /// and validating against a list of supported platforms. /// </summary> /// <returns>The platform string.</returns> /// <param name="platform">The normalized platform string.</param> public string NormalizePlatform(string platform) { var supportedPlatforms = "Android,iOS,Linux,MacOS,Ouya,PCL,PSMobile,Windows,Windows8,WindowsGL,WindowsPhone,WindowsPhone81"; var defaultPlatforms = true; if (!string.IsNullOrEmpty(this.SupportedPlatforms)) { supportedPlatforms = this.SupportedPlatforms; defaultPlatforms = false; } var supportedPlatformsArray = supportedPlatforms.Split(new[] { ',' }) .Select(x => x.Trim()) .Where(x => !string.IsNullOrWhiteSpace(x)) .ToArray(); // Search the array to find a platform that matches case insensitively // to the specified platform. If we are using the default list, then we allow // other platforms to be specified (in case the developer has modified the XSLT to // support others but is not using <SupportedPlatforms>). If the developer has // explicitly set the supported platforms, then we return null if the user passes // an unknown platform (the caller is expected to exit at this point). foreach (var supportedPlatform in supportedPlatformsArray) { if (string.Compare(supportedPlatform, platform, true) == 0) { return supportedPlatform; } } if (defaultPlatforms) { return platform; } else { return null; } } } }
/* * (c) 2008 MOSA - The Managed Operating System Alliance * * Licensed under the terms of the New BSD License. * * Authors: * Simon Wollwage (rootnode) <[email protected]> */ using System; namespace Pictor { public interface IImageFilterFunction { double Radius { get; } double CalculateWeight(double x); }; //-----------------------------------------------------ImageFilterLookUpTable public class ImageFilterLookUpTable { private double m_radius; private uint m_diameter; private int m_start; private ArrayPOD<short> m_weight_array; public enum EImageFilterScale { Shift = 14, //----Shift Scale = 1 << Shift, //----Scale Mask = Scale - 1 //----Mask }; public enum EImageSubpixelScale { Shift = 8, //----Shift Scale = 1 << Shift, //----Scale Mask = Scale - 1 //----Mask }; public void Calculate(IImageFilterFunction filter) { Calculate(filter, true); } public void Calculate(IImageFilterFunction filter, bool normalization) { double r = filter.Radius; ReallocateLookupTable(r); uint i; uint pivot = diameter() << ((int)EImageSubpixelScale.Shift - 1); for (i = 0; i < pivot; i++) { double x = (double)i / (double)EImageSubpixelScale.Scale; double y = filter.CalculateWeight(x); m_weight_array.Array[pivot + i] = m_weight_array.Array[pivot - i] = (short)Basics.Round(y * (int)EImageFilterScale.Scale); } uint end = (diameter() << (int)EImageSubpixelScale.Shift) - 1; m_weight_array.Array[0] = m_weight_array.Array[end]; if (normalization) { Normalize(); } } public ImageFilterLookUpTable() { m_weight_array = new ArrayPOD<short>(256); m_radius = (0); m_diameter = (0); m_start = (0); } public ImageFilterLookUpTable(IImageFilterFunction filter) : this(filter, true) { } public ImageFilterLookUpTable(IImageFilterFunction filter, bool normalization) { m_weight_array = new ArrayPOD<short>(256); Calculate(filter, normalization); } public double radius() { return m_radius; } public uint diameter() { return m_diameter; } public int start() { return m_start; } public unsafe short[] weight_array() { return m_weight_array.Array; } //-------------------------------------------------------------------- // This function normalizes integer values and corrects the rounding // errors. It doesn't do anything with the source floating point values // (m_weight_array_dbl), it corrects only integers according to the rule // of 1.0 which means that any sum of Pixel weights must be equal to 1.0. // So, the filter function must produce a graph of the proper shape. //-------------------------------------------------------------------- public void Normalize() { uint i; int flip = 1; for (i = 0; i < (int)EImageSubpixelScale.Scale; i++) { for (; ; ) { int sum = 0; uint j; for (j = 0; j < m_diameter; j++) { sum += m_weight_array.Array[j * (int)EImageSubpixelScale.Scale + i]; } if (sum == (int)EImageFilterScale.Scale) break; double k = (double)((int)EImageFilterScale.Scale) / (double)(sum); sum = 0; for (j = 0; j < m_diameter; j++) { sum += m_weight_array.Array[j * (int)EImageSubpixelScale.Scale + i] = (short)Basics.Round(m_weight_array.Array[j * (int)EImageSubpixelScale.Scale + i] * k); } sum -= (int)EImageFilterScale.Scale; int inc = (sum > 0) ? -1 : 1; for (j = 0; j < m_diameter && sum != 0; j++) { flip ^= 1; uint idx = flip != 0 ? m_diameter / 2 + j / 2 : m_diameter / 2 - j / 2; int v = m_weight_array.Array[idx * (int)EImageSubpixelScale.Scale + i]; if (v < (int)EImageFilterScale.Scale) { m_weight_array.Array[idx * (int)EImageSubpixelScale.Scale + i] += (short)inc; sum += inc; } } } } uint pivot = m_diameter << ((int)EImageSubpixelScale.Shift - 1); for (i = 0; i < pivot; i++) { m_weight_array.Array[pivot + i] = m_weight_array.Array[pivot - i]; } uint end = (diameter() << (int)EImageSubpixelScale.Shift) - 1; m_weight_array.Array[0] = m_weight_array.Array[end]; } private void ReallocateLookupTable(double radius) { m_radius = radius; m_diameter = Basics.UnsignedCeiling(radius) * 2; m_start = -(int)(m_diameter / 2 - 1); int size = (int)m_diameter << (int)EImageSubpixelScale.Shift; if (size > m_weight_array.Size) { m_weight_array.Resize(size); } } }; /* //--------------------------------------------------------image_filter public class image_filter : ImageFilterLookUpTable { public image_filter() { Calculate(m_filter_function); } private IImageFilter m_filter_function; }; */ //-----------------------------------------------BilinearImageFilter public struct BilinearImageFilter : IImageFilterFunction { public double Radius { get { return 1.0; } } public double CalculateWeight(double x) { return 1.0 - x; } }; //-----------------------------------------------HanningImageFilter public struct HanningImageFilter : IImageFilterFunction { public double Radius { get { return 1.0; } } public double CalculateWeight(double x) { return 0.5 + 0.5 * Math.Cos(Math.PI * x); } }; //-----------------------------------------------HammingImageFilter public struct HammingImageFilter : IImageFilterFunction { public double Radius { get { return 1.0; } } public double CalculateWeight(double x) { return 0.54 + 0.46 * Math.Cos(Math.PI * x); } }; //-----------------------------------------------HermiteImageFilter public struct HermiteImageFilter : IImageFilterFunction { public double Radius { get { return 1.0; } } public double CalculateWeight(double x) { return (2.0 * x - 3.0) * x * x + 1.0; } }; //------------------------------------------------QuadricImageFilter public struct QuadricImageFilter : IImageFilterFunction { public double Radius { get { return 1.5; } } public double CalculateWeight(double x) { double t; if (x < 0.5) return 0.75 - x * x; if (x < 1.5) { t = x - 1.5; return 0.5 * t * t; } return 0.0; } }; //------------------------------------------------BicubicImageFilter public class BicubicImageFilter : IImageFilterFunction { private static double pow3(double x) { return (x <= 0.0) ? 0.0 : x * x * x; } public double Radius { get { return 2.0; } } public double CalculateWeight(double x) { return (1.0 / 6.0) * (pow3(x + 2) - 4 * pow3(x + 1) + 6 * pow3(x) - 4 * pow3(x - 1)); } }; //-------------------------------------------------KaiserImageFilter public class KaiserImageFilter : IImageFilterFunction { private double a; private double i0a; private double epsilon; public KaiserImageFilter() : this(6.33) { } public KaiserImageFilter(double b) { a = (b); epsilon = (1e-12); i0a = 1.0 / bessel_i0(b); } public double Radius { get { return 1.0; } } public double CalculateWeight(double x) { return bessel_i0(a * Math.Sqrt(1.0 - x * x)) * i0a; } private double bessel_i0(double x) { int i; double sum, y, t; sum = 1.0; y = x * x / 4.0; t = y; for (i = 2; t > epsilon; i++) { sum += t; t *= (double)y / (i * i); } return sum; } }; //----------------------------------------------CatromImageFilter public struct CatromImageFilter : IImageFilterFunction { public double Radius { get { return 2.0; } } public double CalculateWeight(double x) { if (x < 1.0) return 0.5 * (2.0 + x * x * (-5.0 + x * 3.0)); if (x < 2.0) return 0.5 * (4.0 + x * (-8.0 + x * (5.0 - x))); return 0.0; } }; //---------------------------------------------MitchellImageFilter public class MitchellImageFilter : IImageFilterFunction { private double p0, p2, p3; private double q0, q1, q2, q3; public MitchellImageFilter() : this(1.0 / 3.0, 1.0 / 3.0) { } public MitchellImageFilter(double b, double c) { p0 = ((6.0 - 2.0 * b) / 6.0); p2 = ((-18.0 + 12.0 * b + 6.0 * c) / 6.0); p3 = ((12.0 - 9.0 * b - 6.0 * c) / 6.0); q0 = ((8.0 * b + 24.0 * c) / 6.0); q1 = ((-12.0 * b - 48.0 * c) / 6.0); q2 = ((6.0 * b + 30.0 * c) / 6.0); q3 = ((-b - 6.0 * c) / 6.0); } public double Radius { get { return 2.0; } } public double CalculateWeight(double x) { if (x < 1.0) return p0 + x * x * (p2 + x * p3); if (x < 2.0) return q0 + x * (q1 + x * (q2 + x * q3)); return 0.0; } }; //----------------------------------------------Spline16ImageFilter public struct Spline16ImageFilter : IImageFilterFunction { public double Radius { get { return 2.0; } } public double CalculateWeight(double x) { if (x < 1.0) { return ((x - 9.0 / 5.0) * x - 1.0 / 5.0) * x + 1.0; } return ((-1.0 / 3.0 * (x - 1) + 4.0 / 5.0) * (x - 1) - 7.0 / 15.0) * (x - 1); } }; //---------------------------------------------Spline36ImageFilter public struct Spline36ImageFilter : IImageFilterFunction { public double Radius { get { return 3.0; } } public double CalculateWeight(double x) { if (x < 1.0) { return ((13.0 / 11.0 * x - 453.0 / 209.0) * x - 3.0 / 209.0) * x + 1.0; } if (x < 2.0) { return ((-6.0 / 11.0 * (x - 1) + 270.0 / 209.0) * (x - 1) - 156.0 / 209.0) * (x - 1); } return ((1.0 / 11.0 * (x - 2) - 45.0 / 209.0) * (x - 2) + 26.0 / 209.0) * (x - 2); } }; //----------------------------------------------GaussianImageFilter public struct GaussianImageFilter : IImageFilterFunction { public double Radius { get { return 2.0; } } public double CalculateWeight(double x) { return Math.Exp(-2.0 * x * x) * Math.Sqrt(2.0 / Math.PI); } }; //------------------------------------------------BesselImageFilter public struct BesselImageFilter : IImageFilterFunction { public double Radius { get { return 3.2383; } } public double CalculateWeight(double x) { return (x == 0.0) ? Math.PI / 4.0 : PictorMath.Bessel(Math.PI * x, 1) / (2.0 * x); } }; //-------------------------------------------------SincImageFilter public class SincImageFilter : IImageFilterFunction { public SincImageFilter(double r) { m_radius = (r < 2.0 ? 2.0 : r); } public double Radius { get { return m_radius; } } public double CalculateWeight(double x) { if (x == 0.0) return 1.0; x *= Math.PI; return Math.Sin(x) / x; } private double m_radius; }; //-----------------------------------------------LanczosImageFilter public class LanczosImageFilter : IImageFilterFunction { public LanczosImageFilter(double r) { m_radius = (r < 2.0 ? 2.0 : r); } public double Radius { get { return m_radius; } } public double CalculateWeight(double x) { if (x == 0.0) return 1.0; if (x > m_radius) return 0.0; x *= Math.PI; double xr = x / m_radius; return (Math.Sin(x) / x) * (Math.Sin(xr) / xr); } private double m_radius; }; //----------------------------------------------BlackmanImageFilter public class BlackmanImageFilter : IImageFilterFunction { public BlackmanImageFilter(double r) { m_radius = (r < 2.0 ? 2.0 : r); } public double Radius { get { return m_radius; } } public double CalculateWeight(double x) { if (x == 0.0) { return 1.0; } if (x > m_radius) { return 0.0; } x *= Math.PI; double xr = x / m_radius; return (Math.Sin(x) / x) * (0.42 + 0.5 * Math.Cos(xr) + 0.08 * Math.Cos(2 * xr)); } private double m_radius; }; }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsHttp { using Microsoft.Rest; using Models; /// <summary> /// HttpServerFailure operations. /// </summary> public partial class HttpServerFailure : Microsoft.Rest.IServiceOperations<AutoRestHttpInfrastructureTestService>, IHttpServerFailure { /// <summary> /// Initializes a new instance of the HttpServerFailure class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public HttpServerFailure(AutoRestHttpInfrastructureTestService client) { if (client == null) { throw new System.ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the AutoRestHttpInfrastructureTestService /// </summary> public AutoRestHttpInfrastructureTestService Client { get; private set; } /// <summary> /// Return 501 status code - should be represented in the client as an error /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.HttpOperationResponse<Error>> Head501WithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Head501", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "http/failure/server/501").ToString(); // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("HEAD"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if (!_httpResponse.IsSuccessStatusCode) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.HttpOperationResponse<Error>(); _result.Request = _httpRequest; _result.Response = _httpResponse; string _defaultResponseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_defaultResponseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _defaultResponseContent, ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Return 501 status code - should be represented in the client as an error /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.HttpOperationResponse<Error>> Get501WithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Get501", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "http/failure/server/501").ToString(); // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if (!_httpResponse.IsSuccessStatusCode) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.HttpOperationResponse<Error>(); _result.Request = _httpRequest; _result.Response = _httpResponse; string _defaultResponseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_defaultResponseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _defaultResponseContent, ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Return 505 status code - should be represented in the client as an error /// </summary> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.HttpOperationResponse<Error>> Post505WithHttpMessagesAsync(bool? booleanValue = default(bool?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("booleanValue", booleanValue); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Post505", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "http/failure/server/505").ToString(); // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("POST"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(booleanValue != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(booleanValue, this.Client.SerializationSettings); _httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if (!_httpResponse.IsSuccessStatusCode) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.HttpOperationResponse<Error>(); _result.Request = _httpRequest; _result.Response = _httpResponse; string _defaultResponseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_defaultResponseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _defaultResponseContent, ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Return 505 status code - should be represented in the client as an error /// </summary> /// <param name='booleanValue'> /// Simple boolean value true /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async System.Threading.Tasks.Task<Microsoft.Rest.HttpOperationResponse<Error>> Delete505WithHttpMessagesAsync(bool? booleanValue = default(bool?), System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { // Tracing bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString(); System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>(); tracingParameters.Add("booleanValue", booleanValue); tracingParameters.Add("cancellationToken", cancellationToken); Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Delete505", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "http/failure/server/505").ToString(); // Create HTTP transport objects System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("DELETE"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(booleanValue != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(booleanValue, this.Client.SerializationSettings); _httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Send Request if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if (!_httpResponse.IsSuccessStatusCode) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new Microsoft.Rest.HttpOperationResponse<Error>(); _result.Request = _httpRequest; _result.Response = _httpResponse; string _defaultResponseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_defaultResponseContent, this.Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _defaultResponseContent, ex); } if (_shouldTrace) { Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Diagnostics.CodeAnalysis; using System.Security.Claims; using System.Threading.Tasks; using Microsoft.AspNetCore.Components.Authorization; using Microsoft.Extensions.Options; using Microsoft.JSInterop; using static Microsoft.AspNetCore.Internal.LinkerFlags; namespace Microsoft.AspNetCore.Components.WebAssembly.Authentication { /// <summary> /// The default implementation for <see cref="IRemoteAuthenticationService{TRemoteAuthenticationState}"/> that uses JS interop to authenticate the user. /// </summary> /// <typeparam name="TRemoteAuthenticationState">The state to preserve across authentication operations.</typeparam> /// <typeparam name="TAccount">The type of the <see cref="RemoteUserAccount" />.</typeparam> /// <typeparam name="TProviderOptions">The options to be passed down to the underlying JavaScript library handling the authentication operations.</typeparam> public class RemoteAuthenticationService<TRemoteAuthenticationState, [DynamicallyAccessedMembers(JsonSerialized)] TAccount, TProviderOptions> : AuthenticationStateProvider, IRemoteAuthenticationService<TRemoteAuthenticationState>, IAccessTokenProvider where TRemoteAuthenticationState : RemoteAuthenticationState where TProviderOptions : new() where TAccount : RemoteUserAccount { private static readonly TimeSpan _userCacheRefreshInterval = TimeSpan.FromSeconds(60); private bool _initialized; // This defaults to 1/1/1970 private DateTimeOffset _userLastCheck = DateTimeOffset.FromUnixTimeSeconds(0); private ClaimsPrincipal _cachedUser = new ClaimsPrincipal(new ClaimsIdentity()); /// <summary> /// Gets the <see cref="IJSRuntime"/> to use for performing JavaScript interop operations. /// </summary> protected IJSRuntime JsRuntime { get; } /// <summary> /// Gets the <see cref="NavigationManager"/> used to compute absolute urls. /// </summary> protected NavigationManager Navigation { get; } /// <summary> /// Gets the <see cref="AccountClaimsPrincipalFactory{TAccount}"/> to map accounts to <see cref="ClaimsPrincipal"/>. /// </summary> protected AccountClaimsPrincipalFactory<TAccount> AccountClaimsPrincipalFactory { get; } /// <summary> /// Gets the options for the underlying JavaScript library handling the authentication operations. /// </summary> protected RemoteAuthenticationOptions<TProviderOptions> Options { get; } /// <summary> /// Initializes a new instance. /// </summary> /// <param name="jsRuntime">The <see cref="IJSRuntime"/> to use for performing JavaScript interop operations.</param> /// <param name="options">The options to be passed down to the underlying JavaScript library handling the authentication operations.</param> /// <param name="navigation">The <see cref="NavigationManager"/> used to generate URLs.</param> /// <param name="accountClaimsPrincipalFactory">The <see cref="AccountClaimsPrincipalFactory{TAccount}"/> used to generate the <see cref="ClaimsPrincipal"/> for the user.</param> public RemoteAuthenticationService( IJSRuntime jsRuntime, IOptionsSnapshot<RemoteAuthenticationOptions<TProviderOptions>> options, NavigationManager navigation, AccountClaimsPrincipalFactory<TAccount> accountClaimsPrincipalFactory) { JsRuntime = jsRuntime; Navigation = navigation; AccountClaimsPrincipalFactory = accountClaimsPrincipalFactory; Options = options.Value; } /// <inheritdoc /> public override async Task<AuthenticationState> GetAuthenticationStateAsync() => new AuthenticationState(await GetUser(useCache: true)); /// <inheritdoc /> public virtual async Task<RemoteAuthenticationResult<TRemoteAuthenticationState>> SignInAsync( RemoteAuthenticationContext<TRemoteAuthenticationState> context) { await EnsureAuthService(); var internalResult = await JsRuntime.InvokeAsync<InternalRemoteAuthenticationResult<TRemoteAuthenticationState>>("AuthenticationService.signIn", context.State); var result = internalResult.Convert(); if (result.Status == RemoteAuthenticationStatus.Success) { var getUserTask = GetUser(); await getUserTask; UpdateUser(getUserTask); } return result; } /// <inheritdoc /> public virtual async Task<RemoteAuthenticationResult<TRemoteAuthenticationState>> CompleteSignInAsync( RemoteAuthenticationContext<TRemoteAuthenticationState> context) { await EnsureAuthService(); var internalResult = await JsRuntime.InvokeAsync<InternalRemoteAuthenticationResult<TRemoteAuthenticationState>>("AuthenticationService.completeSignIn", context.Url); var result = internalResult.Convert(); if (result.Status == RemoteAuthenticationStatus.Success) { var getUserTask = GetUser(); await getUserTask; UpdateUser(getUserTask); } return result; } /// <inheritdoc /> public virtual async Task<RemoteAuthenticationResult<TRemoteAuthenticationState>> SignOutAsync( RemoteAuthenticationContext<TRemoteAuthenticationState> context) { await EnsureAuthService(); var internalResult = await JsRuntime.InvokeAsync<InternalRemoteAuthenticationResult<TRemoteAuthenticationState>>("AuthenticationService.signOut", context.State); var result = internalResult.Convert(); if (result.Status == RemoteAuthenticationStatus.Success) { var getUserTask = GetUser(); await getUserTask; UpdateUser(getUserTask); } return result; } /// <inheritdoc /> public virtual async Task<RemoteAuthenticationResult<TRemoteAuthenticationState>> CompleteSignOutAsync( RemoteAuthenticationContext<TRemoteAuthenticationState> context) { await EnsureAuthService(); var internalResult = await JsRuntime.InvokeAsync<InternalRemoteAuthenticationResult<TRemoteAuthenticationState>>("AuthenticationService.completeSignOut", context.Url); var result = internalResult.Convert(); if (result.Status == RemoteAuthenticationStatus.Success) { var getUserTask = GetUser(); await getUserTask; UpdateUser(getUserTask); } return result; } /// <inheritdoc /> public virtual async ValueTask<AccessTokenResult> RequestAccessToken() { await EnsureAuthService(); var result = await JsRuntime.InvokeAsync<InternalAccessTokenResult>("AuthenticationService.getAccessToken"); if (!Enum.TryParse<AccessTokenResultStatus>(result.Status, ignoreCase: true, out var parsedStatus)) { throw new InvalidOperationException($"Invalid access token result status '{result.Status ?? "(null)"}'"); } if (parsedStatus == AccessTokenResultStatus.RequiresRedirect) { var redirectUrl = GetRedirectUrl(null); result.RedirectUrl = redirectUrl.ToString(); } return new AccessTokenResult(parsedStatus, result.Token, result.RedirectUrl); } /// <inheritdoc /> public virtual async ValueTask<AccessTokenResult> RequestAccessToken(AccessTokenRequestOptions options) { if (options is null) { throw new ArgumentNullException(nameof(options)); } await EnsureAuthService(); var result = await JsRuntime.InvokeAsync<InternalAccessTokenResult>("AuthenticationService.getAccessToken", options); if (!Enum.TryParse<AccessTokenResultStatus>(result.Status, ignoreCase: true, out var parsedStatus)) { throw new InvalidOperationException($"Invalid access token result status '{result.Status ?? "(null)"}'"); } if (parsedStatus == AccessTokenResultStatus.RequiresRedirect) { var redirectUrl = GetRedirectUrl(options.ReturnUrl); result.RedirectUrl = redirectUrl.ToString(); } return new AccessTokenResult(parsedStatus, result.Token, result.RedirectUrl); } private Uri GetRedirectUrl(string customReturnUrl) { var returnUrl = customReturnUrl != null ? Navigation.ToAbsoluteUri(customReturnUrl).ToString() : null; var encodedReturnUrl = Uri.EscapeDataString(returnUrl ?? Navigation.Uri); var redirectUrl = Navigation.ToAbsoluteUri($"{Options.AuthenticationPaths.LogInPath}?returnUrl={encodedReturnUrl}"); return redirectUrl; } private async Task<ClaimsPrincipal> GetUser(bool useCache = false) { var now = DateTimeOffset.Now; if (useCache && now < _userLastCheck + _userCacheRefreshInterval) { return _cachedUser; } _cachedUser = await GetAuthenticatedUser(); _userLastCheck = now; return _cachedUser; } /// <summary> /// Gets the current authenticated used using JavaScript interop. /// </summary> /// <returns>A <see cref="Task{ClaimsPrincipal}"/>that will return the current authenticated user when completes.</returns> protected internal virtual async ValueTask<ClaimsPrincipal> GetAuthenticatedUser() { await EnsureAuthService(); var account = await JsRuntime.InvokeAsync<TAccount>("AuthenticationService.getUser"); var user = await AccountClaimsPrincipalFactory.CreateUserAsync(account, Options.UserOptions); return user; } private async ValueTask EnsureAuthService() { if (!_initialized) { await JsRuntime.InvokeVoidAsync("AuthenticationService.init", Options.ProviderOptions); _initialized = true; } } private void UpdateUser(Task<ClaimsPrincipal> task) { NotifyAuthenticationStateChanged(UpdateAuthenticationState(task)); static async Task<AuthenticationState> UpdateAuthenticationState(Task<ClaimsPrincipal> futureUser) => new AuthenticationState(await futureUser); } } // Internal for testing purposes internal struct InternalAccessTokenResult { public string Status { get; set; } public AccessToken Token { get; set; } public string RedirectUrl { get; set; } } // Internal for testing purposes internal struct InternalRemoteAuthenticationResult<TRemoteAuthenticationState> where TRemoteAuthenticationState : RemoteAuthenticationState { public string Status { get; set; } public string ErrorMessage { get; set; } public TRemoteAuthenticationState State { get; set; } public RemoteAuthenticationResult<TRemoteAuthenticationState> Convert() { var result = new RemoteAuthenticationResult<TRemoteAuthenticationState>(); result.ErrorMessage = ErrorMessage; result.State = State; if (Status != null && Enum.TryParse<RemoteAuthenticationStatus>(Status, ignoreCase: true, out var status)) { result.Status = status; } else { throw new InvalidOperationException($"Can't convert status '${Status ?? "(null)"}'."); } return result; } } }
// // Copyright (C) 2012-2014 DataStax Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Collections.Generic; using System.Linq; namespace Cassandra { /// <summary> /// Represents a prepared statement, a query with bound variables that has been /// prepared (pre-parsed) by the database. <p> A prepared statement can be /// executed once concrete values has been provided for the bound variables. The /// pair of a prepared statement and values for its bound variables is a /// BoundStatement and can be executed (by <link>Session#Execute</link>).</p> /// </summary> public class PreparedStatement { internal readonly RowSetMetadata Metadata; internal readonly RowSetMetadata ResultMetadata; private readonly int _protocolVersion; private volatile RoutingKey _routingKey; private string[] _routingNames; /// <summary> /// The cql query /// </summary> internal string Cql { get; private set; } /// <summary> /// The prepared statement identifier /// </summary> internal byte[] Id { get; private set; } /// <summary> /// The keyspace were the prepared statement was first executed /// </summary> internal string Keyspace { get; private set; } /// <summary> /// Gets metadata on the bounded variables of this prepared statement. /// </summary> public RowSetMetadata Variables { get { return Metadata; } } /// <summary> /// Gets the routing key for the prepared statement. /// </summary> public RoutingKey RoutingKey { get { return _routingKey; } } /// <summary> /// Gets or sets the parameter indexes that are part of the partition key /// </summary> public int[] RoutingIndexes { get; internal set; } public ConsistencyLevel? ConsistencyLevel { get; private set; } internal PreparedStatement(RowSetMetadata metadata, byte[] id, string cql, string keyspace, RowSetMetadata resultMetadata, int protocolVersion) { Metadata = metadata; Id = id; Cql = cql; ResultMetadata = resultMetadata; Keyspace = keyspace; _protocolVersion = protocolVersion; } /// <summary> /// Creates a new BoundStatement object and bind its variables to the provided /// values. /// <para> /// Specify the parameter values by the position of the markers in the query or by name, /// using a single instance of an anonymous type, with property names as parameter names. /// </para> /// <para> /// Note that while no more <c>values</c> than bound variables can be provided, it is allowed to /// provide less <c>values</c> that there is variables. /// </para> /// </summary> /// <param name="values"> the values to bind to the variables of the newly /// created BoundStatement. </param> /// <returns>the newly created <c>BoundStatement</c> with its variables /// bound to <c>values</c>. </returns> public BoundStatement Bind(params object[] values) { var bs = new BoundStatement(this) { ProtocolVersion = _protocolVersion }; bs.SetRoutingKey(_routingKey); if (values == null) { return bs; } var valuesByPosition = values; var useNamedParameters = values.Length == 1 && Utils.IsAnonymousType(values[0]); if (useNamedParameters) { //Using named parameters //Reorder the params according the position in the query valuesByPosition = Utils.GetValues(Metadata.Columns.Select(c => c.Name), values[0]).ToArray(); } bs.SetValues(valuesByPosition); bs.CalculateRoutingKey(useNamedParameters, RoutingIndexes, _routingNames, valuesByPosition, values); return bs; } /// <summary> /// Sets a default consistency level for all <c>BoundStatement</c> created /// from this object. <p> If no consistency level is set through this method, the /// BoundStatement created from this object will use the default consistency /// level (One). </p><p> Changing the default consistency level is not retroactive, /// it only applies to BoundStatement created after the change.</p> /// </summary> /// <param name="consistency"> the default consistency level to set. </param> /// <returns>this <c>PreparedStatement</c> object.</returns> public PreparedStatement SetConsistencyLevel(ConsistencyLevel consistency) { ConsistencyLevel = consistency; return this; } /// <summary> /// Sets the partition keys of the query /// </summary> /// <returns>True if it was possible to set the routing indexes for this query</returns> internal bool SetPartitionKeys(TableColumn[] keys) { var queryParameters = Metadata.Columns; var routingIndexes = new List<int>(); foreach (var key in keys) { //find the position of the key in the parameters for (var i = 0; i < queryParameters.Length; i++) { if (queryParameters[i].Name != key.Name) { continue; } routingIndexes.Add(i); break; } } if (routingIndexes.Count != keys.Length) { //The parameter names don't match the partition keys return false; } RoutingIndexes = routingIndexes.ToArray(); return true; } /// <summary> /// Set the routing key for this query. /// <para> /// The routing key is a hint for token aware load balancing policies but is never mandatory. /// This method allows you to manually provide a routing key for this query. /// </para> /// <para> /// Use this method ONLY if the partition keys are the same for all query executions (hard-coded parameters). /// </para> /// <para> /// If the partition key is composite, you should provide multiple routing key components. /// </para> /// </summary> /// <param name="routingKeyComponents"> the raw (binary) values to compose to /// obtain the routing key. </param> /// <returns>this <c>PreparedStatement</c> object.</returns> public PreparedStatement SetRoutingKey(params RoutingKey[] routingKeyComponents) { _routingKey = RoutingKey.Compose(routingKeyComponents); return this; } /// <summary> /// For named query markers, it sets the parameter names that are part of the routing key. /// <para> /// Use this method ONLY if the parameter names are different from the partition key names. /// </para> /// </summary> /// <returns>this <c>PreparedStatement</c> object.</returns> public PreparedStatement SetRoutingNames(params string[] names) { if (names == null) { return this; } _routingNames = names; return this; } } }
/* * DocuSign REST API * * The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. * * OpenAPI spec version: v2.1 * Contact: [email protected] * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = DocuSign.eSign.Client.SwaggerDateConverter; namespace DocuSign.eSign.Model { /// <summary> /// Contains information about a billing plan. /// </summary> [DataContract] public partial class BillingPlan : IEquatable<BillingPlan>, IValidatableObject { public BillingPlan() { // Empty Constructor } /// <summary> /// Initializes a new instance of the <see cref="BillingPlan" /> class. /// </summary> /// <param name="AppStoreProducts">Reserved: TBD.</param> /// <param name="CurrencyPlanPrices">Contains the currencyCode and currencySymbol for the alternate currency values for envelopeFee, fixedFee, and seatFee that are configured for this plan feature set..</param> /// <param name="EnableSupport">When set to **true**, then customer support is provided as part of the account plan..</param> /// <param name="IncludedSeats">The number of seats (users) included..</param> /// <param name="OtherDiscountPercent">OtherDiscountPercent.</param> /// <param name="PaymentCycle"> The payment cycle associated with the plan. The possible values are: Monthly or Annually. .</param> /// <param name="PaymentMethod">PaymentMethod.</param> /// <param name="PerSeatPrice">The per seat price for the plan..</param> /// <param name="PlanClassification">Identifies the type of plan. Examples include Business, Corporate, Enterprise, Free..</param> /// <param name="PlanFeatureSets">PlanFeatureSets.</param> /// <param name="PlanId">PlanId.</param> /// <param name="PlanName">The name of the Billing Plan..</param> /// <param name="SeatDiscounts">SeatDiscounts.</param> /// <param name="SupportIncidentFee">The support incident fee charged for each support incident..</param> /// <param name="SupportPlanFee">The support plan fee charged for this plan..</param> public BillingPlan(List<AppStoreProduct> AppStoreProducts = default(List<AppStoreProduct>), List<CurrencyPlanPrice> CurrencyPlanPrices = default(List<CurrencyPlanPrice>), string EnableSupport = default(string), string IncludedSeats = default(string), string OtherDiscountPercent = default(string), string PaymentCycle = default(string), string PaymentMethod = default(string), string PerSeatPrice = default(string), string PlanClassification = default(string), List<FeatureSet> PlanFeatureSets = default(List<FeatureSet>), string PlanId = default(string), string PlanName = default(string), List<SeatDiscount> SeatDiscounts = default(List<SeatDiscount>), string SupportIncidentFee = default(string), string SupportPlanFee = default(string)) { this.AppStoreProducts = AppStoreProducts; this.CurrencyPlanPrices = CurrencyPlanPrices; this.EnableSupport = EnableSupport; this.IncludedSeats = IncludedSeats; this.OtherDiscountPercent = OtherDiscountPercent; this.PaymentCycle = PaymentCycle; this.PaymentMethod = PaymentMethod; this.PerSeatPrice = PerSeatPrice; this.PlanClassification = PlanClassification; this.PlanFeatureSets = PlanFeatureSets; this.PlanId = PlanId; this.PlanName = PlanName; this.SeatDiscounts = SeatDiscounts; this.SupportIncidentFee = SupportIncidentFee; this.SupportPlanFee = SupportPlanFee; } /// <summary> /// Reserved: TBD /// </summary> /// <value>Reserved: TBD</value> [DataMember(Name="appStoreProducts", EmitDefaultValue=false)] public List<AppStoreProduct> AppStoreProducts { get; set; } /// <summary> /// Contains the currencyCode and currencySymbol for the alternate currency values for envelopeFee, fixedFee, and seatFee that are configured for this plan feature set. /// </summary> /// <value>Contains the currencyCode and currencySymbol for the alternate currency values for envelopeFee, fixedFee, and seatFee that are configured for this plan feature set.</value> [DataMember(Name="currencyPlanPrices", EmitDefaultValue=false)] public List<CurrencyPlanPrice> CurrencyPlanPrices { get; set; } /// <summary> /// When set to **true**, then customer support is provided as part of the account plan. /// </summary> /// <value>When set to **true**, then customer support is provided as part of the account plan.</value> [DataMember(Name="enableSupport", EmitDefaultValue=false)] public string EnableSupport { get; set; } /// <summary> /// The number of seats (users) included. /// </summary> /// <value>The number of seats (users) included.</value> [DataMember(Name="includedSeats", EmitDefaultValue=false)] public string IncludedSeats { get; set; } /// <summary> /// Gets or Sets OtherDiscountPercent /// </summary> [DataMember(Name="otherDiscountPercent", EmitDefaultValue=false)] public string OtherDiscountPercent { get; set; } /// <summary> /// The payment cycle associated with the plan. The possible values are: Monthly or Annually. /// </summary> /// <value> The payment cycle associated with the plan. The possible values are: Monthly or Annually. </value> [DataMember(Name="paymentCycle", EmitDefaultValue=false)] public string PaymentCycle { get; set; } /// <summary> /// Gets or Sets PaymentMethod /// </summary> [DataMember(Name="paymentMethod", EmitDefaultValue=false)] public string PaymentMethod { get; set; } /// <summary> /// The per seat price for the plan. /// </summary> /// <value>The per seat price for the plan.</value> [DataMember(Name="perSeatPrice", EmitDefaultValue=false)] public string PerSeatPrice { get; set; } /// <summary> /// Identifies the type of plan. Examples include Business, Corporate, Enterprise, Free. /// </summary> /// <value>Identifies the type of plan. Examples include Business, Corporate, Enterprise, Free.</value> [DataMember(Name="planClassification", EmitDefaultValue=false)] public string PlanClassification { get; set; } /// <summary> /// Gets or Sets PlanFeatureSets /// </summary> [DataMember(Name="planFeatureSets", EmitDefaultValue=false)] public List<FeatureSet> PlanFeatureSets { get; set; } /// <summary> /// Gets or Sets PlanId /// </summary> [DataMember(Name="planId", EmitDefaultValue=false)] public string PlanId { get; set; } /// <summary> /// The name of the Billing Plan. /// </summary> /// <value>The name of the Billing Plan.</value> [DataMember(Name="planName", EmitDefaultValue=false)] public string PlanName { get; set; } /// <summary> /// Gets or Sets SeatDiscounts /// </summary> [DataMember(Name="seatDiscounts", EmitDefaultValue=false)] public List<SeatDiscount> SeatDiscounts { get; set; } /// <summary> /// The support incident fee charged for each support incident. /// </summary> /// <value>The support incident fee charged for each support incident.</value> [DataMember(Name="supportIncidentFee", EmitDefaultValue=false)] public string SupportIncidentFee { get; set; } /// <summary> /// The support plan fee charged for this plan. /// </summary> /// <value>The support plan fee charged for this plan.</value> [DataMember(Name="supportPlanFee", EmitDefaultValue=false)] public string SupportPlanFee { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class BillingPlan {\n"); sb.Append(" AppStoreProducts: ").Append(AppStoreProducts).Append("\n"); sb.Append(" CurrencyPlanPrices: ").Append(CurrencyPlanPrices).Append("\n"); sb.Append(" EnableSupport: ").Append(EnableSupport).Append("\n"); sb.Append(" IncludedSeats: ").Append(IncludedSeats).Append("\n"); sb.Append(" OtherDiscountPercent: ").Append(OtherDiscountPercent).Append("\n"); sb.Append(" PaymentCycle: ").Append(PaymentCycle).Append("\n"); sb.Append(" PaymentMethod: ").Append(PaymentMethod).Append("\n"); sb.Append(" PerSeatPrice: ").Append(PerSeatPrice).Append("\n"); sb.Append(" PlanClassification: ").Append(PlanClassification).Append("\n"); sb.Append(" PlanFeatureSets: ").Append(PlanFeatureSets).Append("\n"); sb.Append(" PlanId: ").Append(PlanId).Append("\n"); sb.Append(" PlanName: ").Append(PlanName).Append("\n"); sb.Append(" SeatDiscounts: ").Append(SeatDiscounts).Append("\n"); sb.Append(" SupportIncidentFee: ").Append(SupportIncidentFee).Append("\n"); sb.Append(" SupportPlanFee: ").Append(SupportPlanFee).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as BillingPlan); } /// <summary> /// Returns true if BillingPlan instances are equal /// </summary> /// <param name="other">Instance of BillingPlan to be compared</param> /// <returns>Boolean</returns> public bool Equals(BillingPlan other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.AppStoreProducts == other.AppStoreProducts || this.AppStoreProducts != null && this.AppStoreProducts.SequenceEqual(other.AppStoreProducts) ) && ( this.CurrencyPlanPrices == other.CurrencyPlanPrices || this.CurrencyPlanPrices != null && this.CurrencyPlanPrices.SequenceEqual(other.CurrencyPlanPrices) ) && ( this.EnableSupport == other.EnableSupport || this.EnableSupport != null && this.EnableSupport.Equals(other.EnableSupport) ) && ( this.IncludedSeats == other.IncludedSeats || this.IncludedSeats != null && this.IncludedSeats.Equals(other.IncludedSeats) ) && ( this.OtherDiscountPercent == other.OtherDiscountPercent || this.OtherDiscountPercent != null && this.OtherDiscountPercent.Equals(other.OtherDiscountPercent) ) && ( this.PaymentCycle == other.PaymentCycle || this.PaymentCycle != null && this.PaymentCycle.Equals(other.PaymentCycle) ) && ( this.PaymentMethod == other.PaymentMethod || this.PaymentMethod != null && this.PaymentMethod.Equals(other.PaymentMethod) ) && ( this.PerSeatPrice == other.PerSeatPrice || this.PerSeatPrice != null && this.PerSeatPrice.Equals(other.PerSeatPrice) ) && ( this.PlanClassification == other.PlanClassification || this.PlanClassification != null && this.PlanClassification.Equals(other.PlanClassification) ) && ( this.PlanFeatureSets == other.PlanFeatureSets || this.PlanFeatureSets != null && this.PlanFeatureSets.SequenceEqual(other.PlanFeatureSets) ) && ( this.PlanId == other.PlanId || this.PlanId != null && this.PlanId.Equals(other.PlanId) ) && ( this.PlanName == other.PlanName || this.PlanName != null && this.PlanName.Equals(other.PlanName) ) && ( this.SeatDiscounts == other.SeatDiscounts || this.SeatDiscounts != null && this.SeatDiscounts.SequenceEqual(other.SeatDiscounts) ) && ( this.SupportIncidentFee == other.SupportIncidentFee || this.SupportIncidentFee != null && this.SupportIncidentFee.Equals(other.SupportIncidentFee) ) && ( this.SupportPlanFee == other.SupportPlanFee || this.SupportPlanFee != null && this.SupportPlanFee.Equals(other.SupportPlanFee) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.AppStoreProducts != null) hash = hash * 59 + this.AppStoreProducts.GetHashCode(); if (this.CurrencyPlanPrices != null) hash = hash * 59 + this.CurrencyPlanPrices.GetHashCode(); if (this.EnableSupport != null) hash = hash * 59 + this.EnableSupport.GetHashCode(); if (this.IncludedSeats != null) hash = hash * 59 + this.IncludedSeats.GetHashCode(); if (this.OtherDiscountPercent != null) hash = hash * 59 + this.OtherDiscountPercent.GetHashCode(); if (this.PaymentCycle != null) hash = hash * 59 + this.PaymentCycle.GetHashCode(); if (this.PaymentMethod != null) hash = hash * 59 + this.PaymentMethod.GetHashCode(); if (this.PerSeatPrice != null) hash = hash * 59 + this.PerSeatPrice.GetHashCode(); if (this.PlanClassification != null) hash = hash * 59 + this.PlanClassification.GetHashCode(); if (this.PlanFeatureSets != null) hash = hash * 59 + this.PlanFeatureSets.GetHashCode(); if (this.PlanId != null) hash = hash * 59 + this.PlanId.GetHashCode(); if (this.PlanName != null) hash = hash * 59 + this.PlanName.GetHashCode(); if (this.SeatDiscounts != null) hash = hash * 59 + this.SeatDiscounts.GetHashCode(); if (this.SupportIncidentFee != null) hash = hash * 59 + this.SupportIncidentFee.GetHashCode(); if (this.SupportPlanFee != null) hash = hash * 59 + this.SupportPlanFee.GetHashCode(); return hash; } } public IEnumerable<ValidationResult> Validate(ValidationContext validationContext) { yield break; } } }
// // Mono.System.Xml.Serialization.SoapReflectionImporter // // Author: // Tim Coleman ([email protected]) // // Copyright (C) Tim Coleman, 2002 // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections; using System.Globalization; using System.Reflection; using Mono.System.Xml; using Mono.System.Xml.Schema; namespace Mono.System.Xml.Serialization { public class SoapReflectionImporter { SoapAttributeOverrides attributeOverrides; string initialDefaultNamespace; ArrayList includedTypes; ArrayList relatedMaps = new ArrayList (); ReflectionHelper helper = new ReflectionHelper(); #region Constructors public SoapReflectionImporter (): this (null, null) { } public SoapReflectionImporter (SoapAttributeOverrides attributeOverrides): this (attributeOverrides, null) { } public SoapReflectionImporter (string defaultNamespace): this (null, defaultNamespace) { } public SoapReflectionImporter (SoapAttributeOverrides attributeOverrides, string defaultNamespace) { if (defaultNamespace == null) initialDefaultNamespace = String.Empty; else initialDefaultNamespace = defaultNamespace; if (attributeOverrides == null) this.attributeOverrides = new SoapAttributeOverrides(); else this.attributeOverrides = attributeOverrides; } #endregion // Constructors #region Methods public XmlMembersMapping ImportMembersMapping (string elementName, string ns, XmlReflectionMember[] members) { return ImportMembersMapping (elementName, ns, members, true, true, false); } public XmlMembersMapping ImportMembersMapping (string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors) { return ImportMembersMapping (elementName, ns, members, hasWrapperElement, writeAccessors, false); } public XmlMembersMapping ImportMembersMapping (string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors, bool validate) { return ImportMembersMapping (elementName, ns, members, hasWrapperElement, writeAccessors, validate, XmlMappingAccess.Read | XmlMappingAccess.Write); } #if NET_2_0 [MonoTODO] public #endif XmlMembersMapping ImportMembersMapping (string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors, bool validate, XmlMappingAccess access) { elementName = XmlConvert.EncodeLocalName (elementName); XmlMemberMapping[] mapping = new XmlMemberMapping[members.Length]; for (int n=0; n<members.Length; n++) { XmlTypeMapMember mapMem = CreateMapMember (members[n], ns); mapping[n] = new XmlMemberMapping (XmlConvert.EncodeLocalName (members[n].MemberName), ns, mapMem, true); } XmlMembersMapping mps = new XmlMembersMapping (elementName, ns, hasWrapperElement, writeAccessors, mapping); mps.RelatedMaps = relatedMaps; mps.Format = SerializationFormat.Encoded; Type[] extraTypes = includedTypes != null ? (Type[])includedTypes.ToArray(typeof(Type)) : null; mps.Source = new MembersSerializationSource (elementName, hasWrapperElement, members, writeAccessors, false, null, extraTypes); return mps; } public XmlTypeMapping ImportTypeMapping (Type type) { return ImportTypeMapping (type, null); } public XmlTypeMapping ImportTypeMapping (Type type, string defaultNamespace) { if (type == null) throw new ArgumentNullException ("type"); if (type == typeof (void)) throw new InvalidOperationException ("Type " + type.Name + " may not be serialized."); return ImportTypeMapping (TypeTranslator.GetTypeData (type), defaultNamespace); } internal XmlTypeMapping ImportTypeMapping (TypeData typeData, string defaultNamespace) { if (typeData == null) throw new ArgumentNullException ("typeData"); if (typeData.Type == null) throw new ArgumentException ("Specified TypeData instance does not have Type set."); string oldNs = initialDefaultNamespace; if (defaultNamespace == null) defaultNamespace = initialDefaultNamespace; if (defaultNamespace == null) defaultNamespace = string.Empty; initialDefaultNamespace = defaultNamespace; XmlTypeMapping map; switch (typeData.SchemaType) { case SchemaTypes.Class: map = ImportClassMapping (typeData, defaultNamespace); break; case SchemaTypes.Array: map = ImportListMapping (typeData, defaultNamespace); break; case SchemaTypes.XmlNode: throw CreateTypeException (typeData.Type); case SchemaTypes.Primitive: map = ImportPrimitiveMapping (typeData, defaultNamespace); break; case SchemaTypes.Enum: map = ImportEnumMapping (typeData, defaultNamespace); break; case SchemaTypes.XmlSerializable: default: throw new NotSupportedException ("Type " + typeData.Type.FullName + " not supported for XML serialization"); } map.RelatedMaps = relatedMaps; map.Format = SerializationFormat.Encoded; Type[] extraTypes = includedTypes != null ? (Type[])includedTypes.ToArray(typeof(Type)) : null; map.Source = new SoapTypeSerializationSource (typeData.Type, attributeOverrides, defaultNamespace, extraTypes); initialDefaultNamespace = oldNs; return map; } XmlTypeMapping CreateTypeMapping (TypeData typeData, string defaultXmlType, string defaultNamespace) { string membersNamespace = defaultNamespace; bool includeInSchema = true; SoapAttributes atts = null; if (defaultXmlType == null) defaultXmlType = typeData.XmlType; if (!typeData.IsListType) { if (attributeOverrides != null) atts = attributeOverrides[typeData.Type]; if (atts != null && typeData.SchemaType == SchemaTypes.Primitive) throw new InvalidOperationException ("SoapType attribute may not be specified for the type " + typeData.FullTypeName); } if (atts == null) atts = new SoapAttributes (typeData.Type); if (atts.SoapType != null) { if (atts.SoapType.Namespace != null && atts.SoapType.Namespace != string.Empty) membersNamespace = atts.SoapType.Namespace; if (atts.SoapType.TypeName != null && atts.SoapType.TypeName != string.Empty) defaultXmlType = XmlConvert.EncodeLocalName (atts.SoapType.TypeName); includeInSchema = atts.SoapType.IncludeInSchema; } if (membersNamespace == null) membersNamespace = ""; XmlTypeMapping map = new XmlTypeMapping (defaultXmlType, membersNamespace, typeData, defaultXmlType, membersNamespace); map.IncludeInSchema = includeInSchema; relatedMaps.Add (map); return map; } XmlTypeMapping ImportClassMapping (Type type, string defaultNamespace) { TypeData typeData = TypeTranslator.GetTypeData (type); return ImportClassMapping (typeData, defaultNamespace); } XmlTypeMapping ImportClassMapping (TypeData typeData, string defaultNamespace) { Type type = typeData.Type; if (type.IsValueType) throw CreateStructException (type); if (type == typeof (object)) defaultNamespace = XmlSchema.Namespace; ReflectionHelper.CheckSerializableType (type, false); XmlTypeMapping map = helper.GetRegisteredClrType (type, GetTypeNamespace (typeData, defaultNamespace)); if (map != null) return map; map = CreateTypeMapping (typeData, null, defaultNamespace); helper.RegisterClrType (map, type, map.Namespace); map.MultiReferenceType = true; ClassMap classMap = new ClassMap (); map.ObjectMap = classMap; // Import members ICollection members = GetReflectionMembers (type); foreach (XmlReflectionMember rmember in members) { if (rmember.SoapAttributes.SoapIgnore) continue; classMap.AddMember (CreateMapMember (rmember, defaultNamespace)); } // Import included classes SoapIncludeAttribute[] includes = (SoapIncludeAttribute[])type.GetCustomAttributes (typeof (SoapIncludeAttribute), false); for (int n=0; n<includes.Length; n++) { Type includedType = includes[n].Type; ImportTypeMapping (includedType); } if (type == typeof (object) && includedTypes != null) { foreach (Type intype in includedTypes) map.DerivedTypes.Add (ImportTypeMapping (intype)); } // Register inheritance relations if (type.BaseType != null) { XmlTypeMapping bmap = ImportClassMapping (type.BaseType, defaultNamespace); if (type.BaseType != typeof (object)) map.BaseMap = bmap; // At this point, derived classes of this map must be already registered RegisterDerivedMap (bmap, map); } return map; } void RegisterDerivedMap (XmlTypeMapping map, XmlTypeMapping derivedMap) { map.DerivedTypes.Add (derivedMap); map.DerivedTypes.AddRange (derivedMap.DerivedTypes); if (map.BaseMap != null) RegisterDerivedMap (map.BaseMap, derivedMap); else { XmlTypeMapping obmap = ImportTypeMapping (typeof(object)); if (obmap != map) obmap.DerivedTypes.Add (derivedMap); } } string GetTypeNamespace (TypeData typeData, string defaultNamespace) { string membersNamespace = defaultNamespace; SoapAttributes atts = null; if (!typeData.IsListType) { if (attributeOverrides != null) atts = attributeOverrides[typeData.Type]; } if (atts == null) atts = new SoapAttributes (typeData.Type); if (atts.SoapType != null) { if (atts.SoapType.Namespace != null && atts.SoapType.Namespace != string.Empty) membersNamespace = atts.SoapType.Namespace; } if (membersNamespace == null) return ""; else return membersNamespace; } XmlTypeMapping ImportListMapping (TypeData typeData, string defaultNamespace) { Type type = typeData.Type; XmlTypeMapping map = helper.GetRegisteredClrType (type, XmlSerializer.EncodingNamespace); if (map != null) return map; ListMap obmap = new ListMap (); TypeData itemTypeData = typeData.ListItemTypeData; map = CreateTypeMapping (typeData, "Array", XmlSerializer.EncodingNamespace); helper.RegisterClrType (map, type, XmlSerializer.EncodingNamespace); map.MultiReferenceType = true; map.ObjectMap = obmap; XmlTypeMapElementInfo elem = new XmlTypeMapElementInfo (null, itemTypeData); if (elem.TypeData.IsComplexType) { elem.MappedType = ImportTypeMapping (typeData.ListItemType, defaultNamespace); elem.TypeData = elem.MappedType.TypeData; } elem.ElementName = "Item"; elem.Namespace = string.Empty; elem.IsNullable = true; // By default, items are nullable XmlTypeMapElementInfoList list = new XmlTypeMapElementInfoList(); list.Add (elem); obmap.ItemInfo = list; XmlTypeMapping objMap = ImportTypeMapping (typeof(object), defaultNamespace); objMap.DerivedTypes.Add (map); // Register any of the including types as a derived class of object SoapIncludeAttribute[] includes = (SoapIncludeAttribute[])type.GetCustomAttributes (typeof (SoapIncludeAttribute), false); for (int i = 0; i < includes.Length; i++) { Type includedType = includes[i].Type; objMap.DerivedTypes.Add(ImportTypeMapping (includedType, defaultNamespace)); } return map; } XmlTypeMapping ImportPrimitiveMapping (TypeData typeData, string defaultNamespace) { if (typeData.SchemaType == SchemaTypes.Primitive) defaultNamespace = typeData.IsXsdType ? XmlSchema.Namespace : XmlSerializer.WsdlTypesNamespace; Type type = typeData.Type; XmlTypeMapping map = helper.GetRegisteredClrType (type, GetTypeNamespace (typeData, defaultNamespace)); if (map != null) return map; map = CreateTypeMapping (typeData, null, defaultNamespace); helper.RegisterClrType (map, type, map.Namespace); return map; } XmlTypeMapping ImportEnumMapping (TypeData typeData, string defaultNamespace) { Type type = typeData.Type; XmlTypeMapping map = helper.GetRegisteredClrType (type, GetTypeNamespace (typeData, defaultNamespace)); if (map != null) return map; ReflectionHelper.CheckSerializableType (type, false); map = CreateTypeMapping (typeData, null, defaultNamespace); helper.RegisterClrType (map, type, map.Namespace); map.MultiReferenceType = true; string [] names = Enum.GetNames (type); EnumMap.EnumMapMember[] members = new EnumMap.EnumMapMember[names.Length]; for (int n=0; n<names.Length; n++) { FieldInfo field = type.GetField (names[n]); string xmlName = names[n]; object[] atts = field.GetCustomAttributes (typeof(SoapEnumAttribute), false); if (atts.Length > 0) xmlName = ((SoapEnumAttribute)atts[0]).Name; long value = ((IConvertible) field.GetValue (null)).ToInt64 (CultureInfo.InvariantCulture); members[n] = new EnumMap.EnumMapMember (XmlConvert.EncodeLocalName (xmlName), names[n], value); } bool isFlags = type.IsDefined (typeof (FlagsAttribute), false); map.ObjectMap = new EnumMap (members, isFlags); ImportTypeMapping (typeof(object), defaultNamespace).DerivedTypes.Add (map); return map; } ICollection GetReflectionMembers (Type type) { ArrayList members = new ArrayList(); PropertyInfo[] properties = type.GetProperties (BindingFlags.Instance | BindingFlags.Public); foreach (PropertyInfo prop in properties) { if (!prop.CanRead) continue; if (!prop.CanWrite && (TypeTranslator.GetTypeData (prop.PropertyType).SchemaType != SchemaTypes.Array || prop.PropertyType.IsArray)) continue; SoapAttributes atts = attributeOverrides[type, prop.Name]; if (atts == null) atts = new SoapAttributes (prop); if (atts.SoapIgnore) continue; XmlReflectionMember member = new XmlReflectionMember(prop.Name, prop.PropertyType, atts); members.Add (member); } FieldInfo[] fields = type.GetFields (BindingFlags.Instance | BindingFlags.Public); foreach (FieldInfo field in fields) { SoapAttributes atts = attributeOverrides[type, field.Name]; if (atts == null) atts = new SoapAttributes (field); if (atts.SoapIgnore) continue; XmlReflectionMember member = new XmlReflectionMember(field.Name, field.FieldType, atts); members.Add (member); } return members; } private XmlTypeMapMember CreateMapMember (XmlReflectionMember rmember, string defaultNamespace) { XmlTypeMapMember mapMember; SoapAttributes atts = rmember.SoapAttributes; TypeData typeData = TypeTranslator.GetTypeData (rmember.MemberType); if (atts.SoapAttribute != null) { // An attribute if (typeData.SchemaType != SchemaTypes.Enum && typeData.SchemaType != SchemaTypes.Primitive) { throw new InvalidOperationException (string.Format (CultureInfo.InvariantCulture, "Cannot serialize member '{0}' of type {1}. " + "SoapAttribute cannot be used to encode complex types.", rmember.MemberName, typeData.FullTypeName)); } if (atts.SoapElement != null) throw new Exception ("SoapAttributeAttribute and SoapElementAttribute cannot be applied to the same member"); XmlTypeMapMemberAttribute mapAttribute = new XmlTypeMapMemberAttribute (); if (atts.SoapAttribute.AttributeName.Length == 0) mapAttribute.AttributeName = XmlConvert.EncodeLocalName (rmember.MemberName); else mapAttribute.AttributeName = XmlConvert.EncodeLocalName (atts.SoapAttribute.AttributeName); mapAttribute.Namespace = (atts.SoapAttribute.Namespace != null) ? atts.SoapAttribute.Namespace : ""; if (typeData.IsComplexType) mapAttribute.MappedType = ImportTypeMapping (typeData.Type, defaultNamespace); typeData = TypeTranslator.GetTypeData (rmember.MemberType, atts.SoapAttribute.DataType); mapMember = mapAttribute; mapMember.DefaultValue = GetDefaultValue (typeData, atts.SoapDefaultValue); } else { if (typeData.SchemaType == SchemaTypes.Array) mapMember = new XmlTypeMapMemberList (); else mapMember = new XmlTypeMapMemberElement (); if (atts.SoapElement != null && atts.SoapElement.DataType.Length != 0) typeData = TypeTranslator.GetTypeData (rmember.MemberType, atts.SoapElement.DataType); // Creates an ElementInfo that identifies the element XmlTypeMapElementInfoList infoList = new XmlTypeMapElementInfoList(); XmlTypeMapElementInfo elem = new XmlTypeMapElementInfo (mapMember, typeData); elem.ElementName = XmlConvert.EncodeLocalName ((atts.SoapElement != null && atts.SoapElement.ElementName.Length != 0) ? atts.SoapElement.ElementName : rmember.MemberName); elem.Namespace = string.Empty; elem.IsNullable = (atts.SoapElement != null) ? atts.SoapElement.IsNullable : false; if (typeData.IsComplexType) elem.MappedType = ImportTypeMapping (typeData.Type, defaultNamespace); infoList.Add (elem); ((XmlTypeMapMemberElement)mapMember).ElementInfo = infoList; } mapMember.TypeData = typeData; mapMember.Name = rmember.MemberName; mapMember.IsReturnValue = rmember.IsReturnValue; return mapMember; } public void IncludeType (Type type) { if (type == null) throw new ArgumentNullException ("type"); if (includedTypes == null) includedTypes = new ArrayList (); if (!includedTypes.Contains (type)) includedTypes.Add (type); } public void IncludeTypes (ICustomAttributeProvider provider) { object[] ats = provider.GetCustomAttributes (typeof(SoapIncludeAttribute), true); foreach (SoapIncludeAttribute at in ats) IncludeType (at.Type); } Exception CreateTypeException (Type type) { return new NotSupportedException ("The type " + type.FullName + " may not be serialized with SOAP-encoded messages. Set the Use for your message to Literal"); } Exception CreateStructException (Type type) { return new NotSupportedException ("Cannot serialize " + type.FullName + ". Nested structs are not supported with encoded SOAP"); } private object GetDefaultValue (TypeData typeData, object defaultValue) { if (defaultValue == DBNull.Value || typeData.SchemaType != SchemaTypes.Enum) return defaultValue; if (typeData.Type != defaultValue.GetType ()) { string msg = string.Format (CultureInfo.InvariantCulture, "Enum {0} cannot be converted to {1}.", defaultValue.GetType ().FullName, typeData.FullTypeName); throw new InvalidOperationException (msg); } // get string representation of enum value string namedValue = Enum.Format (typeData.Type, defaultValue, "g"); // get decimal representation of enum value string decimalValue = Enum.Format (typeData.Type, defaultValue, "d"); // if decimal representation matches string representation, then // the value is not defined in the enum type (as the "g" format // will return the decimal equivalent of the value if the value // is not equal to a combination of named enumerated constants if (namedValue == decimalValue) { string msg = string.Format (CultureInfo.InvariantCulture, "Value '{0}' cannot be converted to {1}.", defaultValue, defaultValue.GetType ().FullName); throw new InvalidOperationException (msg); } // XmlSerializer expects integral enum value //return namedValue.Replace (',', ' '); return defaultValue; } #endregion // Methods } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.Serialization; using Hammock.Model; using Hammock.Tasks; using Newtonsoft.Json; namespace TweetSharp { #if !SILVERLIGHT && !WINRT [Serializable] #endif #if !Smartphone && !NET20 [DataContract] #endif [JsonObject(MemberSerialization.OptIn)] public class TwitterRateLimitStatusSummary { #if !Smartphone && !NET20 [DataMember] #endif public virtual string AccessToken { get; set; } #if !Smartphone && !NET20 [DataMember] #endif public virtual List<TwitterRateLimitResource> Resources { get; set; } #if !Smartphone && !NET20 [DataMember] #endif public virtual string RawSource { get; set; } } #if !SILVERLIGHT && !WINRT [Serializable] #endif #if !Smartphone && !NET20 [DataContract] #endif [JsonObject(MemberSerialization.OptIn)] public class TwitterRateLimitResource { #if !Smartphone && !NET20 [DataMember] #endif public virtual string Name { get; set; } #if !Smartphone && !NET20 [DataMember] #endif public virtual Dictionary<string, TwitterRateLimitStatus> Limits { get; set; } } #if !SILVERLIGHT && !WINRT [Serializable] #endif #if !Smartphone && !NET20 [DataContract] [DebuggerDisplay("{RemainingHits} / {HourlyLimit} remaining.")] #endif [JsonObject(MemberSerialization.OptIn)] public class TwitterRateLimitStatus : PropertyChangedBase, IComparable<TwitterRateLimitStatus>, IEquatable<TwitterRateLimitStatus>, IRateLimitStatus, ITwitterModel { private int _remainingHits; private int _hourlyLimit; private long _resetTimeInSeconds; private DateTime _resetTime; /// <summary> /// Gets or sets the remaining API hits allowed. /// </summary> /// <value>The remaining API hits allowed.</value> [JsonProperty("remaining_hits")] #if !Smartphone && !NET20 [DataMember] #endif public virtual int RemainingHits { get { return _remainingHits; } set { if (_remainingHits == value) { return; } _remainingHits = value; OnPropertyChanged("RemainingHits"); } } /// <summary> /// Gets or sets the API hits hourly limit. /// You can compare this to <see cref="RemainingHits" /> to get a /// percentage of usage remaining. /// </summary> /// <value>The hourly limit.</value> [JsonProperty("hourly_limit")] #if !Smartphone && !NET20 [DataMember] #endif public virtual int HourlyLimit { get { return _hourlyLimit; } set { if (_hourlyLimit == value) { return; } _hourlyLimit = value; OnPropertyChanged("HourlyLimit"); } } /// <summary> /// Gets or sets the UNIX time representing the time /// this rate limit will reset. /// This is not the number of seconds until the rate limit /// resets. /// </summary> /// <value>The reset time in seconds.</value> [JsonProperty("reset_time_in_seconds")] #if !Smartphone && !NET20 [DataMember] #endif public virtual long ResetTimeInSeconds { get { return _resetTimeInSeconds; } set { if (_resetTimeInSeconds == value) { return; } _resetTimeInSeconds = value; OnPropertyChanged("ResetTimeInSeconds"); } } /// <summary> /// Gets or sets the reset time for this rate limit constraint. /// </summary> /// <value>The reset time.</value> [JsonProperty("reset_time")] #if !Smartphone && !NET20 [DataMember] #endif public virtual DateTime ResetTime { get { return _resetTime; } set { if (_resetTime == value) { return; } _resetTime = value; OnPropertyChanged("ResetTime"); } } #region Implementation of IComparable<TwitterRateLimitStatus> /// <summary> /// Compares the current object with another object of the same type. /// </summary> /// <param name="other">An object to compare with this object.</param> /// <returns> /// A value that indicates the relative order of the objects being compared. The return value has the following meanings: Value Meaning Less than zero This object is less than the <paramref name="other"/> parameter.Zero This object is equal to <paramref name="other"/>. Greater than zero This object is greater than <paramref name="other"/>. /// </returns> public int CompareTo(TwitterRateLimitStatus other) { return other.HourlyLimit.CompareTo(HourlyLimit) == 0 && other.ResetTime.CompareTo(ResetTime) == 0 && other.RemainingHits.CompareTo(RemainingHits) == 0 ? 0 : 1; } /// <summary> /// Determines whether the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>. /// </summary> /// <param name="obj">The <see cref="T:System.Object"/> to compare with the current <see cref="T:System.Object"/>.</param> /// <returns> /// true if the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>; otherwise, false. /// </returns> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; return obj.GetType() == typeof (TwitterRateLimitStatus) && Equals((TwitterRateLimitStatus) obj); } /// <summary> /// Serves as a hash function for a particular type. /// </summary> /// <returns> /// A hash code for the current <see cref="T:System.Object"/>. /// </returns> public override int GetHashCode() { unchecked { var result = _remainingHits; result = (result*397) ^ _hourlyLimit; result = (result*397) ^ _resetTimeInSeconds.GetHashCode(); result = (result*397) ^ _resetTime.GetHashCode(); return result; } } #endregion #region Implementation of IEquatable<TwitterRateLimitStatus> /// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <param name="other">An object to compare with this object.</param> /// <returns> /// true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false. /// </returns> public bool Equals(TwitterRateLimitStatus other) { if (ReferenceEquals(null, other)) { return false; } if (ReferenceEquals(this, other)) { return true; } return other._remainingHits == _remainingHits && other._hourlyLimit == _hourlyLimit && other._resetTimeInSeconds == _resetTimeInSeconds && other._resetTime.Equals(_resetTime); } #endregion #region IRateLimitStatus Members /// <summary> /// Gets the next reset time. /// </summary> /// <value>The next reset time.</value> DateTime IRateLimitStatus.NextReset { get { return ResetTime; } } /// <summary> /// Gets the remaining API uses. /// </summary> /// <value>The remaining API uses.</value> int IRateLimitStatus.RemainingUses { get { return RemainingHits; } } #endregion #if !Smartphone && !NET20 [DataMember] #endif public virtual string RawSource { get; set; } } }
// <copyright file="SslStreamConnectionAdapter.cs" company="Fubar Development Junker"> // Copyright (c) Fubar Development Junker. All rights reserved. // </copyright> using System; using System.IO; using System.IO.Pipelines; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Threading.Tasks; using FubarDev.FtpServer.Authentication; using FubarDev.FtpServer.Networking; using Microsoft.Extensions.Logging; namespace FubarDev.FtpServer.ConnectionHandlers { /// <summary> /// A connection adapter that injects an SSL stream between the socket and the connection pipe. /// </summary> internal class SslStreamConnectionAdapter : IFtpConnectionAdapter { private readonly IDuplexPipe _socketPipe; private readonly IDuplexPipe _connectionPipe; private readonly ISslStreamWrapperFactory _sslStreamWrapperFactory; private readonly X509Certificate _certificate; private readonly CancellationToken _connectionClosed; private readonly ILoggerFactory? _loggerFactory; private SslCommunicationInfo? _info; public SslStreamConnectionAdapter( IDuplexPipe socketPipe, IDuplexPipe connectionPipe, ISslStreamWrapperFactory sslStreamWrapperFactory, X509Certificate certificate, CancellationToken connectionClosed, ILoggerFactory? loggerFactory = null) { _socketPipe = socketPipe; _connectionPipe = connectionPipe; _sslStreamWrapperFactory = sslStreamWrapperFactory; _certificate = certificate; _connectionClosed = connectionClosed; _loggerFactory = loggerFactory; } /// <inheritdoc /> public IFtpService Sender => _info?.TransmitterService ?? throw new InvalidOperationException("Sender can only be accessed when the connection service was started."); /// <inheritdoc /> public IPausableFtpService Receiver => _info?.ReceiverService ?? throw new InvalidOperationException("Receiver can only be accessed when the connection service was started."); /// <inheritdoc /> public async Task StartAsync(CancellationToken cancellationToken) { var rawStream = new SimplePipeStream( _socketPipe.Input, _socketPipe.Output); var sslStream = await _sslStreamWrapperFactory.WrapStreamAsync(rawStream, false, _certificate, cancellationToken) .ConfigureAwait(false); var receiverLogger = _loggerFactory ?.CreateLogger(typeof(SslStreamConnectionAdapter).FullName + ".Receiver"); var receiverService = new NonClosingNetworkStreamReader( sslStream, _connectionPipe.Output, _socketPipe.Input, _connectionClosed, receiverLogger); var transmitterLogger = _loggerFactory ?.CreateLogger(typeof(SslStreamConnectionAdapter).FullName + ".Transmitter"); var transmitterService = new NonClosingNetworkStreamWriter( sslStream, _connectionPipe.Input, _connectionClosed, transmitterLogger); var info = new SslCommunicationInfo(transmitterService, receiverService, sslStream); _info = info; await info.TransmitterService.StartAsync(cancellationToken) .ConfigureAwait(false); await info.ReceiverService.StartAsync(cancellationToken) .ConfigureAwait(false); } /// <inheritdoc /> public async Task StopAsync(CancellationToken cancellationToken) { if (_info == null) { // Service wasn't started yet! return; } var info = _info; var receiverStopTask = info.ReceiverService.StopAsync(cancellationToken); var transmitterStopTask = info.TransmitterService.StopAsync(cancellationToken); await Task.WhenAll(receiverStopTask, transmitterStopTask) .ConfigureAwait(false); await _sslStreamWrapperFactory.CloseStreamAsync(info.SslStream, cancellationToken) .ConfigureAwait(false); _info = null; } private class SslCommunicationInfo { public SslCommunicationInfo( IPausableFtpService transmitterService, IPausableFtpService receiverService, Stream sslStream) { TransmitterService = transmitterService; ReceiverService = receiverService; SslStream = sslStream; } public IPausableFtpService TransmitterService { get; } public IPausableFtpService ReceiverService { get; } public Stream SslStream { get; } } private class NonClosingNetworkStreamReader : StreamPipeReaderService { private readonly PipeReader _socketPipeReader; public NonClosingNetworkStreamReader( Stream stream, PipeWriter pipeWriter, PipeReader socketPipeReader, CancellationToken connectionClosed, ILogger? logger = null) : base(stream, pipeWriter, connectionClosed, logger) { _socketPipeReader = socketPipeReader; } /// <inheritdoc /> protected override Task OnPauseRequestedAsync(CancellationToken cancellationToken) { _socketPipeReader.CancelPendingRead(); return base.OnPauseRequestedAsync(cancellationToken); } /// <inheritdoc /> protected override Task OnCloseAsync(Exception? exception, CancellationToken cancellationToken) { // Do nothing return Task.CompletedTask; } } private class NonClosingNetworkStreamWriter : StreamPipeWriterService { public NonClosingNetworkStreamWriter( Stream stream, PipeReader pipeReader, CancellationToken connectionClosed, ILogger? logger = null) : base(stream, pipeReader, connectionClosed, logger) { } /// <inheritdoc /> protected override Task OnCloseAsync(Exception? exception, CancellationToken cancellationToken) { // Do nothing return Task.CompletedTask; } #if USE_SYNC_SSL_STREAM /// <inheritdoc /> protected override Task WriteToStreamAsync( byte[] buffer, int offset, int length, CancellationToken cancellationToken) { // We have to use Write instead of WriteAsync, because // otherwise we might run into a deadlock. // // It **might** be related to the following issues: // https://github.com/dotnet/corefx/issues/5077 // https://github.com/dotnet/corefx/issues/14698 Stream.Write(buffer, offset, length); return Task.CompletedTask; } #endif } } }
// // Copyright (c) 2004-2020 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // using System; using System.Collections.Generic; using System.Text; namespace NLog.Internal { /// <summary> /// Split a string /// </summary> internal static class StringSplitter { /// <summary> /// Split a string, optional quoted value /// </summary> /// <param name="text">Text to split</param> /// <param name="splitChar">Character to split the <paramref name="text" /></param> /// <param name="quoteChar">Quote character</param> /// <param name="escapeChar"> /// Escape for the <paramref name="quoteChar" />, not escape for the <paramref name="splitChar" /> /// , use quotes for that. /// </param> public static IEnumerable<string> SplitQuoted(this string text, char splitChar, char quoteChar, char escapeChar) { if (!string.IsNullOrEmpty(text)) { if (splitChar == quoteChar) { throw new NotSupportedException("Quote character should different from split character"); } if (splitChar == escapeChar) { throw new NotSupportedException("Escape character should different from split character"); } return SplitQuoted2(text, splitChar, quoteChar, escapeChar); } return ArrayHelper.Empty<string>(); } /// <summary> /// Split a string, optional quoted value /// </summary> /// <param name="text">Text to split</param> /// <param name="splitChar">Character to split the <paramref name="text" /></param> /// <param name="quoteChar">Quote character</param> /// <param name="escapeChar"> /// Escape for the <paramref name="quoteChar" />, not escape for the <paramref name="splitChar" /> /// , use quotes for that. /// </param> private static IEnumerable<string> SplitQuoted2(string text, char splitChar, char quoteChar, char escapeChar) { bool inQuotedMode = false; bool prevEscape = false; bool prevQuote = false; bool doubleQuotesEscapes = escapeChar == quoteChar; // Special mode var item = new StringBuilder(); foreach (var c in text) { if (c == quoteChar) { if (inQuotedMode) { if (prevEscape && !doubleQuotesEscapes) { item.Append(c); // Escaped quote-char in quoted-mode prevEscape = false; prevQuote = false; } else if (prevQuote && doubleQuotesEscapes) { // Double quote, means escaped quote, quoted-mode not real item.Append(c); inQuotedMode = false; prevEscape = false; prevQuote = false; } else if (item.Length > 0) { // quoted-mode ended with something to yield inQuotedMode = false; yield return item.ToString(); item.Length = 0; // Start new item prevEscape = false; prevQuote = true; // signal that item is empty, because it has just been yielded after quoted-mode } else { // quoted-mode ended without anything to yield inQuotedMode = false; prevEscape = false; prevQuote = false; } } else { if (item.Length != 0 || prevEscape) { // Quoted-mode can only be activated initially item.Append(c); prevEscape = false; prevQuote = false; } else { // Quoted-mode is now activated prevEscape = c == escapeChar; prevQuote = true; inQuotedMode = true; } } } else if (c == escapeChar) { if (prevEscape) item.Append(escapeChar); // Escape-chars are only stripped in quoted-mode when placed before quote prevEscape = true; prevQuote = false; } else if (inQuotedMode) { item.Append(c); prevEscape = false; prevQuote = false; } else if (c == splitChar) { if (prevEscape) item.Append(escapeChar); // Escape-chars are only stripped in quoted-mode when placed before quote if (item.Length > 0 || !prevQuote) { yield return item.ToString(); item.Length = 0; // Start new item } prevEscape = false; prevQuote = false; } else { if (prevEscape) item.Append(escapeChar); // Escape-chars are only stripped in quoted-mode when placed before quote item.Append(c); prevEscape = false; prevQuote = false; } } if (prevEscape && !doubleQuotesEscapes) item.Append(escapeChar); // incomplete escape-sequence, means escape should be included if (inQuotedMode) { // incomplete quoted-mode, means quotes should be included if (prevQuote) { item.Append(quoteChar); } else { #if SILVERLIGHT item.Insert(0, new [] { quoteChar }); #else item.Insert(0, quoteChar); #endif } } if (item.Length > 0 || !prevQuote) yield return item.ToString(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; using System.Runtime.InteropServices; using System.Security; using System.Threading; using System.Threading.Tasks; namespace System.IO.Pipes { public abstract partial class PipeStream : Stream { private static readonly Task<int> s_zeroTask = Task.FromResult(0); private SafePipeHandle _handle; private bool _canRead; private bool _canWrite; private bool _isAsync; private bool _isMessageComplete; private bool _isFromExistingHandle; private bool _isHandleExposed; private PipeTransmissionMode _readMode; private PipeTransmissionMode _transmissionMode; private PipeDirection _pipeDirection; private int _outBufferSize; private PipeState _state; protected PipeStream(PipeDirection direction, int bufferSize) { if (direction < PipeDirection.In || direction > PipeDirection.InOut) { throw new ArgumentOutOfRangeException("direction", SR.ArgumentOutOfRange_DirectionModeInOutOrInOut); } if (bufferSize < 0) { throw new ArgumentOutOfRangeException("bufferSize", SR.ArgumentOutOfRange_NeedNonNegNum); } Init(direction, PipeTransmissionMode.Byte, bufferSize); } protected PipeStream(PipeDirection direction, PipeTransmissionMode transmissionMode, int outBufferSize) { if (direction < PipeDirection.In || direction > PipeDirection.InOut) { throw new ArgumentOutOfRangeException("direction", SR.ArgumentOutOfRange_DirectionModeInOutOrInOut); } if (transmissionMode < PipeTransmissionMode.Byte || transmissionMode > PipeTransmissionMode.Message) { throw new ArgumentOutOfRangeException("transmissionMode", SR.ArgumentOutOfRange_TransmissionModeByteOrMsg); } if (outBufferSize < 0) { throw new ArgumentOutOfRangeException("outBufferSize", SR.ArgumentOutOfRange_NeedNonNegNum); } Init(direction, transmissionMode, outBufferSize); } private void Init(PipeDirection direction, PipeTransmissionMode transmissionMode, int outBufferSize) { Debug.Assert(direction >= PipeDirection.In && direction <= PipeDirection.InOut, "invalid pipe direction"); Debug.Assert(transmissionMode >= PipeTransmissionMode.Byte && transmissionMode <= PipeTransmissionMode.Message, "transmissionMode is out of range"); Debug.Assert(outBufferSize >= 0, "outBufferSize is negative"); // always defaults to this until overridden _readMode = transmissionMode; _transmissionMode = transmissionMode; _pipeDirection = direction; if ((_pipeDirection & PipeDirection.In) != 0) { _canRead = true; } if ((_pipeDirection & PipeDirection.Out) != 0) { _canWrite = true; } _outBufferSize = outBufferSize; // This should always default to true _isMessageComplete = true; _state = PipeState.WaitingToConnect; } // Once a PipeStream has a handle ready, it should call this method to set up the PipeStream. If // the pipe is in a connected state already, it should also set the IsConnected (protected) property. // This method may also be called to uninitialize a handle, setting it to null. [SecuritySafeCritical] internal void InitializeHandle(SafePipeHandle handle, bool isExposed, bool isAsync) { if (isAsync && handle != null) { InitializeAsyncHandle(handle); } _handle = handle; _isAsync = isAsync; // track these separately; _isHandleExposed will get updated if accessed though the property _isHandleExposed = isExposed; _isFromExistingHandle = isExposed; } [SecurityCritical] public override int Read([In, Out] byte[] buffer, int offset, int count) { if (_isAsync) { return ReadAsync(buffer, offset, count, CancellationToken.None).GetAwaiter().GetResult(); } CheckReadWriteArgs(buffer, offset, count); if (!CanRead) { throw Error.GetReadNotSupported(); } CheckReadOperations(); return ReadCore(buffer, offset, count); } [SecuritySafeCritical] public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { CheckReadWriteArgs(buffer, offset, count); if (!CanRead) { throw Error.GetReadNotSupported(); } if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled<int>(cancellationToken); } CheckReadOperations(); if (!_isAsync) { return base.ReadAsync(buffer, offset, count, cancellationToken); } if (count == 0) { UpdateMessageCompletion(false); return s_zeroTask; } return ReadAsyncCore(buffer, offset, count, cancellationToken); } [SecurityCritical] public override void Write(byte[] buffer, int offset, int count) { if (_isAsync) { WriteAsync(buffer, offset, count, CancellationToken.None).GetAwaiter().GetResult(); return; } CheckReadWriteArgs(buffer, offset, count); if (!CanWrite) { throw Error.GetWriteNotSupported(); } CheckWriteOperations(); WriteCore(buffer, offset, count); } [SecuritySafeCritical] public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { CheckReadWriteArgs(buffer, offset, count); if (!CanWrite) { throw Error.GetWriteNotSupported(); } if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled<int>(cancellationToken); } CheckWriteOperations(); if (!_isAsync) { return base.WriteAsync(buffer, offset, count, cancellationToken); } if (count == 0) { return Task.CompletedTask; } return WriteAsyncCore(buffer, offset, count, cancellationToken); } private void CheckReadWriteArgs(byte[] buffer, int offset, int count) { if (buffer == null) throw new ArgumentNullException("buffer", SR.ArgumentNull_Buffer); if (offset < 0) throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_NeedNonNegNum); if (count < 0) throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum); if (buffer.Length - offset < count) throw new ArgumentException(SR.Argument_InvalidOffLen); } [Conditional("DEBUG")] private static void DebugAssertReadWriteArgs(byte[] buffer, int offset, int count, SafePipeHandle handle) { Debug.Assert(buffer != null, "buffer is null"); Debug.Assert(offset >= 0, "offset is negative"); Debug.Assert(count >= 0, "count is negative"); Debug.Assert(offset <= buffer.Length - count, "offset + count is too big"); Debug.Assert(handle != null, "handle is null"); Debug.Assert(!handle.IsClosed, "handle is closed"); } [ThreadStatic] private static byte[] t_singleByteArray; private static byte[] SingleByteArray { get { return t_singleByteArray ?? (t_singleByteArray = new byte[1]); } } // Reads a byte from the pipe stream. Returns the byte cast to an int // or -1 if the connection has been broken. [SecurityCritical] public override int ReadByte() { byte[] buffer = SingleByteArray; return Read(buffer, 0, 1) > 0 ? buffer[0] : -1; } [SecurityCritical] public override void WriteByte(byte value) { byte[] buffer = SingleByteArray; buffer[0] = value; Write(buffer, 0, 1); } // Does nothing on PipeStreams. We cannot call Interop.FlushFileBuffers here because we can deadlock // if the other end of the pipe is no longer interested in reading from the pipe. [SecurityCritical] public override void Flush() { CheckWriteOperations(); if (!CanWrite) { throw Error.GetWriteNotSupported(); } } [SecurityCritical] protected override void Dispose(bool disposing) { try { // Nothing will be done differently based on whether we are // disposing vs. finalizing. if (_handle != null && !_handle.IsClosed) { _handle.Dispose(); } UninitializeAsyncHandle(); } finally { base.Dispose(disposing); } _state = PipeState.Closed; } // ********************** Public Properties *********************** // // APIs use coarser definition of connected, but these map to internal // Connected/Disconnected states. Note that setter is protected; only // intended to be called by custom PipeStream concrete children public bool IsConnected { get { return State == PipeState.Connected; } protected set { _state = (value) ? PipeState.Connected : PipeState.Disconnected; } } public bool IsAsync { get { return _isAsync; } } // Set by the most recent call to Read or EndRead. Will be false if there are more buffer in the // message, otherwise it is set to true. public bool IsMessageComplete { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] get { // omitting pipe broken exception to allow reader to finish getting message if (_state == PipeState.WaitingToConnect) { throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected); } if (_state == PipeState.Disconnected) { throw new InvalidOperationException(SR.InvalidOperation_PipeDisconnected); } if (CheckOperationsRequiresSetHandle && _handle == null) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed)) { throw Error.GetPipeNotOpen(); } // don't need to check transmission mode; just care about read mode. Always use // cached mode; otherwise could throw for valid message when other side is shutting down if (_readMode != PipeTransmissionMode.Message) { throw new InvalidOperationException(SR.InvalidOperation_PipeReadModeNotMessage); } return _isMessageComplete; } } internal void UpdateMessageCompletion(bool completion) { // Set message complete to true because the pipe is broken as well. // Need this to signal to readers to stop reading. _isMessageComplete = (completion || _state == PipeState.Broken); } public SafePipeHandle SafePipeHandle { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] get { if (_handle == null) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } if (_handle.IsClosed) { throw Error.GetPipeNotOpen(); } _isHandleExposed = true; return _handle; } } internal SafePipeHandle InternalHandle { [SecurityCritical] get { return _handle; } } internal bool IsHandleExposed { get { return _isHandleExposed; } } public override bool CanRead { [Pure] get { return _canRead; } } public override bool CanWrite { [Pure] get { return _canWrite; } } public override bool CanSeek { [Pure] get { return false; } } public override long Length { get { throw Error.GetSeekNotSupported(); } } public override long Position { get { throw Error.GetSeekNotSupported(); } set { throw Error.GetSeekNotSupported(); } } public override void SetLength(long value) { throw Error.GetSeekNotSupported(); } public override long Seek(long offset, SeekOrigin origin) { throw Error.GetSeekNotSupported(); } // anonymous pipe ends and named pipe server can get/set properties when broken // or connected. Named client overrides [SecurityCritical] internal virtual void CheckPipePropertyOperations() { if (CheckOperationsRequiresSetHandle && _handle == null) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } // these throw object disposed if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed)) { throw Error.GetPipeNotOpen(); } } // Reads can be done in Connected and Broken. In the latter, // read returns 0 bytes [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")] internal void CheckReadOperations() { // Invalid operation if (_state == PipeState.WaitingToConnect) { throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected); } if (_state == PipeState.Disconnected) { throw new InvalidOperationException(SR.InvalidOperation_PipeDisconnected); } if (CheckOperationsRequiresSetHandle && _handle == null) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } // these throw object disposed if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed)) { throw Error.GetPipeNotOpen(); } } // Writes can only be done in connected state [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")] internal void CheckWriteOperations() { // Invalid operation if (_state == PipeState.WaitingToConnect) { throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected); } if (_state == PipeState.Disconnected) { throw new InvalidOperationException(SR.InvalidOperation_PipeDisconnected); } if (CheckOperationsRequiresSetHandle && _handle == null) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } // IOException if (_state == PipeState.Broken) { throw new IOException(SR.IO_PipeBroken); } // these throw object disposed if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed)) { throw Error.GetPipeNotOpen(); } } internal PipeState State { get { return _state; } set { _state = value; } } } }
/* Copyright 2019 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using ESRI.ArcGIS.Carto; using ESRI.ArcGIS.Geometry; using ESRI.ArcGIS.Controls; using ESRI.ArcGIS.SystemUI; using ESRI.ArcGIS.ADF.CATIDs; using System.Runtime.InteropServices; namespace PanZoom { [ClassInterface(ClassInterfaceType.None)] [Guid("1BA75CB7-7C7F-45e7-824B-DDB3C662A734")] public class PanUp : ICommand { #region COM Registration Function(s) [ComRegisterFunction()] [ComVisible(false)] static void RegisterFunction(Type registerType) { // Required for ArcGIS Component Category Registrar support ArcGISCategoryRegistration(registerType); // // TODO: Add any COM registration code here // } [ComUnregisterFunction()] [ComVisible(false)] static void UnregisterFunction(Type registerType) { // Required for ArcGIS Component Category Registrar support ArcGISCategoryUnregistration(registerType); // // TODO: Add any COM unregistration code here // } #region ArcGIS Component Category Registrar generated code /// <summary> /// Required method for ArcGIS Component Category registration - /// Do not modify the contents of this method with the code editor. /// </summary> private static void ArcGISCategoryRegistration(Type registerType) { string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID); ControlsCommands.Register(regKey); } /// <summary> /// Required method for ArcGIS Component Category unregistration - /// Do not modify the contents of this method with the code editor. /// </summary> private static void ArcGISCategoryUnregistration(Type registerType) { string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID); ControlsCommands.Unregister(regKey); } #endregion #endregion [DllImport("gdi32.dll")] static extern bool DeleteObject(IntPtr hObject); private System.Drawing.Bitmap m_bitmap; private IntPtr m_hBitmap; private IHookHelper m_pHookHelper; public PanUp() { string[] res = GetType().Assembly.GetManifestResourceNames(); if(res.GetLength(0) > 0) { m_bitmap = new System.Drawing.Bitmap(GetType().Assembly.GetManifestResourceStream(GetType(), "PanUp.bmp")); if(m_bitmap != null) { m_bitmap.MakeTransparent(m_bitmap.GetPixel(1,1)); m_hBitmap = m_bitmap.GetHbitmap(); } } m_pHookHelper = new HookHelperClass (); } #region ICommand Members public void OnClick() { if(m_pHookHelper == null) return; //Get the active view IActiveView pActiveView = (IActiveView) m_pHookHelper.FocusMap; //Get the extent IEnvelope pEnvelope = (IEnvelope) pActiveView.Extent; //Create a point to pan to IPoint pPoint; pPoint = new PointClass(); pPoint.X = (pEnvelope.XMin + pEnvelope.XMax) / 2; pPoint.Y = ((pEnvelope.YMin + pEnvelope.YMax) / 2) + (pEnvelope.Height / (100 / GetPanFactor())); //Center the envelope on the point pEnvelope.CenterAt(pPoint); //Set the new extent pActiveView.Extent = pEnvelope; //Refresh the active view pActiveView.Refresh(); } private long GetPanFactor() { return 50; } public string Message { get { return "Pan display up by the pan factor percentage"; } } public int Bitmap { get { return m_hBitmap.ToInt32(); } } public void OnCreate(object hook) { m_pHookHelper.Hook = hook; } public string Caption { get { return "Pan Up"; } } public string Tooltip { get { return "Pan Up"; } } public int HelpContextID { get { // TODO: Add PanUp.HelpContextID getter implementation return 0; } } public string Name { get { return "Sample_Pan/Zoom_Pan Up"; } } public bool Checked { get { return false; } } public bool Enabled { get { if(m_pHookHelper.FocusMap == null) return false; return true; } } public string HelpFile { get { // TODO: Add PanUp.HelpFile getter implementation return null; } } public string Category { get { return "Sample_Pan/Zoom"; } } #endregion } }
/****************************************************************************** * The MIT License * Copyright (c) 2003 Novell Inc. www.novell.com * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the Software), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. *******************************************************************************/ // // Novell.Directory.Ldap.Asn1.LBERDecoder.cs // // Author: // Sunil Kumar ([email protected]) // // (C) 2003 Novell, Inc (http://www.novell.com) // using System; namespace Novell.Directory.LDAP.VQ.Asn1 { /// <summary> This class provides LBER decoding routines for ASN.1 Types. LBER is a /// subset of BER as described in the following taken from 5.1 of RFC 2251: /// /// 5.1. Mapping Onto BER-based Transport Services /// /// The protocol elements of Ldap are encoded for exchange using the /// Basic Encoding Rules (BER) [11] of ASN.1 [3]. However, due to the /// high overhead involved in using certain elements of the BER, the /// following additional restrictions are placed on BER-encodings of Ldap /// protocol elements: /// /// <li>(1) Only the definite form of length encoding will be used.</li> /// /// <li>(2) OCTET STRING values will be encoded in the primitive form only.</li> /// /// <li>(3) If the value of a BOOLEAN type is true, the encoding MUST have /// its contents octets set to hex "FF".</li> /// /// <li>(4) If a value of a type is its default value, it MUST be absent. /// Only some BOOLEAN and INTEGER types have default values in this /// protocol definition. /// /// These restrictions do not apply to ASN.1 types encapsulated inside of /// OCTET STRING values, such as attribute values, unless otherwise /// noted.</li> /// /// [3] ITU-T Rec. X.680, "Abstract Syntax Notation One (ASN.1) - /// Specification of Basic Notation", 1994. /// /// [11] ITU-T Rec. X.690, "Specification of ASN.1 encoding rules: Basic, /// Canonical, and Distinguished Encoding Rules", 1994. /// /// </summary> [CLSCompliant(true)] public class LBERDecoder : Asn1Decoder { public LBERDecoder() { InitBlock(); } private void InitBlock() { asn1ID = new Asn1Identifier(); asn1Len = new Asn1Length(); } //used to speed up decode, so it doesn't need to recreate an identifier every time //instead just reset is called CANNOT be static for multiple connections private Asn1Identifier asn1ID; private Asn1Length asn1Len; /* Generic decode routines */ /// <summary> Decode an LBER encoded value into an Asn1Type from a byte array.</summary> [CLSCompliant(false)] public virtual Asn1Object decode(sbyte[] value_Renamed) { Asn1Object asn1 = null; System.IO.MemoryStream in_Renamed = new System.IO.MemoryStream(SupportClass.ToByteArray(value_Renamed)); try { asn1 = decode(in_Renamed); } catch (System.IO.IOException ioe) { } return asn1; } /// <summary> Decode an LBER encoded value into an Asn1Type from an InputStream.</summary> public virtual Asn1Object decode(System.IO.Stream in_Renamed) { int[] len = new int[1]; return decode(in_Renamed, len); } /// <summary> Decode an LBER encoded value into an Asn1Object from an InputStream. /// /// This method also returns the total length of this encoded /// Asn1Object (length of type + length of length + length of content) /// in the parameter len. This information is helpful when decoding /// structured types. /// </summary> public virtual Asn1Object decode(System.IO.Stream in_Renamed, int[] len) { asn1ID.reset(in_Renamed); asn1Len.reset(in_Renamed); int length = asn1Len.Length; len[0] = asn1ID.EncodedLength + asn1Len.EncodedLength + length; if (asn1ID.Universal) { switch (asn1ID.Tag) { case Asn1Sequence.TAG: return new Asn1Sequence(this, in_Renamed, length); case Asn1Set.TAG: return new Asn1Set(this, in_Renamed, length); case Asn1Boolean.TAG: return new Asn1Boolean(this, in_Renamed, length); case Asn1Integer.TAG: return new Asn1Integer(this, in_Renamed, length); case Asn1OctetString.TAG: return new Asn1OctetString(this, in_Renamed, length); case Asn1Enumerated.TAG: return new Asn1Enumerated(this, in_Renamed, length); case Asn1Null.TAG: return new Asn1Null(); // has no content to decode. /* Asn1 TYPE NOT YET SUPPORTED case Asn1BitString.TAG: return new Asn1BitString(this, in, length); case Asn1ObjectIdentifier.TAG: return new Asn1ObjectIdentifier(this, in, length); case Asn1Real.TAG: return new Asn1Real(this, in, length); case Asn1NumericString.TAG: return new Asn1NumericString(this, in, length); case Asn1PrintableString.TAG: return new Asn1PrintableString(this, in, length); case Asn1TeletexString.TAG: return new Asn1TeletexString(this, in, length); case Asn1VideotexString.TAG: return new Asn1VideotexString(this, in, length); case Asn1IA5String.TAG: return new Asn1IA5String(this, in, length); case Asn1GraphicString.TAG: return new Asn1GraphicString(this, in, length); case Asn1VisibleString.TAG: return new Asn1VisibleString(this, in, length); case Asn1GeneralString.TAG: return new Asn1GeneralString(this, in, length); */ default: throw new System.IO.EndOfStreamException("Unknown tag"); // !!! need a better exception } } else { // APPLICATION or CONTEXT-SPECIFIC tag return new Asn1Tagged(this, in_Renamed, length, (Asn1Identifier) asn1ID.Clone()); } } /* Decoders for ASN.1 simple type Contents */ /// <summary> Decode a boolean directly from a stream.</summary> public object decodeBoolean(System.IO.Stream in_Renamed, int len) { sbyte[] lber = new sbyte[len]; int i = SupportClass.ReadInput(in_Renamed, ref lber, 0, lber.Length); if (i != len) throw new System.IO.EndOfStreamException("LBER: BOOLEAN: decode error: EOF"); return (lber[0] == 0x00)?false:true; } /// <summary> Decode a Numeric type directly from a stream. Decodes INTEGER /// and ENUMERATED types. /// </summary> public object decodeNumeric(System.IO.Stream in_Renamed, int len) { long l = 0; int r = in_Renamed.ReadByte(); if (r < 0) throw new System.IO.EndOfStreamException("LBER: NUMERIC: decode error: EOF"); if ((r & 0x80) != 0) { // check for negative number l = - 1; } l = (l << 8) | r; for (int i = 1; i < len; i++) { r = in_Renamed.ReadByte(); if (r < 0) throw new System.IO.EndOfStreamException("LBER: NUMERIC: decode error: EOF"); l = (l << 8) | r; } return (Int64) l; } /// <summary> Decode an OctetString directly from a stream.</summary> public object decodeOctetString(System.IO.Stream in_Renamed, int len) { sbyte[] octets = new sbyte[len]; int totalLen = 0; while (totalLen < len) { // Make sure we have read all the data int inLen = SupportClass.ReadInput(in_Renamed, ref octets, totalLen, len - totalLen); totalLen += inLen; } return octets; } /// <summary> Decode a CharacterString directly from a stream.</summary> public object decodeCharacterString(System.IO.Stream in_Renamed, int len) { sbyte[] octets = new sbyte[len]; for (int i = 0; i < len; i++) { int ret = in_Renamed.ReadByte(); // blocks if (ret == - 1) throw new System.IO.EndOfStreamException("LBER: CHARACTER STRING: decode error: EOF"); octets[i] = (sbyte) ret; } System.Text.Encoding encoder = System.Text.Encoding.GetEncoding("utf-8"); char[] dchar = encoder.GetChars(SupportClass.ToByteArray(octets)); string rval = new String(dchar); return rval;//new String( "UTF8"); } } }
// **************************************************************** // Copyright 2007, Charlie Poole // This is free software licensed under the NUnit license. You may // obtain a copy of the license at http://nunit.org // **************************************************************** // TODO: Figure out how to make test work in SILVERLIGHT, since they support SetUpFixture #if !SILVERLIGHT && !PORTABLE using System.Collections; using NUnit.Common; using NUnit.Framework.Api; using NUnit.Framework.Interfaces; namespace NUnit.Framework.Internal { [TestFixture] public class SetUpFixtureTests { private static readonly string testAssembly = AssemblyHelper.GetAssemblyPath(typeof(NUnit.TestData.SetupFixture.Namespace1.SomeFixture)); ITestAssemblyBuilder builder; ITestAssemblyRunner runner; #region SetUp [SetUp] public void SetUp() { TestUtilities.SimpleEventRecorder.Clear(); builder = new DefaultTestAssemblyBuilder(); runner = new NUnitTestAssemblyRunner(builder); } #endregion SetUp private ITestResult runTests(string nameSpace) { return runTests(nameSpace, TestFilter.Empty); } private ITestResult runTests(string nameSpace, TestFilter filter) { IDictionary options = new Hashtable(); if (nameSpace != null) options["LOAD"] = new string[] { nameSpace }; // No need for the overhead of parallel execution here options["NumberOfTestWorkers"] = 0; if (runner.Load(testAssembly, options) != null) return runner.Run(TestListener.NULL, filter); return null; } #region Builder Tests /// <summary> /// Tests that the TestSuiteBuilder correctly interperets a SetupFixture class as a 'virtual namespace' into which /// all it's sibling classes are inserted. /// </summary> [NUnit.Framework.Test] public void NamespaceSetUpFixtureReplacesNamespaceNodeInTree() { string nameSpace = "NUnit.TestData.SetupFixture.Namespace1"; IDictionary options = new Hashtable(); options["LOAD"] = new string[] { nameSpace }; ITest suite = builder.Build(testAssembly, options); Assert.IsNotNull(suite); Assert.AreEqual(testAssembly, suite.FullName); Assert.AreEqual(1, suite.Tests.Count, "Error in top level test count"); string[] nameSpaceBits = nameSpace.Split('.'); for (int i = 0; i < nameSpaceBits.Length; i++) { suite = suite.Tests[0] as TestSuite; Assert.AreEqual(nameSpaceBits[i], suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(RunState.Runnable)); } Assert.That(suite, Is.InstanceOf<SetUpFixture>()); suite = suite.Tests[0] as TestSuite; Assert.AreEqual("SomeFixture", suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(RunState.Runnable)); Assert.That(suite.Tests[0].RunState, Is.EqualTo(RunState.Runnable)); } /// <summary> /// Tests that the TestSuiteBuilder correctly interperets a SetupFixture class with no parent namespace /// as a 'virtual assembly' into which all it's sibling fixtures are inserted. /// </summary> [NUnit.Framework.Test] public void AssemblySetUpFixtureReplacesAssemblyNodeInTree() { IDictionary options = new Hashtable(); ITest suite = builder.Build(testAssembly, options); Assert.IsNotNull(suite); Assert.That(suite, Is.InstanceOf<SetUpFixture>()); suite = suite.Tests[1] as TestSuite; Assert.AreEqual("SomeFixture", suite.Name); Assert.AreEqual(1, suite.Tests.Count); } [Test] public void InvalidAssemblySetUpFixtureIsLoadedCorrectly() { string nameSpace = "NUnit.TestData.SetupFixture.Namespace6"; IDictionary options = new Hashtable(); options["LOAD"] = new string[] { nameSpace }; ITest suite = builder.Build(testAssembly, options); Assert.IsNotNull(suite); Assert.AreEqual(testAssembly, suite.FullName); Assert.AreEqual(1, suite.Tests.Count, "Error in top level test count"); Assert.AreEqual(RunState.Runnable, suite.RunState); string[] nameSpaceBits = nameSpace.Split('.'); for (int i = 0; i < nameSpaceBits.Length; i++) { suite = suite.Tests[0] as TestSuite; Assert.AreEqual(nameSpaceBits[i], suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(i < nameSpaceBits.Length - 1 ? RunState.Runnable : RunState.NotRunnable)); } suite = suite.Tests[0] as TestSuite; Assert.AreEqual("SomeFixture", suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(RunState.Runnable)); Assert.That(suite.Tests[0].RunState, Is.EqualTo(RunState.Runnable)); } #endregion #region Simple [NUnit.Framework.Test] public void NamespaceSetUpFixtureWrapsExecutionOfSingleTest() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace1").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("NS1.OneTimeSetup", "NS1.Fixture.SetUp", "NS1.Test.SetUp", "NS1.Test", "NS1.Test.TearDown", "NS1.Fixture.TearDown", "NS1.OneTimeTearDown"); } #endregion Simple #region Static [Test] public void NamespaceSetUpMethodsMayBeStatic() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace5").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("NS5.OneTimeSetUp", "NS5.Fixture.SetUp", "NS5.Test.SetUp", "NS5.Test", "NS5.Test.TearDown", "NS5.Fixture.TearDown", "NS5.OneTimeTearDown"); } #endregion #region TwoTestFixtures [NUnit.Framework.Test] public void NamespaceSetUpFixtureWrapsExecutionOfTwoTests() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace2").ResultState.Status, Is.EqualTo(TestStatus.Passed)); // There are two fixtures but we can't be sure of the order of execution so they use the same events TestUtilities.SimpleEventRecorder.Verify("NS2.OneTimeSetUp", "NS2.Fixture.SetUp", "NS2.Test.SetUp", "NS2.Test", "NS2.Test.TearDown", "NS2.Fixture.TearDown", "NS2.Fixture.SetUp", "NS2.Test.SetUp", "NS2.Test", "NS2.Test.TearDown", "NS2.Fixture.TearDown", "NS2.OneTimeTearDown"); } #endregion TwoTestFixtures #region SubNamespace [NUnit.Framework.Test] public void NamespaceSetUpFixtureWrapsNestedNamespaceSetUpFixture() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace3").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("NS3.OneTimeSetUp", "NS3.Fixture.SetUp", "NS3.Test.SetUp", "NS3.Test", "NS3.Test.TearDown", "NS3.Fixture.TearDown", "NS3.SubNamespace.OneTimeSetUp", "NS3.SubNamespace.Fixture.SetUp", "NS3.SubNamespace.Test.SetUp", "NS3.SubNamespace.Test", "NS3.SubNamespace.Test.TearDown", "NS3.SubNamespace.Fixture.TearDown", "NS3.SubNamespace.OneTimeTearDown", "NS3.OneTimeTearDown"); } #endregion SubNamespace #region TwoSetUpFixtures [NUnit.Framework.Test] public void WithTwoSetUpFixturesBothAreUsed() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace4").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.ExpectEvents("NS4.OneTimeSetUp1", "NS4.OneTimeSetUp2") .AndThen("NS4.Fixture.SetUp") .AndThen("NS4.Test.SetUp") .AndThen("NS4.Test") .AndThen("NS4.Test.TearDown") .AndThen("NS4.Fixture.TearDown") .AndThen("NS4.OneTimeTearDown1", "NS4.OneTimeTearDown2") .Verify(); } #endregion TwoSetUpFixtures #region InvalidSetUpFixture [Test] public void InvalidSetUpFixtureTest() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace6").ResultState.Status, Is.EqualTo(TestStatus.Failed)); TestUtilities.SimpleEventRecorder.Verify(new string[0]); } #endregion #region NoNamespaceSetupFixture [NUnit.Framework.Test] public void AssemblySetupFixtureWrapsExecutionOfTest() { ITestResult result = runTests(null, new Filters.FullNameFilter("SomeFixture")); Assert.AreEqual(1, result.PassCount); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("Assembly.OneTimeSetUp", "NoNamespaceTest", "Assembly.OneTimeTearDown"); } #endregion NoNamespaceSetupFixture } } #endif
//----------------------------------------------------------------------- // <copyright file="FanOut.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Immutable; using System.Linq; using Akka.Actor; using Akka.Event; using Akka.Pattern; using Reactive.Streams; namespace Akka.Streams.Implementation { public class OutputBunch<T> { #region internal classes private sealed class FanoutOutputs : SimpleOutputs { private readonly int _id; public FanoutOutputs(int id, IActorRef actor, IPump pump) : base(actor, pump) { _id = id; } public new ISubscription CreateSubscription() => new FanOut.SubstreamSubscription(Actor, _id); } #endregion private readonly int _outputCount; private bool _bunchCancelled; private readonly FanoutOutputs[] _outputs; private readonly bool[] _marked; private int _markedCount; private readonly bool[] _pending; private int _markedPending; private readonly bool[] _cancelled; private int _markedCanceled; private readonly bool[] _completed; private readonly bool[] _errored; private bool _unmarkCancelled = true; private int _preferredId; public OutputBunch(int outputCount, IActorRef impl, IPump pump) { _outputCount = outputCount; _outputs = new FanoutOutputs[outputCount]; for (var i = 0; i < outputCount; i++) _outputs[i] = new FanoutOutputs(i, impl, pump); _marked = new bool[outputCount]; _pending = new bool[outputCount]; _cancelled = new bool[outputCount]; _completed = new bool[outputCount]; _errored = new bool[outputCount]; AllOfMarkedOutputs = new LambdaTransferState( isCompleted: () => _markedCanceled > 0 || _markedCount == 0, isReady: () => _markedPending == _markedCount); AnyOfMarkedOutputs = new LambdaTransferState( isCompleted: () => _markedCanceled == _markedCount, isReady: () => _markedPending > 0); // FIXME: Eliminate re-wraps SubReceive = new SubReceive(message => message.Match() .With<FanOut.ExposedPublishers<T>>(exposed => { var publishers = exposed.Publishers.GetEnumerator(); var outputs = _outputs.AsEnumerable().GetEnumerator(); while (publishers.MoveNext() && outputs.MoveNext()) outputs.Current.SubReceive.CurrentReceive(new ExposedPublisher(publishers.Current)); }) .With<FanOut.SubstreamRequestMore>(more => { if (more.Demand < 1) // According to Reactive Streams Spec 3.9, with non-positive demand must yield onError Error(more.Id, ReactiveStreamsCompliance.NumberOfElementsInRequestMustBePositiveException); else { if (_marked[more.Id] && !_pending[more.Id]) _markedPending += 1; _pending[more.Id] = true; _outputs[more.Id].SubReceive.CurrentReceive(new RequestMore(null, more.Demand)); } }) .With<FanOut.SubstreamCancel>(cancel => { if (_unmarkCancelled) UnmarkOutput(cancel.Id); if (_marked[cancel.Id] && !_cancelled[cancel.Id]) _markedCanceled += 1; _cancelled[cancel.Id] = true; OnCancel(cancel.Id); _outputs[cancel.Id].SubReceive.CurrentReceive(new Cancel(null)); }) .With<FanOut.SubstreamSubscribePending>(pending => _outputs[pending.Id].SubReceive.CurrentReceive(SubscribePending.Instance)) .WasHandled); } /// <summary> /// Will only transfer an element when all marked outputs /// have demand, and will complete as soon as any of the marked /// outputs have canceled. /// </summary> public readonly TransferState AllOfMarkedOutputs; /// <summary> /// Will transfer an element when any of the marked outputs /// have demand, and will complete when all of the marked /// outputs have canceled. /// </summary> public readonly TransferState AnyOfMarkedOutputs; public readonly SubReceive SubReceive; public bool IsPending(int output) => _pending[output]; public bool IsCompleted(int output) => _completed[output]; public bool IsCancelled(int output) => _cancelled[output]; public bool IsErrored(int output) => _errored[output]; public void Complete() { if (!_bunchCancelled) { _bunchCancelled = true; for (var i = 0; i < _outputs.Length; i++) Complete(i); } } public void Complete(int output) { if (!_completed[output] && !_errored[output] && !_cancelled[output]) { _outputs[output].Complete(); _completed[output] = true; UnmarkOutput(output); } } public void Cancel(Exception e) { if (!_bunchCancelled) { _bunchCancelled = true; for (var i = 0; i < _outputs.Length; i++) Error(i, e); } } public void Error(int output, Exception e) { if (!_errored[output] && !_cancelled[output] && !_completed[output]) { _outputs[output].Error(e); _errored[output] = true; UnmarkOutput(output); } } public void MarkOutput(int output) { if (!_marked[output]) { if (_cancelled[output]) _markedCanceled += 1; if (_pending[output]) _markedPending += 1; _marked[output] = true; _markedCount += 1; } } public void UnmarkOutput(int output) { if (_marked[output]) { if (_cancelled[output]) _markedCanceled -= 1; if (_pending[output]) _markedPending -= 1; _marked[output] = false; _markedCount -= 1; } } public void MarkAllOutputs() { for (var i = 0; i < _outputCount; i++) MarkOutput(i); } public void UnmarkAllOutputs() { for (var i = 0; i < _outputCount; i++) UnmarkOutput(i); } public void UnmarkCancelledOutputs(bool enabled) => _unmarkCancelled = enabled; public int IdToEnqueue() { var id = _preferredId; while (!(_marked[id] && _pending[id])) { id += 1; if (id == _outputCount) id = 0; if (id != _preferredId) throw new ArgumentException("Tried to equeue without waiting for any demand"); } return id; } public void Enqueue(int id, T element) { var output = _outputs[id]; output.EnqueueOutputElement(element); if (!output.IsDemandAvailable) { if (_marked[id]) _markedPending -= 1; _pending[id] = false; } } public void EnqueueMarked(T element) { for (var id = 0; id < _outputCount; id++) if (_marked[id]) Enqueue(id, element); } public int IdToEnqueueAndYield() { var id = IdToEnqueue(); _preferredId = id + 1; if (_preferredId == _outputCount) _preferredId = 0; return id; } public void EnqueueAndYield(T element) => Enqueue(IdToEnqueueAndYield(), element); public void EnqueueAndPrefer(T element, int preferred) { var id = IdToEnqueue(); _preferredId = preferred; Enqueue(id, element); } public void OnCancel(int output) { } public TransferState DemandAvailableFor(int id) => new LambdaTransferState(isReady: () => _pending[id], isCompleted: () => _cancelled[id] || _completed[id] || _errored[id]); public TransferState DemandOrCancelAvailableFor(int id) => new LambdaTransferState(isReady: () => _pending[id] || _cancelled[id], isCompleted: () => false); } /// <summary> /// INTERNAL API /// </summary> public static class FanOut { [Serializable] public struct SubstreamRequestMore : INoSerializationVerificationNeeded, IDeadLetterSuppression { public readonly int Id; public readonly long Demand; public SubstreamRequestMore(int id, long demand) { Id = id; Demand = demand; } } [Serializable] public struct SubstreamCancel : INoSerializationVerificationNeeded, IDeadLetterSuppression { public readonly int Id; public SubstreamCancel(int id) { Id = id; } } [Serializable] public struct SubstreamSubscribePending : INoSerializationVerificationNeeded, IDeadLetterSuppression { public readonly int Id; public SubstreamSubscribePending(int id) { Id = id; } } public class SubstreamSubscription : ISubscription { private readonly IActorRef _parent; private readonly int _id; public SubstreamSubscription(IActorRef parent, int id) { _parent = parent; _id = id; } public void Request(long elements) => _parent.Tell(new SubstreamRequestMore(_id, elements)); public void Cancel() => _parent.Tell(new SubstreamCancel(_id)); public override string ToString() => "SubstreamSubscription" + GetHashCode(); } [Serializable] public struct ExposedPublishers<T> : INoSerializationVerificationNeeded, IDeadLetterSuppression { public readonly ImmutableList<ActorPublisher<T>> Publishers; public ExposedPublishers(ImmutableList<ActorPublisher<T>> publishers) { Publishers = publishers; } } } /// <summary> /// INTERNAL API /// </summary> public abstract class FanOut<T> : ActorBase, IPump { #region internal classes private sealed class AnonymousBatchingInputBuffer : BatchingInputBuffer { private readonly FanOut<T> _pump; public AnonymousBatchingInputBuffer(int count, FanOut<T> pump) : base(count, pump) { _pump = pump; } protected override void OnError(Exception e) => _pump.Fail(e); } #endregion private readonly ActorMaterializerSettings _settings; protected readonly OutputBunch<T> OutputBunch; protected readonly BatchingInputBuffer PrimaryInputs; protected FanOut(ActorMaterializerSettings settings, int outputCount) { _log = Context.GetLogger(); _settings = settings; OutputBunch = new OutputBunch<T>(outputCount, Self, this); PrimaryInputs = new AnonymousBatchingInputBuffer(settings.MaxInputBufferSize, this); this.Init(); } #region Actor implementation private ILoggingAdapter _log; protected ILoggingAdapter Log => _log ?? (_log = Context.GetLogger()); protected override void PostStop() { PrimaryInputs.Cancel(); OutputBunch.Cancel(new AbruptTerminationException(Self)); } protected override void PostRestart(Exception reason) { base.PostRestart(reason); throw new IllegalStateException("This actor cannot be restarted"); } protected void Fail(Exception e) { if (_settings.IsDebugLogging) Log.Debug($"fail due to: {e.Message}"); PrimaryInputs.Cancel(); OutputBunch.Cancel(e); Pump(); } protected override bool Receive(object message) { return PrimaryInputs.SubReceive.CurrentReceive(message) || OutputBunch.SubReceive.CurrentReceive(message); } #endregion #region Pump implementation public TransferState TransferState { get; set; } public Action CurrentAction { get; set; } public bool IsPumpFinished => this.IsPumpFinished(); public void InitialPhase(int waitForUpstream, TransferPhase andThen) => Pumps.InitialPhase(this, waitForUpstream, andThen); public void WaitForUpstream(int waitForUpstream) => Pumps.WaitForUpstream(this, waitForUpstream); public void GotUpstreamSubscription() => Pumps.GotUpstreamSubscription(this); public void NextPhase(TransferPhase phase) => Pumps.NextPhase(this, phase); public void Pump() => Pumps.Pump(this); public void PumpFailed(Exception e) => Fail(e); public void PumpFinished() { PrimaryInputs.Cancel(); OutputBunch.Complete(); Context.Stop(Self); } #endregion } /// <summary> /// INTERNAL API /// </summary> internal static class Unzip { public static Props Props<T>(ActorMaterializerSettings settings) => Actor.Props.Create(() => new Unzip<T>(settings, 2)).WithDeploy(Deploy.Local); } /// <summary> /// INTERNAL API /// TODO Find out where this class will be used and check if the type parameter fit /// since we need to cast messages into a tuple and therefore maybe need aditional type parameters /// </summary> internal sealed class Unzip<T> : FanOut<T> { public Unzip(ActorMaterializerSettings settings, int outputCount = 2) : base(settings, outputCount) { OutputBunch.MarkAllOutputs(); InitialPhase(1, new TransferPhase(PrimaryInputs.NeedsInput.And(OutputBunch.AllOfMarkedOutputs), () => { var message = PrimaryInputs.DequeueInputElement(); var tuple = message as Tuple<T, T>; if (tuple == null) throw new ArgumentException($"Unable to unzip elements of type {message.GetType().Name}"); OutputBunch.Enqueue(0, tuple.Item1); OutputBunch.Enqueue(1, tuple.Item2); })); } } }
using NUnit.Framework; using Rhino.Mocks; using Spextensions.Specifications.RhinoMocks.Dummies; namespace Spextensions.Specifications.RhinoMocks.Signaling { [TestFixture] public class SignalingSpecs { private ISomeInterface _mock1; private ISomeOtherInterface _mock2; private Signal _signal1; private Signal _signal2; private ISomeInterface _stub1; [SetUp] public void SetUp() { _mock1 = MockRepository.GenerateMock<ISomeInterface>(); _stub1 = MockRepository.GenerateStub<ISomeInterface>(); _mock2 = MockRepository.GenerateMock<ISomeOtherInterface>(); _signal1 = new Signal("signal description 1"); _signal2 = new Signal("signal description 2"); } [Fact] public void No_exception_when_signal_is_given_before_matched_expectation() { _mock1.Expect(x => x.Method()) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock1.Method(); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_no_signal_is_given_before_matched_expectation() { _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_only_some_other_signal_is_given_before_matched_expectation() { _mock1.Expect(x => x.Method()) .Signal(_signal2); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock1.Method(); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_expected_signal_is_setup_but_signaling_call_is_never_executed() { _mock1.Expect(x => x.Method()) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_expected_signal_is_setup_but_signaling_call_is_made_with_wrong_argument() { _mock1.Expect(x => x.MethodWithParameter("expected argument")) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock1.MethodWithParameter("wrong argument"); _mock2.OtherMethod(); } [Fact] public void No_exception_is_thrown_when_stub_signals_expected_signal_before_matched_call() { _stub1.Stub(x => x.Method()) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _stub1.Method(); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_stub_signal_is_set_up_but_signaling_call_is_never_made() { _stub1.Stub(x => x.Method()) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_stub_signal_is_set_up_but_signaling_call_is_made_after_matching_call() { _stub1.Stub(x => x.Method()) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock2.OtherMethod(); _stub1.Method(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_stub_signal_is_set_up_but_signaling_call_is_made_with_wrong_argument() { _stub1.Stub(x => x.MethodWithParameter("expected argument")) .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _stub1.MethodWithParameter("wrong argument"); _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_two_signals_are_expected_but_only_one_is_signaled() { _stub1.Stub(x => x.Method()) .Signal(_signal1); _mock1.Stub(x => x.Method()) .Signal(_signal2); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1) .AssertSignal(_signal2); _mock1.Method(); _mock2.OtherMethod(); } [Fact] public void No_exception_when_two_signals_are_expected_and_both_are_signaled_before_matching_call() { _stub1.Stub(x => x.Method()) .Signal(_signal1); _mock1.Stub(x => x.Method()) .Signal(_signal2); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1) .AssertSignal(_signal2); _stub1.Method(); _mock1.Method(); _mock2.OtherMethod(); } [Fact] public void No_expectation_when_signal_set_for_property_setter_is_signaled_before_matching_call() { _mock1.Expect(x => x.Property) .SetPropertyWithArgument("expected value") .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock1.Property = "expected value"; _mock2.OtherMethod(); } [Fact] [ExpectedException(typeof(SignalAssertionException))] public void Throws_AssertionException_when_signal_is_set_for_property_setter_but_is_called_with_wrong_argument() { _mock1.Expect(x => x.Property) .SetPropertyWithArgument("expected value") .Signal(_signal1); _mock2.Expect(x => x.OtherMethod()) .AssertSignal(_signal1); _mock1.Property = "wrong value"; _mock2.OtherMethod(); } } }
namespace NegroniGame { using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; using Microsoft.Xna.Framework.Media; using Microsoft.Xna.Framework.Audio; using NegroniGame.Handlers; using System; using System.Runtime.InteropServices; using System.Collections.Generic; using log4net; using log4net.Config; using log4net.Appender; using log4net.Core; /// <summary> /// This is the main type for your game /// </summary> public sealed class GameScreen : Microsoft.Xna.Framework.Game { // Singleton ! private static GameScreen instance; #region Fields Declaration [DllImport("user32.dll", CharSet = CharSet.Auto)] public static extern uint MessageBox(IntPtr hWnd, String text, String caption, uint type); private static readonly ILog log = LogManager.GetLogger(typeof(GameScreen)); private readonly GraphicsDeviceManager graphics; private Vector2 cursorPos = new Vector2(); private List<Texture2D> monster1Textures, monster2Textures, monster3Textures, monster4Textures, monster5Textures, monster6Textures; private Video video; private VideoPlayer videoPlayer; private Texture2D videoTexture; private Color videoColor; private Song inGameMusic, gameOverMusic; #endregion private GameScreen() { // Windows settings graphics = new GraphicsDeviceManager(this); graphics.IsFullScreen = false; graphics.PreferredBackBufferHeight = 500; graphics.PreferredBackBufferWidth = 700; graphics.ApplyChanges(); //Changes the settings that you just applied Content.RootDirectory = "Content"; GameState = 0; } public static GameScreen Instance { get { if (instance == null) { instance = new GameScreen(); } return instance; } } #region Properties Declarations public MouseState MouseState { get; set; } public MouseState MouseStatePrevious { get; set; } public KeyboardState KeyboardState { get; set; } public SpriteBatch SpriteBatch { get; set; } public int GameState { get; set; } public static int ScreenWidth { get; private set; } public static int ScreenHeight { get; private set; } public SpriteFont FontMessages { get; private set; } public SpriteFont FontInfoBox { get; private set; } public KeyboardState KeyboardStatePrevious { get; private set; } public List<List<Texture2D>> MonstersTextures { get; private set; } public List<Texture2D> AllSceneryTextures { get; private set; } public List<Texture2D> PlayerTextures { get; private set; } public List<Texture2D> SlotsTextures { get; private set; } public List<Texture2D> MajesticSetTextures { get; private set; } public List<Texture2D> NegroniHPTextures { get; private set; } public List<Texture2D> ShotsTextures { get; private set; } public List<Texture2D> DropTextures { get; private set; } public List<Texture2D> HealthBars { get; private set; } public Texture2D NewbieStaffTexture { get; private set; } public Texture2D MysticStaffTexture { get; private set; } public Texture2D CoinsTexture { get; private set; } public Texture2D ElixirsTexture { get; private set; } public Texture2D CursorTexture { get; private set; } public Texture2D InfoBoxTexture { get; private set; } public Texture2D InfoBox1Texture { get; private set; } public Texture2D FireballsTexture { get; private set; } public Texture2D MarketDialog { get; private set; } public Texture2D BuyButton { get; private set; } public Texture2D GameOverTex { get; private set; } public Texture2D NpcSorcererTexture { get; private set; } public Texture2D NpcHelperTexture { get; private set; } public SoundEffect PickUpSound { get; private set; } public SoundEffect FireAttackSound { get; private set; } public SoundEffect DrinkElixir { get; private set; } public SoundEffect DrinkWell { get; private set; } public SoundEffect WeaponBought { get; private set; } public SoundEffect ArmorBought { get; private set; } public SoundEffect ElixirBought { get; private set; } public List<SoundEffect> HitSounds { get; private set; } #endregion protected override void Initialize() { FileAppender fileAppender = new FileAppender(); fileAppender.File = "GameStatusLogs.txt"; fileAppender.AppendToFile = true; fileAppender.Layout = new log4net.Layout.SimpleLayout(); fileAppender.Threshold = Level.Info; fileAppender.ActivateOptions(); BasicConfigurator.Configure(fileAppender); // Create a new SpriteBatch, which can be used to draw textures. SpriteBatch = new SpriteBatch(GraphicsDevice); // device = graphics.GraphicsDevice; ScreenWidth = graphics.PreferredBackBufferWidth; ScreenHeight = graphics.PreferredBackBufferHeight; video = Content.Load<Video>("media/IntroVideo"); videoPlayer = new VideoPlayer(); videoPlayer.Play(video); videoColor = new Color(255, 255, 255); base.Initialize(); log.Info("XNA Game Initialization completed"); } protected override void LoadContent() { NpcSorcererTexture = Content.Load<Texture2D>("media/sprites/sorcerer"); NpcHelperTexture = Content.Load<Texture2D>("media/sprites/player2"); FontMessages = Content.Load<SpriteFont>("Segoe UI Mono"); FontInfoBox = Content.Load<SpriteFont>("Segoe UI Mono Smaller"); CursorTexture = Content.Load<Texture2D>("media/cursor1"); // cursor CoinsTexture = Content.Load<Texture2D>("media/drop/coins"); ElixirsTexture = Content.Load<Texture2D>("media/drop/elixirs"); NewbieStaffTexture = Content.Load<Texture2D>("media/drop/newbieStaff"); MysticStaffTexture = Content.Load<Texture2D>("media/drop/mysticStaff"); InfoBoxTexture = Content.Load<Texture2D>("media/infoBox"); InfoBox1Texture = Content.Load<Texture2D>("media/infoBox1"); MarketDialog = Content.Load<Texture2D>("media/marketDialog"); BuyButton = Content.Load<Texture2D>("media/buy"); GameOverTex = Content.Load<Texture2D>("media/GameOver"); // background, toolbar, well, playerPic, equipmentShop AllSceneryTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/background"), Content.Load<Texture2D>("media/toolbar"), Content.Load<Texture2D>("media/well"), Content.Load<Texture2D>("media/Elvina"), Content.Load<Texture2D>("media/market") }; PlayerTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/Elvina-right"), Content.Load<Texture2D>("media/sprites/Elvina-left"), Content.Load<Texture2D>("media/sprites/Elvina-up"), Content.Load<Texture2D>("media/sprites/Elvina-down"), Content.Load<Texture2D>("media/sprites/Elvina-dead"), }; MajesticSetTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/drop/majesticBoots"), Content.Load<Texture2D>("media/drop/majesticGloves"), Content.Load<Texture2D>("media/drop/majesticHelmet"), Content.Load<Texture2D>("media/drop/majesticRobe"), Content.Load<Texture2D>("media/drop/majesticShield") }; monster1Textures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/monster1-down"), Content.Load<Texture2D>("media/sprites/monster1-left"), Content.Load<Texture2D>("media/sprites/monster1-right"), Content.Load<Texture2D>("media/sprites/monster1-up"), }; monster2Textures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/monster2-down"), Content.Load<Texture2D>("media/sprites/monster2-left"), Content.Load<Texture2D>("media/sprites/monster2-right"), Content.Load<Texture2D>("media/sprites/monster2-up"), }; monster3Textures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/monster3-down"), Content.Load<Texture2D>("media/sprites/monster3-left"), Content.Load<Texture2D>("media/sprites/monster3-right"), Content.Load<Texture2D>("media/sprites/monster3-up"), }; monster4Textures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/monster4-down"), Content.Load<Texture2D>("media/sprites/monster4-left"), Content.Load<Texture2D>("media/sprites/monster4-right"), Content.Load<Texture2D>("media/sprites/monster4-up"), }; monster5Textures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/monster5-down"), Content.Load<Texture2D>("media/sprites/monster5-left"), Content.Load<Texture2D>("media/sprites/monster5-right"), Content.Load<Texture2D>("media/sprites/monster5-up"), }; monster6Textures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/monster6-down"), Content.Load<Texture2D>("media/sprites/monster6-left"), Content.Load<Texture2D>("media/sprites/monster6-right"), Content.Load<Texture2D>("media/sprites/monster6-up"), }; MonstersTextures = new List<List<Texture2D>>() { monster1Textures, monster2Textures, monster3Textures, monster4Textures, monster5Textures, monster6Textures }; SlotsTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/slots/defaultSlot1"), Content.Load<Texture2D>("media/slots/defaultSlot2"), Content.Load<Texture2D>("media/slots/defaultSlot3"), Content.Load<Texture2D>("media/slots/defaultSlot4"), Content.Load<Texture2D>("media/slots/defaultSlot5"), Content.Load<Texture2D>("media/slots/defaultSlot6"), Content.Load<Texture2D>("media/slots/defaultSlot7"), Content.Load<Texture2D>("media/slots/defaultSlot8"), }; NegroniHPTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/negroniHPfull"), Content.Load<Texture2D>("media/negroniHP2of3"), Content.Load<Texture2D>("media/negroniHP1of3"), Content.Load<Texture2D>("media/negroniHPempty") }; ShotsTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/sprites/fireballs") }; FireballsTexture = Content.Load<Texture2D>("media/sprites/fireballs"); DropTextures = new List<Texture2D>() { Content.Load<Texture2D>("media/drop/coins2"), Content.Load<Texture2D>("media/drop/elixirs") }; HealthBars = new List<Texture2D>() { Content.Load<Texture2D>("media/health-full"), Content.Load<Texture2D>("media/health-low") }; ElixirBought = Content.Load<SoundEffect>("media/sounds/potionBought"); WeaponBought = Content.Load<SoundEffect>("media/sounds/weaponBought"); ArmorBought = Content.Load<SoundEffect>("media/sounds/armorBought"); HitSounds = new List<SoundEffect>() { Content.Load<SoundEffect>("media/sounds/hit1"), Content.Load<SoundEffect>("media/sounds/hit2"), Content.Load<SoundEffect>("media/sounds/hit3"), }; DrinkWell = Content.Load<SoundEffect>("media/sounds/drinkWell"); DrinkElixir = Content.Load<SoundEffect>("media/sounds/drinkPotion"); FireAttackSound = Content.Load<SoundEffect>("media/sounds/firehit"); PickUpSound = Content.Load<SoundEffect>("media/sounds/pickup"); gameOverMusic = Content.Load<Song>("media/sounds/DST-GameOver"); inGameMusic = Content.Load<Song>("media/sounds/DST-Exanos"); Player.Instance.Initialize(); NpcSorcerer.Instance.Initialize(); log.Info("XNA Game load completed"); } protected override void Update(GameTime gameTime) { MouseState = Mouse.GetState(); cursorPos = new Vector2(MouseState.X, MouseState.Y); // cursor update KeyboardState = Keyboard.GetState(); // Allows the game to exit if (KeyboardState.IsKeyDown(Keys.Escape)) { this.OnExiting(this, new EventArgs()); //this.Exit(); } switch (GameState) { case 0: // Video Intro if (videoPlayer.State != MediaState.Stopped) { videoTexture = videoPlayer.GetTexture(); } else { MediaPlayer.Play(inGameMusic); MediaPlayer.IsRepeating = true; GameState = 1; } break; case 1: // Game Started if (videoColor.A > 1) { videoColor.A -= 2; } // Checks for Pause if (KeyboardState.IsKeyDown(Keys.P) && KeyboardStatePrevious.IsKeyUp(Keys.P)) { MediaPlayer.Pause(); GameState = 2; } Player.Instance.Update(gameTime); MonstersHandler.Instance.Update(gameTime); DropHandler.Instance.Update(gameTime); ShotsHandler.Instance.UpdateShots(gameTime, KeyboardState); Toolbar.InventorySlots.Instance.Update(gameTime, MouseState); Toolbar.SystemMsg.Instance.GetLastMessages(); Toolbar.HP.Instance.Update(gameTime); InfoBoxes.Instance.Update(gameTime, MouseState); ElixirsHandler.Instance.Update(gameTime); // updates elixir reuse time Well.Instance.Update(gameTime); // updates well reuse time MarketDialogHandler.Instance.Update(MouseState, MouseStatePrevious); GameOverHandler.Instance.Update(gameTime); NpcHelperHandler.Update(gameTime); break; case 2: // Paused // Checks for Resume if (KeyboardState.IsKeyDown(Keys.P) && KeyboardStatePrevious.IsKeyUp(Keys.P)) { MediaPlayer.Resume(); GameState = 1; } Toolbar.HP.Instance.Update(gameTime); InfoBoxes.Instance.Update(gameTime, MouseState); MarketDialogHandler.Instance.Update(MouseState, MouseStatePrevious); break; case 3: // Game Over if (MediaPlayer.Queue.ActiveSong == inGameMusic) { MediaPlayer.Play(gameOverMusic); } InfoBoxes.Instance.Update(gameTime, MouseState); break; } this.KeyboardStatePrevious = KeyboardState; this.MouseStatePrevious = MouseState; base.Update(gameTime); } protected override void Draw(GameTime gameTime) { GraphicsDevice.Clear(Color.CornflowerBlue); SpriteBatch.Begin(); if (GameState != 0 && videoColor.A != 255) { SceneryHandler.Instance.Draw(); // Scenery DropHandler.Instance.Draw(); // Drop ShotsHandler.Instance.Draw(); // Shots Player.Instance.Draw(); // Player MonstersHandler.Instance.Draw(gameTime); // Monsters MarketDialogHandler.Instance.Draw(); // Market dialog Toolbar.InventorySlots.Instance.Draw(); // Inventory Toolbar.SystemMsg.Instance.DrawText(); // System messages Toolbar.HP.Instance.Draw(); // HP bar NpcHelperHandler.Draw(); InfoBoxes.Instance.Draw(); // Pop-up info boxes GameOverHandler.Instance.Draw(); SpriteBatch.Draw(CursorTexture, cursorPos, Color.White); // draws cursor } if (videoColor.A > 1) { SpriteBatch.Draw(videoTexture, new Vector2(0, 0), videoColor); // Intro Video } SpriteBatch.End(); base.Draw(gameTime); } protected override void OnExiting(object sender, EventArgs args) { uint exitEV = MessageBox(new IntPtr(0), "Would you like to exit?", "Game Over", 1); if (exitEV == 1) //ok { this.Exit(); } } } }
#region License /* * HttpConnection.cs * * This code is derived from System.Net.HttpConnection.cs of Mono * (http://www.mono-project.com). * * The MIT License * * Copyright (c) 2005 Novell, Inc. (http://www.novell.com) * Copyright (c) 2012-2014 sta.blockhead * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #endregion #region Authors /* * Authors: * - Gonzalo Paniagua Javier <[email protected]> */ #endregion using System; using System.IO; using System.Net; using System.Net.Sockets; using System.Text; using System.Threading; using WebSocketSharp.Net.Security; namespace WebSocketSharp.Net { internal sealed class HttpConnection { #region Private Const Fields private const int _bufferSize = 8192; #endregion #region Private Fields private byte [] _buffer; private bool _chunked; private HttpListenerContext _context; private bool _contextWasBound; private StringBuilder _currentLine; private InputState _inputState; private RequestStream _inputStream; private HttpListener _lastListener; private LineState _lineState; private EndPointListener _listener; private ResponseStream _outputStream; private int _position; private ListenerPrefix _prefix; private MemoryStream _requestBuffer; private int _reuses; private bool _secure; private Socket _socket; private Stream _stream; private object _sync; private int _timeout; private Timer _timer; private WebSocketStream _websocketStream; #endregion #region Public Constructors public HttpConnection (Socket socket, EndPointListener listener) { _socket = socket; _listener = listener; _secure = listener.IsSecure; var netStream = new NetworkStream (socket, false); if (_secure) { var sslStream = new SslStream (netStream, false); sslStream.AuthenticateAsServer (listener.Certificate); _stream = sslStream; } else { _stream = netStream; } _sync = new object (); _timeout = 90000; // 90k ms for first request, 15k ms from then on. _timer = new Timer (onTimeout, this, Timeout.Infinite, Timeout.Infinite); init (); } #endregion #region Public Properties public bool IsClosed { get { return _socket == null; } } public bool IsSecure { get { return _secure; } } public IPEndPoint LocalEndPoint { get { return (IPEndPoint) _socket.LocalEndPoint; } } public ListenerPrefix Prefix { get { return _prefix; } set { _prefix = value; } } public IPEndPoint RemoteEndPoint { get { return (IPEndPoint) _socket.RemoteEndPoint; } } public int Reuses { get { return _reuses; } } public Stream Stream { get { return _stream; } } #endregion #region Private Methods private void close () { lock (_sync) { if (_socket == null) return; disposeTimer (); disposeRequestBuffer (); disposeStream (); closeSocket (); } unbind (); removeConnection (); } private void closeSocket () { try { _socket.Shutdown (SocketShutdown.Both); } catch { } _socket.Close (); _socket = null; } private void disposeRequestBuffer () { if (_requestBuffer == null) return; _requestBuffer.Dispose (); _requestBuffer = null; } private void disposeStream () { if (_stream == null) return; _inputStream = null; _outputStream = null; _websocketStream = null; _stream.Dispose (); _stream = null; } private void disposeTimer () { if (_timer == null) return; try { _timer.Change (Timeout.Infinite, Timeout.Infinite); } catch { } _timer.Dispose (); _timer = null; } private void init () { _chunked = false; _context = new HttpListenerContext (this); _inputState = InputState.RequestLine; _inputStream = null; _lineState = LineState.None; _outputStream = null; _position = 0; _prefix = null; _requestBuffer = new MemoryStream (); } private static void onRead (IAsyncResult asyncResult) { var conn = (HttpConnection) asyncResult.AsyncState; if (conn._socket == null) return; lock (conn._sync) { if (conn._socket == null) return; var nread = -1; try { conn._timer.Change (Timeout.Infinite, Timeout.Infinite); nread = conn._stream.EndRead (asyncResult); conn._requestBuffer.Write (conn._buffer, 0, nread); if (conn._requestBuffer.Length > 32768) { conn.SendError ("Bad request", 400); conn.Close (true); return; } } catch { if (conn._requestBuffer != null && conn._requestBuffer.Length > 0) conn.SendError (); conn.close (); return; } if (nread <= 0) { conn.close (); return; } if (conn.processInput (conn._requestBuffer.GetBuffer ())) { if (!conn._context.HasError) conn._context.Request.FinishInitialization (); if (conn._context.HasError) { conn.SendError (); conn.Close (true); return; } if (!conn._listener.BindContext (conn._context)) { conn.SendError ("Invalid host", 400); conn.Close (true); return; } var listener = conn._context.Listener; if (conn._lastListener != listener) { conn.removeConnection (); listener.AddConnection (conn); conn._lastListener = listener; } conn._contextWasBound = true; listener.RegisterContext (conn._context); return; } conn._stream.BeginRead (conn._buffer, 0, _bufferSize, onRead, conn); } } private static void onTimeout (object state) { var conn = (HttpConnection) state; conn.close (); } // true -> Done processing. // false -> Need more input. private bool processInput (byte [] data) { var len = data.Length; var used = 0; string line; try { while ((line = readLine (data, _position, len - _position, ref used)) != null) { _position += used; if (line.Length == 0) { if (_inputState == InputState.RequestLine) continue; _currentLine = null; return true; } if (_inputState == InputState.RequestLine) { _context.Request.SetRequestLine (line); _inputState = InputState.Headers; } else { _context.Request.AddHeader (line); } if (_context.HasError) return true; } } catch (Exception ex) { _context.ErrorMessage = ex.Message; return true; } _position += used; if (used == len) { _requestBuffer.SetLength (0); _position = 0; } return false; } private string readLine (byte [] buffer, int offset, int length, ref int used) { if (_currentLine == null) _currentLine = new StringBuilder (); var last = offset + length; used = 0; for (int i = offset; i < last && _lineState != LineState.LF; i++) { used++; var b = buffer [i]; if (b == 13) _lineState = LineState.CR; else if (b == 10) _lineState = LineState.LF; else _currentLine.Append ((char) b); } string res = null; if (_lineState == LineState.LF) { _lineState = LineState.None; res = _currentLine.ToString (); _currentLine.Length = 0; } return res; } private void removeConnection () { if (_lastListener == null) _listener.RemoveConnection (this); else _lastListener.RemoveConnection (this); } private void unbind () { if (_contextWasBound) { _listener.UnbindContext (_context); _contextWasBound = false; } } #endregion #region Internal Methods internal void Close (bool force) { if (_socket == null) return; lock (_sync) { if (_socket == null) return; if (!force) { GetResponseStream ().Close (); var req = _context.Request; var res = _context.Response; if (req.KeepAlive && !res.CloseConnection && req.FlushInput () && (!_chunked || (_chunked && !res.ForceCloseChunked))) { // Don't close. Keep working. _reuses++; disposeRequestBuffer (); unbind (); init (); BeginReadRequest (); return; } } close (); } } #endregion #region Public Methods public void BeginReadRequest () { if (_buffer == null) _buffer = new byte [_bufferSize]; if (_reuses == 1) _timeout = 15000; try { _timer.Change (_timeout, Timeout.Infinite); _stream.BeginRead (_buffer, 0, _bufferSize, onRead, this); } catch { close (); } } public void Close () { Close (false); } public RequestStream GetRequestStream (bool chunked, long contentlength) { if (_inputStream != null || _socket == null) return _inputStream; lock (_sync) { if (_socket == null) return _inputStream; var buff = _requestBuffer.GetBuffer (); var len = buff.Length; disposeRequestBuffer (); if (chunked) { _chunked = true; _context.Response.SendChunked = true; _inputStream = new ChunkedRequestStream ( _context, _stream, buff, _position, len - _position); } else { _inputStream = new RequestStream ( _stream, buff, _position, len - _position, contentlength); } return _inputStream; } } public ResponseStream GetResponseStream () { // TODO: Can we get this stream before reading the input? if (_outputStream != null || _socket == null) return _outputStream; lock (_sync) { if (_socket == null) return _outputStream; var listener = _context.Listener; var ignore = listener == null ? true : listener.IgnoreWriteExceptions; _outputStream = new ResponseStream (_stream, _context.Response, ignore); return _outputStream; } } public WebSocketStream GetWebSocketStream () { if (_websocketStream != null || _socket == null) return _websocketStream; lock (_sync) { if (_socket == null) return _websocketStream; _websocketStream = new WebSocketStream (_stream, _secure); return _websocketStream; } } public void SendError () { SendError (_context.ErrorMessage, _context.ErrorStatus); } public void SendError (string message, int status) { if (_socket == null) return; lock (_sync) { if (_socket == null) return; try { var res = _context.Response; res.StatusCode = status; res.ContentType = "text/html"; var desc = status.GetStatusDescription (); var msg = message != null && message.Length > 0 ? String.Format ("<h1>{0} ({1})</h1>", desc, message) : String.Format ("<h1>{0}</h1>", desc); var entity = res.ContentEncoding.GetBytes (msg); res.Close (entity, false); } catch { // Response was already closed. } } } #endregion } }
using HaloSharp.Converter; using Newtonsoft.Json; using System; using System.Collections.Generic; using System.Linq; namespace HaloSharp.Model.HaloWars2.Stats.Common { [Serializable] public class Stats : IEquatable<Stats> { [JsonProperty(PropertyName = "PlaylistId")] public Guid? PlaylistId { get; set; } [JsonProperty(PropertyName = "PlaylistClassification")] public Enumeration.HaloWars2.PlaylistClassification? PlaylistClassification { get; set; } [JsonProperty(PropertyName = "HighestCsr")] public CompetitiveSkillRanking HighestCsr { get; set; } [JsonProperty(PropertyName = "TotalTimePlayed")] [JsonConverter(typeof(TimeSpanConverter))] public TimeSpan? TotalTimePlayed { get; set; } [JsonProperty(PropertyName = "TotalMatchesStarted")] public int TotalMatchesStarted { get; set; } [JsonProperty(PropertyName = "TotalMatchesCompleted")] public int TotalMatchesCompleted { get; set; } [JsonProperty(PropertyName = "TotalMatchesWon")] public int TotalMatchesWon { get; set; } [JsonProperty(PropertyName = "TotalMatchesLost")] public int TotalMatchesLost { get; set; } [JsonProperty(PropertyName = "TotalPointCaptures")] public int TotalPointCaptures { get; set; } [JsonProperty(PropertyName = "TotalUnitsBuilt")] public int TotalUnitsBuilt { get; set; } [JsonProperty(PropertyName = "TotalUnitsLost")] public int TotalUnitsLost { get; set; } [JsonProperty(PropertyName = "TotalUnitsDestroyed")] public int TotalUnitsDestroyed { get; set; } [JsonProperty(PropertyName = "TotalCardPlays")] public int TotalCardPlays { get; set; } [JsonProperty(PropertyName = "HighestWaveCompleted")] public int HighestWaveCompleted { get; set; } [JsonProperty(PropertyName = "LeaderStats")] public Dictionary<string, LeaderStats> LeaderStats { get; set; } public bool Equals(Stats other) { if (ReferenceEquals(null, other)) { return false; } if (ReferenceEquals(this, other)) { return true; } return Equals(HighestCsr, other.HighestCsr) && HighestWaveCompleted == other.HighestWaveCompleted && LeaderStats.OrderBy(ls => ls.Key).SequenceEqual(other.LeaderStats.OrderBy(ls => ls.Key)) && PlaylistClassification == other.PlaylistClassification && PlaylistId.Equals(other.PlaylistId) && TotalCardPlays == other.TotalCardPlays && TotalMatchesCompleted == other.TotalMatchesCompleted && TotalMatchesLost == other.TotalMatchesLost && TotalMatchesStarted == other.TotalMatchesStarted && TotalMatchesWon == other.TotalMatchesWon && TotalPointCaptures == other.TotalPointCaptures && TotalTimePlayed.Equals(other.TotalTimePlayed) && TotalUnitsBuilt == other.TotalUnitsBuilt && TotalUnitsDestroyed == other.TotalUnitsDestroyed && TotalUnitsLost == other.TotalUnitsLost; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) { return false; } if (ReferenceEquals(this, obj)) { return true; } if (obj.GetType() != typeof(Stats)) { return false; } return Equals((Stats) obj); } public override int GetHashCode() { unchecked { var hashCode = HighestCsr?.GetHashCode() ?? 0; hashCode = (hashCode*397) ^ HighestWaveCompleted; hashCode = (hashCode*397) ^ (LeaderStats?.GetHashCode() ?? 0); hashCode = (hashCode*397) ^ PlaylistClassification.GetHashCode(); hashCode = (hashCode*397) ^ PlaylistId.GetHashCode(); hashCode = (hashCode*397) ^ TotalCardPlays; hashCode = (hashCode*397) ^ TotalMatchesCompleted; hashCode = (hashCode*397) ^ TotalMatchesLost; hashCode = (hashCode*397) ^ TotalMatchesStarted; hashCode = (hashCode*397) ^ TotalMatchesWon; hashCode = (hashCode*397) ^ TotalPointCaptures; hashCode = (hashCode*397) ^ TotalTimePlayed.GetHashCode(); hashCode = (hashCode*397) ^ TotalUnitsBuilt; hashCode = (hashCode*397) ^ TotalUnitsDestroyed; hashCode = (hashCode*397) ^ TotalUnitsLost; return hashCode; } } public static bool operator ==(Stats left, Stats right) { return Equals(left, right); } public static bool operator !=(Stats left, Stats right) { return !Equals(left, right); } } [Serializable] public class LeaderStats : IEquatable<LeaderStats> { [JsonProperty(PropertyName = "TotalTimePlayed")] [JsonConverter(typeof(TimeSpanConverter))] public TimeSpan TotalTimePlayed { get; set; } [JsonProperty(PropertyName = "TotalMatchesStarted")] public int TotalMatchesStarted { get; set; } [JsonProperty(PropertyName = "TotalMatchesCompleted")] public int TotalMatchesCompleted { get; set; } [JsonProperty(PropertyName = "TotalMatchesWon")] public int TotalMatchesWon { get; set; } [JsonProperty(PropertyName = "TotalMatchesLost")] public int TotalMatchesLost { get; set; } [JsonProperty(PropertyName = "TotalLeaderPowersCast")] public int TotalLeaderPowersCast { get; set; } public bool Equals(LeaderStats other) { if (ReferenceEquals(null, other)) { return false; } if (ReferenceEquals(this, other)) { return true; } return TotalLeaderPowersCast == other.TotalLeaderPowersCast && TotalMatchesCompleted == other.TotalMatchesCompleted && TotalMatchesLost == other.TotalMatchesLost && TotalMatchesStarted == other.TotalMatchesStarted && TotalMatchesWon == other.TotalMatchesWon && TotalTimePlayed.Equals(other.TotalTimePlayed); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) { return false; } if (ReferenceEquals(this, obj)) { return true; } if (obj.GetType() != typeof(LeaderStats)) { return false; } return Equals((LeaderStats)obj); } public override int GetHashCode() { unchecked { var hashCode = TotalLeaderPowersCast; hashCode = (hashCode * 397) ^ TotalMatchesCompleted; hashCode = (hashCode * 397) ^ TotalMatchesLost; hashCode = (hashCode * 397) ^ TotalMatchesStarted; hashCode = (hashCode * 397) ^ TotalMatchesWon; hashCode = (hashCode * 397) ^ TotalTimePlayed.GetHashCode(); return hashCode; } } public static bool operator ==(LeaderStats left, LeaderStats right) { return Equals(left, right); } public static bool operator !=(LeaderStats left, LeaderStats right) { return !Equals(left, right); } } }
using YAF.Lucene.Net.Index; using YAF.Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; using JCG = J2N.Collections.Generic; namespace YAF.Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using ArrayUtil = YAF.Lucene.Net.Util.ArrayUtil; using AtomicReader = YAF.Lucene.Net.Index.AtomicReader; using AtomicReaderContext = YAF.Lucene.Net.Index.AtomicReaderContext; using IBits = YAF.Lucene.Net.Util.IBits; using BytesRef = YAF.Lucene.Net.Util.BytesRef; using DocsAndPositionsEnum = YAF.Lucene.Net.Index.DocsAndPositionsEnum; using DocsEnum = YAF.Lucene.Net.Index.DocsEnum; using IndexReader = YAF.Lucene.Net.Index.IndexReader; using IndexReaderContext = YAF.Lucene.Net.Index.IndexReaderContext; using Similarity = YAF.Lucene.Net.Search.Similarities.Similarity; using SimScorer = YAF.Lucene.Net.Search.Similarities.Similarity.SimScorer; using Term = YAF.Lucene.Net.Index.Term; using TermContext = YAF.Lucene.Net.Index.TermContext; using Terms = YAF.Lucene.Net.Index.Terms; using TermsEnum = YAF.Lucene.Net.Index.TermsEnum; using TermState = YAF.Lucene.Net.Index.TermState; using ToStringUtils = YAF.Lucene.Net.Util.ToStringUtils; using System.Collections; using J2N.Collections.Generic.Extensions; /// <summary> /// <see cref="MultiPhraseQuery"/> is a generalized version of <see cref="PhraseQuery"/>, with an added /// method <see cref="Add(Term[])"/>. /// <para/> /// To use this class, to search for the phrase "Microsoft app*" first use /// <see cref="Add(Term)"/> on the term "Microsoft", then find all terms that have "app" as /// prefix using <c>MultiFields.GetFields(IndexReader).GetTerms(string)</c>, and use <see cref="MultiPhraseQuery.Add(Term[])"/> /// to add them to the query. /// <para/> /// Collection initializer note: To create and populate a <see cref="MultiPhraseQuery"/> /// in a single statement, you can use the following example as a guide: /// /// <code> /// var multiPhraseQuery = new MultiPhraseQuery() { /// new Term("field", "microsoft"), /// new Term("field", "office") /// }; /// </code> /// Note that as long as you specify all of the parameters, you can use either /// <see cref="Add(Term)"/>, <see cref="Add(Term[])"/>, or <see cref="Add(Term[], int)"/> /// as the method to use to initialize. If there are multiple parameters, each parameter set /// must be surrounded by curly braces. /// </summary> public class MultiPhraseQuery : Query, IEnumerable<Term[]> // LUCENENET specific - implemented IEnumerable<Term[]>, which allows for use of collection initializer. See: https://stackoverflow.com/a/9195144 { private string field; private IList<Term[]> termArrays = new JCG.List<Term[]>(); private readonly IList<int> positions = new JCG.List<int>(); private int slop = 0; /// <summary> /// Sets the phrase slop for this query. </summary> /// <seealso cref="PhraseQuery.Slop"/> public virtual int Slop { set { if (value < 0) { throw new System.ArgumentException("slop value cannot be negative"); } slop = value; } get { return slop; } } /// <summary> /// Add a single term at the next position in the phrase. </summary> /// <seealso cref="PhraseQuery.Add(Term)"/> public virtual void Add(Term term) { Add(new Term[] { term }); } /// <summary> /// Add multiple terms at the next position in the phrase. Any of the terms /// may match. /// </summary> /// <seealso cref="PhraseQuery.Add(Term)"/> public virtual void Add(Term[] terms) { int position = 0; if (positions.Count > 0) { position = (int)positions[positions.Count - 1] + 1; } Add(terms, position); } /// <summary> /// Allows to specify the relative position of terms within the phrase. /// </summary> /// <seealso cref="PhraseQuery.Add(Term, int)"/> public virtual void Add(Term[] terms, int position) { if (termArrays.Count == 0) { field = terms[0].Field; } for (var i = 0; i < terms.Length; i++) { if (!terms[i].Field.Equals(field, StringComparison.Ordinal)) { throw new System.ArgumentException("All phrase terms must be in the same field (" + field + "): " + terms[i]); } } termArrays.Add(terms); positions.Add(position); } /// <summary> /// Returns a List of the terms in the multiphrase. /// Do not modify the List or its contents. /// </summary> public virtual IList<Term[]> GetTermArrays() // LUCENENET TODO: API - make into a property { return termArrays.AsReadOnly(); } /// <summary> /// Returns the relative positions of terms in this phrase. /// </summary> public virtual int[] GetPositions() { var result = new int[positions.Count]; for (int i = 0; i < positions.Count; i++) { result[i] = (int)positions[i]; } return result; } /// <summary> /// Expert: adds all terms occurring in this query to the terms set. Only /// works if this query is in its rewritten (<see cref="Rewrite(IndexReader)"/>) form. /// </summary> /// <exception cref="InvalidOperationException"> If this query is not yet rewritten </exception> public override void ExtractTerms(ISet<Term> terms) { foreach (Term[] arr in termArrays) { foreach (Term term in arr) { terms.Add(term); } } } private class MultiPhraseWeight : Weight { private readonly MultiPhraseQuery outerInstance; private readonly Similarity similarity; private readonly Similarity.SimWeight stats; private readonly IDictionary<Term, TermContext> termContexts = new Dictionary<Term, TermContext>(); public MultiPhraseWeight(MultiPhraseQuery outerInstance, IndexSearcher searcher) { this.outerInstance = outerInstance; this.similarity = searcher.Similarity; IndexReaderContext context = searcher.TopReaderContext; // compute idf var allTermStats = new List<TermStatistics>(); foreach (Term[] terms in outerInstance.termArrays) { foreach (Term term in terms) { TermContext termContext; termContexts.TryGetValue(term, out termContext); if (termContext == null) { termContext = TermContext.Build(context, term); termContexts[term] = termContext; } allTermStats.Add(searcher.TermStatistics(term, termContext)); } } stats = similarity.ComputeWeight(outerInstance.Boost, searcher.CollectionStatistics(outerInstance.field), allTermStats.ToArray()); } public override Query Query { get { return outerInstance; } } public override float GetValueForNormalization() { return stats.GetValueForNormalization(); } public override void Normalize(float queryNorm, float topLevelBoost) { stats.Normalize(queryNorm, topLevelBoost); } public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { Debug.Assert(outerInstance.termArrays.Count > 0); AtomicReader reader = (context.AtomicReader); IBits liveDocs = acceptDocs; PhraseQuery.PostingsAndFreq[] postingsFreqs = new PhraseQuery.PostingsAndFreq[outerInstance.termArrays.Count]; Terms fieldTerms = reader.GetTerms(outerInstance.field); if (fieldTerms == null) { return null; } // Reuse single TermsEnum below: TermsEnum termsEnum = fieldTerms.GetIterator(null); for (int pos = 0; pos < postingsFreqs.Length; pos++) { Term[] terms = outerInstance.termArrays[pos]; DocsAndPositionsEnum postingsEnum; int docFreq; if (terms.Length > 1) { postingsEnum = new UnionDocsAndPositionsEnum(liveDocs, context, terms, termContexts, termsEnum); // coarse -- this overcounts since a given doc can // have more than one term: docFreq = 0; for (int termIdx = 0; termIdx < terms.Length; termIdx++) { Term term = terms[termIdx]; TermState termState = termContexts[term].Get(context.Ord); if (termState == null) { // Term not in reader continue; } termsEnum.SeekExact(term.Bytes, termState); docFreq += termsEnum.DocFreq; } if (docFreq == 0) { // None of the terms are in this reader return null; } } else { Term term = terms[0]; TermState termState = termContexts[term].Get(context.Ord); if (termState == null) { // Term not in reader return null; } termsEnum.SeekExact(term.Bytes, termState); postingsEnum = termsEnum.DocsAndPositions(liveDocs, null, DocsAndPositionsFlags.NONE); if (postingsEnum == null) { // term does exist, but has no positions Debug.Assert(termsEnum.Docs(liveDocs, null, DocsFlags.NONE) != null, "termstate found but no term exists in reader"); throw new InvalidOperationException("field \"" + term.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.Text() + ")"); } docFreq = termsEnum.DocFreq; } postingsFreqs[pos] = new PhraseQuery.PostingsAndFreq(postingsEnum, docFreq, (int)outerInstance.positions[pos], terms); } // sort by increasing docFreq order if (outerInstance.slop == 0) { ArrayUtil.TimSort(postingsFreqs); } if (outerInstance.slop == 0) { ExactPhraseScorer s = new ExactPhraseScorer(this, postingsFreqs, similarity.GetSimScorer(stats, context)); if (s.noDocs) { return null; } else { return s; } } else { return new SloppyPhraseScorer(this, postingsFreqs, outerInstance.slop, similarity.GetSimScorer(stats, context)); } } public override Explanation Explain(AtomicReaderContext context, int doc) { Scorer scorer = GetScorer(context, (context.AtomicReader).LiveDocs); if (scorer != null) { int newDoc = scorer.Advance(doc); if (newDoc == doc) { float freq = outerInstance.slop == 0 ? scorer.Freq : ((SloppyPhraseScorer)scorer).SloppyFreq; SimScorer docScorer = similarity.GetSimScorer(stats, context); ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + ") [" + similarity.GetType().Name + "], result of:"; Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq)); result.AddDetail(scoreExplanation); result.Value = scoreExplanation.Value; result.Match = true; return result; } } return new ComplexExplanation(false, 0.0f, "no matching term"); } } public override Query Rewrite(IndexReader reader) { if (termArrays.Count == 0) { BooleanQuery bq = new BooleanQuery(); bq.Boost = Boost; return bq; } // optimize one-term case else if (termArrays.Count == 1) { Term[] terms = termArrays[0]; BooleanQuery boq = new BooleanQuery(true); for (int i = 0; i < terms.Length; i++) { boq.Add(new TermQuery(terms[i]), Occur.SHOULD); } boq.Boost = Boost; return boq; } else { return this; } } public override Weight CreateWeight(IndexSearcher searcher) { return new MultiPhraseWeight(this, searcher); } /// <summary> /// Prints a user-readable version of this query. </summary> public override sealed string ToString(string f) { StringBuilder buffer = new StringBuilder(); if (field == null || !field.Equals(f, StringComparison.Ordinal)) { buffer.Append(field); buffer.Append(":"); } buffer.Append("\""); int k = 0; int? lastPos = -1; bool first = true; foreach (Term[] terms in termArrays) { int? position = positions[k]; if (first) { first = false; } else { buffer.Append(" "); for (int j = 1; j < (position - lastPos); j++) { buffer.Append("? "); } } if (terms.Length > 1) { buffer.Append("("); for (int j = 0; j < terms.Length; j++) { buffer.Append(terms[j].Text()); if (j < terms.Length - 1) { buffer.Append(" "); } } buffer.Append(")"); } else { buffer.Append(terms[0].Text()); } lastPos = position; ++k; } buffer.Append("\""); if (slop != 0) { buffer.Append("~"); buffer.Append(slop); } buffer.Append(ToStringUtils.Boost(Boost)); return buffer.ToString(); } /// <summary> /// Returns <c>true</c> if <paramref name="o"/> is equal to this. </summary> public override bool Equals(object o) { if (!(o is MultiPhraseQuery)) { return false; } MultiPhraseQuery other = (MultiPhraseQuery)o; return this.Boost == other.Boost && this.slop == other.slop && TermArraysEquals(this.termArrays, other.termArrays) && this.positions.Equals(other.positions); } /// <summary> /// Returns a hash code value for this object. </summary> public override int GetHashCode() { //If this doesn't work hash all elements of positions. This was used to reduce time overhead return J2N.BitConversion.SingleToInt32Bits(Boost) ^ slop ^ TermArraysHashCode() ^ ((positions.Count == 0) ? 0 : positions.GetHashCode() ^ 0x4AC65113); } // Breakout calculation of the termArrays hashcode private int TermArraysHashCode() { int hashCode = 1; foreach (Term[] termArray in termArrays) { hashCode = 31 * hashCode + (termArray == null ? 0 : Arrays.GetHashCode(termArray)); } return hashCode; } // Breakout calculation of the termArrays equals private bool TermArraysEquals(IList<Term[]> termArrays1, IList<Term[]> termArrays2) { if (termArrays1.Count != termArrays2.Count) { return false; } using (IEnumerator<Term[]> iterator1 = termArrays1.GetEnumerator()) { using (IEnumerator<Term[]> iterator2 = termArrays2.GetEnumerator()) { while (iterator1.MoveNext()) { Term[] termArray1 = iterator1.Current; iterator2.MoveNext(); Term[] termArray2 = iterator2.Current; if (!(termArray1 == null ? termArray2 == null : Arrays.Equals(termArray1, termArray2))) { return false; } } } } return true; } /// <summary> /// Returns an enumerator that iterates through the <see cref="termArrays"/> collection. /// </summary> /// <returns>An enumerator that can be used to iterate through the <see cref="termArrays"/> collection.</returns> // LUCENENET specific public IEnumerator<Term[]> GetEnumerator() { return termArrays.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through the <see cref="termArrays"/>. /// </summary> /// <returns>An enumerator that can be used to iterate through the <see cref="termArrays"/> collection.</returns> // LUCENENET specific IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } } /// <summary> /// Takes the logical union of multiple <see cref="DocsEnum"/> iterators. /// </summary> // TODO: if ever we allow subclassing of the *PhraseScorer internal class UnionDocsAndPositionsEnum : DocsAndPositionsEnum { private sealed class DocsQueue : Util.PriorityQueue<DocsAndPositionsEnum> { internal DocsQueue(ICollection<DocsAndPositionsEnum> docsEnums) : base(docsEnums.Count) { foreach (DocsAndPositionsEnum postings in docsEnums) { if (postings.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { Add(postings); } } } protected internal override bool LessThan(DocsAndPositionsEnum a, DocsAndPositionsEnum b) { return a.DocID < b.DocID; } } /// <summary> /// NOTE: This was IntQueue in Lucene /// </summary> private sealed class Int32Queue { public Int32Queue() { InitializeInstanceFields(); } internal void InitializeInstanceFields() { _array = new int[_arraySize]; } private int _arraySize = 16; private int _index = 0; private int _lastIndex = 0; private int[] _array; internal void Add(int i) { if (_lastIndex == _arraySize) { GrowArray(); } _array[_lastIndex++] = i; } internal int Next() { return _array[_index++]; } internal void Sort() { Array.Sort(_array, _index, _lastIndex); } internal void Clear() { _index = 0; _lastIndex = 0; } internal int Count // LUCENENET NOTE: This was size() in Lucene. { get { return (_lastIndex - _index); } } private void GrowArray() { var newArray = new int[_arraySize * 2]; Array.Copy(_array, 0, newArray, 0, _arraySize); _array = newArray; _arraySize *= 2; } } private int _doc; private int _freq; private readonly DocsQueue _queue; private readonly Int32Queue _posList; private readonly long _cost; public UnionDocsAndPositionsEnum(IBits liveDocs, AtomicReaderContext context, Term[] terms, IDictionary<Term, TermContext> termContexts, TermsEnum termsEnum) { ICollection<DocsAndPositionsEnum> docsEnums = new LinkedList<DocsAndPositionsEnum>(); for (int i = 0; i < terms.Length; i++) { Term term = terms[i]; TermState termState = termContexts[term].Get(context.Ord); if (termState == null) { // Term doesn't exist in reader continue; } termsEnum.SeekExact(term.Bytes, termState); DocsAndPositionsEnum postings = termsEnum.DocsAndPositions(liveDocs, null, DocsAndPositionsFlags.NONE); if (postings == null) { // term does exist, but has no positions throw new InvalidOperationException("field \"" + term.Field + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.Text() + ")"); } _cost += postings.GetCost(); docsEnums.Add(postings); } _queue = new DocsQueue(docsEnums); _posList = new Int32Queue(); } public override sealed int NextDoc() { if (_queue.Count == 0) { return NO_MORE_DOCS; } // TODO: move this init into positions(): if the search // doesn't need the positions for this doc then don't // waste CPU merging them: _posList.Clear(); _doc = _queue.Top.DocID; // merge sort all positions together DocsAndPositionsEnum postings; do { postings = _queue.Top; int freq = postings.Freq; for (int i = 0; i < freq; i++) { _posList.Add(postings.NextPosition()); } if (postings.NextDoc() != NO_MORE_DOCS) { _queue.UpdateTop(); } else { _queue.Pop(); } } while (_queue.Count > 0 && _queue.Top.DocID == _doc); _posList.Sort(); _freq = _posList.Count; return _doc; } public override int NextPosition() { return _posList.Next(); } public override int StartOffset { get { return -1; } } public override int EndOffset { get { return -1; } } public override BytesRef GetPayload() { return null; } public override sealed int Advance(int target) { while (_queue.Top != null && target > _queue.Top.DocID) { DocsAndPositionsEnum postings = _queue.Pop(); if (postings.Advance(target) != NO_MORE_DOCS) { _queue.Add(postings); } } return NextDoc(); } public override sealed int Freq { get { return _freq; } } public override sealed int DocID { get { return _doc; } } public override long GetCost() { return _cost; } } }
#region License, Terms and Author(s) // // ELMAH - Error Logging Modules and Handlers for ASP.NET // Copyright (c) 2004-9 Atif Aziz. All rights reserved. // // Author(s): // // Atif Aziz, http://www.raboof.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion namespace Elmah { #region Imports using RidoElmah; using System.Collections; using System.Web; using CultureInfo = System.Globalization.CultureInfo; using Encoding = System.Text.Encoding; #endregion /// <summary> /// HTTP handler factory that dispenses handlers for rendering views and /// resources needed to display the error log. /// </summary> public class ErrorLogPageFactory : IHttpHandlerFactory { private static readonly object _authorizationHandlersKey = new object(); private static readonly IRequestAuthorizationHandler[] _zeroAuthorizationHandlers = new IRequestAuthorizationHandler[0]; /// <summary> /// Returns an object that implements the <see cref="IHttpHandler"/> /// interface and which is responsible for serving the request. /// </summary> /// <returns> /// A new <see cref="IHttpHandler"/> object that processes the request. /// </returns> public virtual IHttpHandler GetHandler(HttpContext context, string requestType, string url, string pathTranslated) { // // The request resource is determined by the looking up the // value of the PATH_INFO server variable. // string resource = context.Request.PathInfo.Length == 0 ? string.Empty : context.Request.PathInfo.Substring(1).ToLower(CultureInfo.InvariantCulture); IHttpHandler handler = FindHandler(resource); if (handler == null) throw new HttpException(404, "Resource not found."); // // Check if authorized then grant or deny request. // int authorized = IsAuthorized(context); if (authorized == 0 || (authorized < 0 // Compatibility case... && !HttpRequestSecurity.IsLocal(context.Request) && !SecurityConfiguration.Default.AllowRemoteAccess)) { (new ManifestResourceHandler("RemoteAccessError.htm", "text/html")).ProcessRequest(context); HttpResponse response = context.Response; response.Status = "403 Forbidden"; response.End(); // // HttpResponse.End docs say that it throws // ThreadAbortException and so should never end up here but // that's not been the observation in the debugger. So as a // precautionary measure, bail out anyway. // return null; } return handler; } private static IHttpHandler FindHandler(string name) { Debug.Assert(name != null); switch (name) { case "detail": return new ErrorDetailPage(); case "html": return new ErrorHtmlPage(); case "xml": return new ErrorXmlHandler(); case "json": return new ErrorJsonHandler(); //case "rss": // return new ErrorRssHandler(); //case "digestrss": // return new ErrorDigestRssHandler(); case "download": return new ErrorLogDownloadHandler(); case "stylesheet": return new ManifestResourceHandler("ErrorLog.css", "text/css", Encoding.GetEncoding("Windows-1252")); //case "test": // throw new TestException(); //case "about": // return new AboutPage(); default: return name.Length == 0 ? new ErrorLogPage() : null; } } /// <summary> /// Enables the factory to reuse an existing handler instance. /// </summary> public virtual void ReleaseHandler(IHttpHandler handler) { } /// <summary> /// Determines if the request is authorized by objects implementing /// <see cref="IRequestAuthorizationHandler" />. /// </summary> /// <returns> /// Returns zero if unauthorized, a value greater than zero if /// authorized otherwise a value less than zero if no handlers /// were available to answer. /// </returns> private static int IsAuthorized(HttpContext context) { Debug.Assert(context != null); int authorized = /* uninitialized */ -1; IEnumerator authorizationHandlers = GetAuthorizationHandlers(context).GetEnumerator(); while (authorized != 0 && authorizationHandlers.MoveNext()) { IRequestAuthorizationHandler authorizationHandler = (IRequestAuthorizationHandler)authorizationHandlers.Current; authorized = authorizationHandler.Authorize(context) ? 1 : 0; } return authorized; } private static IList GetAuthorizationHandlers(HttpContext context) { Debug.Assert(context != null); object key = _authorizationHandlersKey; IList handlers = (IList)context.Items[key]; if (handlers == null) { const int capacity = 4; ArrayList list = null; HttpApplication application = context.ApplicationInstance; if (application is IRequestAuthorizationHandler) { list = new ArrayList(capacity); list.Add(application); } foreach (IHttpModule module in HttpModuleRegistry.GetModules(application)) { if (module is IRequestAuthorizationHandler) { if (list == null) list = new ArrayList(capacity); list.Add(module); } } context.Items[key] = handlers = ArrayList.ReadOnly( list != null ? list.ToArray(typeof(IRequestAuthorizationHandler)) : _zeroAuthorizationHandlers); } return handlers; } } public interface IRequestAuthorizationHandler { bool Authorize(HttpContext context); } }
namespace SunLine.Community.Repositories.Migrations { using System; using System.Data.Entity.Migrations; public partial class AddInitial : DbMigration { public override void Up() { CreateTable( "dbo.Categories", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 100), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.Messages", c => new { Id = c.Guid(nullable: false, identity: true), Mind = c.String(nullable: false, maxLength: 200), Speech = c.String(), AmountOfFavourites = c.Int(nullable: false), AmountOfTransmitted = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), Comment_Id = c.Guid(), Language_Id = c.Guid(nullable: false), MessageState_Id = c.Guid(nullable: false), User_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Messages", t => t.Comment_Id) .ForeignKey("dbo.Languages", t => t.Language_Id) .ForeignKey("dbo.MessageStates", t => t.MessageState_Id) .ForeignKey("dbo.Users", t => t.User_Id) .Index(t => t.Comment_Id) .Index(t => t.Language_Id) .Index(t => t.MessageState_Id) .Index(t => t.User_Id); CreateTable( "dbo.Files", c => new { Id = c.Guid(nullable: false, identity: true), Path = c.String(nullable: false, maxLength: 500), Identifier = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), FileType_Id = c.Guid(nullable: false), User_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.FileTypes", t => t.FileType_Id) .ForeignKey("dbo.Users", t => t.User_Id) .Index(t => t.FileType_Id) .Index(t => t.User_Id); CreateTable( "dbo.FileTypes", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 100), FileTypeEnum = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.Users", c => new { Id = c.Guid(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), FirstName = c.String(nullable: false, maxLength: 100), LastName = c.String(nullable: false, maxLength: 100), GravatarHash = c.String(nullable: false, maxLength: 100), Email = c.String(maxLength: 100), EmailConfirmed = c.Boolean(nullable: false), PasswordHash = c.String(), SecurityStamp = c.String(), PhoneNumber = c.String(), PhoneNumberConfirmed = c.Boolean(nullable: false), TwoFactorEnabled = c.Boolean(nullable: false), LockoutEndDateUtc = c.DateTime(), LockoutEnabled = c.Boolean(nullable: false), AccessFailedCount = c.Int(nullable: false), UserName = c.String(nullable: false, maxLength: 50), Language_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Languages", t => t.Language_Id) .Index(t => t.UserName, unique: true, name: "UserNameIndex") .Index(t => t.Language_Id); CreateTable( "dbo.AspNetUserClaims", c => new { Id = c.Int(nullable: false, identity: true), UserId = c.Guid(nullable: false), ClaimType = c.String(), ClaimValue = c.String(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Users", t => t.UserId) .Index(t => t.UserId); CreateTable( "dbo.Languages", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 100), Code = c.String(nullable: false, maxLength: 5), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.AspNetUserLogins", c => new { LoginProvider = c.String(nullable: false, maxLength: 128), ProviderKey = c.String(nullable: false, maxLength: 128), UserId = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.LoginProvider, t.ProviderKey, t.UserId }) .ForeignKey("dbo.Users", t => t.UserId) .Index(t => t.UserId); CreateTable( "dbo.AspNetUserRoles", c => new { UserId = c.Guid(nullable: false), RoleId = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.UserId, t.RoleId }) .ForeignKey("dbo.Users", t => t.UserId) .ForeignKey("dbo.AspNetRoles", t => t.RoleId) .Index(t => t.UserId) .Index(t => t.RoleId); CreateTable( "dbo.UserCategories", c => new { Id = c.Guid(nullable: false, identity: true), AmountOfFavouritesToShow = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), Category_Id = c.Guid(nullable: false), User_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Categories", t => t.Category_Id) .ForeignKey("dbo.Users", t => t.User_Id) .Index(t => t.Category_Id) .Index(t => t.User_Id); CreateTable( "dbo.UserMessages", c => new { Id = c.Guid(nullable: false, identity: true), IsUserFavouriteMessage = c.Boolean(nullable: false), IsMarkerSet = c.Boolean(nullable: false), HaveMention = c.Boolean(nullable: false), WasTransmitted = c.Boolean(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), Message_Id = c.Guid(nullable: false), TransmittedUserMessage_Id = c.Guid(), User_Id = c.Guid(nullable: false), UserMessageCreationMode_Id = c.Guid(nullable: false), UserMessageState_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Messages", t => t.Message_Id) .ForeignKey("dbo.UserMessages", t => t.TransmittedUserMessage_Id) .ForeignKey("dbo.Users", t => t.User_Id) .ForeignKey("dbo.UserMessageCreationModes", t => t.UserMessageCreationMode_Id) .ForeignKey("dbo.UserMessageStates", t => t.UserMessageState_Id) .Index(t => t.Message_Id) .Index(t => t.TransmittedUserMessage_Id) .Index(t => t.User_Id) .Index(t => t.UserMessageCreationMode_Id) .Index(t => t.UserMessageState_Id); CreateTable( "dbo.UserMessageCreationModes", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 100), UserMessageCreationModeEnum = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.UserMessageStates", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 100), UserMessageStateEnum = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.MessageHashtags", c => new { Id = c.Guid(nullable: false, identity: true), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), Hashtag_Id = c.Guid(nullable: false), Message_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Hashtags", t => t.Hashtag_Id) .ForeignKey("dbo.Messages", t => t.Message_Id) .Index(t => t.Hashtag_Id) .Index(t => t.Message_Id); CreateTable( "dbo.Hashtags", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 50), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id) .Index(t => t.Name, unique: true); CreateTable( "dbo.MessageStates", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 100), MessageStateEnum = c.Int(nullable: false), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.Errors", c => new { Id = c.Guid(nullable: false, identity: true), ErrorMessage = c.String(), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), User_Id = c.Guid(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Users", t => t.User_Id) .Index(t => t.User_Id); CreateTable( "dbo.AspNetRoles", c => new { Id = c.Guid(nullable: false), Name = c.String(nullable: false, maxLength: 256), }) .PrimaryKey(t => t.Id) .Index(t => t.Name, unique: true, name: "RoleNameIndex"); CreateTable( "dbo.Settings", c => new { Id = c.Guid(nullable: false, identity: true), Name = c.String(nullable: false), Key = c.String(nullable: false), Value = c.String(), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), }) .PrimaryKey(t => t.Id); CreateTable( "dbo.UserConnections", c => new { Id = c.Guid(nullable: false, identity: true), Version = c.Int(nullable: false), CreationDate = c.DateTime(nullable: false), ModificationDate = c.DateTime(), FromUser_Id = c.Guid(nullable: false), ToUser_Id = c.Guid(nullable: false), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.Users", t => t.FromUser_Id) .ForeignKey("dbo.Users", t => t.ToUser_Id) .Index(t => t.FromUser_Id) .Index(t => t.ToUser_Id); CreateTable( "dbo.MessageCategories", c => new { Message_Id = c.Guid(nullable: false), Category_Id = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.Message_Id, t.Category_Id }) .ForeignKey("dbo.Messages", t => t.Message_Id) .ForeignKey("dbo.Categories", t => t.Category_Id) .Index(t => t.Message_Id) .Index(t => t.Category_Id); CreateTable( "dbo.FileMessages", c => new { File_Id = c.Guid(nullable: false), Message_Id = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.File_Id, t.Message_Id }) .ForeignKey("dbo.Files", t => t.File_Id) .ForeignKey("dbo.Messages", t => t.Message_Id) .Index(t => t.File_Id) .Index(t => t.Message_Id); CreateTable( "dbo.UserLanguages", c => new { User_Id = c.Guid(nullable: false), Language_Id = c.Guid(nullable: false), }) .PrimaryKey(t => new { t.User_Id, t.Language_Id }) .ForeignKey("dbo.Users", t => t.User_Id) .ForeignKey("dbo.Languages", t => t.Language_Id) .Index(t => t.User_Id) .Index(t => t.Language_Id); } public override void Down() { DropForeignKey("dbo.UserConnections", "ToUser_Id", "dbo.Users"); DropForeignKey("dbo.UserConnections", "FromUser_Id", "dbo.Users"); DropForeignKey("dbo.AspNetUserRoles", "RoleId", "dbo.AspNetRoles"); DropForeignKey("dbo.Errors", "User_Id", "dbo.Users"); DropForeignKey("dbo.Messages", "User_Id", "dbo.Users"); DropForeignKey("dbo.Messages", "MessageState_Id", "dbo.MessageStates"); DropForeignKey("dbo.MessageHashtags", "Message_Id", "dbo.Messages"); DropForeignKey("dbo.MessageHashtags", "Hashtag_Id", "dbo.Hashtags"); DropForeignKey("dbo.Messages", "Language_Id", "dbo.Languages"); DropForeignKey("dbo.Files", "User_Id", "dbo.Users"); DropForeignKey("dbo.UserMessages", "UserMessageState_Id", "dbo.UserMessageStates"); DropForeignKey("dbo.UserMessages", "UserMessageCreationMode_Id", "dbo.UserMessageCreationModes"); DropForeignKey("dbo.UserMessages", "User_Id", "dbo.Users"); DropForeignKey("dbo.UserMessages", "TransmittedUserMessage_Id", "dbo.UserMessages"); DropForeignKey("dbo.UserMessages", "Message_Id", "dbo.Messages"); DropForeignKey("dbo.UserLanguages", "Language_Id", "dbo.Languages"); DropForeignKey("dbo.UserLanguages", "User_Id", "dbo.Users"); DropForeignKey("dbo.UserCategories", "User_Id", "dbo.Users"); DropForeignKey("dbo.UserCategories", "Category_Id", "dbo.Categories"); DropForeignKey("dbo.AspNetUserRoles", "UserId", "dbo.Users"); DropForeignKey("dbo.AspNetUserLogins", "UserId", "dbo.Users"); DropForeignKey("dbo.Users", "Language_Id", "dbo.Languages"); DropForeignKey("dbo.AspNetUserClaims", "UserId", "dbo.Users"); DropForeignKey("dbo.FileMessages", "Message_Id", "dbo.Messages"); DropForeignKey("dbo.FileMessages", "File_Id", "dbo.Files"); DropForeignKey("dbo.Files", "FileType_Id", "dbo.FileTypes"); DropForeignKey("dbo.Messages", "Comment_Id", "dbo.Messages"); DropForeignKey("dbo.MessageCategories", "Category_Id", "dbo.Categories"); DropForeignKey("dbo.MessageCategories", "Message_Id", "dbo.Messages"); DropIndex("dbo.UserLanguages", new[] { "Language_Id" }); DropIndex("dbo.UserLanguages", new[] { "User_Id" }); DropIndex("dbo.FileMessages", new[] { "Message_Id" }); DropIndex("dbo.FileMessages", new[] { "File_Id" }); DropIndex("dbo.MessageCategories", new[] { "Category_Id" }); DropIndex("dbo.MessageCategories", new[] { "Message_Id" }); DropIndex("dbo.UserConnections", new[] { "ToUser_Id" }); DropIndex("dbo.UserConnections", new[] { "FromUser_Id" }); DropIndex("dbo.AspNetRoles", "RoleNameIndex"); DropIndex("dbo.Errors", new[] { "User_Id" }); DropIndex("dbo.Hashtags", new[] { "Name" }); DropIndex("dbo.MessageHashtags", new[] { "Message_Id" }); DropIndex("dbo.MessageHashtags", new[] { "Hashtag_Id" }); DropIndex("dbo.UserMessages", new[] { "UserMessageState_Id" }); DropIndex("dbo.UserMessages", new[] { "UserMessageCreationMode_Id" }); DropIndex("dbo.UserMessages", new[] { "User_Id" }); DropIndex("dbo.UserMessages", new[] { "TransmittedUserMessage_Id" }); DropIndex("dbo.UserMessages", new[] { "Message_Id" }); DropIndex("dbo.UserCategories", new[] { "User_Id" }); DropIndex("dbo.UserCategories", new[] { "Category_Id" }); DropIndex("dbo.AspNetUserRoles", new[] { "RoleId" }); DropIndex("dbo.AspNetUserRoles", new[] { "UserId" }); DropIndex("dbo.AspNetUserLogins", new[] { "UserId" }); DropIndex("dbo.AspNetUserClaims", new[] { "UserId" }); DropIndex("dbo.Users", new[] { "Language_Id" }); DropIndex("dbo.Users", "UserNameIndex"); DropIndex("dbo.Files", new[] { "User_Id" }); DropIndex("dbo.Files", new[] { "FileType_Id" }); DropIndex("dbo.Messages", new[] { "User_Id" }); DropIndex("dbo.Messages", new[] { "MessageState_Id" }); DropIndex("dbo.Messages", new[] { "Language_Id" }); DropIndex("dbo.Messages", new[] { "Comment_Id" }); DropTable("dbo.UserLanguages"); DropTable("dbo.FileMessages"); DropTable("dbo.MessageCategories"); DropTable("dbo.UserConnections"); DropTable("dbo.Settings"); DropTable("dbo.AspNetRoles"); DropTable("dbo.Errors"); DropTable("dbo.MessageStates"); DropTable("dbo.Hashtags"); DropTable("dbo.MessageHashtags"); DropTable("dbo.UserMessageStates"); DropTable("dbo.UserMessageCreationModes"); DropTable("dbo.UserMessages"); DropTable("dbo.UserCategories"); DropTable("dbo.AspNetUserRoles"); DropTable("dbo.AspNetUserLogins"); DropTable("dbo.Languages"); DropTable("dbo.AspNetUserClaims"); DropTable("dbo.Users"); DropTable("dbo.FileTypes"); DropTable("dbo.Files"); DropTable("dbo.Messages"); DropTable("dbo.Categories"); } } }
using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Input; namespace HadoukInput { /// <summary> /// Helper for reading input from keyboard and gamepad. This class tracks both /// the current and previous state of both input devices, and implements query /// methods for high level input actions such as "move up through the menu" /// or "pause the game". /// </summary> public class InputState : IInputState { #region Properties /// <summary> /// The trigger dead zone. /// </summary> private const float TriggerDeadZone = 0.25f; public readonly GamePadState[] _currentGamePadStates; public readonly GamePadState[] _lastGamePadStates; public readonly bool[] _gamePadWasConnected; /// <summary> /// the radius of the controller thumbstick dead zone /// </summary> private float _deadZone; public KeyboardState CurrentKeyboardState { get; private set; } public KeyboardState LastKeyboardState { get; private set; } /// <summary> /// Gets or sets the size of the thumbstick dead zone. /// To square off the dead zone, set the DeadZoneType to Axial and set this to 0.5f /// </summary> /// <value>The size of the dead zone.</value> public float DeadZone { get { return _deadZone; } set { _deadZone = value; DeadZoneSquared = _deadZone * _deadZone; } } /// <summary> /// the square of the thumbstick dead zone /// </summary> public float DeadZoneSquared { get; private set; } public bool CheckControllers { get; set; } #endregion //Properties #region Initialization /// <summary> /// Constructs a new input state. /// </summary> public InputState() { CheckControllers = true; CurrentKeyboardState = new KeyboardState(); LastKeyboardState = new KeyboardState(); _currentGamePadStates = new GamePadState[GamePad.MaximumGamePadCount]; _lastGamePadStates = new GamePadState[GamePad.MaximumGamePadCount]; _gamePadWasConnected = new bool[GamePad.MaximumGamePadCount]; for (var i = 0; i < GamePad.MaximumGamePadCount; i++) { _gamePadWasConnected[i] = false; } DeadZone = 0.27f; } #endregion //Initialization #region Public Methods public bool IsConnected(int controllerIndex) { return _currentGamePadStates[controllerIndex].IsConnected; } public virtual Vector2 LeftThumbstick(int controllerIndex) { return _currentGamePadStates[controllerIndex].ThumbSticks.Left; } public virtual Vector2 RightThumbstick(int controllerIndex) { return _currentGamePadStates[controllerIndex].ThumbSticks.Right; } public bool DPadUp(int controllerIndex) { return ButtonState.Pressed == _currentGamePadStates[controllerIndex].DPad.Up; } public bool DPadDown(int controllerIndex) { return ButtonState.Pressed == _currentGamePadStates[controllerIndex].DPad.Down; } public bool DPadLeft(int controllerIndex) { return ButtonState.Pressed == _currentGamePadStates[controllerIndex].DPad.Left; } public bool DPadRight(int controllerIndex) { return ButtonState.Pressed == _currentGamePadStates[controllerIndex].DPad.Right; } /// <summary> /// Reads the latest state of the keyboard and gamepad. /// </summary> public virtual void Update() { LastKeyboardState = CurrentKeyboardState; CurrentKeyboardState = Keyboard.GetState(); if (CheckControllers) { for (var i = 0; i < GamePad.MaximumGamePadCount; i++) { _lastGamePadStates[i] = _currentGamePadStates[i]; _currentGamePadStates[i] = GamePad.GetState(i, GamePadDeadZone.None); // Keep track of whether a gamepad has ever been connected, so we can detect if it is unplugged. if (_currentGamePadStates[i].IsConnected) { _gamePadWasConnected[i] = true; } } } } /// <summary> /// Helper for checking if a key was newly pressed during this update. The /// controllingPlayer parameter specifies which player to read input for. /// If this is null, it will accept input from any player. When a keypress /// is detected, the output playerIndex reports which player pressed it. /// </summary> public bool IsNewKeyPress(Keys key) { // Read input from the specified player. return (CurrentKeyboardState.IsKeyDown(key) && LastKeyboardState.IsKeyUp(key)); } /// <summary> /// Helper for checking if a button was newly pressed during this update. /// The controllingPlayer parameter specifies which player to read input for. /// If this is null, it will accept input from any player. When a button press /// is detected, the output playerIndex reports which player pressed it. /// </summary> public bool IsNewButtonPress(Buttons button, int? controllingPlayer, out int playerIndex) { if (controllingPlayer.HasValue) { // Read input from the specified player. playerIndex = controllingPlayer.Value; return (ButtonDown(playerIndex, button) && !PrevButtonDown(playerIndex, button)); } else { // Accept input from any player. for (int i = 0; i < GamePad.MaximumGamePadCount; i++) { if (IsNewButtonPress(button, i, out playerIndex)) { return true; } } playerIndex = 0; return false; } } /// <summary> /// Checks for a "pause the game" input action. /// The controllingPlayer parameter specifies which player to read /// input for. If this is null, it will accept input from any player. /// </summary> public bool IsPauseGame(int? controllingPlayer) { //blah throwaway variable return IsNewKeyPress(Keys.Escape) || IsNewButtonPress(Buttons.Back, controllingPlayer, out int playerIndex) || IsNewButtonPress(Buttons.Start, controllingPlayer, out playerIndex); } #region Button Press Methods /// <summary> /// Check if the is button down. /// </summary> /// <returns><c>true</c>, if down was buttoned, <c>false</c> otherwise.</returns> /// <param name="playerIndex">I player index.</param> /// <param name="button">Button.</param> public virtual bool ButtonDown(int playerIndex, Buttons button) { //check that button on that gamepad return CheckButton(_currentGamePadStates[playerIndex], button); } /// <summary> /// Check if the wass button down last time /// </summary> /// <returns><c>true</c>, if button down was previoused, <c>false</c> otherwise.</returns> /// <param name="playerIndex">I player index.</param> /// <param name="button">Button.</param> public virtual bool PrevButtonDown(int playerIndex, Buttons button) { //check that button on that gamepad return CheckButton(_lastGamePadStates[playerIndex], button); } /// <summary> /// Given a game pad state and a button, check if the button is down on that gamepadstate /// </summary> /// <returns><c>true</c>, if button was checked, <c>false</c> otherwise.</returns> /// <param name="gamePad">My game pad.</param> /// <param name="button">Button.</param> private bool CheckButton(GamePadState gamePad, Buttons button) { switch (button) { case Buttons.DPadUp: { return ButtonState.Pressed == gamePad.DPad.Up; } case Buttons.DPadDown: { //don't do down if a horizontal direction is held return (!CheckButton(gamePad, Buttons.DPadLeft) && !CheckButton(gamePad, Buttons.DPadRight) && (ButtonState.Pressed == gamePad.DPad.Down)); } case Buttons.DPadLeft: { //don't do horizontal if up direction is held return (!CheckButton(gamePad, Buttons.DPadUp) && (ButtonState.Pressed == gamePad.DPad.Left)); } case Buttons.DPadRight: { //don't do horizontal if up direction is held return (!CheckButton(gamePad, Buttons.DPadUp) && (ButtonState.Pressed == gamePad.DPad.Right)); } case Buttons.Start: { return ButtonState.Pressed == gamePad.Buttons.Start; } case Buttons.Back: { return ButtonState.Pressed == gamePad.Buttons.Back; } case Buttons.LeftStick: { return ButtonState.Pressed == gamePad.Buttons.LeftStick; } case Buttons.RightStick: { return ButtonState.Pressed == gamePad.Buttons.RightStick; } case Buttons.LeftShoulder: { return ButtonState.Pressed == gamePad.Buttons.LeftShoulder; } case Buttons.RightShoulder: { return ButtonState.Pressed == gamePad.Buttons.RightShoulder; } case Buttons.BigButton: { return ButtonState.Pressed == gamePad.Buttons.BigButton; } case Buttons.A: { return ButtonState.Pressed == gamePad.Buttons.A; } case Buttons.B: { return ButtonState.Pressed == gamePad.Buttons.B; } case Buttons.X: { return ButtonState.Pressed == gamePad.Buttons.X; } case Buttons.Y: { return ButtonState.Pressed == gamePad.Buttons.Y; } case Buttons.RightTrigger: { return gamePad.Triggers.Right > TriggerDeadZone; } case Buttons.LeftTrigger: { return gamePad.Triggers.Left > TriggerDeadZone; } case Buttons.LeftThumbstickUp: { return gamePad.ThumbSticks.Left.Y > DeadZone; } case Buttons.LeftThumbstickDown: { //don't do down if a horizontal direction is held return (!CheckButton(gamePad, Buttons.LeftThumbstickLeft) && !CheckButton(gamePad, Buttons.LeftThumbstickRight) && (gamePad.ThumbSticks.Left.Y < -DeadZone)); } case Buttons.LeftThumbstickLeft: { //don't do horizontal if up direction is held return (!CheckButton(gamePad, Buttons.LeftThumbstickUp) && (gamePad.ThumbSticks.Left.X < -DeadZone)); } case Buttons.LeftThumbstickRight: { //don't do horizontal if up direction is held return (!CheckButton(gamePad, Buttons.LeftThumbstickUp) && (gamePad.ThumbSticks.Left.X > DeadZone)); } case Buttons.RightThumbstickUp: { return gamePad.ThumbSticks.Right.Y > DeadZone; } case Buttons.RightThumbstickDown: { //don't do down if a horizontal direction is held return (!CheckButton(gamePad, Buttons.RightThumbstickLeft) && !CheckButton(gamePad, Buttons.RightThumbstickRight) && (gamePad.ThumbSticks.Right.Y <- DeadZone)); } case Buttons.RightThumbstickLeft: { //don't do horizontal if up direction is held return (!CheckButton(gamePad, Buttons.RightThumbstickUp) && (gamePad.ThumbSticks.Right.X < -DeadZone)); } case Buttons.RightThumbstickRight: { //don't do horizontal if up direction is held return (!CheckButton(gamePad, Buttons.RightThumbstickUp) && (gamePad.ThumbSticks.Right.X > DeadZone)); } default: { return false; } } } #endregion //Button Press Methods #region Menu Methods /// <summary> /// Checks for a "menu select" input action. /// The controllingPlayer parameter specifies which player to read input for. /// If this is null, it will accept input from any player. When the action /// is detected, the output playerIndex reports which player pressed it. /// </summary> public bool IsMenuSelect(int? controllingPlayer, out int playerIndex) { //default to player 1 in case of keyboard playerIndex = 0; return IsNewKeyPress(Keys.Space) || IsNewKeyPress(Keys.Enter) || IsNewKeyPress(Keys.Z) || IsNewButtonPress(Buttons.A, controllingPlayer, out playerIndex) || IsNewButtonPress(Buttons.Start, controllingPlayer, out playerIndex); } /// <summary> /// Checks for a "menu cancel" input action. /// The controllingPlayer parameter specifies which player to read input for. /// If this is null, it will accept input from any player. When the action /// is detected, the output playerIndex reports which player pressed it. /// </summary> public bool IsMenuCancel(int? controllingPlayer, out int playerIndex) { //default to player 1 in case of keyboard playerIndex = 0; return IsNewKeyPress(Keys.Escape) || IsNewKeyPress(Keys.X) || IsNewButtonPress(Buttons.B, controllingPlayer, out playerIndex) || IsNewButtonPress(Buttons.Back, controllingPlayer, out playerIndex); } /// <summary> /// Checks for a "menu up" input action. /// The controllingPlayer parameter specifies which player to read /// input for. If this is null, it will accept input from any player. /// </summary> public bool IsMenuUp(int? controllingPlayer) { return IsNewKeyPress(Keys.Up) || IsNewButtonPress(Buttons.DPadUp, controllingPlayer, out int playerIndex) || IsNewButtonPress(Buttons.LeftThumbstickUp, controllingPlayer, out playerIndex); } /// <summary> /// Checks for a "menu down" input action. /// The controllingPlayer parameter specifies which player to read /// input for. If this is null, it will accept input from any player. /// </summary> public bool IsMenuDown(int? controllingPlayer) { return IsNewKeyPress(Keys.Down) || IsNewButtonPress(Buttons.DPadDown, controllingPlayer, out int playerIndex) || IsNewButtonPress(Buttons.LeftThumbstickDown, controllingPlayer, out playerIndex); } /// <summary> /// Checks for a "menu left" input action. /// The controllingPlayer parameter specifies which player to read /// input for. If this is null, it will accept input from any player. /// </summary> public bool IsMenuLeft(int? controllingPlayer) { return IsNewKeyPress(Keys.Left) || IsNewButtonPress(Buttons.DPadLeft, controllingPlayer, out int playerIndex) || IsNewButtonPress(Buttons.LeftThumbstickLeft, controllingPlayer, out playerIndex); } /// <summary> /// Checks for a "menu Right" input action. /// The controllingPlayer parameter specifies which player to read /// input for. If this is null, it will accept input from any player. /// </summary> public bool IsMenuRight(int? controllingPlayer) { return IsNewKeyPress(Keys.Right) || IsNewButtonPress(Buttons.DPadRight, controllingPlayer, out int playerIndex) || IsNewButtonPress(Buttons.LeftThumbstickRight, controllingPlayer, out playerIndex); } #endregion //Menu Methods #endregion //Public Methods } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Immutable; using System.Composition; using System.Linq; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CodeFixes; using Analyzer.Utilities; using System.Threading; using Analyzer.Utilities.Extensions; using Microsoft.CodeAnalysis.Editing; namespace Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines { /// <summary> /// CA1066: Implement IEquatable when overriding Object.Equals /// CA1067: Override Object.Equals(object) when implementing IEquatable{T} /// </summary> [ExportCodeFixProvider(LanguageNames.CSharp, LanguageNames.VisualBasic), Shared] public sealed class EquatableFixer : CodeFixProvider { public override ImmutableArray<string> FixableDiagnosticIds => ImmutableArray.Create(EquatableAnalyzer.ImplementIEquatableRuleId, EquatableAnalyzer.OverrideObjectEqualsRuleId); public override FixAllProvider GetFixAllProvider() { // See https://github.com/dotnet/roslyn/blob/master/docs/analyzers/FixAllProvider.md for more information on Fix All Providers return WellKnownFixAllProviders.BatchFixer; } public override async Task RegisterCodeFixesAsync(CodeFixContext context) { SyntaxGenerator generator = SyntaxGenerator.GetGenerator(context.Document); SyntaxNode root = await context.Document.GetSyntaxRootAsync(context.CancellationToken).ConfigureAwait(false); SyntaxNode declaration = root.FindNode(context.Span); declaration = generator.GetDeclaration(declaration); if (declaration == null) { return; } SemanticModel model = await context.Document.GetSemanticModelAsync(context.CancellationToken).ConfigureAwait(false); if (model.GetDeclaredSymbol(declaration, context.CancellationToken) is not INamedTypeSymbol type || type.TypeKind != TypeKind.Class && type.TypeKind != TypeKind.Struct) { return; } INamedTypeSymbol? equatableType = model.Compilation.GetOrCreateTypeByMetadataName(WellKnownTypeNames.SystemIEquatable1); if (equatableType == null) { return; } if (type.TypeKind == TypeKind.Struct && !TypeImplementsEquatable(type, equatableType)) { string title = MicrosoftCodeQualityAnalyzersResources.ImplementEquatable; context.RegisterCodeFix(new MyCodeAction( title, async ct => await ImplementEquatableInStructAsync(context.Document, declaration, type, model.Compilation, equatableType, ct).ConfigureAwait(false), equivalenceKey: title), context.Diagnostics); } if (!type.OverridesEquals()) { string title = MicrosoftCodeQualityAnalyzersResources.OverrideEqualsOnImplementingIEquatableCodeActionTitle; context.RegisterCodeFix(new MyCodeAction( title, async ct => await OverrideObjectEqualsAsync(context.Document, declaration, type, equatableType, ct).ConfigureAwait(false), equivalenceKey: title), context.Diagnostics); } } private static bool TypeImplementsEquatable(INamedTypeSymbol type, INamedTypeSymbol equatableType) { INamedTypeSymbol constructedEquatable = equatableType.Construct(type); INamedTypeSymbol implementation = type .Interfaces .FirstOrDefault(x => x.Equals(constructedEquatable)); return implementation != null; } private static async Task<Document> ImplementEquatableInStructAsync(Document document, SyntaxNode declaration, INamedTypeSymbol typeSymbol, Compilation compilation, INamedTypeSymbol equatableType, CancellationToken cancellationToken) { var editor = await DocumentEditor.CreateAsync(document, cancellationToken).ConfigureAwait(false); var generator = editor.Generator; var equalsMethod = generator.MethodDeclaration( WellKnownMemberNames.ObjectEquals, new[] { generator.ParameterDeclaration("other", generator.TypeExpression(typeSymbol)) }, returnType: generator.TypeExpression(SpecialType.System_Boolean), accessibility: Accessibility.Public, statements: generator.DefaultMethodBody(compilation)); editor.AddMember(declaration, equalsMethod); INamedTypeSymbol constructedType = equatableType.Construct(typeSymbol); editor.AddInterfaceType(declaration, generator.TypeExpression(constructedType)); return editor.GetChangedDocument(); } private static async Task<Document> OverrideObjectEqualsAsync(Document document, SyntaxNode declaration, INamedTypeSymbol typeSymbol, INamedTypeSymbol equatableType, CancellationToken cancellationToken) { var editor = await DocumentEditor.CreateAsync(document, cancellationToken).ConfigureAwait(false); var generator = editor.Generator; var argumentName = generator.IdentifierName("obj"); SyntaxNode returnStatement; if (HasExplicitEqualsImplementation(typeSymbol, equatableType)) { returnStatement = typeSymbol.TypeKind == TypeKind.Class ? GetReturnStatementForExplicitClass(generator, typeSymbol, argumentName, equatableType) : GetReturnStatementForExplicitStruct(generator, typeSymbol, argumentName, equatableType); } else { returnStatement = typeSymbol.TypeKind == TypeKind.Class ? GetReturnStatementForImplicitClass(generator, typeSymbol, argumentName) : GetReturnStatementForImplicitStruct(generator, typeSymbol, argumentName); } var equalsMethod = generator.MethodDeclaration( WellKnownMemberNames.ObjectEquals, new[] { generator.ParameterDeclaration(argumentName.ToString(), generator.TypeExpression(SpecialType.System_Object)) }, returnType: generator.TypeExpression(SpecialType.System_Boolean), accessibility: Accessibility.Public, modifiers: DeclarationModifiers.Override, statements: new[] { returnStatement }); editor.AddMember(declaration, equalsMethod); return editor.GetChangedDocument(); } private static bool HasExplicitEqualsImplementation(INamedTypeSymbol typeSymbol, INamedTypeSymbol equatableType) { INamedTypeSymbol constructedType = equatableType.Construct(typeSymbol); IMethodSymbol constructedEqualsMethod = constructedType.GetMembers().OfType<IMethodSymbol>().FirstOrDefault(); foreach (IMethodSymbol method in typeSymbol.GetMembers().OfType<IMethodSymbol>()) { foreach (IMethodSymbol explicitImplementation in method.ExplicitInterfaceImplementations) { if (explicitImplementation.Equals(constructedEqualsMethod)) { return true; } } } return false; } private static SyntaxNode GetReturnStatementForExplicitClass(SyntaxGenerator generator, INamedTypeSymbol typeSymbol, SyntaxNode argumentName, INamedTypeSymbol equatableType) { return generator.ReturnStatement( generator.InvocationExpression( generator.MemberAccessExpression( generator.CastExpression( equatableType.Construct(typeSymbol), generator.ThisExpression()), WellKnownMemberNames.ObjectEquals), generator.TryCastExpression( argumentName, typeSymbol))); } private static SyntaxNode GetReturnStatementForExplicitStruct(SyntaxGenerator generator, INamedTypeSymbol typeSymbol, SyntaxNode argumentName, INamedTypeSymbol equatableType) { return generator.ReturnStatement( generator.LogicalAndExpression( generator.IsTypeExpression( argumentName, typeSymbol), generator.InvocationExpression( generator.MemberAccessExpression( generator.CastExpression( equatableType.Construct(typeSymbol), generator.ThisExpression()), WellKnownMemberNames.ObjectEquals), generator.CastExpression( typeSymbol, argumentName)))); } private static SyntaxNode GetReturnStatementForImplicitClass(SyntaxGenerator generator, INamedTypeSymbol typeSymbol, SyntaxNode argumentName) { return generator.ReturnStatement( generator.InvocationExpression( generator.IdentifierName(WellKnownMemberNames.ObjectEquals), generator.Argument( generator.TryCastExpression( argumentName, typeSymbol)))); } private static SyntaxNode GetReturnStatementForImplicitStruct(SyntaxGenerator generator, INamedTypeSymbol typeSymbol, SyntaxNode argumentName) { return generator.ReturnStatement( generator.LogicalAndExpression( generator.IsTypeExpression( argumentName, typeSymbol), generator.InvocationExpression( generator.IdentifierName(WellKnownMemberNames.ObjectEquals), generator.CastExpression( typeSymbol, argumentName)))); } // Needed for Telemetry (https://github.com/dotnet/roslyn-analyzers/issues/192) private class MyCodeAction : DocumentChangeAction { public MyCodeAction(string title, Func<CancellationToken, Task<Document>> createChangedDocument, string equivalenceKey) : base(title, createChangedDocument, equivalenceKey) { } } } }
#region MIT license // // MIT license // // Copyright (c) 2007-2008 Jiri Moudry, Pascal Craponne // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // #endregion using System; using System.Collections; using System.Data; using System.Data.Common; using System.Data.Linq; using System.Data.Linq.Mapping; using System.Linq.Expressions; using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; using System.Reflection.Emit; #if MONO_STRICT using AttributeMappingSource = System.Data.Linq.Mapping.AttributeMappingSource; #else using AttributeMappingSource = DbLinq.Data.Linq.Mapping.AttributeMappingSource; #endif using DbLinq; using DbLinq.Data.Linq; using DbLinq.Data.Linq.Database; using DbLinq.Data.Linq.Database.Implementation; using DbLinq.Data.Linq.Identity; using DbLinq.Data.Linq.Implementation; using DbLinq.Data.Linq.Mapping; using DbLinq.Data.Linq.Sugar; using DbLinq.Factory; using DbLinq.Util; using DbLinq.Vendor; #if MONO_STRICT namespace System.Data.Linq #else namespace DbLinq.Data.Linq #endif { public partial class DataContext : IDisposable { //private readonly Dictionary<string, ITable> _tableMap = new Dictionary<string, ITable>(); private readonly Dictionary<Type, ITable> _tableMap = new Dictionary<Type, ITable>(); public MetaModel Mapping { get; private set; } // PC question: at ctor, we get a IDbConnection and the Connection property exposes a DbConnection // WTF? public DbConnection Connection { get { return DatabaseContext.Connection as DbConnection; } } // all properties below are set public to optionally be injected internal IVendor Vendor { get; set; } internal IQueryBuilder QueryBuilder { get; set; } internal IQueryRunner QueryRunner { get; set; } internal IMemberModificationHandler MemberModificationHandler { get; set; } internal IDatabaseContext DatabaseContext { get; private set; } // /all properties... private bool objectTrackingEnabled = true; private bool deferredLoadingEnabled = true; private bool queryCacheEnabled = false; /// <summary> /// Disable the QueryCache: this is surely good for rarely used Select, since preparing /// the SelectQuery to be cached could require more time than build the sql from scratch. /// </summary> [DBLinqExtended] public bool QueryCacheEnabled { get { return queryCacheEnabled; } set { queryCacheEnabled = value; } } private IEntityTracker currentTransactionEntities; private IEntityTracker CurrentTransactionEntities { get { if (this.currentTransactionEntities == null) { if (this.ObjectTrackingEnabled) this.currentTransactionEntities = new EntityTracker(); else this.currentTransactionEntities = new DisabledEntityTracker(); } return this.currentTransactionEntities; } } private IEntityTracker allTrackedEntities; private IEntityTracker AllTrackedEntities { get { if (this.allTrackedEntities == null) { allTrackedEntities = ObjectTrackingEnabled ? (IEntityTracker) new EntityTracker() : (IEntityTracker) new DisabledEntityTracker(); } return this.allTrackedEntities; } } private IIdentityReaderFactory identityReaderFactory; private readonly IDictionary<Type, IIdentityReader> identityReaders = new Dictionary<Type, IIdentityReader>(); /// <summary> /// The default behavior creates one MappingContext. /// </summary> [DBLinqExtended] internal virtual MappingContext _MappingContext { get; set; } [DBLinqExtended] internal IVendorProvider _VendorProvider { get; set; } public DataContext(IDbConnection connection, MappingSource mapping) { Profiler.At("START DataContext(IDbConnection, MappingSource)"); Init(new DatabaseContext(connection), mapping, null); Profiler.At("END DataContext(IDbConnection, MappingSource)"); } public DataContext(IDbConnection connection) { Profiler.At("START DataContext(IDbConnection)"); if (connection == null) throw new ArgumentNullException("connection"); Init(new DatabaseContext(connection), null, null); Profiler.At("END DataContext(IDbConnection)"); } [DbLinqToDo] public DataContext(string fileOrServerOrConnection, MappingSource mapping) { Profiler.At("START DataContext(string, MappingSource)"); if (fileOrServerOrConnection == null) throw new ArgumentNullException("fileOrServerOrConnection"); if (mapping == null) throw new ArgumentNullException("mapping"); if (File.Exists(fileOrServerOrConnection)) throw new NotImplementedException("File names not supported."); // Is this a decent server name check? // It assumes that the connection string will have at least 2 // parameters (separated by ';') if (!fileOrServerOrConnection.Contains(";")) throw new NotImplementedException("Server name not supported."); // Assume it's a connection string... IVendor ivendor = GetVendor(ref fileOrServerOrConnection); IDbConnection dbConnection = ivendor.CreateDbConnection(fileOrServerOrConnection); Init(new DatabaseContext(dbConnection), mapping, ivendor); Profiler.At("END DataContext(string, MappingSource)"); } /// <summary> /// Construct DataContext, given a connectionString. /// To determine which DB type to go against, we look for 'DbLinqProvider=xxx' substring. /// If not found, we assume that we are dealing with MS Sql Server. /// /// Valid values are names of provider DLLs (or any other DLL containing an IVendor implementation) /// DbLinqProvider=Mysql /// DbLinqProvider=Oracle etc. /// </summary> /// <param name="connectionString">specifies file or server connection</param> [DbLinqToDo] public DataContext(string connectionString) { Profiler.At("START DataContext(string)"); IVendor ivendor = GetVendor(ref connectionString); IDbConnection dbConnection = ivendor.CreateDbConnection(connectionString); Init(new DatabaseContext(dbConnection), null, ivendor); Profiler.At("END DataContext(string)"); } private IVendor GetVendor(ref string connectionString) { if (connectionString == null) throw new ArgumentNullException("connectionString"); Assembly assy; string vendorClassToLoad; GetVendorInfo(ref connectionString, out assy, out vendorClassToLoad); var types = from type in assy.GetTypes() where type.Name.ToLowerInvariant() == vendorClassToLoad.ToLowerInvariant() && type.GetInterfaces().Contains(typeof(IVendor)) && type.GetConstructor(Type.EmptyTypes) != null select type; if (!types.Any()) { throw new ArgumentException(string.Format("Found no IVendor class in assembly `{0}' named `{1}' having a default constructor.", assy.GetName().Name, vendorClassToLoad)); } else if (types.Count() > 1) { throw new ArgumentException(string.Format("Found too many IVendor classes in assembly `{0}' named `{1}' having a default constructor.", assy.GetName().Name, vendorClassToLoad)); } return (IVendor) Activator.CreateInstance(types.First()); } private void GetVendorInfo(ref string connectionString, out Assembly assembly, out string typeName) { System.Text.RegularExpressions.Regex reProvider = new System.Text.RegularExpressions.Regex(@"DbLinqProvider=([\w\.]+);?"); string assemblyName = null; string vendor; if (!reProvider.IsMatch(connectionString)) { vendor = "SqlServer"; assemblyName = "DbLinq.SqlServer"; } else { var match = reProvider.Match(connectionString); vendor = match.Groups[1].Value; assemblyName = "DbLinq." + vendor; //plain DbLinq - non MONO: //IVendor classes are in DLLs such as "DbLinq.MySql.dll" if (vendor.Contains(".")) { //already fully qualified DLL name? throw new ArgumentException("Please provide a short name, such as 'MySql', not '" + vendor + "'"); } //shorten: "DbLinqProvider=X;Server=Y" -> ";Server=Y" connectionString = reProvider.Replace(connectionString, ""); } typeName = vendor + "Vendor"; try { #if MONO_STRICT assembly = typeof (DataContext).Assembly; // System.Data.Linq.dll #else assembly = Assembly.Load(assemblyName); #endif } catch (Exception e) { throw new ArgumentException( string.Format( "Unable to load the `{0}' DbLinq vendor within assembly '{1}.dll'.", assemblyName, vendor), "connectionString", e); } } private void Init(IDatabaseContext databaseContext, MappingSource mappingSource, IVendor vendor) { if (databaseContext == null) throw new ArgumentNullException("databaseContext"); // Yes, .NET throws an NRE for this. Why it's not ArgumentNullException, I couldn't tell you. if (databaseContext.Connection.ConnectionString == null) throw new NullReferenceException(); string connectionString = databaseContext.Connection.ConnectionString; _VendorProvider = ObjectFactory.Get<IVendorProvider>(); Vendor = vendor ?? (connectionString != null ? GetVendor(ref connectionString) : null) ?? _VendorProvider.FindVendorByProviderType(typeof(SqlClient.Sql2005Provider)); DatabaseContext = databaseContext; MemberModificationHandler = ObjectFactory.Create<IMemberModificationHandler>(); // not a singleton: object is stateful QueryBuilder = ObjectFactory.Get<IQueryBuilder>(); QueryRunner = ObjectFactory.Get<IQueryRunner>(); //EntityMap = ObjectFactory.Create<IEntityMap>(); identityReaderFactory = ObjectFactory.Get<IIdentityReaderFactory>(); _MappingContext = new MappingContext(); // initialize the mapping information if (mappingSource == null) mappingSource = new AttributeMappingSource(); Mapping = mappingSource.GetModel(GetType()); } /// <summary> /// Checks if the table is allready mapped or maps it if not. /// </summary> /// <param name="tableType">Type of the table.</param> /// <exception cref="InvalidOperationException">Thrown if the table is not mappable.</exception> private void CheckTableMapping(Type tableType) { //This will throw an exception if the table is not found if(Mapping.GetTable(tableType) == null) { throw new InvalidOperationException("The type '" + tableType.Name + "' is not mapped as a Table."); } } /// <summary> /// Returns a Table for the type TEntity. /// </summary> /// <exception cref="InvalidOperationException">If the type TEntity is not mappable as a Table.</exception> /// <typeparam name="TEntity">The table type.</typeparam> public Table<TEntity> GetTable<TEntity>() where TEntity : class { return (Table<TEntity>)GetTable(typeof(TEntity)); } /// <summary> /// Returns a Table for the given type. /// </summary> /// <param name="type">The table type.</param> /// <exception cref="InvalidOperationException">If the type is not mappable as a Table.</exception> public ITable GetTable(Type type) { Profiler.At("DataContext.GetTable(typeof({0}))", type != null ? type.Name : null); ITable tableExisting; if (_tableMap.TryGetValue(type, out tableExisting)) return tableExisting; //Check for table mapping CheckTableMapping(type); var tableNew = Activator.CreateInstance( typeof(Table<>).MakeGenericType(type) , BindingFlags.NonPublic | BindingFlags.Instance , null , new object[] { this } , System.Globalization.CultureInfo.CurrentCulture) as ITable; _tableMap[type] = tableNew; return tableNew; } public void SubmitChanges() { SubmitChanges(ConflictMode.FailOnFirstConflict); } /// <summary> /// Pings database /// </summary> /// <returns></returns> public bool DatabaseExists() { try { return Vendor.Ping(this); } catch (Exception) { return false; } } /// <summary> /// Commits all pending changes to database /// </summary> /// <param name="failureMode"></param> public virtual void SubmitChanges(ConflictMode failureMode) { if (this.objectTrackingEnabled == false) throw new InvalidOperationException("Object tracking is not enabled for the current data context instance."); using (DatabaseContext.OpenConnection()) //ConnMgr will close connection for us { if (Transaction != null) SubmitChangesImpl(failureMode); else { using (IDbTransaction transaction = DatabaseContext.CreateTransaction()) { try { Transaction = (DbTransaction) transaction; SubmitChangesImpl(failureMode); // TODO: handle conflicts (which can only occur when concurrency mode is implemented) transaction.Commit(); } finally { Transaction = null; } } } } } void SubmitChangesImpl(ConflictMode failureMode) { var queryContext = new QueryContext(this); // There's no sense in updating an entity when it's going to // be deleted in the current transaction, so do deletes first. foreach (var entityTrack in CurrentTransactionEntities.EnumerateAll().ToList()) { switch (entityTrack.EntityState) { case EntityState.ToDelete: var deleteQuery = QueryBuilder.GetDeleteQuery(entityTrack.Entity, queryContext); QueryRunner.Delete(entityTrack.Entity, deleteQuery); UnregisterDelete(entityTrack.Entity); AllTrackedEntities.RegisterToDelete(entityTrack.Entity); AllTrackedEntities.RegisterDeleted(entityTrack.Entity); break; default: // ignore. break; } } foreach (var entityTrack in CurrentTransactionEntities.EnumerateAll() .Concat(AllTrackedEntities.EnumerateAll()) .ToList()) { switch (entityTrack.EntityState) { case EntityState.ToInsert: foreach (var toInsert in GetReferencedObjects(entityTrack.Entity)) { InsertEntity(toInsert, queryContext); } break; case EntityState.ToWatch: foreach (var toUpdate in GetReferencedObjects(entityTrack.Entity)) { UpdateEntity(toUpdate, queryContext); } break; default: throw new ArgumentOutOfRangeException(); } } } private IEnumerable<object> GetReferencedObjects(object value) { var values = new EntitySet<object>(); FillReferencedObjects(value, values); return values; } // Breadth-first traversal of an object graph private void FillReferencedObjects(object parent, EntitySet<object> values) { if (parent == null) return; var children = new Queue<object>(); children.Enqueue(parent); while (children.Count > 0) { object value = children.Dequeue(); values.Add(value); IEnumerable<MetaAssociation> associationList = Mapping.GetMetaType(value.GetType()).Associations.Where(a => !a.IsForeignKey); if (associationList.Any()) { foreach (MetaAssociation association in associationList) { var memberData = association.ThisMember; var entitySetValue = memberData.Member.GetMemberValue(value); if (entitySetValue != null) { var hasLoadedOrAssignedValues = entitySetValue.GetType().GetProperty("HasLoadedOrAssignedValues"); if (!((bool)hasLoadedOrAssignedValues.GetValue(entitySetValue, null))) continue; // execution deferred; ignore. foreach (var o in ((IEnumerable)entitySetValue)) children.Enqueue(o); } } } } } private void InsertEntity(object entity, QueryContext queryContext) { var insertQuery = QueryBuilder.GetInsertQuery(entity, queryContext); QueryRunner.Insert(entity, insertQuery); Register(entity); UpdateReferencedObjects(entity); MoveToAllTrackedEntities(entity, true); } private void UpdateEntity(object entity, QueryContext queryContext) { if (!AllTrackedEntities.ContainsReference(entity)) InsertEntity(entity, queryContext); else if (MemberModificationHandler.IsModified(entity, Mapping)) { var modifiedMembers = MemberModificationHandler.GetModifiedProperties(entity, Mapping); var updateQuery = QueryBuilder.GetUpdateQuery(entity, modifiedMembers, queryContext); QueryRunner.Update(entity, updateQuery, modifiedMembers); RegisterUpdateAgain(entity); UpdateReferencedObjects(entity); MoveToAllTrackedEntities(entity, false); } } private void UpdateReferencedObjects(object root) { var metaType = Mapping.GetMetaType(root.GetType()); foreach (var assoc in metaType.Associations) { var memberData = assoc.ThisMember; //This is not correct - AutoSyncing applies to auto-updating columns, such as a TimeStamp, not to foreign key associations, which is always automatically synched //Confirmed against default .NET l2sql - association columns are always set, even if AutoSync==AutoSync.Never //if (memberData.Association.ThisKey.Any(m => (m.AutoSync != AutoSync.Always) && (m.AutoSync != sync))) // continue; var oks = memberData.Association.OtherKey.Select(m => m.StorageMember).ToList(); if (oks.Count == 0) continue; var pks = memberData.Association.ThisKey .Select(m => m.StorageMember.GetMemberValue(root)) .ToList(); if (pks.Count != oks.Count) throw new InvalidOperationException( string.Format("Count of primary keys ({0}) doesn't match count of other keys ({1}).", pks.Count, oks.Count)); var members = memberData.Member.GetMemberValue(root) as IEnumerable; if (members == null) continue; foreach (var member in members) { for (int i = 0; i < pks.Count; ++i) { oks[i].SetMemberValue(member, pks[i]); } } } } private void MoveToAllTrackedEntities(object entity, bool insert) { if (!ObjectTrackingEnabled) return; if (CurrentTransactionEntities.ContainsReference(entity)) { CurrentTransactionEntities.RegisterToDelete(entity); if (!insert) CurrentTransactionEntities.RegisterDeleted(entity); } if (!AllTrackedEntities.ContainsReference(entity)) { var identityReader = _GetIdentityReader(entity.GetType()); AllTrackedEntities.RegisterToWatch(entity, identityReader.GetIdentityKey(entity)); } } /// <summary> /// TODO - allow generated methods to call into stored procedures /// </summary> [DBLinqExtended] internal IExecuteResult _ExecuteMethodCall(DataContext context, System.Reflection.MethodInfo method, params object[] sqlParams) { using (DatabaseContext.OpenConnection()) { System.Data.Linq.IExecuteResult result = Vendor.ExecuteMethodCall(context, method, sqlParams); return result; } } [DbLinqToDo] protected IExecuteResult ExecuteMethodCall(object instance, System.Reflection.MethodInfo methodInfo, params object[] parameters) { throw new NotImplementedException(); } #region Identity management [DBLinqExtended] internal IIdentityReader _GetIdentityReader(Type t) { IIdentityReader identityReader; if (!identityReaders.TryGetValue(t, out identityReader)) { identityReader = identityReaderFactory.GetReader(t, this); identityReaders[t] = identityReader; } return identityReader; } [DBLinqExtended] internal object _GetRegisteredEntity(object entity) { // TODO: check what is faster: by identity or by ref var identityReader = _GetIdentityReader(entity.GetType()); var identityKey = identityReader.GetIdentityKey(entity); if (identityKey == null) // if we don't have an entitykey here, it means that the entity has no PK return entity; // even var registeredEntityTrack = CurrentTransactionEntities.FindByIdentity(identityKey) ?? AllTrackedEntities.FindByIdentity(identityKey); if (registeredEntityTrack != null) return registeredEntityTrack.Entity; return null; } //internal object GetRegisteredEntityByKey(IdentityKey identityKey) //{ // return EntityMap[identityKey]; //} /// <summary> /// Registers an entity in a watch state /// </summary> /// <param name="entity"></param> /// <returns></returns> [DBLinqExtended] internal object _GetOrRegisterEntity(object entity) { var identityReader = _GetIdentityReader(entity.GetType()); var identityKey = identityReader.GetIdentityKey(entity); SetEntitySetsQueries(entity); SetEntityRefQueries(entity); // if we have no identity, we can't track it if (identityKey == null) return entity; // try to find an already registered entity and return it var registeredEntityTrack = CurrentTransactionEntities.FindByIdentity(identityKey) ?? AllTrackedEntities.FindByIdentity(identityKey); if (registeredEntityTrack != null) return registeredEntityTrack.Entity; // otherwise, register and return AllTrackedEntities.RegisterToWatch(entity, identityKey); return entity; } readonly IDataMapper DataMapper = ObjectFactory.Get<IDataMapper>(); private void SetEntityRefQueries(object entity) { if (!this.deferredLoadingEnabled) return; // BUG: This is ignoring External Mappings from XmlMappingSource. Type thisType = entity.GetType(); IEnumerable<MetaAssociation> associationList = Mapping.GetMetaType(entity.GetType()).Associations.Where(a => a.IsForeignKey); foreach (MetaAssociation association in associationList) { //example of entityRef:Order.Employee var memberData = association.ThisMember; Type otherTableType = association.OtherType.Type; ParameterExpression p = Expression.Parameter(otherTableType, "other"); var otherTable = GetTable(otherTableType); //ie:EmployeeTerritories.EmployeeID var foreignKeys = memberData.Association.ThisKey; BinaryExpression predicate = null; var otherPKs = memberData.Association.OtherKey; IEnumerator<MetaDataMember> otherPKEnumerator = otherPKs.GetEnumerator(); if (otherPKs.Count != foreignKeys.Count) throw new InvalidOperationException("Foreign keys don't match ThisKey"); foreach (MetaDataMember key in foreignKeys) { otherPKEnumerator.MoveNext(); var thisForeignKeyProperty = (PropertyInfo)key.Member; object thisForeignKeyValue = thisForeignKeyProperty.GetValue(entity, null); if (thisForeignKeyValue != null) { BinaryExpression keyPredicate; if (!(thisForeignKeyProperty.PropertyType.IsNullable())) { keyPredicate = Expression.Equal(Expression.MakeMemberAccess(p, otherPKEnumerator.Current.Member), Expression.Constant(thisForeignKeyValue)); } else { var ValueProperty = thisForeignKeyProperty.PropertyType.GetProperty("Value"); keyPredicate = Expression.Equal(Expression.MakeMemberAccess(p, otherPKEnumerator.Current.Member), Expression.Constant(ValueProperty.GetValue(thisForeignKeyValue, null))); } if (predicate == null) predicate = keyPredicate; else predicate = Expression.And(predicate, keyPredicate); } } IEnumerable query = null; if (predicate != null) { query = GetOtherTableQuery(predicate, p, otherTableType, otherTable) as IEnumerable; //it would be interesting surround the above query with a .Take(1) expression for performance. } // If no separate Storage is specified, use the member directly MemberInfo storage = memberData.StorageMember; if (storage == null) storage = memberData.Member; // Check that the storage is a field or a writable property if (!(storage is FieldInfo) && !(storage is PropertyInfo && ((PropertyInfo)storage).CanWrite)) { throw new InvalidOperationException(String.Format( "Member {0}.{1} is not a field nor a writable property", storage.DeclaringType, storage.Name)); } Type storageType = storage.GetMemberType(); object entityRefValue = null; if (query != null) entityRefValue = Activator.CreateInstance(storageType, query); else entityRefValue = Activator.CreateInstance(storageType); storage.SetMemberValue(entity, entityRefValue); } } /// <summary> /// This method is executed when the entity is being registered. Each EntitySet property has a internal query that can be set using the EntitySet.SetSource method. /// Here we set the query source of each EntitySetProperty /// </summary> /// <param name="entity"></param> private void SetEntitySetsQueries(object entity) { if (!this.deferredLoadingEnabled) return; // BUG: This is ignoring External Mappings from XmlMappingSource. IEnumerable<MetaAssociation> associationList = Mapping.GetMetaType(entity.GetType()).Associations.Where(a => !a.IsForeignKey); if (associationList.Any()) { foreach (MetaAssociation association in associationList) { //example of entitySet: Employee.EmployeeTerritories var memberData = association.ThisMember; Type otherTableType = association.OtherType.Type; ParameterExpression p = Expression.Parameter(otherTableType, "other"); //other table:EmployeeTerritories var otherTable = GetTable(otherTableType); var otherKeys = memberData.Association.OtherKey; var thisKeys = memberData.Association.ThisKey; if (otherKeys.Count != thisKeys.Count) throw new InvalidOperationException("This keys don't match OtherKey"); BinaryExpression predicate = null; IEnumerator<MetaDataMember> thisKeyEnumerator = thisKeys.GetEnumerator(); foreach (MetaDataMember otherKey in otherKeys) { thisKeyEnumerator.MoveNext(); //other table member:EmployeeTerritories.EmployeeID var otherTableMember = (PropertyInfo)otherKey.Member; BinaryExpression keyPredicate; if (!(otherTableMember.PropertyType.IsNullable())) { keyPredicate = Expression.Equal(Expression.MakeMemberAccess(p, otherTableMember), Expression.Constant(thisKeyEnumerator.Current.Member.GetMemberValue(entity))); } else { var ValueProperty = otherTableMember.PropertyType.GetProperty("Value"); keyPredicate = Expression.Equal(Expression.MakeMemberAccess( Expression.MakeMemberAccess(p, otherTableMember), ValueProperty), Expression.Constant(thisKeyEnumerator.Current.Member.GetMemberValue(entity))); } if (predicate == null) predicate = keyPredicate; else predicate = Expression.And(predicate, keyPredicate); } var query = GetOtherTableQuery(predicate, p, otherTableType, otherTable); var entitySetValue = memberData.Member.GetMemberValue(entity); if (entitySetValue == null) { entitySetValue = Activator.CreateInstance(memberData.Member.GetMemberType()); memberData.Member.SetMemberValue(entity, entitySetValue); } var hasLoadedOrAssignedValues = entitySetValue.GetType().GetProperty("HasLoadedOrAssignedValues"); if ((bool)hasLoadedOrAssignedValues.GetValue(entitySetValue, null)) continue; var setSourceMethod = entitySetValue.GetType().GetMethod("SetSource"); setSourceMethod.Invoke(entitySetValue, new[] { query }); //employee.EmployeeTerritories.SetSource(Table[EmployeesTerritories].Where(other=>other.employeeID="WARTH")) } } } private static MethodInfo _WhereMethod = typeof(Queryable).GetMethods().First(m => m.Name == "Where"); internal object GetOtherTableQuery(Expression predicate, ParameterExpression parameter, Type otherTableType, IQueryable otherTable) { //predicate: other.EmployeeID== "WARTH" Expression lambdaPredicate = Expression.Lambda(predicate, parameter); //lambdaPredicate: other=>other.EmployeeID== "WARTH" Expression call = Expression.Call(_WhereMethod.MakeGenericMethod(otherTableType), otherTable.Expression, lambdaPredicate); //Table[EmployeesTerritories].Where(other=>other.employeeID="WARTH") return otherTable.Provider.CreateQuery(call); } #endregion #region Insert/Update/Delete management /// <summary> /// Registers an entity for insert /// </summary> /// <param name="entity"></param> internal void RegisterInsert(object entity) { CurrentTransactionEntities.RegisterToInsert(entity); } private void DoRegisterUpdate(object entity) { if (entity == null) throw new ArgumentNullException("entity"); if (!this.objectTrackingEnabled) return; var identityReader = _GetIdentityReader(entity.GetType()); var identityKey = identityReader.GetIdentityKey(entity); // if we have no key, we can not watch if (identityKey == null || identityKey.Keys.Count == 0) return; // register entity AllTrackedEntities.RegisterToWatch(entity, identityKey); } /// <summary> /// Registers an entity for update /// The entity will be updated only if some of its members have changed after the registration /// </summary> /// <param name="entity"></param> internal void RegisterUpdate(object entity) { DoRegisterUpdate(entity); MemberModificationHandler.Register(entity, Mapping); } /// <summary> /// Registers or re-registers an entity and clears its state /// </summary> /// <param name="entity"></param> /// <returns></returns> internal object Register(object entity) { if (! this.objectTrackingEnabled) return entity; var registeredEntity = _GetOrRegisterEntity(entity); // the fact of registering again clears the modified state, so we're... clear with that MemberModificationHandler.Register(registeredEntity, Mapping); return registeredEntity; } /// <summary> /// Registers an entity for update /// The entity will be updated only if some of its members have changed after the registration /// </summary> /// <param name="entity"></param> /// <param name="entityOriginalState"></param> internal void RegisterUpdate(object entity, object entityOriginalState) { if (!this.objectTrackingEnabled) return; DoRegisterUpdate(entity); MemberModificationHandler.Register(entity, entityOriginalState, Mapping); } /// <summary> /// Clears the current state, and marks the object as clean /// </summary> /// <param name="entity"></param> internal void RegisterUpdateAgain(object entity) { if (!this.objectTrackingEnabled) return; MemberModificationHandler.ClearModified(entity, Mapping); } /// <summary> /// Registers an entity for delete /// </summary> /// <param name="entity"></param> internal void RegisterDelete(object entity) { if (!this.objectTrackingEnabled) return; CurrentTransactionEntities.RegisterToDelete(entity); } /// <summary> /// Unregisters entity after deletion /// </summary> /// <param name="entity"></param> internal void UnregisterDelete(object entity) { if (!this.objectTrackingEnabled) return; CurrentTransactionEntities.RegisterDeleted(entity); } #endregion /// <summary> /// Changed object determine /// </summary> /// <returns>Lists of inserted, updated, deleted objects</returns> public ChangeSet GetChangeSet() { var inserts = new List<object>(); var updates = new List<object>(); var deletes = new List<object>(); foreach (var entityTrack in CurrentTransactionEntities.EnumerateAll() .Concat(AllTrackedEntities.EnumerateAll())) { switch (entityTrack.EntityState) { case EntityState.ToInsert: inserts.Add(entityTrack.Entity); break; case EntityState.ToWatch: if (MemberModificationHandler.IsModified(entityTrack.Entity, Mapping)) updates.Add(entityTrack.Entity); break; case EntityState.ToDelete: deletes.Add(entityTrack.Entity); break; default: throw new ArgumentOutOfRangeException(); } } return new ChangeSet(inserts, updates, deletes); } /// <summary> /// use ExecuteCommand to call raw SQL /// </summary> public int ExecuteCommand(string command, params object[] parameters) { var directQuery = QueryBuilder.GetDirectQuery(command, new QueryContext(this)); return QueryRunner.Execute(directQuery, parameters); } /// <summary> /// Execute raw SQL query and return object /// </summary> public IEnumerable<TResult> ExecuteQuery<TResult>(string query, params object[] parameters) where TResult : class, new() { if (query == null) throw new ArgumentNullException("query"); return CreateExecuteQueryEnumerable<TResult>(query, parameters); } private IEnumerable<TResult> CreateExecuteQueryEnumerable<TResult>(string query, object[] parameters) where TResult : class, new() { foreach (TResult result in ExecuteQuery(typeof(TResult), query, parameters)) yield return result; } public IEnumerable ExecuteQuery(Type elementType, string query, params object[] parameters) { if (elementType == null) throw new ArgumentNullException("elementType"); if (query == null) throw new ArgumentNullException("query"); var queryContext = new QueryContext(this); var directQuery = QueryBuilder.GetDirectQuery(query, queryContext); return QueryRunner.ExecuteSelect(elementType, directQuery, parameters); } /// <summary> /// Gets or sets the load options /// </summary> [DbLinqToDo] public DataLoadOptions LoadOptions { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } public DbTransaction Transaction { get { return (DbTransaction) DatabaseContext.CurrentTransaction; } set { DatabaseContext.CurrentTransaction = value; } } /// <summary> /// Runs the given reader and returns columns. /// </summary> /// <typeparam name="TResult">The type of the result.</typeparam> /// <param name="reader">The reader.</param> /// <returns></returns> public IEnumerable<TResult> Translate<TResult>(DbDataReader reader) { if (reader == null) throw new ArgumentNullException("reader"); return CreateTranslateIterator<TResult>(reader); } IEnumerable<TResult> CreateTranslateIterator<TResult>(DbDataReader reader) { foreach (TResult result in Translate(typeof(TResult), reader)) yield return result; } public IMultipleResults Translate(DbDataReader reader) { throw new NotImplementedException(); } public IEnumerable Translate(Type elementType, DbDataReader reader) { if (elementType == null) throw new ArgumentNullException("elementType"); if (reader == null) throw new ArgumentNullException("reader"); return QueryRunner.EnumerateResult(elementType, reader, this); } public void Dispose() { //connection closing should not be done here. //read: http://msdn2.microsoft.com/en-us/library/bb292288.aspx //We own the instance of MemberModificationHandler - we must unregister listeners of entities we attached to MemberModificationHandler.UnregisterAll(); } [DbLinqToDo] protected virtual void Dispose(bool disposing) { throw new NotImplementedException(); } /// <summary> /// Creates a IDbDataAdapter. Used internally by Vendors /// </summary> /// <returns></returns> internal IDbDataAdapter CreateDataAdapter() { return DatabaseContext.CreateDataAdapter(); } /// <summary> /// Sets a TextWriter where generated SQL commands are written /// </summary> public TextWriter Log { get; set; } /// <summary> /// Writes text on Log (if not null) /// Internal helper /// </summary> /// <param name="text"></param> internal void WriteLog(string text) { if (Log != null) Log.WriteLine(text); } /// <summary> /// Write an IDbCommand to Log (if non null) /// </summary> /// <param name="command"></param> internal void WriteLog(IDbCommand command) { if (Log != null) { Log.WriteLine(command.CommandText); foreach (IDbDataParameter parameter in command.Parameters) WriteLog(parameter); Log.Write("--"); Log.Write(" Context: {0}", Vendor.VendorName); Log.Write(" Model: {0}", Mapping.GetType().Name); Log.Write(" Build: {0}", Assembly.GetExecutingAssembly().GetName().Version); Log.WriteLine(); } } /// <summary> /// Writes and IDbDataParameter to Log (if non null) /// </summary> /// <param name="parameter"></param> internal void WriteLog(IDbDataParameter parameter) { if (Log != null) { // -- @p0: Input Int (Size = 0; Prec = 0; Scale = 0) [2] // -- <name>: <direction> <type> (...) [<value>] Log.WriteLine("-- {0}: {1} {2} (Size = {3}; Prec = {4}; Scale = {5}) [{6}]", parameter.ParameterName, parameter.Direction, parameter.DbType, parameter.Size, parameter.Precision, parameter.Scale, parameter.Value); } } public bool ObjectTrackingEnabled { get { return this.objectTrackingEnabled; } set { if (this.currentTransactionEntities != null && value != this.objectTrackingEnabled) throw new InvalidOperationException("Data context options cannot be modified after results have been returned from a query."); this.objectTrackingEnabled = value; } } [DbLinqToDo] public int CommandTimeout { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } public bool DeferredLoadingEnabled { get { return this.deferredLoadingEnabled; } set { if (this.currentTransactionEntities != null && value != this.deferredLoadingEnabled) throw new InvalidOperationException("Data context options cannot be modified after results have been returned from a query."); this.deferredLoadingEnabled = value; } } [DbLinqToDo] public ChangeConflictCollection ChangeConflicts { get { throw new NotImplementedException(); } } [DbLinqToDo] public DbCommand GetCommand(IQueryable query) { DbCommand dbCommand = GetIDbCommand(query) as DbCommand; if (dbCommand == null) throw new InvalidOperationException(); return dbCommand; } [DBLinqExtended] public IDbCommand GetIDbCommand(IQueryable query) { if (query == null) throw new ArgumentNullException("query"); var qp = query.Provider as QueryProvider; if (qp == null) throw new InvalidOperationException(); if (qp.ExpressionChain.Expressions.Count == 0) qp.ExpressionChain.Expressions.Add(CreateDefaultQuery(query)); return qp.GetQuery(null).GetCommand().Command; } private Expression CreateDefaultQuery(IQueryable query) { // Manually create the expression tree for: IQueryable<TableType>.Select(e => e) var identityParameter = Expression.Parameter(query.ElementType, "e"); var identityBody = Expression.Lambda( typeof(Func<,>).MakeGenericType(query.ElementType, query.ElementType), identityParameter, new[] { identityParameter } ); return Expression.Call( typeof(Queryable), "Select", new[] { query.ElementType, query.ElementType }, query.Expression, Expression.Quote(identityBody) ); } [DbLinqToDo] public void Refresh(RefreshMode mode, IEnumerable entities) { throw new NotImplementedException(); } [DbLinqToDo] public void Refresh(RefreshMode mode, params object[] entities) { throw new NotImplementedException(); } [DbLinqToDo] public void Refresh(RefreshMode mode, object entity) { throw new NotImplementedException(); } [DbLinqToDo] public void DeleteDatabase() { throw new NotImplementedException(); } [DbLinqToDo] public void CreateDatabase() { throw new NotImplementedException(); } [DbLinqToDo] protected internal IQueryable<TResult> CreateMethodCallQuery<TResult>(object instance, MethodInfo methodInfo, params object[] parameters) { throw new NotImplementedException(); } [DbLinqToDo] protected internal void ExecuteDynamicDelete(object entity) { throw new NotImplementedException(); } [DbLinqToDo] protected internal void ExecuteDynamicInsert(object entity) { throw new NotImplementedException(); } [DbLinqToDo] protected internal void ExecuteDynamicUpdate(object entity) { throw new NotImplementedException(); } } }
// Copyright (c) 2015, Outercurve Foundation. // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // - Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // - Neither the name of the Outercurve Foundation nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.IO; using System.Xml; using System.Configuration; using System.Windows.Forms; using System.Collections; using System.Text; using WebsitePanel.Setup.Web; using WebsitePanel.Setup.Actions; using System.Threading; namespace WebsitePanel.Setup { public class StandaloneServerSetup : BaseSetup { public static object Install(object obj) { return InstallBase(obj, "1.0.6"); } internal static object InstallBase(object obj, string minimalInstallerVersion) { Hashtable args = Utils.GetSetupParameters(obj); //check CS version string shellVersion = Utils.GetStringSetupParameter(args, Global.Parameters.ShellVersion); var shellMode = Utils.GetStringSetupParameter(args, Global.Parameters.ShellMode); Version version = new Version(shellVersion); //******************** Server **************** var serverSetup = new SetupVariables { ComponentId = Guid.NewGuid().ToString(), Instance = String.Empty, ComponentName = Global.Server.ComponentName, ComponentCode = Global.Server.ComponentCode, ComponentDescription = Global.Server.ComponentDescription, // ServerPassword = Guid.NewGuid().ToString("N").Substring(0, 10), // SetupAction = SetupActions.Install, IISVersion = Global.IISVersion, ApplicationName = Utils.GetStringSetupParameter(args, Global.Parameters.ApplicationName), Version = Utils.GetStringSetupParameter(args, Global.Parameters.Version), Installer = Utils.GetStringSetupParameter(args, Global.Parameters.Installer), InstallerPath = Utils.GetStringSetupParameter(args, Global.Parameters.InstallerPath), SetupXml = Utils.GetStringSetupParameter(args, Global.Parameters.SetupXml), // InstallerFolder = Path.Combine(Utils.GetStringSetupParameter(args, Global.Parameters.InstallerFolder), Global.Server.ComponentName), InstallerType = Utils.GetStringSetupParameter(args, Global.Parameters.InstallerType).Replace(Global.StandaloneServer.SetupController, Global.Server.SetupController), InstallationFolder = Path.Combine(Path.Combine(Utils.GetSystemDrive(), "WebsitePanel"), Global.Server.ComponentName), ConfigurationFile = "web.config", }; // Load config file AppConfig.LoadConfiguration(); // LoadComponentVariablesFromSetupXml(serverSetup.ComponentCode, serverSetup.SetupXml, serverSetup); // //serverSetup.ComponentConfig = AppConfig.CreateComponentConfig(serverSetup.ComponentId); //serverSetup.RemoteServerUrl = GetUrl(serverSetup.WebSiteDomain, serverSetup.WebSiteIP, serverSetup.WebSitePort); // //CreateComponentSettingsFromSetupVariables(serverSetup, serverSetup.ComponentId); //******************** Enterprise Server **************** var esServerSetup = new SetupVariables { ComponentId = Guid.NewGuid().ToString(), SetupAction = SetupActions.Install, IISVersion = Global.IISVersion, // Instance = String.Empty, ComponentName = Global.EntServer.ComponentName, ComponentCode = Global.EntServer.ComponentCode, ApplicationName = Utils.GetStringSetupParameter(args, Global.Parameters.ApplicationName), Version = Utils.GetStringSetupParameter(args, Global.Parameters.Version), ComponentDescription = Global.EntServer.ComponentDescription, Installer = Utils.GetStringSetupParameter(args, Global.Parameters.Installer), InstallerFolder = Path.Combine(Utils.GetStringSetupParameter(args, Global.Parameters.InstallerFolder), Global.EntServer.ComponentName), InstallerType = Utils.GetStringSetupParameter(args, Global.Parameters.InstallerType).Replace(Global.StandaloneServer.SetupController, Global.EntServer.SetupController), InstallationFolder = Path.Combine(Path.Combine(Utils.GetSystemDrive(), "WebsitePanel"), Global.EntServer.ComponentName), InstallerPath = Utils.GetStringSetupParameter(args, Global.Parameters.InstallerPath), SetupXml = Utils.GetStringSetupParameter(args, Global.Parameters.SetupXml), // ConfigurationFile = "web.config", ConnectionString = Global.EntServer.AspNetConnectionStringFormat, DatabaseServer = Global.EntServer.DefaultDbServer, Database = Global.EntServer.DefaultDatabase, CreateDatabase = true, UpdateServerAdminPassword = true, // WebSiteIP = Global.EntServer.DefaultIP, WebSitePort = Global.EntServer.DefaultPort, WebSiteDomain = String.Empty, }; // LoadComponentVariablesFromSetupXml(esServerSetup.ComponentCode, esServerSetup.SetupXml, esServerSetup); // //esServerSetup.ComponentConfig = AppConfig.CreateComponentConfig(esServerSetup.ComponentId); // //CreateComponentSettingsFromSetupVariables(esServerSetup, esServerSetup.ComponentId); //******************** Portal **************** #region Portal Setup Variables var portalSetup = new SetupVariables { ComponentId = Guid.NewGuid().ToString(), SetupAction = SetupActions.Install, IISVersion = Global.IISVersion, // Instance = String.Empty, ComponentName = Global.WebPortal.ComponentName, ComponentCode = Global.WebPortal.ComponentCode, ApplicationName = Utils.GetStringSetupParameter(args, Global.Parameters.ApplicationName), Version = Utils.GetStringSetupParameter(args, Global.Parameters.Version), ComponentDescription = Global.WebPortal.ComponentDescription, Installer = Utils.GetStringSetupParameter(args, Global.Parameters.Installer), InstallerFolder = Path.Combine(Utils.GetStringSetupParameter(args, Global.Parameters.InstallerFolder), Global.WebPortal.ComponentName), InstallerType = Utils.GetStringSetupParameter(args, Global.Parameters.InstallerType).Replace(Global.StandaloneServer.SetupController, Global.WebPortal.SetupController), InstallationFolder = Path.Combine(Path.Combine(Utils.GetSystemDrive(), "WebsitePanel"), Global.WebPortal.ComponentName), InstallerPath = Utils.GetStringSetupParameter(args, Global.Parameters.InstallerPath), SetupXml = Utils.GetStringSetupParameter(args, Global.Parameters.SetupXml), // ConfigurationFile = "web.config", EnterpriseServerURL = Global.WebPortal.DefaultEntServURL, }; // LoadComponentVariablesFromSetupXml(portalSetup.ComponentCode, portalSetup.SetupXml, portalSetup); // //portalSetup.ComponentConfig = AppConfig.CreateComponentConfig(portalSetup.ComponentId); // //CreateComponentSettingsFromSetupVariables(portalSetup, portalSetup.ComponentId); #endregion // var stdssam = new StandaloneServerActionManager(serverSetup, esServerSetup, portalSetup); // stdssam.PrepareDistributiveDefaults(); // if (shellMode.Equals(Global.SilentInstallerShell, StringComparison.OrdinalIgnoreCase)) { // Validate the setup controller's bootstrapper version if (version < new Version(minimalInstallerVersion)) { Utils.ShowConsoleErrorMessage(Global.Messages.InstallerVersionIsObsolete, minimalInstallerVersion); // return false; } try { var success = true; // Retrieve WebsitePanel Enterprise Server component's settings from the command-line var adminPassword = Utils.GetStringSetupParameter(args, Global.Parameters.ServerAdminPassword); // This has been designed to make an installation process via Web PI more secure if (String.IsNullOrEmpty(adminPassword)) { // Set serveradmin password esServerSetup.ServerAdminPassword = Guid.NewGuid().ToString(); // Set peer admin password esServerSetup.PeerAdminPassword = Guid.NewGuid().ToString(); // Instruct provisioning scenario to enter the application in SCPA mode (Setup Control Panel Acounts) esServerSetup.EnableScpaMode = true; } else { esServerSetup.ServerAdminPassword = esServerSetup.PeerAdminPassword = adminPassword; } // esServerSetup.Database = Utils.GetStringSetupParameter(args, Global.Parameters.DatabaseName); esServerSetup.DatabaseServer = Utils.GetStringSetupParameter(args, Global.Parameters.DatabaseServer); esServerSetup.DbInstallConnectionString = SqlUtils.BuildDbServerMasterConnectionString( esServerSetup.DatabaseServer, Utils.GetStringSetupParameter(args, Global.Parameters.DbServerAdmin), Utils.GetStringSetupParameter(args, Global.Parameters.DbServerAdminPassword) ); // stdssam.ActionError += new EventHandler<ActionErrorEventArgs>((object sender, ActionErrorEventArgs e) => { Utils.ShowConsoleErrorMessage(e.ErrorMessage); // Log.WriteError(e.ErrorMessage); // success = false; }); // stdssam.Start(); // return success; } catch (Exception ex) { Log.WriteError("Failed to install the component", ex); // return false; } } else { // Validate the setup controller's bootstrapper version if (version < new Version(minimalInstallerVersion)) { MessageBox.Show(String.Format(Global.Messages.InstallerVersionIsObsolete, minimalInstallerVersion), "Setup Wizard", MessageBoxButtons.OK, MessageBoxIcon.Warning); // return DialogResult.Cancel; } // NOTE: there is no assignment to SetupVariables property of the wizard as usually because we have three components // to setup here and thus we have created SwapSetupVariablesAction setup action to swap corresponding variables // back and forth while executing the installation scenario. InstallerForm form = new InstallerForm(); Wizard wizard = form.Wizard; wizard.SetupVariables = serverSetup; // Assign corresponding action manager to the wizard. wizard.ActionManager = stdssam; // Initialize wizard pages and their properties var introPage = new IntroductionPage(); var licPage = new LicenseAgreementPage(); var page2 = new ConfigurationCheckPage(); // Setup prerequisites validation page2.Checks.AddRange(new ConfigurationCheck[] { new ConfigurationCheck(CheckTypes.OperationSystem, "Operating System Requirement"){ SetupVariables = serverSetup }, new ConfigurationCheck(CheckTypes.IISVersion, "IIS Requirement"){ SetupVariables = serverSetup }, new ConfigurationCheck(CheckTypes.ASPNET, "ASP.NET Requirement"){ SetupVariables = serverSetup }, // Validate Server installation prerequisites new ConfigurationCheck(CheckTypes.WPServer, "WebsitePanel Server Requirement") { SetupVariables = serverSetup }, // Validate EnterpriseServer installation prerequisites new ConfigurationCheck(CheckTypes.WPEnterpriseServer, "WebsitePanel Enterprise Server Requirement") { SetupVariables = esServerSetup }, // Validate WebPortal installation prerequisites new ConfigurationCheck(CheckTypes.WPPortal, "WebsitePanel Portal Requirement") { SetupVariables = portalSetup } }); // Assign WebPortal setup variables set to acquire corresponding settings var page3 = new WebPage { SetupVariables = portalSetup }; // Assign EnterpriseServer setup variables set to acquire corresponding settings var page4 = new DatabasePage { SetupVariables = esServerSetup }; // Assign EnterpriseServer setup variables set to acquire corresponding settings var page5 = new ServerAdminPasswordPage { SetupVariables = esServerSetup, NoteText = "Note: Both serveradmin and admin accounts will use this password. You can always change password for serveradmin or admin accounts through control panel." }; // var page6 = new ExpressInstallPage2(); // Assign WebPortal setup variables set to acquire corresponding settings var page7 = new SetupCompletePage { SetupVariables = portalSetup }; // wizard.Controls.AddRange(new Control[] { introPage, licPage, page2, page3, page4, page5, page6, page7 }); wizard.LinkPages(); wizard.SelectedPage = introPage; // Run wizard IWin32Window owner = args[Global.Parameters.ParentForm] as IWin32Window; return form.ShowModal(owner); } } public static DialogResult Uninstall(object obj) { MessageBox.Show("Functionality is not supported.", "Setup Wizard", MessageBoxButtons.OK, MessageBoxIcon.Warning); return DialogResult.Cancel; } public static DialogResult Setup(object obj) { MessageBox.Show("Functionality is not supported.", "Setup Wizard", MessageBoxButtons.OK, MessageBoxIcon.Warning); return DialogResult.Cancel; } public static DialogResult Update(object obj) { MessageBox.Show("Functionality is not supported.", "Setup Wizard", MessageBoxButtons.OK, MessageBoxIcon.Warning); return DialogResult.Cancel; } protected static void LoadComponentVariablesFromSetupXml(string componentCode, string xml, SetupVariables setupVariables) { if (string.IsNullOrEmpty(componentCode)) return; if (string.IsNullOrEmpty(xml)) return; try { XmlDocument doc = new XmlDocument(); doc.LoadXml(xml); string xpath = string.Format("components/component[@code=\"{0}\"]", componentCode); XmlNode componentNode = doc.SelectSingleNode(xpath); if (componentNode != null) { LoadSetupVariablesFromSetupXml(componentNode.InnerXml, setupVariables); } } catch (Exception ex) { Log.WriteError("Unattended setup error", ex); throw; } } private static string GetUrl(string domain, string ip, string port) { string address = "http://"; string server = string.Empty; string ipPort = string.Empty; //server if (domain != null && domain.Trim().Length > 0) { //domain server = domain.Trim(); } else { //ip if (ip != null && ip.Trim().Length > 0) { server = ip.Trim(); } } //port if (server.Length > 0 && ip.Trim().Length > 0 && ip.Trim() != "80") { ipPort = ":" + port.Trim(); } //address string address += server + ipPort; return address; } } }
// Copyright (C) 2004-2007 MySQL AB // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License version 2 as published by // the Free Software Foundation // // There are special exceptions to the terms and conditions of the GPL // as it is applied to this software. View the full text of the // exception in file EXCEPTIONS in the directory of this software // distribution. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA using System; using System.Data.Common; using System.Collections; using System.ComponentModel; using MySql.Data.MySqlClient.Properties; namespace MySql.Data.MySqlClient { /// <summary> /// Represents a collection of parameters relevant to a <see cref="MySqlCommand"/> as well as their respective mappings to columns in a <see cref="System.Data.DataSet"/>. This class cannot be inherited. /// </summary> /// <include file='docs/MySqlParameterCollection.xml' path='MyDocs/MyMembers[@name="Class"]/*'/> public sealed class MySqlParameterCollection : DbParameterCollection { private ArrayList items = new ArrayList(); private Hashtable indexHash; private MySqlCommand owningCommand; internal MySqlParameterCollection(MySqlCommand cmd) { indexHash = new Hashtable(StringComparer.CurrentCultureIgnoreCase); Clear(); owningCommand = cmd; } #region Public Methods /// <summary> /// Gets the <see cref="MySqlParameter"/> at the specified index. /// </summary> /// <overloads>Gets the <see cref="MySqlParameter"/> with a specified attribute. /// [C#] In C#, this property is the indexer for the <see cref="MySqlParameterCollection"/> class. /// </overloads> public new MySqlParameter this[int index] { get { return (MySqlParameter)GetParameter(index); } set { SetParameter(index, value); } } /// <summary> /// Gets the <see cref="MySqlParameter"/> with the specified name. /// </summary> public new MySqlParameter this[string name] { get { return (MySqlParameter)GetParameter(name); } set { SetParameter(name, value); } } /// <summary> /// Adds the specified <see cref="MySqlParameter"/> object to the <see cref="MySqlParameterCollection"/>. /// </summary> /// <param name="value">The <see cref="MySqlParameter"/> to add to the collection.</param> /// <returns>The newly added <see cref="MySqlParameter"/> object.</returns> public MySqlParameter Add(MySqlParameter value) { return InternalAdd(value, -1); } /// <summary> /// Adds a <see cref="MySqlParameter"/> to the <see cref="MySqlParameterCollection"/> given the specified parameter name and value. /// </summary> /// <param name="parameterName">The name of the parameter.</param> /// <param name="value">The <see cref="MySqlParameter.Value"/> of the <see cref="MySqlParameter"/> to add to the collection.</param> /// <returns>The newly added <see cref="MySqlParameter"/> object.</returns> [Obsolete("Add(String parameterName, Object value) has been deprecated. Use AddWithValue(String parameterName, Object value)")] public MySqlParameter Add(string parameterName, object value) { return Add(new MySqlParameter(parameterName, value)); } public MySqlParameter AddWithValue(string parameterName, object value) { return Add(new MySqlParameter(parameterName, value)); } /// <summary> /// Adds a <see cref="MySqlParameter"/> to the <see cref="MySqlParameterCollection"/> given the parameter name and the data type. /// </summary> /// <param name="parameterName">The name of the parameter.</param> /// <param name="dbType">One of the <see cref="MySqlDbType"/> values. </param> /// <returns>The newly added <see cref="MySqlParameter"/> object.</returns> public MySqlParameter Add(string parameterName, MySqlDbType dbType) { return Add(new MySqlParameter(parameterName, dbType)); } /// <summary> /// Adds a <see cref="MySqlParameter"/> to the <see cref="MySqlParameterCollection"/> with the parameter name, the data type, and the column length. /// </summary> /// <param name="parameterName">The name of the parameter.</param> /// <param name="dbType">One of the <see cref="MySqlDbType"/> values. </param> /// <param name="size">The length of the column.</param> /// <returns>The newly added <see cref="MySqlParameter"/> object.</returns> public MySqlParameter Add(string parameterName, MySqlDbType dbType, int size) { return Add(new MySqlParameter(parameterName, dbType, size)); } /// <summary> /// Adds a <see cref="MySqlParameter"/> to the <see cref="MySqlParameterCollection"/> with the parameter name, the data type, the column length, and the source column name. /// </summary> /// <param name="parameterName">The name of the parameter.</param> /// <param name="dbType">One of the <see cref="MySqlDbType"/> values. </param> /// <param name="size">The length of the column.</param> /// <param name="sourceColumn">The name of the source column.</param> /// <returns>The newly added <see cref="MySqlParameter"/> object.</returns> public MySqlParameter Add(string parameterName, MySqlDbType dbType, int size, string sourceColumn) { return Add(new MySqlParameter(parameterName, dbType, size, sourceColumn)); } #endregion #region DbParameterCollection Implementation /// <summary> /// Adds an array of values to the end of the <see cref="MySqlParameterCollection"/>. /// </summary> /// <param name="values"></param> public override void AddRange(Array values) { foreach (DbParameter p in values) Add(p); } void CheckIndex(int index) { if (index < 0 || index >= Count) throw new IndexOutOfRangeException("Parameter index is out of range."); } /// <summary> /// Retrieve the parameter with the given name. /// </summary> /// <param name="parameterName"></param> /// <returns></returns> protected override DbParameter GetParameter(string parameterName) { int index = IndexOf(parameterName); if (index < 0) { // check to see if the user has added the parameter without a // parameter marker. If so, kindly tell them what they did. if (parameterName.StartsWith("@") || parameterName.StartsWith("?")) { string newParameterName = parameterName.Substring(1); index = IndexOf(newParameterName); if (index != -1) return (DbParameter)items[index]; } throw new ArgumentException("Parameter '" + parameterName + "' not found in the collection."); } return (DbParameter)items[index]; } protected override DbParameter GetParameter(int index) { CheckIndex(index); return (DbParameter)items[index]; } protected override void SetParameter(string parameterName, DbParameter value) { int index = IndexOf(parameterName); if (index < 0) throw new ArgumentException("Parameter '" + parameterName + "' not found in the collection."); SetParameter(index, value); } protected override void SetParameter(int index, DbParameter value) { CheckIndex(index); MySqlParameter p = (MySqlParameter)items[index]; indexHash.Remove(p.ParameterName); items[index] = value; indexHash.Add(value.ParameterName, index); } /// <summary> /// Adds the specified <see cref="MySqlParameter"/> object to the <see cref="MySqlParameterCollection"/>. /// </summary> /// <param name="value">The <see cref="MySqlParameter"/> to add to the collection.</param> /// <returns>The index of the new <see cref="MySqlParameter"/> object.</returns> public override int Add(object value) { if (!(value is MySqlParameter)) throw new MySqlException("Only MySqlParameter objects may be stored"); MySqlParameter p = (MySqlParameter)value; if (p.ParameterName == null || p.ParameterName == String.Empty) throw new MySqlException("Parameters must be named"); p = Add(p); return IndexOf(p); } /// <summary> /// Removes all items from the collection. /// </summary> public override void Clear() { foreach (MySqlParameter p in items) p.Collection = null; items.Clear(); indexHash.Clear(); } /// <summary> /// Gets a value indicating whether a <see cref="MySqlParameter"/> with the specified parameter name exists in the collection. /// </summary> /// <param name="parameterName">The name of the <see cref="MySqlParameter"/> object to find.</param> /// <returns>true if the collection contains the parameter; otherwise, false.</returns> public override bool Contains(string parameterName) { return IndexOf(parameterName) != -1; } /// <summary> /// Gets a value indicating whether a MySqlParameter exists in the collection. /// </summary> /// <param name="value">The value of the <see cref="MySqlParameter"/> object to find. </param> /// <returns>true if the collection contains the <see cref="MySqlParameter"/> object; otherwise, false.</returns> /// <overloads>Gets a value indicating whether a <see cref="MySqlParameter"/> exists in the collection.</overloads> public override bool Contains(object value) { return items.Contains(value); } /// <summary> /// Copies MySqlParameter objects from the MySqlParameterCollection to the specified array. /// </summary> /// <param name="array"></param> /// <param name="index"></param> public override void CopyTo(Array array, int index) { items.CopyTo(array, index); } /// <summary> /// Gets the number of MySqlParameter objects in the collection. /// </summary> public override int Count { get { return items.Count; } } /// <summary> /// Returns an enumerator that iterates through the <see cref="MySqlParameterCollection"/>. /// </summary> /// <returns></returns> public override IEnumerator GetEnumerator() { return items.GetEnumerator(); } /// <summary> /// Gets the location of the <see cref="MySqlParameter"/> in the collection with a specific parameter name. /// </summary> /// <param name="parameterName">The name of the <see cref="MySqlParameter"/> object to retrieve. </param> /// <returns>The zero-based location of the <see cref="MySqlParameter"/> in the collection.</returns> public override int IndexOf(string parameterName) { object o = indexHash[parameterName]; if (o == null) return -1; return (int)o; } /// <summary> /// Gets the location of a <see cref="MySqlParameter"/> in the collection. /// </summary> /// <param name="value">The <see cref="MySqlParameter"/> object to locate. </param> /// <returns>The zero-based location of the <see cref="MySqlParameter"/> in the collection.</returns> /// <overloads>Gets the location of a <see cref="MySqlParameter"/> in the collection.</overloads> public override int IndexOf(object value) { return items.IndexOf(value); } /// <summary> /// Inserts a MySqlParameter into the collection at the specified index. /// </summary> /// <param name="index"></param> /// <param name="value"></param> public override void Insert(int index, object value) { if (!(value is MySqlParameter)) throw new MySqlException("Only MySqlParameter objects may be stored"); InternalAdd((MySqlParameter)value, index); } /// <summary> /// Removes the specified MySqlParameter from the collection. /// </summary> /// <param name="value"></param> public override void Remove(object value) { MySqlParameter p = (value as MySqlParameter); p.Collection = null; int index = IndexOf(p); items.Remove(p); indexHash.Remove(p.ParameterName); AdjustHash(index, false); } /// <summary> /// Removes the specified <see cref="MySqlParameter"/> from the collection using the parameter name. /// </summary> /// <param name="parameterName">The name of the <see cref="MySqlParameter"/> object to retrieve. </param> public override void RemoveAt(string parameterName) { DbParameter p = GetParameter(parameterName); Remove(p); } /// <summary> /// Removes the specified <see cref="MySqlParameter"/> from the collection using a specific index. /// </summary> /// <param name="index">The zero-based index of the parameter. </param> /// <overloads>Removes the specified <see cref="MySqlParameter"/> from the collection.</overloads> public override void RemoveAt(int index) { object o = items[index]; Remove(o); } /// <summary> /// Gets an object that can be used to synchronize access to the /// <see cref="MySqlParameterCollection"/>. /// </summary> public override object SyncRoot { get { return items.SyncRoot; } } #endregion internal void ParameterNameChanged(MySqlParameter p, string oldName, string newName) { int index = IndexOf(oldName); indexHash.Remove(oldName); indexHash.Add(newName, index); } private MySqlParameter InternalAdd(MySqlParameter value, int index) { if (value == null) throw new ArgumentException("The MySqlParameterCollection only accepts non-null MySqlParameter type objects.", "value"); // make sure we don't already have a parameter with this name string inComingName = value.ParameterName; if (indexHash.ContainsKey(inComingName)) throw new MySqlException( String.Format(Resources.ParameterAlreadyDefined, value.ParameterName)); if (inComingName[0] == '@' || inComingName[0] == '?') inComingName = inComingName.Substring(1, inComingName.Length - 1); if (indexHash.ContainsKey(inComingName)) throw new MySqlException( String.Format(Resources.ParameterAlreadyDefined, value.ParameterName)); if (index == -1) { index = items.Add(value); indexHash.Add(value.ParameterName, index); } else { items.Insert(index, value); AdjustHash(index, true); indexHash.Add(value.ParameterName, index); } value.Collection = this; return value; } private void AdjustHash(int keyIndex, bool addEntry) { for (int i=0; i < Count; i++) { MySqlParameter p = (MySqlParameter)items[i]; if (!indexHash.ContainsKey(p.ParameterName)) return; int index = (int)indexHash[p.ParameterName]; if (index < keyIndex) continue; indexHash[p.ParameterName] = addEntry ? ++index : --index; } } internal MySqlParameter GetParameterFlexible(string parameterName, bool throwOnNotFound) { int index = IndexOf(parameterName); if (-1 == index) index = IndexOf("?" + parameterName); if (-1 == index) index = IndexOf("@" + parameterName); if (-1 == index) { if (parameterName.StartsWith("@") || parameterName.StartsWith("?")) index = IndexOf(parameterName.Substring(1)); } if (-1 != index) return this[index]; if (throwOnNotFound) throw new ArgumentException("Parameter '" + parameterName + "' not found in the collection."); return null; } } }
#region Copyright notice and license // Copyright 2015-2016, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using CommandLine; using CommandLine.Text; using Google.Apis.Auth.OAuth2; using Google.Protobuf; using Grpc.Auth; using Grpc.Core; using Grpc.Core.Utils; using Grpc.Testing; using Newtonsoft.Json.Linq; using NUnit.Framework; namespace Grpc.IntegrationTesting { public class InteropClient { private class ClientOptions { [Option("server_host", Default = "127.0.0.1")] public string ServerHost { get; set; } [Option("server_host_override", Default = TestCredentials.DefaultHostOverride)] public string ServerHostOverride { get; set; } [Option("server_port", Required = true)] public int ServerPort { get; set; } [Option("test_case", Default = "large_unary")] public string TestCase { get; set; } // Deliberately using nullable bool type to allow --use_tls=true syntax (as opposed to --use_tls) [Option("use_tls", Default = false)] public bool? UseTls { get; set; } // Deliberately using nullable bool type to allow --use_test_ca=true syntax (as opposed to --use_test_ca) [Option("use_test_ca", Default = false)] public bool? UseTestCa { get; set; } [Option("default_service_account", Required = false)] public string DefaultServiceAccount { get; set; } [Option("oauth_scope", Required = false)] public string OAuthScope { get; set; } [Option("service_account_key_file", Required = false)] public string ServiceAccountKeyFile { get; set; } } ClientOptions options; private InteropClient(ClientOptions options) { this.options = options; } public static void Run(string[] args) { var parserResult = Parser.Default.ParseArguments<ClientOptions>(args) .WithNotParsed(errors => Environment.Exit(1)) .WithParsed(options => { var interopClient = new InteropClient(options); interopClient.Run().Wait(); }); } private async Task Run() { var credentials = await CreateCredentialsAsync(); List<ChannelOption> channelOptions = null; if (!string.IsNullOrEmpty(options.ServerHostOverride)) { channelOptions = new List<ChannelOption> { new ChannelOption(ChannelOptions.SslTargetNameOverride, options.ServerHostOverride) }; } var channel = new Channel(options.ServerHost, options.ServerPort, credentials, channelOptions); await RunTestCaseAsync(channel, options); await channel.ShutdownAsync(); } private async Task<ChannelCredentials> CreateCredentialsAsync() { var credentials = ChannelCredentials.Insecure; if (options.UseTls.Value) { credentials = options.UseTestCa.Value ? TestCredentials.CreateSslCredentials() : new SslCredentials(); } if (options.TestCase == "jwt_token_creds") { var googleCredential = await GoogleCredential.GetApplicationDefaultAsync(); Assert.IsTrue(googleCredential.IsCreateScopedRequired); credentials = ChannelCredentials.Create(credentials, googleCredential.ToCallCredentials()); } if (options.TestCase == "compute_engine_creds") { var googleCredential = await GoogleCredential.GetApplicationDefaultAsync(); Assert.IsFalse(googleCredential.IsCreateScopedRequired); credentials = ChannelCredentials.Create(credentials, googleCredential.ToCallCredentials()); } return credentials; } private async Task RunTestCaseAsync(Channel channel, ClientOptions options) { var client = new TestService.TestServiceClient(channel); switch (options.TestCase) { case "empty_unary": RunEmptyUnary(client); break; case "large_unary": RunLargeUnary(client); break; case "client_streaming": await RunClientStreamingAsync(client); break; case "server_streaming": await RunServerStreamingAsync(client); break; case "ping_pong": await RunPingPongAsync(client); break; case "empty_stream": await RunEmptyStreamAsync(client); break; case "compute_engine_creds": RunComputeEngineCreds(client, options.DefaultServiceAccount, options.OAuthScope); break; case "jwt_token_creds": RunJwtTokenCreds(client); break; case "oauth2_auth_token": await RunOAuth2AuthTokenAsync(client, options.OAuthScope); break; case "per_rpc_creds": await RunPerRpcCredsAsync(client, options.OAuthScope); break; case "cancel_after_begin": await RunCancelAfterBeginAsync(client); break; case "cancel_after_first_response": await RunCancelAfterFirstResponseAsync(client); break; case "timeout_on_sleeping_server": await RunTimeoutOnSleepingServerAsync(client); break; case "custom_metadata": await RunCustomMetadataAsync(client); break; case "status_code_and_message": await RunStatusCodeAndMessageAsync(client); break; case "unimplemented_method": RunUnimplementedMethod(new UnimplementedService.UnimplementedServiceClient(channel)); break; case "client_compressed_unary": RunClientCompressedUnary(client); break; case "client_compressed_streaming": await RunClientCompressedStreamingAsync(client); break; default: throw new ArgumentException("Unknown test case " + options.TestCase); } } public static void RunEmptyUnary(TestService.TestServiceClient client) { Console.WriteLine("running empty_unary"); var response = client.EmptyCall(new Empty()); Assert.IsNotNull(response); Console.WriteLine("Passed!"); } public static void RunLargeUnary(TestService.TestServiceClient client) { Console.WriteLine("running large_unary"); var request = new SimpleRequest { ResponseSize = 314159, Payload = CreateZerosPayload(271828) }; var response = client.UnaryCall(request); Assert.AreEqual(314159, response.Payload.Body.Length); Console.WriteLine("Passed!"); } public static async Task RunClientStreamingAsync(TestService.TestServiceClient client) { Console.WriteLine("running client_streaming"); var bodySizes = new List<int> { 27182, 8, 1828, 45904 }.Select((size) => new StreamingInputCallRequest { Payload = CreateZerosPayload(size) }); using (var call = client.StreamingInputCall()) { await call.RequestStream.WriteAllAsync(bodySizes); var response = await call.ResponseAsync; Assert.AreEqual(74922, response.AggregatedPayloadSize); } Console.WriteLine("Passed!"); } public static async Task RunServerStreamingAsync(TestService.TestServiceClient client) { Console.WriteLine("running server_streaming"); var bodySizes = new List<int> { 31415, 9, 2653, 58979 }; var request = new StreamingOutputCallRequest { ResponseParameters = { bodySizes.Select((size) => new ResponseParameters { Size = size }) } }; using (var call = client.StreamingOutputCall(request)) { var responseList = await call.ResponseStream.ToListAsync(); CollectionAssert.AreEqual(bodySizes, responseList.Select((item) => item.Payload.Body.Length)); } Console.WriteLine("Passed!"); } public static async Task RunPingPongAsync(TestService.TestServiceClient client) { Console.WriteLine("running ping_pong"); using (var call = client.FullDuplexCall()) { await call.RequestStream.WriteAsync(new StreamingOutputCallRequest { ResponseParameters = { new ResponseParameters { Size = 31415 } }, Payload = CreateZerosPayload(27182) }); Assert.IsTrue(await call.ResponseStream.MoveNext()); Assert.AreEqual(31415, call.ResponseStream.Current.Payload.Body.Length); await call.RequestStream.WriteAsync(new StreamingOutputCallRequest { ResponseParameters = { new ResponseParameters { Size = 9 } }, Payload = CreateZerosPayload(8) }); Assert.IsTrue(await call.ResponseStream.MoveNext()); Assert.AreEqual(9, call.ResponseStream.Current.Payload.Body.Length); await call.RequestStream.WriteAsync(new StreamingOutputCallRequest { ResponseParameters = { new ResponseParameters { Size = 2653 } }, Payload = CreateZerosPayload(1828) }); Assert.IsTrue(await call.ResponseStream.MoveNext()); Assert.AreEqual(2653, call.ResponseStream.Current.Payload.Body.Length); await call.RequestStream.WriteAsync(new StreamingOutputCallRequest { ResponseParameters = { new ResponseParameters { Size = 58979 } }, Payload = CreateZerosPayload(45904) }); Assert.IsTrue(await call.ResponseStream.MoveNext()); Assert.AreEqual(58979, call.ResponseStream.Current.Payload.Body.Length); await call.RequestStream.CompleteAsync(); Assert.IsFalse(await call.ResponseStream.MoveNext()); } Console.WriteLine("Passed!"); } public static async Task RunEmptyStreamAsync(TestService.TestServiceClient client) { Console.WriteLine("running empty_stream"); using (var call = client.FullDuplexCall()) { await call.RequestStream.CompleteAsync(); var responseList = await call.ResponseStream.ToListAsync(); Assert.AreEqual(0, responseList.Count); } Console.WriteLine("Passed!"); } public static void RunComputeEngineCreds(TestService.TestServiceClient client, string defaultServiceAccount, string oauthScope) { Console.WriteLine("running compute_engine_creds"); var request = new SimpleRequest { ResponseSize = 314159, Payload = CreateZerosPayload(271828), FillUsername = true, FillOauthScope = true }; // not setting credentials here because they were set on channel already var response = client.UnaryCall(request); Assert.AreEqual(314159, response.Payload.Body.Length); Assert.False(string.IsNullOrEmpty(response.OauthScope)); Assert.True(oauthScope.Contains(response.OauthScope)); Assert.AreEqual(defaultServiceAccount, response.Username); Console.WriteLine("Passed!"); } public static void RunJwtTokenCreds(TestService.TestServiceClient client) { Console.WriteLine("running jwt_token_creds"); var request = new SimpleRequest { ResponseSize = 314159, Payload = CreateZerosPayload(271828), FillUsername = true, }; // not setting credentials here because they were set on channel already var response = client.UnaryCall(request); Assert.AreEqual(314159, response.Payload.Body.Length); Assert.AreEqual(GetEmailFromServiceAccountFile(), response.Username); Console.WriteLine("Passed!"); } public static async Task RunOAuth2AuthTokenAsync(TestService.TestServiceClient client, string oauthScope) { Console.WriteLine("running oauth2_auth_token"); ITokenAccess credential = (await GoogleCredential.GetApplicationDefaultAsync()).CreateScoped(new[] { oauthScope }); string oauth2Token = await credential.GetAccessTokenForRequestAsync(); var credentials = GoogleGrpcCredentials.FromAccessToken(oauth2Token); var request = new SimpleRequest { FillUsername = true, FillOauthScope = true }; var response = client.UnaryCall(request, new CallOptions(credentials: credentials)); Assert.False(string.IsNullOrEmpty(response.OauthScope)); Assert.True(oauthScope.Contains(response.OauthScope)); Assert.AreEqual(GetEmailFromServiceAccountFile(), response.Username); Console.WriteLine("Passed!"); } public static async Task RunPerRpcCredsAsync(TestService.TestServiceClient client, string oauthScope) { Console.WriteLine("running per_rpc_creds"); ITokenAccess googleCredential = await GoogleCredential.GetApplicationDefaultAsync(); var credentials = googleCredential.ToCallCredentials(); var request = new SimpleRequest { FillUsername = true, }; var response = client.UnaryCall(request, new CallOptions(credentials: credentials)); Assert.AreEqual(GetEmailFromServiceAccountFile(), response.Username); Console.WriteLine("Passed!"); } public static async Task RunCancelAfterBeginAsync(TestService.TestServiceClient client) { Console.WriteLine("running cancel_after_begin"); var cts = new CancellationTokenSource(); using (var call = client.StreamingInputCall(cancellationToken: cts.Token)) { // TODO(jtattermusch): we need this to ensure call has been initiated once we cancel it. await Task.Delay(1000); cts.Cancel(); var ex = Assert.ThrowsAsync<RpcException>(async () => await call.ResponseAsync); Assert.AreEqual(StatusCode.Cancelled, ex.Status.StatusCode); } Console.WriteLine("Passed!"); } public static async Task RunCancelAfterFirstResponseAsync(TestService.TestServiceClient client) { Console.WriteLine("running cancel_after_first_response"); var cts = new CancellationTokenSource(); using (var call = client.FullDuplexCall(cancellationToken: cts.Token)) { await call.RequestStream.WriteAsync(new StreamingOutputCallRequest { ResponseParameters = { new ResponseParameters { Size = 31415 } }, Payload = CreateZerosPayload(27182) }); Assert.IsTrue(await call.ResponseStream.MoveNext()); Assert.AreEqual(31415, call.ResponseStream.Current.Payload.Body.Length); cts.Cancel(); try { // cannot use Assert.ThrowsAsync because it uses Task.Wait and would deadlock. await call.ResponseStream.MoveNext(); Assert.Fail(); } catch (RpcException ex) { Assert.AreEqual(StatusCode.Cancelled, ex.Status.StatusCode); } } Console.WriteLine("Passed!"); } public static async Task RunTimeoutOnSleepingServerAsync(TestService.TestServiceClient client) { Console.WriteLine("running timeout_on_sleeping_server"); var deadline = DateTime.UtcNow.AddMilliseconds(1); using (var call = client.FullDuplexCall(deadline: deadline)) { try { await call.RequestStream.WriteAsync(new StreamingOutputCallRequest { Payload = CreateZerosPayload(27182) }); } catch (InvalidOperationException) { // Deadline was reached before write has started. Eat the exception and continue. } catch (RpcException) { // Deadline was reached before write has started. Eat the exception and continue. } try { await call.ResponseStream.MoveNext(); Assert.Fail(); } catch (RpcException ex) { // We can't guarantee the status code always DeadlineExceeded. See issue #2685. Assert.Contains(ex.Status.StatusCode, new[] { StatusCode.DeadlineExceeded, StatusCode.Internal }); } } Console.WriteLine("Passed!"); } public static async Task RunCustomMetadataAsync(TestService.TestServiceClient client) { Console.WriteLine("running custom_metadata"); { // step 1: test unary call var request = new SimpleRequest { ResponseSize = 314159, Payload = CreateZerosPayload(271828) }; var call = client.UnaryCallAsync(request, headers: CreateTestMetadata()); await call.ResponseAsync; var responseHeaders = await call.ResponseHeadersAsync; var responseTrailers = call.GetTrailers(); Assert.AreEqual("test_initial_metadata_value", responseHeaders.First((entry) => entry.Key == "x-grpc-test-echo-initial").Value); CollectionAssert.AreEqual(new byte[] { 0xab, 0xab, 0xab }, responseTrailers.First((entry) => entry.Key == "x-grpc-test-echo-trailing-bin").ValueBytes); } { // step 2: test full duplex call var request = new StreamingOutputCallRequest { ResponseParameters = { new ResponseParameters { Size = 31415 } }, Payload = CreateZerosPayload(27182) }; var call = client.FullDuplexCall(headers: CreateTestMetadata()); await call.RequestStream.WriteAsync(request); await call.RequestStream.CompleteAsync(); await call.ResponseStream.ToListAsync(); var responseHeaders = await call.ResponseHeadersAsync; var responseTrailers = call.GetTrailers(); Assert.AreEqual("test_initial_metadata_value", responseHeaders.First((entry) => entry.Key == "x-grpc-test-echo-initial").Value); CollectionAssert.AreEqual(new byte[] { 0xab, 0xab, 0xab }, responseTrailers.First((entry) => entry.Key == "x-grpc-test-echo-trailing-bin").ValueBytes); } Console.WriteLine("Passed!"); } public static async Task RunStatusCodeAndMessageAsync(TestService.TestServiceClient client) { Console.WriteLine("running status_code_and_message"); var echoStatus = new EchoStatus { Code = 2, Message = "test status message" }; { // step 1: test unary call var request = new SimpleRequest { ResponseStatus = echoStatus }; var e = Assert.Throws<RpcException>(() => client.UnaryCall(request)); Assert.AreEqual(StatusCode.Unknown, e.Status.StatusCode); Assert.AreEqual(echoStatus.Message, e.Status.Detail); } { // step 2: test full duplex call var request = new StreamingOutputCallRequest { ResponseStatus = echoStatus }; var call = client.FullDuplexCall(); await call.RequestStream.WriteAsync(request); await call.RequestStream.CompleteAsync(); try { // cannot use Assert.ThrowsAsync because it uses Task.Wait and would deadlock. await call.ResponseStream.ToListAsync(); Assert.Fail(); } catch (RpcException e) { Assert.AreEqual(StatusCode.Unknown, e.Status.StatusCode); Assert.AreEqual(echoStatus.Message, e.Status.Detail); } } Console.WriteLine("Passed!"); } public static void RunUnimplementedMethod(UnimplementedService.UnimplementedServiceClient client) { Console.WriteLine("running unimplemented_method"); var e = Assert.Throws<RpcException>(() => client.UnimplementedCall(new Empty())); Assert.AreEqual(StatusCode.Unimplemented, e.Status.StatusCode); Console.WriteLine("Passed!"); } public static void RunClientCompressedUnary(TestService.TestServiceClient client) { Console.WriteLine("running client_compressed_unary"); var probeRequest = new SimpleRequest { ExpectCompressed = new BoolValue { Value = true // lie about compression }, ResponseSize = 314159, Payload = CreateZerosPayload(271828) }; var e = Assert.Throws<RpcException>(() => client.UnaryCall(probeRequest, CreateClientCompressionMetadata(false))); Assert.AreEqual(StatusCode.InvalidArgument, e.Status.StatusCode); var compressedRequest = new SimpleRequest { ExpectCompressed = new BoolValue { Value = true }, ResponseSize = 314159, Payload = CreateZerosPayload(271828) }; var response1 = client.UnaryCall(compressedRequest, CreateClientCompressionMetadata(true)); Assert.AreEqual(314159, response1.Payload.Body.Length); var uncompressedRequest = new SimpleRequest { ExpectCompressed = new BoolValue { Value = false }, ResponseSize = 314159, Payload = CreateZerosPayload(271828) }; var response2 = client.UnaryCall(uncompressedRequest, CreateClientCompressionMetadata(false)); Assert.AreEqual(314159, response2.Payload.Body.Length); Console.WriteLine("Passed!"); } public static async Task RunClientCompressedStreamingAsync(TestService.TestServiceClient client) { Console.WriteLine("running client_compressed_streaming"); try { var probeCall = client.StreamingInputCall(CreateClientCompressionMetadata(false)); await probeCall.RequestStream.WriteAsync(new StreamingInputCallRequest { ExpectCompressed = new BoolValue { Value = true }, Payload = CreateZerosPayload(27182) }); // cannot use Assert.ThrowsAsync because it uses Task.Wait and would deadlock. await probeCall; Assert.Fail(); } catch (RpcException e) { Assert.AreEqual(StatusCode.InvalidArgument, e.Status.StatusCode); } var call = client.StreamingInputCall(CreateClientCompressionMetadata(true)); await call.RequestStream.WriteAsync(new StreamingInputCallRequest { ExpectCompressed = new BoolValue { Value = true }, Payload = CreateZerosPayload(27182) }); call.RequestStream.WriteOptions = new WriteOptions(WriteFlags.NoCompress); await call.RequestStream.WriteAsync(new StreamingInputCallRequest { ExpectCompressed = new BoolValue { Value = false }, Payload = CreateZerosPayload(45904) }); await call.RequestStream.CompleteAsync(); var response = await call.ResponseAsync; Assert.AreEqual(73086, response.AggregatedPayloadSize); Console.WriteLine("Passed!"); } private static Payload CreateZerosPayload(int size) { return new Payload { Body = ByteString.CopyFrom(new byte[size]) }; } private static Metadata CreateClientCompressionMetadata(bool compressed) { var algorithmName = compressed ? "gzip" : "identity"; return new Metadata { { new Metadata.Entry(Metadata.CompressionRequestAlgorithmMetadataKey, algorithmName) } }; } // extracts the client_email field from service account file used for auth test cases private static string GetEmailFromServiceAccountFile() { string keyFile = Environment.GetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS"); Assert.IsNotNull(keyFile); var jobject = JObject.Parse(File.ReadAllText(keyFile)); string email = jobject.GetValue("client_email").Value<string>(); Assert.IsTrue(email.Length > 0); // spec requires nonempty client email. return email; } private static Metadata CreateTestMetadata() { return new Metadata { {"x-grpc-test-echo-initial", "test_initial_metadata_value"}, {"x-grpc-test-echo-trailing-bin", new byte[] {0xab, 0xab, 0xab}} }; } } }
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/ // Portions Copyright 2000-2004 Jonathan de Halleux // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; #pragma warning disable 67 namespace MbUnit.TestResources.Reflection { /// <summary> /// This class contains a variety of different types that are used by /// the reflection policy tests. /// </summary> public class ReflectionPolicySample { public abstract class Class1 { static Class1() { } public void Method1<T>(T param) { Event1 += null; } [return: Sample(typeof(int))] protected abstract int Method2(); public int Field1; internal object Field2 = null; public int Property1 { get { return 0; } } public int Property2 { get { return 0; } set { } } protected abstract string Property3 { set; } [Sample(typeof(int))] public event EventHandler Event1; protected abstract event EventHandler Event2; public static bool operator ==(Class1 x, Class1 y) { return true; } public static bool operator !=(Class1 x, Class1 y) { return false; } public override bool Equals(object obj) { return true; } public override int GetHashCode() { return 0; } } [Sample(typeof(int))] internal class Class2 : Class1 { protected override event EventHandler Event2 { add { } remove { } } protected override string Property3 { set { } } protected override int Method2() { return 0; } } [Sample(typeof(string[]), Field = 2, Property = "foo")] internal class Class3 : Class2 { } public struct Struct1<[Sample(typeof(int))] S, T> : Interface1 { public Struct1(S s, [Sample(typeof(string[]))] T t) { } string Interface1.Method1([Sample(typeof(int), Field = 5)] string s, int x) { return ""; } } [Sample(typeof(string[]), Field = 2, Property = "foo")] public interface Interface1 { string Method1(string s, int x); } [Sample(typeof(int))] public interface Interface2 { } /// <summary> /// This sample is a bit of a torture test for the exhaustive test case. /// </summary> [Sample(typeof(int))] [Serializable] public class TortureTest<[Sample(typeof(int))] T> { [Sample(typeof(int))] public int Field; public int Field2; private int nonInheritedField = 0; public const int Constant = 42; // Causes Cecil to throw NotSupportedException. //public volatile int VolatileField; public readonly int ReadOnlyField = 42; [return: Sample(typeof(int))] [Sample(typeof(int))] public virtual S InheritedMethod<[Sample(typeof(int))] S>([Sample(typeof(int))] S s, T t) { return default(S); } [return: Sample(typeof(string[]), Field = 2, Property = "foo")] [Sample(typeof(string[]), Field = 2, Property = "foo")] public virtual T InheritedMethod<[Sample(typeof(string[]), Field = 2, Property = "foo")] S>(T t, [Sample(typeof(string[]), Field = 2, Property = "foo")] S s) { return default(T); } [return: Sample(typeof(int))] [Sample(typeof(int))] public S NonInheritedMethod<[Sample(typeof(int))] S>([Sample(typeof(int))] S x) { return x; } [return: Sample(typeof(int))] [Sample(typeof(int))] public virtual S NonInheritedMethod2<[Sample(typeof(int))] S>([Sample(typeof(int))] S x) { return x; } [Sample(typeof(int))] public virtual event EventHandler InheritedEvent { add { } remove { } } [Sample(typeof(int))] public event EventHandler NonInheritedEvent { add { } remove { } } [Sample(typeof(int))] public virtual event EventHandler NonInheritedEvent2 { add { } remove { } } [Sample(typeof(int))] public virtual int InheritedProperty { get { return 0; } } public virtual int InheritedProperty2 { set { } } public virtual int InheritedProperty3 { get { return nonInheritedField; } protected set { } } [Sample(typeof(int))] public int NonInherited { get { return 0; } } public virtual int NonInherited2 { get { return 0; } } public int this[string index] { get { return 0; } set { } } public string this[int index1, int index2] { get { return ""; } set { } } public class NestedType { public class DirectlyRecursiveNestedType : NestedType { } public class MiddleType { public class IndirectlyRecursiveNestedType : NestedType { } } } public class GenericDoublyNestedType<S> { } } public class TortureTest2<[Sample(typeof(string[]), Field = 2, Property = "foo")] T> : TortureTest<T[]> { new public string Field2; ~TortureTest2() { } [Sample(typeof(int))] public override S InheritedMethod<S>(S s, [Sample(typeof(int))] T[] t) { return default(S); } [Sample(typeof(int))] public override T[] InheritedMethod<S>([Sample(typeof(int))] T[] t, S s) { return null; } [return: Sample(typeof(int))] [Sample(typeof(int))] new public S NonInheritedMethod<[Sample(typeof(int))] S>([Sample(typeof(int))] S x) { return x; } [return: Sample(typeof(int))] [Sample(typeof(int))] new public virtual S NonInheritedMethod2<[Sample(typeof(int))] S>([Sample(typeof(int))] S x) { return x; } public override event EventHandler InheritedEvent { add { } remove { } } new public event EventHandler NonInheritedEvent { add { } remove { } } new public virtual event EventHandler NonInheritedEvent2 { add { } remove { } } public override int InheritedProperty { get { return 0; } } public override int InheritedProperty2 { set { } } public override int InheritedProperty3 { get { return 0; } protected set { } } new public int NonInherited { get { return 0; } } new public virtual int NonInherited2 { get { return 0; } } } } } /// <summary> /// A test type in the global namespace. /// </summary> internal class ReflectionPolicySampleInGlobalNamespace { }
using Lucene.Net.Diagnostics; using System.Diagnostics; using System; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // javadocs using IBits = Lucene.Net.Util.IBits; /// <summary> /// <see cref="AtomicReader"/> is an abstract class, providing an interface for accessing an /// index. Search of an index is done entirely through this abstract interface, /// so that any subclass which implements it is searchable. <see cref="IndexReader"/>s implemented /// by this subclass do not consist of several sub-readers, /// they are atomic. They support retrieval of stored fields, doc values, terms, /// and postings. /// /// <para/>For efficiency, in this API documents are often referred to via /// <i>document numbers</i>, non-negative integers which each name a unique /// document in the index. These document numbers are ephemeral -- they may change /// as documents are added to and deleted from an index. Clients should thus not /// rely on a given document having the same number between sessions. /// /// <para/> /// <b>NOTE</b>: <see cref="IndexReader"/> /// instances are completely thread /// safe, meaning multiple threads can call any of its methods, /// concurrently. If your application requires external /// synchronization, you should <b>not</b> synchronize on the /// <see cref="IndexReader"/> instance; use your own /// (non-Lucene) objects instead. /// </summary> public abstract class AtomicReader : IndexReader { private readonly AtomicReaderContext readerContext; // LUCENENET: marked readonly /// <summary> /// Sole constructor. (For invocation by subclass /// constructors, typically implicit.) /// </summary> protected AtomicReader() : base() { readerContext = new AtomicReaderContext(this); } public sealed override IndexReaderContext Context { get { EnsureOpen(); return readerContext; } } /// <summary> /// LUCENENET specific propety that allows access to /// the context as <see cref="AtomicReaderContext"/>, /// which prevents the need to cast. /// </summary> public AtomicReaderContext AtomicContext { get { EnsureOpen(); return readerContext; } } /// <summary> /// Returns true if there are norms stored for this <paramref name="field"/>. /// </summary> [Obsolete("(4.0) use FieldInfos and check FieldInfo.HasNorms for the field instead.")] public bool HasNorms(string field) { EnsureOpen(); // note: using normValues(field) != null would potentially cause i/o FieldInfo fi = FieldInfos.FieldInfo(field); return fi != null && fi.HasNorms; } /// <summary> /// Returns <see cref="Index.Fields"/> for this reader. /// this property may return <c>null</c> if the reader has no /// postings. /// </summary> public abstract Fields Fields { get; } public override sealed int DocFreq(Term term) { Fields fields = Fields; if (fields == null) { return 0; } Terms terms = fields.GetTerms(term.Field); if (terms == null) { return 0; } TermsEnum termsEnum = terms.GetEnumerator(); if (termsEnum.SeekExact(term.Bytes)) { return termsEnum.DocFreq; } else { return 0; } } /// <summary> /// Returns the number of documents containing the <paramref name="term"/>. /// This method returns 0 if the term or /// field does not exist. This method does not take into /// account deleted documents that have not yet been merged /// away. /// </summary> public override sealed long TotalTermFreq(Term term) { Fields fields = Fields; if (fields == null) { return 0; } Terms terms = fields.GetTerms(term.Field); if (terms == null) { return 0; } TermsEnum termsEnum = terms.GetEnumerator(); if (termsEnum.SeekExact(term.Bytes)) { return termsEnum.TotalTermFreq; } else { return 0; } } public override sealed long GetSumDocFreq(string field) { Terms terms = GetTerms(field); if (terms == null) { return 0; } return terms.SumDocFreq; } public override sealed int GetDocCount(string field) { Terms terms = GetTerms(field); if (terms == null) { return 0; } return terms.DocCount; } public override sealed long GetSumTotalTermFreq(string field) { Terms terms = GetTerms(field); if (terms == null) { return 0; } return terms.SumTotalTermFreq; } /// <summary> /// This may return <c>null</c> if the field does not exist. </summary> public Terms GetTerms(string field) // LUCENENET specific: Renamed from Terms() { Fields fields = Fields; if (fields == null) { return null; } return fields.GetTerms(field); } /// <summary> /// Returns <see cref="DocsEnum"/> for the specified term. /// This will return <c>null</c> if either the field or /// term does not exist. /// </summary> /// <seealso cref="TermsEnum.Docs(IBits, DocsEnum)"/> public DocsEnum GetTermDocsEnum(Term term) // LUCENENET specific: Renamed from TermDocsEnum() { if (Debugging.AssertsEnabled) { Debugging.Assert(term.Field != null); Debugging.Assert(term.Bytes != null); } Fields fields = Fields; if (fields != null) { Terms terms = fields.GetTerms(term.Field); if (terms != null) { TermsEnum termsEnum = terms.GetEnumerator(); if (termsEnum.SeekExact(term.Bytes)) { return termsEnum.Docs(LiveDocs, null); } } } return null; } /// <summary> /// Returns <see cref="DocsAndPositionsEnum"/> for the specified /// term. This will return <c>null</c> if the /// field or term does not exist or positions weren't indexed. </summary> /// <seealso cref="TermsEnum.DocsAndPositions(IBits, DocsAndPositionsEnum)"/> public DocsAndPositionsEnum GetTermPositionsEnum(Term term) // LUCENENET specific: Renamed from TermPositionsEnum() { if (Debugging.AssertsEnabled) Debugging.Assert(term.Field != null); if (Debugging.AssertsEnabled) Debugging.Assert(term.Bytes != null); Fields fields = Fields; if (fields != null) { Terms terms = fields.GetTerms(term.Field); if (terms != null) { TermsEnum termsEnum = terms.GetEnumerator(); if (termsEnum.SeekExact(term.Bytes)) { return termsEnum.DocsAndPositions(LiveDocs, null); } } } return null; } /// <summary> /// Returns <see cref="NumericDocValues"/> for this field, or /// null if no <see cref="NumericDocValues"/> were indexed for /// this field. The returned instance should only be /// used by a single thread. /// </summary> public abstract NumericDocValues GetNumericDocValues(string field); /// <summary> /// Returns <see cref="BinaryDocValues"/> for this field, or /// <c>null</c> if no <see cref="BinaryDocValues"/> were indexed for /// this field. The returned instance should only be /// used by a single thread. /// </summary> public abstract BinaryDocValues GetBinaryDocValues(string field); /// <summary> /// Returns <see cref="SortedDocValues"/> for this field, or /// <c>null</c> if no <see cref="SortedDocValues"/> were indexed for /// this field. The returned instance should only be /// used by a single thread. /// </summary> public abstract SortedDocValues GetSortedDocValues(string field); /// <summary> /// Returns <see cref="SortedSetDocValues"/> for this field, or /// <c>null</c> if no <see cref="SortedSetDocValues"/> were indexed for /// this field. The returned instance should only be /// used by a single thread. /// </summary> public abstract SortedSetDocValues GetSortedSetDocValues(string field); /// <summary> /// Returns a <see cref="IBits"/> at the size of <c>reader.MaxDoc</c>, /// with turned on bits for each docid that does have a value for this field, /// or <c>null</c> if no <see cref="DocValues"/> were indexed for this field. The /// returned instance should only be used by a single thread. /// </summary> public abstract IBits GetDocsWithField(string field); /// <summary> /// Returns <see cref="NumericDocValues"/> representing norms /// for this field, or <c>null</c> if no <see cref="NumericDocValues"/> /// were indexed. The returned instance should only be /// used by a single thread. /// </summary> public abstract NumericDocValues GetNormValues(string field); /// <summary> /// Get the <see cref="Index.FieldInfos"/> describing all fields in /// this reader. /// <para/> /// @lucene.experimental /// </summary> public abstract FieldInfos FieldInfos { get; } /// <summary> /// Returns the <see cref="IBits"/> representing live (not /// deleted) docs. A set bit indicates the doc ID has not /// been deleted. If this method returns <c>null</c> it means /// there are no deleted documents (all documents are /// live). /// <para/> /// The returned instance has been safely published for /// use by multiple threads without additional /// synchronization. /// </summary> public abstract IBits LiveDocs { get; } /// <summary> /// Checks consistency of this reader. /// <para/> /// Note that this may be costly in terms of I/O, e.g. /// may involve computing a checksum value against large data files. /// <para/> /// @lucene.internal /// </summary> public abstract void CheckIntegrity(); } }
// Copyright 2019 Esri // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using ArcGIS.Core.CIM; using ArcGIS.Desktop.Core; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using ArcGIS.Desktop.Framework.Threading.Tasks; using ArcGIS.Desktop.Mapping; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Windows; using ComboBox = ArcGIS.Desktop.Framework.Contracts.ComboBox; namespace ScientificDataStatisticalAnalysis { internal class CellStatisticsComboBox : ComboBox { // The relative path of the raster function template. // Note: Before using the add-in, please import the Scientific_data_calculation raster function template (RFT) // in the Project1 subcategory of the Project category on Raster Functions pane, save ArcGIS Pro project. // The Scientific_data_calculation RFT can be located in the add-in's Visual Studio project folder. public static string fileRelativePath = @"\RasterFunctionTemplates\Project1\Scientific_data_calculation.rft.xml"; // Defines an instance that is used to save the default rendering rule of the selected layer. CIMRenderingRule renderingRule_default = null; // Defines an instance for the selected combo box item. public static ComboBoxItem selectedComboBoxItem = null; /// <summary> /// Enumeration of the operations provided in the combo box. /// </summary> public enum CellStatistics_Operations { Majority = 38, Maximum = 39, Mean = 40, Median = 41, Minimum = 42, Minority = 43, Range = 47, StandardDeviation = 54, Sum = 55, Variety = 58, // Other operations that are also included in the cell statistics raster function. //MajorityIgnoreNoData = 66, //MaximumIgnoreNoData = 67, //MeanIgnoreNoData = 68, //MedianIgnoreNoData = 69, //MinimumIgnoreNoData = 70, //MinorityIgnoreNoData = 71, //RangeIgnoreNoData = 72, //StandardDeviationIgnoreNoData = 73, //SumIgnoreNoData = 74, //VarietyIgnoreNoData = 75 } /// <summary> /// Constructor of the combo box. /// Subscribes to the SelectedLayersChanges event. /// </summary> public CellStatisticsComboBox() { try { // Subscribes to the layer selection changed event. ArcGIS.Desktop.Mapping.Events.TOCSelectionChangedEvent.Subscribe(SelectedLayersChanged); SelectedLayersChanged(null); } catch (Exception ex) { MessageBox.Show("Exception caught on Updating combo box:" + ex.Message, "Exception", MessageBoxButton.OK, MessageBoxImage.Error); } // If DefQueryEditBox text changed, calls the onEditBoxChanged function. DefQueryEditBox.TextChanged += onEditBoxChanged; // Initiates the combox box selected item. InitializeComboBox(); } /// <summary> /// Initializes the combo box items. /// </summary> private void InitializeComboBox() { // Resets the combo box. SelectedItem = -1; // Adds option to reset the layer to default status. Add(new ComboBoxItem("None")); // Adds the values in the CellStatistics_Operations enumeration as the combo box items. foreach (Enum _operation in Enum.GetValues(typeof(CellStatistics_Operations))) { // Adds the enum items by name. string _operationName = Convert.ToString(_operation); Add(new ComboBoxItem(_operationName)); } // Selects the first combo box item. //SelectedIndex = 0; } /// <summary> /// Event handler for layer selection change. /// </summary> /// <param name="mapViewArgs">An instance of the MapViewEventArgs.</param> private async void SelectedLayersChanged(ArcGIS.Desktop.Mapping.Events.MapViewEventArgs mapViewArgs) { // Check if there is an active map view. if (MapView.Active != null) { // Gets the selected layers from the current Map. IReadOnlyList<Layer> selectedLayers = MapView.Active.GetSelectedLayers(); // The combo box will update only if one layer is selected. if (selectedLayers.Count == 1) { // Gets the selected layer. Layer firstSelectedLayer = selectedLayers.First(); // Make sure the selected layer is an image service layer. if (firstSelectedLayer != null && firstSelectedLayer is ImageServiceLayer) { // Initiates the combox box selected item. //InitializeComboBox(); await QueuedTask.Run(() => { // Get and store the rendering rule of the selected image service layer. renderingRule_default = (firstSelectedLayer as ImageServiceLayer).GetRenderingRule(); }); } } } else MessageBox.Show("There is no active map.", "Error", MessageBoxButton.OK, MessageBoxImage.Error); } /// <summary> /// Called when the combo box item selection changed. /// </summary> /// <param name="item"> The selected combo box item. </param> protected override async void OnSelectionChange(ComboBoxItem item) { // Sets logic if selected combo box item is null, then return. if (item == null) return; // Passes the current selected combo box item to the selectedComboBoxItem. selectedComboBoxItem = item; // Adds validation here if the item text is emply then return. if (string.IsNullOrEmpty(item.Text)) return; // Try and get the first selected layer. Layer firstSelectedLayer = null; try { firstSelectedLayer = MapView.Active.GetSelectedLayers().First(); } catch (Exception) { } // Check if there are any selected layers and if the first selected layer is a image service layer. if (!(firstSelectedLayer !=null && firstSelectedLayer is ImageServiceLayer)) { MessageBox.Show("Please select an image service layer."); return; } ImageServiceLayer selectedLayer = firstSelectedLayer as ImageServiceLayer; // Enters if the selected combo box item is not 'defult' or empty, else sets the original rendering rule to the selected layer. if (item.Text != "None" && item.Text !=null) { // Gets the operation enum item from the its name. CellStatistics_Operations operation = (CellStatistics_Operations)Enum.Parse(typeof(CellStatistics_Operations), item.Text); try { string rftFilePath = Project.Current.HomeFolderPath + fileRelativePath; // Customizes the raster function template XML file using the user defined definition query and operation. string xmlFilePath = Process.CustomRFTXmlFile(rftFilePath, operation, DefQueryEditBox.passingText); // Applies the custom raster function template to the selected layer. await Process.ApplyRFTXmlFile(selectedLayer, operation, xmlFilePath); } catch (Exception ex) { MessageBox.Show("Exception caught in OnSelectionChange:" + ex.Message, "Exception", MessageBoxButton.OK, MessageBoxImage.Error); } } else { await QueuedTask.Run(() => { // Sets the defult rendering rule to the selected image service layer. selectedLayer.SetRenderingRule(renderingRule_default); }); } } /// <summary> /// Called if the text is changed in Definition query edit box. /// </summary> /// <param name="source"></param> /// <param name="e"></param> public void onEditBoxChanged(object source, TextEventArgs e) { // When the edit box text updated, applies the rendering rule to the selected layer again. OnSelectionChange(selectedComboBoxItem); } } }
/* Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. See License.txt in the project root for license information. */ using System; using System.Collections.Generic; using System.Configuration; using System.Linq; using Microsoft.Xrm.Sdk.Metadata; namespace Microsoft.Xrm.Portal.IdentityModel.Configuration { /// <summary> /// The configuration settings for user registration. /// </summary> /// <seealso cref="FederationCrmConfigurationManager"/> public sealed class UserRegistrationElement : ConfigurationElement, IUserRegistrationSettings { private static readonly ConfigurationPropertyCollection _properties; private static readonly ConfigurationProperty _propPortalName; private static readonly ConfigurationProperty _propEnabled; private static readonly ConfigurationProperty _propRequiresInvitation; private static readonly ConfigurationProperty _propRequiresChallengeAnswer; private static readonly ConfigurationProperty _propRequiresConfirmation; private static readonly ConfigurationProperty _propAttributeMapInvitationCode; private static readonly ConfigurationProperty _propAttributeMapInvitationCodeExpiryDate; private static readonly ConfigurationProperty _propAttributeMapChallengeAnswer; private static readonly ConfigurationProperty _propAttributeMapLogonEnabled; private static readonly ConfigurationProperty _propAttributeMapEmail; private static readonly ConfigurationProperty _propAttributeMapDisplayName; private static readonly ConfigurationProperty _propAttributeMapLastSuccessfulLogon; private static readonly ConfigurationProperty _propAttributeMapIdentityProvider; private static readonly ConfigurationProperty _propEmailClaimType; private static readonly ConfigurationProperty _propDisplayNameClaimType; private static readonly ConfigurationProperty _propReturnUrlKey; private static readonly ConfigurationProperty _propInvitationCodeKey; private static readonly ConfigurationProperty _propChallengeAnswerKey; private static readonly ConfigurationProperty _propLiveIdTokenKey; private static readonly ConfigurationProperty _propResultCodeKey; private static readonly ConfigurationProperty _propDefaultReturnPath; private static readonly ConfigurationProperty _propProfilePath; private static readonly ConfigurationProperty _propRegistrationPath; private static readonly ConfigurationProperty _propConfirmationPath; private static readonly ConfigurationProperty _propErrorPath; private static readonly ConfigurationProperty _propAccountTransferPath; private static readonly ConfigurationProperty _propUnregisteredUserPath; private static readonly ConfigurationProperty _propInvitationCodeDuration; private static readonly ConfigurationProperty _propRequiredLevel; private static readonly ConfigurationProperty _propSignUpAttributes; static UserRegistrationElement() { _propPortalName = new ConfigurationProperty("portalName", typeof(string), null, ConfigurationPropertyOptions.None); _propEnabled = new ConfigurationProperty("enabled", typeof(bool), false, ConfigurationPropertyOptions.None); _propRequiresInvitation = new ConfigurationProperty("requiresInvitation", typeof(bool), true, ConfigurationPropertyOptions.None); _propRequiresChallengeAnswer = new ConfigurationProperty("requiresChallengeAnswer", typeof(bool), true, ConfigurationPropertyOptions.None); _propRequiresConfirmation = new ConfigurationProperty("requiresConfirmation", typeof(bool), true, ConfigurationPropertyOptions.None); _propAttributeMapInvitationCode = new ConfigurationProperty("attributeMapInvitationCode", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapInvitationCodeExpiryDate = new ConfigurationProperty("atributeMapInvitationCodeExpiryDate", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapChallengeAnswer = new ConfigurationProperty("attributeMapChallengeAnswer", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapLogonEnabled = new ConfigurationProperty("attributeMapLogonEnabled", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapEmail = new ConfigurationProperty("attributeMapEmail", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapDisplayName = new ConfigurationProperty("attributeMapDisplayName", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapLastSuccessfulLogon = new ConfigurationProperty("attributeMapLastSuccessfulLogon", typeof(string), null, ConfigurationPropertyOptions.None); _propAttributeMapIdentityProvider = new ConfigurationProperty("attributeMapIdentityProvider", typeof(string), null, ConfigurationPropertyOptions.None); _propEmailClaimType = new ConfigurationProperty("emailClaimType", typeof(string), null, ConfigurationPropertyOptions.None); _propDisplayNameClaimType = new ConfigurationProperty("displayNameClaimType", typeof(string), null, ConfigurationPropertyOptions.None); _propReturnUrlKey = new ConfigurationProperty("returnUrlKey", typeof(string), null, ConfigurationPropertyOptions.None); _propInvitationCodeKey = new ConfigurationProperty("invitationCodeKey", typeof(string), null, ConfigurationPropertyOptions.None); _propChallengeAnswerKey = new ConfigurationProperty("challengeAnswerKey", typeof(string), null, ConfigurationPropertyOptions.None); _propLiveIdTokenKey = new ConfigurationProperty("liveIdTokenKey", typeof(string), null, ConfigurationPropertyOptions.None); _propResultCodeKey = new ConfigurationProperty("resultCodeKey", typeof(string), null, ConfigurationPropertyOptions.None); _propDefaultReturnPath = new ConfigurationProperty("defaultReturnPath", typeof(string), null, ConfigurationPropertyOptions.None); _propProfilePath = new ConfigurationProperty("profilePath", typeof(string), null, ConfigurationPropertyOptions.None); _propRegistrationPath = new ConfigurationProperty("registrationPath", typeof(string), null, ConfigurationPropertyOptions.None); _propConfirmationPath = new ConfigurationProperty("confirmationPath", typeof(string), null, ConfigurationPropertyOptions.None); _propErrorPath = new ConfigurationProperty("errorPath", typeof(string), null, ConfigurationPropertyOptions.None); _propAccountTransferPath = new ConfigurationProperty("accountTransferPath", typeof(string), null, ConfigurationPropertyOptions.None); _propUnregisteredUserPath = new ConfigurationProperty("unregisteredUserPath", typeof(string), null, ConfigurationPropertyOptions.None); _propInvitationCodeDuration = new ConfigurationProperty("invitationCodeDuration", typeof(TimeSpan?), null, ConfigurationPropertyOptions.None); _propRequiredLevel = new ConfigurationProperty("requiredLevel", typeof(AttributeRequiredLevel), AttributeRequiredLevel.ApplicationRequired, ConfigurationPropertyOptions.None); _propSignUpAttributes = new ConfigurationProperty(SignUpAttributeElementCollection.Name, typeof(SignUpAttributeElementCollection), new SignUpAttributeElementCollection(), ConfigurationPropertyOptions.None); _properties = new ConfigurationPropertyCollection { _propPortalName, _propEnabled, _propRequiresInvitation, _propRequiresChallengeAnswer, _propRequiresConfirmation, _propAttributeMapInvitationCode, _propAttributeMapInvitationCodeExpiryDate, _propAttributeMapChallengeAnswer, _propAttributeMapLogonEnabled, _propAttributeMapEmail, _propAttributeMapDisplayName, _propAttributeMapLastSuccessfulLogon, _propAttributeMapIdentityProvider, _propEmailClaimType, _propDisplayNameClaimType, _propReturnUrlKey, _propInvitationCodeKey, _propChallengeAnswerKey, _propLiveIdTokenKey, _propResultCodeKey, _propDefaultReturnPath, _propProfilePath, _propRegistrationPath, _propConfirmationPath, _propErrorPath, _propAccountTransferPath, _propUnregisteredUserPath, _propInvitationCodeDuration, _propRequiredLevel, _propSignUpAttributes, }; } protected override ConfigurationPropertyCollection Properties { get { return _properties; } } public override bool IsReadOnly() { return false; } /// <summary> /// The name of the associated portal context for retrieving portal related settings. /// </summary> public string PortalName { get { return (string)base[_propPortalName]; } set { base[_propPortalName] = value; } } /// <summary> /// Allows all user registration processing to be globally enabled or disabled. /// </summary> public bool Enabled { get { return (bool)base[_propEnabled]; } set { base[_propEnabled] = value; } } /// <summary> /// Forces an invitation code to be provided before a user is registered. /// </summary> public bool RequiresInvitation { get { return (bool)base[_propRequiresInvitation]; } set { base[_propRequiresInvitation] = value; } } /// <summary> /// Forces a challenge answer to be provided in addition to an invitation code before a user is registered. /// </summary> public bool RequiresChallengeAnswer { get { return (bool)base[_propRequiresChallengeAnswer]; } set { base[_propRequiresChallengeAnswer] = value; } } /// <summary> /// Enables an email workflow to be triggered to confirm that the registering user possesses a valid email address. Enabling this prevents open registration from occurring. /// </summary> public bool RequiresConfirmation { get { return (bool)base[_propRequiresConfirmation]; } set { base[_propRequiresConfirmation] = value; } } /// <summary> /// The logical name of the invitation code attribute. /// </summary> public string AttributeMapInvitationCode { get { return (string)base[_propAttributeMapInvitationCode]; } set { base[_propAttributeMapInvitationCode] = value; } } /// <summary> /// The logical name of the invitation code expiry date attribute. /// </summary> public string AttributeMapInvitationCodeExpiryDate { get { return (string)base[_propAttributeMapInvitationCodeExpiryDate]; } set { base[_propAttributeMapInvitationCodeExpiryDate] = value; } } /// <summary> /// The logical name of the challenge answer attribute. /// </summary> public string AttributeMapChallengeAnswer { get { return (string)base[_propAttributeMapChallengeAnswer]; } set { base[_propAttributeMapChallengeAnswer] = value; } } /// <summary> /// The logical name of the logon enabled attribute. /// </summary> public string AttributeMapLogonEnabled { get { return (string)base[_propAttributeMapLogonEnabled]; } set { base[_propAttributeMapLogonEnabled] = value; } } /// <summary> /// The logical name of the email attribute. /// </summary> public string AttributeMapEmail { get { return (string)base[_propAttributeMapEmail]; } set { base[_propAttributeMapEmail] = value; } } /// <summary> /// The logical name of the display name attribute. /// </summary> public string AttributeMapDisplayName { get { return (string)base[_propAttributeMapDisplayName]; } set { base[_propAttributeMapDisplayName] = value; } } /// <summary> /// The logical name of the last successful logon attribute. /// </summary> public string AttributeMapLastSuccessfulLogon { get { return (string)base[_propAttributeMapLastSuccessfulLogon]; } set { base[_propAttributeMapLastSuccessfulLogon] = value; } } /// <summary> /// The logical name of the identity provider attribute. /// </summary> public string AttributeMapIdentityProvider { get { return (string)base[_propAttributeMapIdentityProvider]; } set { base[_propAttributeMapIdentityProvider] = value; } } /// <summary> /// The claim type of the 'email' claim. /// </summary> public string EmailClaimType { get { return (string)base[_propEmailClaimType]; } set { base[_propEmailClaimType] = value; } } /// <summary> /// The claim type of the 'name' claim. /// </summary> public string DisplayNameClaimType { get { return (string)base[_propDisplayNameClaimType]; } set { base[_propDisplayNameClaimType] = value; } } /// <summary> /// The query string name for the Live ID token. /// </summary> public string LiveIdTokenKey { get { return (string)base[_propLiveIdTokenKey]; } set { base[_propLiveIdTokenKey] = value; } } /// <summary> /// The query string name for the invitation code. /// </summary> public string InvitationCodeKey { get { return (string)base[_propInvitationCodeKey]; } set { base[_propInvitationCodeKey] = value; } } /// <summary> /// The query string name for the challenge answer. /// </summary> public string ChallengeAnswerKey { get { return (string)base[_propChallengeAnswerKey]; } set { base[_propChallengeAnswerKey] = value; } } /// <summary> /// The query string name for the return URL. /// </summary> public string ReturnUrlKey { get { return (string)base[_propReturnUrlKey]; } set { base[_propReturnUrlKey] = value; } } /// <summary> /// The query string name for the result code. /// </summary> public string ResultCodeKey { get { return (string)base[_propResultCodeKey]; } set { base[_propResultCodeKey] = value; } } /// <summary> /// The return path, used by the federation authentication handler, when no explicit path is provided. /// </summary> public string DefaultReturnPath { get { return (string)base[_propDefaultReturnPath]; } set { base[_propDefaultReturnPath] = value; } } /// <summary> /// The location the user is redirected to after a new contact registration is created. If a contact record is missing values for required fields and the 'requiredLevel' attribute is set to the "ApplicationRequired" value, then the user is redirected to this page after every sign-in. This redirect indicates a successful registration or sign-in. If no value is provided, the user is redirected to the 'defaultReturnPath'. /// </summary> public string ProfilePath { get { return (string)base[_propProfilePath]; } set { base[_propProfilePath] = value; } } /// <summary> /// The path to the registration page that renders the form for providing invitation code, challenge question/answer, and open registration details. A user is redirected here if he or she is signing into the portal but is not yet registered. In this case, the sign-in attempt has either failed or is still in progress. If no value is provided, the user is redirected to the 'defaultReturnPath'. /// </summary> public string RegistrationPath { get { return (string)base[_propRegistrationPath]; } set { base[_propRegistrationPath] = value; } } /// <summary> /// The location the user is redirected to after a email confirmation workflow is triggered to provide further instructions on completing the registration. If no value is provided, the user is redirected to the 'defaultReturnPath'. /// </summary> public string ConfirmationPath { get { return (string)base[_propConfirmationPath]; } set { base[_propConfirmationPath] = value; } } /// <summary> /// The location the user is redirected to when the federation authentication handler throws an exception. If no value is provided, the user is redirected to the 'defaultReturnPath'. /// </summary> public string ErrorPath { get { return (string)base[_propErrorPath]; } set { base[_propErrorPath] = value; } } /// <summary> /// When the LiveIdAccountTransferHandler (rather than the default LiveIdWebAuthenticationHandler) is wired up as the Windows Live ID authentication sign-in response handler (LiveID.axd), users are redirected to this account transfer page. This page should prompt the user to continue to the next step of signing into an AppFabric ACS account. /// </summary> public string AccountTransferPath { get { return (string)base[_propAccountTransferPath]; } set { base[_propAccountTransferPath] = value; } } /// <summary> /// When the LiveIdAccountTransferHandler is unable to find the Windows Live ID account that a user is attempting to transfer, the user is redirected to this path. /// </summary> public string UnregisteredUserPath { get { return (string)base[_propUnregisteredUserPath]; } set { base[_propUnregisteredUserPath] = value; } } /// <summary> /// When a confirmation email workflow is generated, this value specifies the duration that the invitation code is considered valid. /// </summary> public TimeSpan? InvitationCodeDuration { get { return (TimeSpan?)base[_propInvitationCodeDuration]; } set { base[_propInvitationCodeDuration] = value; } } /// <summary> /// Controls the level of validation when determining if a contact entity contains all the attribute values deemed required. /// </summary> public AttributeRequiredLevel RequiredLevel { get { return (AttributeRequiredLevel)base[_propRequiredLevel]; } set { base[_propRequiredLevel] = value; } } /// <summary> /// A filter for the set of attributes to be accepted as user sign-up attributes. /// </summary> public SignUpAttributeElementCollection SignUpAttributes { get { return (SignUpAttributeElementCollection)base[_propSignUpAttributes]; } set { base[_propSignUpAttributes] = value; } } /// <summary> /// A filter for the set of attributes to be accepted as user sign-up attributes. /// </summary> IEnumerable<string> IUserRegistrationSettings.SignUpAttributes { get { return SignUpAttributes.Cast<SignUpAttributeElement>().Select(attrib => attrib.LogicalName); } } } }
/* * Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using Amazon.Runtime; namespace Amazon.S3.Model { /// <summary> /// Returns information about the ListVersions response and response metadata. /// </summary> public class ListVersionsResponse : AmazonWebServiceResponse { private bool? isTruncated; private string keyMarker; private string versionIdMarker; private string nextKeyMarker; private string nextVersionIdMarker; private List<S3ObjectVersion> versions = new List<S3ObjectVersion>(); private string name; private string prefix; private int? maxKeys; private List<string> commonPrefixes = new List<string>(); private string delimiter; /// <summary> /// A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria. If your results were /// truncated, you can make a follow-up paginated request using the NextKeyMarker and NextVersionIdMarker response parameters as a starting /// place in another request to return the rest of the results. /// /// </summary> public bool IsTruncated { get { return this.isTruncated ?? default(bool); } set { this.isTruncated = value; } } // Check to see if IsTruncated property is set internal bool IsSetIsTruncated() { return this.isTruncated.HasValue; } /// <summary> /// Marks the last Key returned in a truncated response. /// /// </summary> public string KeyMarker { get { return this.keyMarker; } set { this.keyMarker = value; } } // Check to see if KeyMarker property is set internal bool IsSetKeyMarker() { return this.keyMarker != null; } /// <summary> /// Gets and sets the VersionIdMarker property. /// Marks the last Version-Id returned in a truncated response. /// </summary> public string VersionIdMarker { get { return this.versionIdMarker; } set { this.versionIdMarker = value; } } // Check to see if VersionIdMarker property is set internal bool IsSetVersionIdMarker() { return this.versionIdMarker != null; } /// <summary> /// Use this value for the key marker request parameter in a subsequent request. /// /// </summary> public string NextKeyMarker { get { return this.nextKeyMarker; } set { this.nextKeyMarker = value; } } // Check to see if NextKeyMarker property is set internal bool IsSetNextKeyMarker() { return this.nextKeyMarker != null; } /// <summary> /// Use this value for the next version id marker parameter in a subsequent request. /// /// </summary> public string NextVersionIdMarker { get { return this.nextVersionIdMarker; } set { this.nextVersionIdMarker = value; } } // Check to see if NextVersionIdMarker property is set internal bool IsSetNextVersionIdMarker() { return this.nextVersionIdMarker != null; } /// <summary> /// Gets and sets the Versions property. This is a list of /// object versions in the bucket that match your search criteria. /// </summary> public List<S3ObjectVersion> Versions { get { return this.versions; } set { this.versions = value; } } // Check to see if Versions property is set internal bool IsSetVersions() { return this.versions.Count > 0; } /// <summary> /// Gets and sets the Name property. /// The bucket's name. /// </summary> public string Name { get { return this.name; } set { this.name = value; } } // Check to see if Name property is set internal bool IsSetName() { return this.name != null; } /// <summary> /// Gets and sets the Prefix property. /// Keys that begin with the indicated prefix are listed. /// </summary> public string Prefix { get { return this.prefix; } set { this.prefix = value; } } // Check to see if Prefix property is set internal bool IsSetPrefix() { return this.prefix != null; } /// <summary> /// Gets and sets the MaxKeys property. /// This is the maximum number of keys in the S3ObjectVersions collection. /// The value is derived from the MaxKeys parameter to ListVersionsRequest. /// </summary> public int MaxKeys { get { return this.maxKeys ?? default(int); } set { this.maxKeys = value; } } // Check to see if MaxKeys property is set internal bool IsSetMaxKeys() { return this.maxKeys.HasValue; } /// <summary> /// Gets the CommonPrefixes property. /// A response can contain CommonPrefixes only if you specify a delimiter. /// When you do, CommonPrefixes contains all (if there are any) keys between /// Prefix and the next occurrence of the string specified by delimiter. /// </summary> public List<string> CommonPrefixes { get { return this.commonPrefixes; } set { this.commonPrefixes = value; } } // Check to see if CommonPrefixes property is set internal bool IsSetCommonPrefixes() { return this.commonPrefixes.Count > 0; } /// <summary> /// Gets and sets the Delimiter property. /// Causes keys that contain the same string between the prefix and the /// first occurrence of the delimiter to be rolled up into a single result /// element in the CommonPrefixes collection. /// </summary> /// <remarks> /// These rolled-up keys are not returned elsewhere in the response. /// </remarks> public string Delimiter { get { return this.delimiter; } set { this.delimiter = value; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // A simple coordination data structure that we use for fork/join style parallelism. // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Diagnostics; namespace System.Threading { /// <summary> /// Represents a synchronization primitive that is signaled when its count reaches zero. /// </summary> /// <remarks> /// All public and protected members of <see cref="CountdownEvent"/> are thread-safe and may be used /// concurrently from multiple threads, with the exception of Dispose, which /// must only be used when all other operations on the <see cref="CountdownEvent"/> have /// completed, and Reset, which should only be used when no other threads are /// accessing the event. /// </remarks> [DebuggerDisplay("Initial Count={InitialCount}, Current Count={CurrentCount}")] public class CountdownEvent : IDisposable { // CountdownEvent is a simple synchronization primitive used for fork/join parallelism. We create a // latch with a count of N; threads then signal the latch, which decrements N by 1; other threads can // wait on the latch at any point; when the latch count reaches 0, all threads are woken and // subsequent waiters return without waiting. The implementation internally lazily creates a true // Win32 event as needed. We also use some amount of spinning on MP machines before falling back to a // wait. private int _initialCount; // The original # of signals the latch was instantiated with. private volatile int _currentCount; // The # of outstanding signals before the latch transitions to a signaled state. private ManualResetEventSlim _event; // An event used to manage blocking and signaling. private volatile bool _disposed; // Whether the latch has been disposed. /// <summary> /// Initializes a new instance of <see cref="T:System.Threading.CountdownEvent"/> class with the /// specified count. /// </summary> /// <param name="initialCount">The number of signals required to set the <see /// cref="T:System.Threading.CountdownEvent"/>.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="initialCount"/> is less /// than 0.</exception> public CountdownEvent(int initialCount) { if (initialCount < 0) { throw new ArgumentOutOfRangeException("initialCount"); } _initialCount = initialCount; _currentCount = initialCount; // Allocate a thin event, which internally defers creation of an actual Win32 event. _event = new ManualResetEventSlim(); // If the latch was created with a count of 0, then it's already in the signaled state. if (initialCount == 0) { _event.Set(); } } /// <summary> /// Gets the number of remaining signals required to set the event. /// </summary> /// <value> /// The number of remaining signals required to set the event. /// </value> public int CurrentCount { get { int observedCount = _currentCount; return observedCount < 0 ? 0 : observedCount; } } /// <summary> /// Gets the numbers of signals initially required to set the event. /// </summary> /// <value> /// The number of signals initially required to set the event. /// </value> public int InitialCount { get { return _initialCount; } } /// <summary> /// Determines whether the event is set. /// </summary> /// <value>true if the event is set; otherwise, false.</value> public bool IsSet { get { // The latch is "completed" if its current count has reached 0. Note that this is NOT // the same thing is checking the event's IsCompleted property. There is a tiny window // of time, after the final decrement of the current count to 0 and before setting the // event, where the two values are out of sync. return (_currentCount <= 0); } } /// <summary> /// Gets a <see cref="T:System.Threading.WaitHandle"/> that is used to wait for the event to be set. /// </summary> /// <value>A <see cref="T:System.Threading.WaitHandle"/> that is used to wait for the event to be set.</value> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been disposed.</exception> /// <remarks> /// <see cref="WaitHandle"/> should only be used if it's needed for integration with code bases /// that rely on having a WaitHandle. If all that's needed is to wait for the <see cref="CountdownEvent"/> /// to be set, the <see cref="Wait()"/> method should be preferred. /// </remarks> public WaitHandle WaitHandle { get { ThrowIfDisposed(); return _event.WaitHandle; } } /// <summary> /// Releases all resources used by the current instance of <see cref="T:System.Threading.CountdownEvent"/>. /// </summary> /// <remarks> /// Unlike most of the members of <see cref="CountdownEvent"/>, <see cref="Dispose()"/> is not /// thread-safe and may not be used concurrently with other members of this instance. /// </remarks> public void Dispose() { // Gets rid of this latch's associated resources. This can consist of a Win32 event // which is (lazily) allocated by the underlying thin event. This method is not safe to // call concurrently -- i.e. a caller must coordinate to ensure only one thread is using // the latch at the time of the call to Dispose. Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// When overridden in a derived class, releases the unmanaged resources used by the /// <see cref="T:System.Threading.CountdownEvent"/>, and optionally releases the managed resources. /// </summary> /// <param name="disposing">true to release both managed and unmanaged resources; false to release /// only unmanaged resources.</param> /// <remarks> /// Unlike most of the members of <see cref="CountdownEvent"/>, <see cref="Dispose()"/> is not /// thread-safe and may not be used concurrently with other members of this instance. /// </remarks> protected virtual void Dispose(bool disposing) { if (disposing) { _event.Dispose(); _disposed = true; } } /// <summary> /// Registers a signal with the <see cref="T:System.Threading.CountdownEvent"/>, decrementing its /// count. /// </summary> /// <returns>true if the signal caused the count to reach zero and the event was set; otherwise, /// false.</returns> /// <exception cref="T:System.InvalidOperationException">The current instance is already set. /// </exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool Signal() { ThrowIfDisposed(); Debug.Assert(_event != null); if (_currentCount <= 0) { throw new InvalidOperationException(SR.CountdownEvent_Decrement_BelowZero); } #pragma warning disable 0420 int newCount = Interlocked.Decrement(ref _currentCount); #pragma warning restore 0420 if (newCount == 0) { _event.Set(); return true; } else if (newCount < 0) { //if the count is decremented below zero, then throw, it's OK to keep the count negative, and we shouldn't set the event here //because there was a thread already which decremented it to zero and set the event throw new InvalidOperationException(SR.CountdownEvent_Decrement_BelowZero); } return false; } /// <summary> /// Registers multiple signals with the <see cref="T:System.Threading.CountdownEvent"/>, /// decrementing its count by the specified amount. /// </summary> /// <param name="signalCount">The number of signals to register.</param> /// <returns>true if the signals caused the count to reach zero and the event was set; otherwise, /// false.</returns> /// <exception cref="T:System.InvalidOperationException"> /// The current instance is already set. -or- Or <paramref name="signalCount"/> is greater than <see /// cref="CurrentCount"/>. /// </exception> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="signalCount"/> is less /// than 1.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool Signal(int signalCount) { if (signalCount <= 0) { throw new ArgumentOutOfRangeException("signalCount"); } ThrowIfDisposed(); Debug.Assert(_event != null); int observedCount; SpinWait spin = new SpinWait(); while (true) { observedCount = _currentCount; // If the latch is already signaled, we will fail. if (observedCount < signalCount) { throw new InvalidOperationException(SR.CountdownEvent_Decrement_BelowZero); } // This disables the "CS0420: a reference to a volatile field will not be treated as volatile" warning // for this statement. This warning is clearly senseless for Interlocked operations. #pragma warning disable 0420 if (Interlocked.CompareExchange(ref _currentCount, observedCount - signalCount, observedCount) == observedCount) #pragma warning restore 0420 { break; } // The CAS failed. Spin briefly and try again. spin.SpinOnce(); } // If we were the last to signal, set the event. if (observedCount == signalCount) { _event.Set(); return true; } Debug.Assert(_currentCount >= 0, "latch was decremented below zero"); return false; } /// <summary> /// Increments the <see cref="T:System.Threading.CountdownEvent"/>'s current count by one. /// </summary> /// <exception cref="T:System.InvalidOperationException">The current instance is already /// set.</exception> /// <exception cref="T:System.InvalidOperationException"><see cref="CurrentCount"/> is equal to <see /// cref="T:System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.ObjectDisposedException"> /// The current instance has already been disposed. /// </exception> public void AddCount() { AddCount(1); } /// <summary> /// Attempts to increment the <see cref="T:System.Threading.CountdownEvent"/>'s current count by one. /// </summary> /// <returns>true if the increment succeeded; otherwise, false. If <see cref="CurrentCount"/> is /// already at zero. this will return false.</returns> /// <exception cref="T:System.InvalidOperationException"><see cref="CurrentCount"/> is equal to <see /// cref="T:System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool TryAddCount() { return TryAddCount(1); } /// <summary> /// Increments the <see cref="T:System.Threading.CountdownEvent"/>'s current count by a specified /// value. /// </summary> /// <param name="signalCount">The value by which to increase <see cref="CurrentCount"/>.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="signalCount"/> is less than /// 0.</exception> /// <exception cref="T:System.InvalidOperationException">The current instance is already /// set.</exception> /// <exception cref="T:System.InvalidOperationException"><see cref="CurrentCount"/> is equal to <see /// cref="T:System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void AddCount(int signalCount) { if (!TryAddCount(signalCount)) { throw new InvalidOperationException(SR.CountdownEvent_Increment_AlreadyZero); } } /// <summary> /// Attempts to increment the <see cref="T:System.Threading.CountdownEvent"/>'s current count by a /// specified value. /// </summary> /// <param name="signalCount">The value by which to increase <see cref="CurrentCount"/>.</param> /// <returns>true if the increment succeeded; otherwise, false. If <see cref="CurrentCount"/> is /// already at zero this will return false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="signalCount"/> is less /// than 0.</exception> /// <exception cref="T:System.InvalidOperationException">The current instance is already /// set.</exception> /// <exception cref="T:System.InvalidOperationException"><see cref="CurrentCount"/> is equal to <see /// cref="T:System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool TryAddCount(int signalCount) { if (signalCount <= 0) { throw new ArgumentOutOfRangeException("signalCount"); } ThrowIfDisposed(); // Loop around until we successfully increment the count. int observedCount; SpinWait spin = new SpinWait(); while (true) { observedCount = _currentCount; if (observedCount <= 0) { return false; } else if (observedCount > (Int32.MaxValue - signalCount)) { throw new InvalidOperationException(SR.CountdownEvent_Increment_AlreadyMax); } // This disables the "CS0420: a reference to a volatile field will not be treated as volatile" warning // for this statement. This warning is clearly senseless for Interlocked operations. #pragma warning disable 0420 if (Interlocked.CompareExchange(ref _currentCount, observedCount + signalCount, observedCount) == observedCount) #pragma warning restore 0420 { break; } // The CAS failed. Spin briefly and try again. spin.SpinOnce(); } return true; } /// <summary> /// Resets the <see cref="CurrentCount"/> to the value of <see cref="InitialCount"/>. /// </summary> /// <remarks> /// Unlike most of the members of <see cref="CountdownEvent"/>, Reset is not /// thread-safe and may not be used concurrently with other members of this instance. /// </remarks> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed..</exception> public void Reset() { Reset(_initialCount); } /// <summary> /// Resets the <see cref="CurrentCount"/> to a specified value. /// </summary> /// <param name="count">The number of signals required to set the <see /// cref="T:System.Threading.CountdownEvent"/>.</param> /// <remarks> /// Unlike most of the members of <see cref="CountdownEvent"/>, Reset is not /// thread-safe and may not be used concurrently with other members of this instance. /// </remarks> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="count"/> is /// less than 0.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has alread been disposed.</exception> public void Reset(int count) { ThrowIfDisposed(); if (count < 0) { throw new ArgumentOutOfRangeException("count"); } _currentCount = count; _initialCount = count; if (count == 0) { _event.Set(); } else { _event.Reset(); } } /// <summary> /// Blocks the current thread until the <see cref="T:System.Threading.CountdownEvent"/> is set. /// </summary> /// <remarks> /// The caller of this method blocks indefinitely until the current instance is set. The caller will /// return immediately if the event is currently in a set state. /// </remarks> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void Wait() { Wait(Timeout.Infinite, new CancellationToken()); } /// <summary> /// Blocks the current thread until the <see cref="T:System.Threading.CountdownEvent"/> is set, while /// observing a <see cref="T:System.Threading.CancellationToken"/>. /// </summary> /// <param name="cancellationToken">The <see cref="T:System.Threading.CancellationToken"/> to /// observe.</param> /// <remarks> /// The caller of this method blocks indefinitely until the current instance is set. The caller will /// return immediately if the event is currently in a set state. If the /// <see cref="T:System.Threading.CancellationToken">CancellationToken</see> being observed /// is canceled during the wait operation, an <see cref="T:System.OperationCanceledException"/> /// will be thrown. /// </remarks> /// <exception cref="T:System.OperationCanceledException"><paramref name="cancellationToken"/> has been /// canceled.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public void Wait(CancellationToken cancellationToken) { Wait(Timeout.Infinite, cancellationToken); } /// <summary> /// Blocks the current thread until the <see cref="T:System.Threading.CountdownEvent"/> is set, using a /// <see cref="T:System.TimeSpan"/> to measure the time interval. /// </summary> /// <param name="timeout">A <see cref="T:System.TimeSpan"/> that represents the number of /// milliseconds to wait, or a <see cref="T:System.TimeSpan"/> that represents -1 milliseconds to /// wait indefinitely.</param> /// <returns>true if the <see cref="System.Threading.CountdownEvent"/> was set; otherwise, /// false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/> is a negative /// number other than -1 milliseconds, which represents an infinite time-out -or- timeout is greater /// than <see cref="System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool Wait(TimeSpan timeout) { long totalMilliseconds = (long)timeout.TotalMilliseconds; if (totalMilliseconds < -1 || totalMilliseconds > int.MaxValue) { throw new ArgumentOutOfRangeException("timeout"); } return Wait((int)totalMilliseconds, new CancellationToken()); } /// <summary> /// Blocks the current thread until the <see cref="T:System.Threading.CountdownEvent"/> is set, using /// a <see cref="T:System.TimeSpan"/> to measure the time interval, while observing a /// <see cref="T:System.Threading.CancellationToken"/>. /// </summary> /// <param name="timeout">A <see cref="T:System.TimeSpan"/> that represents the number of /// milliseconds to wait, or a <see cref="T:System.TimeSpan"/> that represents -1 milliseconds to /// wait indefinitely.</param> /// <param name="cancellationToken">The <see cref="T:System.Threading.CancellationToken"/> to /// observe.</param> /// <returns>true if the <see cref="System.Threading.CountdownEvent"/> was set; otherwise, /// false.</returns> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/> is a negative /// number other than -1 milliseconds, which represents an infinite time-out -or- timeout is greater /// than <see cref="System.Int32.MaxValue"/>.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> /// <exception cref="T:System.OperationCanceledException"><paramref name="cancellationToken"/> has /// been canceled.</exception> public bool Wait(TimeSpan timeout, CancellationToken cancellationToken) { long totalMilliseconds = (long)timeout.TotalMilliseconds; if (totalMilliseconds < -1 || totalMilliseconds > int.MaxValue) { throw new ArgumentOutOfRangeException("timeout"); } return Wait((int)totalMilliseconds, cancellationToken); } /// <summary> /// Blocks the current thread until the <see cref="T:System.Threading.CountdownEvent"/> is set, using a /// 32-bit signed integer to measure the time interval. /// </summary> /// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see /// cref="Timeout.Infinite"/>(-1) to wait indefinitely.</param> /// <returns>true if the <see cref="System.Threading.CountdownEvent"/> was set; otherwise, /// false.</returns> /// <exception cref="ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is a /// negative number other than -1, which represents an infinite time-out.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> public bool Wait(int millisecondsTimeout) { return Wait(millisecondsTimeout, new CancellationToken()); } /// <summary> /// Blocks the current thread until the <see cref="T:System.Threading.CountdownEvent"/> is set, using a /// 32-bit signed integer to measure the time interval, while observing a /// <see cref="T:System.Threading.CancellationToken"/>. /// </summary> /// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see /// cref="Timeout.Infinite"/>(-1) to wait indefinitely.</param> /// <param name="cancellationToken">The <see cref="T:System.Threading.CancellationToken"/> to /// observe.</param> /// <returns>true if the <see cref="System.Threading.CountdownEvent"/> was set; otherwise, /// false.</returns> /// <exception cref="ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is a /// negative number other than -1, which represents an infinite time-out.</exception> /// <exception cref="T:System.ObjectDisposedException">The current instance has already been /// disposed.</exception> /// <exception cref="T:System.OperationCanceledException"><paramref name="cancellationToken"/> has /// been canceled.</exception> public bool Wait(int millisecondsTimeout, CancellationToken cancellationToken) { if (millisecondsTimeout < -1) { throw new ArgumentOutOfRangeException("millisecondsTimeout"); } ThrowIfDisposed(); cancellationToken.ThrowIfCancellationRequested(); bool returnValue = IsSet; // If not completed yet, wait on the event. if (!returnValue) { // ** the actual wait returnValue = _event.Wait(millisecondsTimeout, cancellationToken); //the Wait will throw OCE itself if the token is canceled. } return returnValue; } // -------------------------------------- // Private methods /// <summary> /// Throws an exception if the latch has been disposed. /// </summary> private void ThrowIfDisposed() { if (_disposed) { throw new ObjectDisposedException("CountdownEvent"); } } } }
/***************************************************************************** * Skeleton Utility created by Mitch Thompson * Full irrevocable rights and permissions granted to Esoteric Software *****************************************************************************/ using UnityEngine; using System.Collections; using System.Collections.Generic; using Spine; [RequireComponent(typeof(ISkeletonAnimation))] [ExecuteInEditMode] public class SkeletonUtility : MonoBehaviour { public static T GetInParent<T> (Transform origin) where T : Component { #if UNITY_4_3 Transform parent = origin.parent; while(parent.GetComponent<T>() == null){ parent = parent.parent; if(parent == null) return default(T); } return parent.GetComponent<T>(); #else return origin.GetComponentInParent<T>(); #endif } public static PolygonCollider2D AddBoundingBox (Skeleton skeleton, string skinName, string slotName, string attachmentName, Transform parent, bool isTrigger = true) { // List<Attachment> attachments = new List<Attachment>(); Skin skin; if (skinName == "") skinName = skeleton.Data.DefaultSkin.Name; skin = skeleton.Data.FindSkin(skinName); if (skin == null) { Debug.LogError("Skin " + skinName + " not found!"); return null; } var attachment = skin.GetAttachment(skeleton.FindSlotIndex(slotName), attachmentName); if (attachment is BoundingBoxAttachment) { GameObject go = new GameObject("[BoundingBox]" + attachmentName); go.transform.parent = parent; go.transform.localPosition = Vector3.zero; go.transform.localRotation = Quaternion.identity; go.transform.localScale = Vector3.one; var collider = go.AddComponent<PolygonCollider2D>(); collider.isTrigger = isTrigger; var boundingBox = (BoundingBoxAttachment)attachment; float[] floats = boundingBox.Vertices; int floatCount = floats.Length; int vertCount = floatCount / 2; Vector2[] verts = new Vector2[vertCount]; int v = 0; for (int i = 0; i < floatCount; i += 2, v++) { verts[v].x = floats[i]; verts[v].y = floats[i + 1]; } collider.SetPath(0, verts); return collider; } return null; } public static PolygonCollider2D AddBoundingBoxAsComponent (BoundingBoxAttachment boundingBox, GameObject gameObject, bool isTrigger = true) { if (boundingBox == null) return null; var collider = gameObject.AddComponent<PolygonCollider2D>(); collider.isTrigger = isTrigger; float[] floats = boundingBox.Vertices; int floatCount = floats.Length; int vertCount = floatCount / 2; Vector2[] verts = new Vector2[vertCount]; int v = 0; for (int i = 0; i < floatCount; i += 2, v++) { verts[v].x = floats[i]; verts[v].y = floats[i + 1]; } collider.SetPath(0, verts); return collider; } public static Bounds GetBoundingBoxBounds (BoundingBoxAttachment boundingBox, float depth = 0) { float[] floats = boundingBox.Vertices; int floatCount = floats.Length; Bounds bounds = new Bounds(); bounds.center = new Vector3(floats[0], floats[1], 0); for (int i = 2; i < floatCount; i += 2) { bounds.Encapsulate(new Vector3(floats[i], floats[i + 1], 0)); } Vector3 size = bounds.size; size.z = depth; bounds.size = size; return bounds; } public delegate void SkeletonUtilityDelegate (); public event SkeletonUtilityDelegate OnReset; public Transform boneRoot; void Update () { if (boneRoot != null && skeletonRenderer.skeleton != null) { Vector3 flipScale = Vector3.one; if (skeletonRenderer.skeleton.FlipX) flipScale.x = -1; if (skeletonRenderer.skeleton.FlipY) flipScale.y = -1; boneRoot.localScale = flipScale; } } [HideInInspector] public SkeletonRenderer skeletonRenderer; [HideInInspector] public ISkeletonAnimation skeletonAnimation; [System.NonSerialized] public List<SkeletonUtilityBone> utilityBones = new List<SkeletonUtilityBone>(); [System.NonSerialized] public List<SkeletonUtilityConstraint> utilityConstraints = new List<SkeletonUtilityConstraint>(); // Dictionary<Bone, SkeletonUtilityBone> utilityBoneTable; protected bool hasTransformBones; protected bool hasUtilityConstraints; protected bool needToReprocessBones; void OnEnable () { if (skeletonRenderer == null) { skeletonRenderer = GetComponent<SkeletonRenderer>(); } if (skeletonAnimation == null) { skeletonAnimation = GetComponent<SkeletonAnimation>(); if (skeletonAnimation == null) skeletonAnimation = GetComponent<SkeletonAnimator>(); } skeletonRenderer.OnRebuild -= HandleRendererReset; skeletonRenderer.OnRebuild += HandleRendererReset; if (skeletonAnimation != null) { skeletonAnimation.UpdateLocal -= UpdateLocal; skeletonAnimation.UpdateLocal += UpdateLocal; } CollectBones(); } void Start () { //recollect because order of operations failure when switching between game mode and edit mode... // CollectBones(); } void OnDisable () { skeletonRenderer.OnRebuild -= HandleRendererReset; if (skeletonAnimation != null) { skeletonAnimation.UpdateLocal -= UpdateLocal; skeletonAnimation.UpdateWorld -= UpdateWorld; skeletonAnimation.UpdateComplete -= UpdateComplete; } } void HandleRendererReset (SkeletonRenderer r) { if (OnReset != null) OnReset(); CollectBones(); } public void RegisterBone (SkeletonUtilityBone bone) { if (utilityBones.Contains(bone)) return; else { utilityBones.Add(bone); needToReprocessBones = true; } } public void UnregisterBone (SkeletonUtilityBone bone) { utilityBones.Remove(bone); } public void RegisterConstraint (SkeletonUtilityConstraint constraint) { if (utilityConstraints.Contains(constraint)) return; else { utilityConstraints.Add(constraint); needToReprocessBones = true; } } public void UnregisterConstraint (SkeletonUtilityConstraint constraint) { utilityConstraints.Remove(constraint); } public void CollectBones () { if (skeletonRenderer.skeleton == null) return; if (boneRoot != null) { List<string> constraintTargetNames = new List<string>(); ExposedList<IkConstraint> ikConstraints = skeletonRenderer.skeleton.IkConstraints; for (int i = 0, n = ikConstraints.Count; i < n; i++) constraintTargetNames.Add(ikConstraints.Items[i].Target.Data.Name); foreach (var b in utilityBones) { if (b.bone == null) { return; } if (b.mode == SkeletonUtilityBone.Mode.Override) { hasTransformBones = true; } if (constraintTargetNames.Contains(b.bone.Data.Name)) { hasUtilityConstraints = true; } } if (utilityConstraints.Count > 0) hasUtilityConstraints = true; if (skeletonAnimation != null) { skeletonAnimation.UpdateWorld -= UpdateWorld; skeletonAnimation.UpdateComplete -= UpdateComplete; if (hasTransformBones || hasUtilityConstraints) { skeletonAnimation.UpdateWorld += UpdateWorld; } if (hasUtilityConstraints) { skeletonAnimation.UpdateComplete += UpdateComplete; } } needToReprocessBones = false; } else { utilityBones.Clear(); utilityConstraints.Clear(); } } void UpdateLocal (ISkeletonAnimation anim) { if (needToReprocessBones) CollectBones(); if (utilityBones == null) return; foreach (SkeletonUtilityBone b in utilityBones) { b.transformLerpComplete = false; } UpdateAllBones(); } void UpdateWorld (ISkeletonAnimation anim) { UpdateAllBones(); foreach (SkeletonUtilityConstraint c in utilityConstraints) c.DoUpdate(); } void UpdateComplete (ISkeletonAnimation anim) { UpdateAllBones(); } void UpdateAllBones () { if (boneRoot == null) { CollectBones(); } if (utilityBones == null) return; foreach (SkeletonUtilityBone b in utilityBones) { b.DoUpdate(); } } public Transform GetBoneRoot () { if (boneRoot != null) return boneRoot; boneRoot = new GameObject("SkeletonUtility-Root").transform; boneRoot.parent = transform; boneRoot.localPosition = Vector3.zero; boneRoot.localRotation = Quaternion.identity; boneRoot.localScale = Vector3.one; return boneRoot; } public GameObject SpawnRoot (SkeletonUtilityBone.Mode mode, bool pos, bool rot, bool sca) { GetBoneRoot(); Skeleton skeleton = this.skeletonRenderer.skeleton; GameObject go = SpawnBone(skeleton.RootBone, boneRoot, mode, pos, rot, sca); CollectBones(); return go; } public GameObject SpawnHierarchy (SkeletonUtilityBone.Mode mode, bool pos, bool rot, bool sca) { GetBoneRoot(); Skeleton skeleton = this.skeletonRenderer.skeleton; GameObject go = SpawnBoneRecursively(skeleton.RootBone, boneRoot, mode, pos, rot, sca); CollectBones(); return go; } public GameObject SpawnBoneRecursively (Bone bone, Transform parent, SkeletonUtilityBone.Mode mode, bool pos, bool rot, bool sca) { GameObject go = SpawnBone(bone, parent, mode, pos, rot, sca); ExposedList<Bone> childrenBones = bone.Children; for (int i = 0, n = childrenBones.Count; i < n; i++) { Bone child = childrenBones.Items[i]; SpawnBoneRecursively(child, go.transform, mode, pos, rot, sca); } return go; } public GameObject SpawnBone (Bone bone, Transform parent, SkeletonUtilityBone.Mode mode, bool pos, bool rot, bool sca) { GameObject go = new GameObject(bone.Data.Name); go.transform.parent = parent; SkeletonUtilityBone b = go.AddComponent<SkeletonUtilityBone>(); b.skeletonUtility = this; b.position = pos; b.rotation = rot; b.scale = sca; b.mode = mode; b.zPosition = true; b.Reset(); b.bone = bone; b.boneName = bone.Data.Name; b.valid = true; if (mode == SkeletonUtilityBone.Mode.Override) { if (rot) go.transform.localRotation = Quaternion.Euler(0, 0, b.bone.AppliedRotation); if (pos) go.transform.localPosition = new Vector3(b.bone.X, b.bone.Y, 0); go.transform.localScale = new Vector3(b.bone.scaleX, b.bone.scaleY, 0); } return go; } public void SpawnSubRenderers (bool disablePrimaryRenderer) { int submeshCount = GetComponent<MeshFilter>().sharedMesh.subMeshCount; for (int i = 0; i < submeshCount; i++) { GameObject go = new GameObject("Submesh " + i, typeof(MeshFilter), typeof(MeshRenderer)); go.transform.parent = transform; go.transform.localPosition = Vector3.zero; go.transform.localRotation = Quaternion.identity; go.transform.localScale = Vector3.one; SkeletonUtilitySubmeshRenderer s = go.AddComponent<SkeletonUtilitySubmeshRenderer>(); s.GetComponent<Renderer>().sortingOrder = i * 10; s.submeshIndex = i; } skeletonRenderer.CollectSubmeshRenderers(); if (disablePrimaryRenderer) GetComponent<Renderer>().enabled = false; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Xml; using System.Text; using System.Diagnostics; using System.Runtime.Serialization; using System.Security; namespace System.Xml { public class UniqueId { private Int64 _idLow; private Int64 _idHigh; [SecurityCritical] /// <SecurityNote> /// Critical - some SecurityCritical unsafe code assumes that this field has been validated /// </SecurityNote> private string _s; private const int guidLength = 16; private const int uuidLength = 45; private static short[] s_char2val = new short[256] { /* 0-15 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 16-31 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 32-47 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 48-63 */ 0x000, 0x010, 0x020, 0x030, 0x040, 0x050, 0x060, 0x070, 0x080, 0x090, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 64-79 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 80-95 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 96-111 */ 0x100, 0x0A0, 0x0B0, 0x0C0, 0x0D0, 0x0E0, 0x0F0, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 112-127 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 0-15 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 16-31 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 32-47 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 48-63 */ 0x000, 0x001, 0x002, 0x003, 0x004, 0x005, 0x006, 0x007, 0x008, 0x009, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 64-79 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 80-95 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 96-111 */ 0x100, 0x00A, 0x00B, 0x00C, 0x00D, 0x00E, 0x00F, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, /* 112-127 */ 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, 0x100, }; private const string val2char = "0123456789abcdef"; public UniqueId() : this(Guid.NewGuid()) { } public UniqueId(Guid guid) : this(guid.ToByteArray()) { } public UniqueId(byte[] guid) : this(guid, 0) { } /// <SecurityNote> /// Critical - contains unsafe code /// Safe - unsafe code is effectively encapsulated, all inputs are validated /// </SecurityNote> [SecuritySafeCritical] unsafe public UniqueId(byte[] guid, int offset) { if (guid == null) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("guid")); if (offset < 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.ValueMustBeNonNegative))); if (offset > guid.Length) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.OffsetExceedsBufferSize, guid.Length))); if (guidLength > guid.Length - offset) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentException(SR.Format(SR.XmlArrayTooSmallInput, guidLength), "guid")); fixed (byte* pb = &guid[offset]) { _idLow = UnsafeGetInt64(pb); _idHigh = UnsafeGetInt64(&pb[8]); } } /// <SecurityNote> /// Critical - contains unsafe code /// Safe - unsafe code is effectively encapsulated, all inputs are validated /// </SecurityNote> [SecuritySafeCritical] unsafe public UniqueId(string value) { if (value == null) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("value"); if (value.Length == 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new FormatException(SR.Format(SR.XmlInvalidUniqueId))); fixed (char* pch = value) { UnsafeParse(pch, value.Length); } _s = value; } /// <SecurityNote> /// Critical - contains unsafe code /// Safe - unsafe code is effectively encapsulated, all inputs are validated /// </SecurityNote> [SecuritySafeCritical] unsafe public UniqueId(char[] chars, int offset, int count) { if (chars == null) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("chars")); if (offset < 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.ValueMustBeNonNegative))); if (offset > chars.Length) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.OffsetExceedsBufferSize, chars.Length))); if (count < 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("count", SR.Format(SR.ValueMustBeNonNegative))); if (count > chars.Length - offset) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("count", SR.Format(SR.SizeExceedsRemainingBufferSpace, chars.Length - offset))); if (count == 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new FormatException(SR.Format(SR.XmlInvalidUniqueId))); fixed (char* pch = &chars[offset]) { UnsafeParse(pch, count); } if (!IsGuid) { _s = new string(chars, offset, count); } } public int CharArrayLength { /// <SecurityNote> /// Critical - accesses critical field 's'. /// Safe - doesn't leak any control or data /// </SecurityNote> [SecuritySafeCritical] get { if (_s != null) return _s.Length; return uuidLength; } } /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe int UnsafeDecode(short* char2val, char ch1, char ch2) { if ((ch1 | ch2) >= 0x80) return 0x100; return char2val[ch1] | char2val[0x80 + ch2]; } /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe void UnsafeEncode(char* val2char, byte b, char* pch) { pch[0] = val2char[b >> 4]; pch[1] = val2char[b & 0x0F]; } public bool IsGuid { get { return ((_idLow | _idHigh) != 0); } } // It must be the case that comparing UniqueId's as strings yields the same result as comparing UniqueId's as // their binary equivalent. This means that there must be a 1-1 relationship between a string and its binary // equivalent. Therefore, for example, we cannot accept both upper and lower case hex chars since there would // then be more than 1 string that mapped to a binary equivalent. /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe void UnsafeParse(char* chars, int charCount) { // 1 2 3 4 // 012345678901234567890123456789012345678901234 // urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx if (charCount != uuidLength || chars[0] != 'u' || chars[1] != 'r' || chars[2] != 'n' || chars[3] != ':' || chars[4] != 'u' || chars[5] != 'u' || chars[6] != 'i' || chars[7] != 'd' || chars[8] != ':' || chars[17] != '-' || chars[22] != '-' || chars[27] != '-' || chars[32] != '-') { return; } byte* bytes = stackalloc byte[guidLength]; int i = 0; int j = 0; fixed (short* ps = s_char2val) { short* _char2val = ps; // 0 1 2 3 4 // 012345678901234567890123456789012345678901234 // urn:uuid:aabbccdd-eeff-gghh-0011-223344556677 // // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 // ddccbbaaffeehhgg0011223344556677 i = UnsafeDecode(_char2val, chars[15], chars[16]); bytes[0] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[13], chars[14]); bytes[1] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[11], chars[12]); bytes[2] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[9], chars[10]); bytes[3] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[20], chars[21]); bytes[4] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[18], chars[19]); bytes[5] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[25], chars[26]); bytes[6] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[23], chars[24]); bytes[7] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[28], chars[29]); bytes[8] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[30], chars[31]); bytes[9] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[33], chars[34]); bytes[10] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[35], chars[36]); bytes[11] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[37], chars[38]); bytes[12] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[39], chars[40]); bytes[13] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[41], chars[42]); bytes[14] = (byte)i; j |= i; i = UnsafeDecode(_char2val, chars[43], chars[44]); bytes[15] = (byte)i; j |= i; if (j >= 0x100) return; _idLow = UnsafeGetInt64(bytes); _idHigh = UnsafeGetInt64(&bytes[8]); } } /// <SecurityNote> /// Critical - contains unsafe code /// Safe - unsafe code is effectively encapsulated, all inputs are validated /// </SecurityNote> [SecuritySafeCritical] unsafe public int ToCharArray(char[] chars, int offset) { int count = CharArrayLength; if (chars == null) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("chars")); if (offset < 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.ValueMustBeNonNegative))); if (offset > chars.Length) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.OffsetExceedsBufferSize, chars.Length))); if (count > chars.Length - offset) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("chars", SR.Format(SR.XmlArrayTooSmallOutput, count))); if (_s != null) { _s.CopyTo(0, chars, offset, count); } else { byte* bytes = stackalloc byte[guidLength]; UnsafeSetInt64(_idLow, bytes); UnsafeSetInt64(_idHigh, &bytes[8]); fixed (char* _pch = &chars[offset]) { char* pch = _pch; pch[0] = 'u'; pch[1] = 'r'; pch[2] = 'n'; pch[3] = ':'; pch[4] = 'u'; pch[5] = 'u'; pch[6] = 'i'; pch[7] = 'd'; pch[8] = ':'; pch[17] = '-'; pch[22] = '-'; pch[27] = '-'; pch[32] = '-'; fixed (char* ps = val2char) { char* _val2char = ps; UnsafeEncode(_val2char, bytes[0], &pch[15]); UnsafeEncode(_val2char, bytes[1], &pch[13]); UnsafeEncode(_val2char, bytes[2], &pch[11]); UnsafeEncode(_val2char, bytes[3], &pch[9]); UnsafeEncode(_val2char, bytes[4], &pch[20]); UnsafeEncode(_val2char, bytes[5], &pch[18]); UnsafeEncode(_val2char, bytes[6], &pch[25]); UnsafeEncode(_val2char, bytes[7], &pch[23]); UnsafeEncode(_val2char, bytes[8], &pch[28]); UnsafeEncode(_val2char, bytes[9], &pch[30]); UnsafeEncode(_val2char, bytes[10], &pch[33]); UnsafeEncode(_val2char, bytes[11], &pch[35]); UnsafeEncode(_val2char, bytes[12], &pch[37]); UnsafeEncode(_val2char, bytes[13], &pch[39]); UnsafeEncode(_val2char, bytes[14], &pch[41]); UnsafeEncode(_val2char, bytes[15], &pch[43]); } } } return count; } public bool TryGetGuid(out Guid guid) { byte[] buffer = new byte[guidLength]; if (!TryGetGuid(buffer, 0)) { guid = Guid.Empty; return false; } guid = new Guid(buffer); return true; } /// <SecurityNote> /// Critical - contains unsafe code /// Safe - unsafe code is effectively encapsulated, all inputs are validated /// </SecurityNote> [SecuritySafeCritical] unsafe public bool TryGetGuid(byte[] buffer, int offset) { if (!IsGuid) return false; if (buffer == null) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("buffer")); if (offset < 0) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.ValueMustBeNonNegative))); if (offset > buffer.Length) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", SR.Format(SR.OffsetExceedsBufferSize, buffer.Length))); if (guidLength > buffer.Length - offset) throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("buffer", SR.Format(SR.XmlArrayTooSmallOutput, guidLength))); fixed (byte* pb = &buffer[offset]) { UnsafeSetInt64(_idLow, pb); UnsafeSetInt64(_idHigh, &pb[8]); } return true; } /// <SecurityNote> /// Critical - accesses critical field 's'. /// Safe - doesn't allow unchecked write access to the field /// </SecurityNote> [SecuritySafeCritical] unsafe public override string ToString() { if (_s == null) { int length = CharArrayLength; char[] chars = new char[length]; ToCharArray(chars, 0); _s = new string(chars, 0, length); } return _s; } static public bool operator ==(UniqueId id1, UniqueId id2) { if (object.ReferenceEquals(id1, id2)) return true; if (object.ReferenceEquals(id1, null) || object.ReferenceEquals(id2, null)) return false; #pragma warning suppress 56506 // Microsoft, checks for whether id1 and id2 are null done above. if (id1.IsGuid && id2.IsGuid) { return id1._idLow == id2._idLow && id1._idHigh == id2._idHigh; } return id1.ToString() == id2.ToString(); } static public bool operator !=(UniqueId id1, UniqueId id2) { return !(id1 == id2); } public override bool Equals(object obj) { return this == (obj as UniqueId); } public override int GetHashCode() { if (IsGuid) { Int64 hash = (_idLow ^ _idHigh); return ((int)(hash >> 32)) ^ ((int)hash); } else { return ToString().GetHashCode(); } } /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe Int64 UnsafeGetInt64(byte* pb) { Int32 idLow = UnsafeGetInt32(pb); Int32 idHigh = UnsafeGetInt32(&pb[4]); return (((Int64)idHigh) << 32) | ((UInt32)idLow); } /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe Int32 UnsafeGetInt32(byte* pb) { int value = pb[3]; value <<= 8; value |= pb[2]; value <<= 8; value |= pb[1]; value <<= 8; value |= pb[0]; return value; } /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe void UnsafeSetInt64(Int64 value, byte* pb) { UnsafeSetInt32((int)value, pb); UnsafeSetInt32((int)(value >> 32), &pb[4]); } /// <SecurityNote> /// Critical - contains unsafe code /// caller needs to validate arguments /// </SecurityNote> [SecurityCritical] private unsafe void UnsafeSetInt32(Int32 value, byte* pb) { pb[0] = (byte)value; value >>= 8; pb[1] = (byte)value; value >>= 8; pb[2] = (byte)value; value >>= 8; pb[3] = (byte)value; } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Apis.Webmasters.v3 { /// <summary>The Webmasters Service.</summary> public class WebmastersService : Google.Apis.Services.BaseClientService { /// <summary>The API version.</summary> public const string Version = "v3"; /// <summary>The discovery version used to generate this service.</summary> public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0; /// <summary>Constructs a new service.</summary> public WebmastersService() : this(new Google.Apis.Services.BaseClientService.Initializer()) { } /// <summary>Constructs a new service.</summary> /// <param name="initializer">The service initializer.</param> public WebmastersService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer) { Searchanalytics = new SearchanalyticsResource(this); Sitemaps = new SitemapsResource(this); Sites = new SitesResource(this); } /// <summary>Gets the service supported features.</summary> public override System.Collections.Generic.IList<string> Features => new string[0]; /// <summary>Gets the service name.</summary> public override string Name => "webmasters"; /// <summary>Gets the service base URI.</summary> public override string BaseUri => #if NETSTANDARD1_3 || NETSTANDARD2_0 || NET45 BaseUriOverride ?? "https://www.googleapis.com/webmasters/v3/"; #else "https://www.googleapis.com/webmasters/v3/"; #endif /// <summary>Gets the service base path.</summary> public override string BasePath => "webmasters/v3/"; #if !NET40 /// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary> public override string BatchUri => "https://www.googleapis.com/batch/webmasters/v3"; /// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary> public override string BatchPath => "batch/webmasters/v3"; #endif /// <summary>Available OAuth 2.0 scopes for use with the Search Console API.</summary> public class Scope { /// <summary>View and manage Search Console data for your verified sites</summary> public static string Webmasters = "https://www.googleapis.com/auth/webmasters"; /// <summary>View Search Console data for your verified sites</summary> public static string WebmastersReadonly = "https://www.googleapis.com/auth/webmasters.readonly"; } /// <summary>Available OAuth 2.0 scope constants for use with the Search Console API.</summary> public static class ScopeConstants { /// <summary>View and manage Search Console data for your verified sites</summary> public const string Webmasters = "https://www.googleapis.com/auth/webmasters"; /// <summary>View Search Console data for your verified sites</summary> public const string WebmastersReadonly = "https://www.googleapis.com/auth/webmasters.readonly"; } /// <summary>Gets the Searchanalytics resource.</summary> public virtual SearchanalyticsResource Searchanalytics { get; } /// <summary>Gets the Sitemaps resource.</summary> public virtual SitemapsResource Sitemaps { get; } /// <summary>Gets the Sites resource.</summary> public virtual SitesResource Sites { get; } } /// <summary>A base abstract class for Webmasters requests.</summary> public abstract class WebmastersBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse> { /// <summary>Constructs a new WebmastersBaseServiceRequest instance.</summary> protected WebmastersBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service) { } /// <summary>Data format for the response.</summary> [Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<AltEnum> Alt { get; set; } /// <summary>Data format for the response.</summary> public enum AltEnum { /// <summary>Responses with Content-Type of application/json</summary> [Google.Apis.Util.StringValueAttribute("json")] Json = 0, } /// <summary>Selector specifying which fields to include in a partial response.</summary> [Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)] public virtual string Fields { get; set; } /// <summary> /// API key. Your API key identifies your project and provides you with API access, quota, and reports. Required /// unless you provide an OAuth 2.0 token. /// </summary> [Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)] public virtual string Key { get; set; } /// <summary>OAuth 2.0 token for the current user.</summary> [Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string OauthToken { get; set; } /// <summary>Returns response with indentations and line breaks.</summary> [Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> PrettyPrint { get; set; } /// <summary> /// An opaque string that represents a user for quota purposes. Must not exceed 40 characters. /// </summary> [Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)] public virtual string QuotaUser { get; set; } /// <summary>Deprecated. Please use quotaUser instead.</summary> [Google.Apis.Util.RequestParameterAttribute("userIp", Google.Apis.Util.RequestParameterType.Query)] public virtual string UserIp { get; set; } /// <summary>Initializes Webmasters parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("alt", new Google.Apis.Discovery.Parameter { Name = "alt", IsRequired = false, ParameterType = "query", DefaultValue = "json", Pattern = null, }); RequestParameters.Add("fields", new Google.Apis.Discovery.Parameter { Name = "fields", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("key", new Google.Apis.Discovery.Parameter { Name = "key", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("oauth_token", new Google.Apis.Discovery.Parameter { Name = "oauth_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("prettyPrint", new Google.Apis.Discovery.Parameter { Name = "prettyPrint", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add("quotaUser", new Google.Apis.Discovery.Parameter { Name = "quotaUser", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add("userIp", new Google.Apis.Discovery.Parameter { Name = "userIp", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>The "searchanalytics" collection of methods.</summary> public class SearchanalyticsResource { private const string Resource = "searchanalytics"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public SearchanalyticsResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary> /// Query your data with filters and parameters that you define. Returns zero or more rows grouped by the row /// keys that you define. You must define a date range of one or more days. When date is one of the group by /// values, any days without data are omitted from the result list. If you need to know which days have data, /// issue a broad date range query grouped by date for any metric, and see which day rows are returned. /// </summary> /// <param name="body">The body of the request.</param> /// <param name="siteUrl">The site's URL, including protocol. For example: http://www.example.com/</param> public virtual QueryRequest Query(Google.Apis.Webmasters.v3.Data.SearchAnalyticsQueryRequest body, string siteUrl) { return new QueryRequest(service, body, siteUrl); } /// <summary> /// Query your data with filters and parameters that you define. Returns zero or more rows grouped by the row /// keys that you define. You must define a date range of one or more days. When date is one of the group by /// values, any days without data are omitted from the result list. If you need to know which days have data, /// issue a broad date range query grouped by date for any metric, and see which day rows are returned. /// </summary> public class QueryRequest : WebmastersBaseServiceRequest<Google.Apis.Webmasters.v3.Data.SearchAnalyticsQueryResponse> { /// <summary>Constructs a new Query request.</summary> public QueryRequest(Google.Apis.Services.IClientService service, Google.Apis.Webmasters.v3.Data.SearchAnalyticsQueryRequest body, string siteUrl) : base(service) { SiteUrl = siteUrl; Body = body; InitParameters(); } /// <summary>The site's URL, including protocol. For example: http://www.example.com/</summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>Gets or sets the body of this request.</summary> Google.Apis.Webmasters.v3.Data.SearchAnalyticsQueryRequest Body { get; set; } /// <summary>Returns the body of the request.</summary> protected override object GetBody() => Body; /// <summary>Gets the method name.</summary> public override string MethodName => "query"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "POST"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}/searchAnalytics/query"; /// <summary>Initializes Query parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } } /// <summary>The "sitemaps" collection of methods.</summary> public class SitemapsResource { private const string Resource = "sitemaps"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public SitemapsResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Deletes a sitemap from this site.</summary> /// <param name="siteUrl">The site's URL, including protocol. For example: http://www.example.com/</param> /// <param name="feedpath"> /// The URL of the actual sitemap. For example: http://www.example.com/sitemap.xml /// </param> public virtual DeleteRequest Delete(string siteUrl, string feedpath) { return new DeleteRequest(service, siteUrl, feedpath); } /// <summary>Deletes a sitemap from this site.</summary> public class DeleteRequest : WebmastersBaseServiceRequest<string> { /// <summary>Constructs a new Delete request.</summary> public DeleteRequest(Google.Apis.Services.IClientService service, string siteUrl, string feedpath) : base(service) { SiteUrl = siteUrl; Feedpath = feedpath; InitParameters(); } /// <summary>The site's URL, including protocol. For example: http://www.example.com/</summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>The URL of the actual sitemap. For example: http://www.example.com/sitemap.xml</summary> [Google.Apis.Util.RequestParameterAttribute("feedpath", Google.Apis.Util.RequestParameterType.Path)] public virtual string Feedpath { get; private set; } /// <summary>Gets the method name.</summary> public override string MethodName => "delete"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "DELETE"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}/sitemaps/{feedpath}"; /// <summary>Initializes Delete parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); RequestParameters.Add("feedpath", new Google.Apis.Discovery.Parameter { Name = "feedpath", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } /// <summary>Retrieves information about a specific sitemap.</summary> /// <param name="siteUrl">The site's URL, including protocol. For example: http://www.example.com/</param> /// <param name="feedpath"> /// The URL of the actual sitemap. For example: http://www.example.com/sitemap.xml /// </param> public virtual GetRequest Get(string siteUrl, string feedpath) { return new GetRequest(service, siteUrl, feedpath); } /// <summary>Retrieves information about a specific sitemap.</summary> public class GetRequest : WebmastersBaseServiceRequest<Google.Apis.Webmasters.v3.Data.WmxSitemap> { /// <summary>Constructs a new Get request.</summary> public GetRequest(Google.Apis.Services.IClientService service, string siteUrl, string feedpath) : base(service) { SiteUrl = siteUrl; Feedpath = feedpath; InitParameters(); } /// <summary>The site's URL, including protocol. For example: http://www.example.com/</summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>The URL of the actual sitemap. For example: http://www.example.com/sitemap.xml</summary> [Google.Apis.Util.RequestParameterAttribute("feedpath", Google.Apis.Util.RequestParameterType.Path)] public virtual string Feedpath { get; private set; } /// <summary>Gets the method name.</summary> public override string MethodName => "get"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "GET"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}/sitemaps/{feedpath}"; /// <summary>Initializes Get parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); RequestParameters.Add("feedpath", new Google.Apis.Discovery.Parameter { Name = "feedpath", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } /// <summary> /// Lists the sitemaps-entries submitted for this site, or included in the sitemap index file (if sitemapIndex /// is specified in the request). /// </summary> /// <param name="siteUrl">The site's URL, including protocol. For example: http://www.example.com/</param> public virtual ListRequest List(string siteUrl) { return new ListRequest(service, siteUrl); } /// <summary> /// Lists the sitemaps-entries submitted for this site, or included in the sitemap index file (if sitemapIndex /// is specified in the request). /// </summary> public class ListRequest : WebmastersBaseServiceRequest<Google.Apis.Webmasters.v3.Data.SitemapsListResponse> { /// <summary>Constructs a new List request.</summary> public ListRequest(Google.Apis.Services.IClientService service, string siteUrl) : base(service) { SiteUrl = siteUrl; InitParameters(); } /// <summary>The site's URL, including protocol. For example: http://www.example.com/</summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>A URL of a site's sitemap index. For example: http://www.example.com/sitemapindex.xml</summary> [Google.Apis.Util.RequestParameterAttribute("sitemapIndex", Google.Apis.Util.RequestParameterType.Query)] public virtual string SitemapIndex { get; set; } /// <summary>Gets the method name.</summary> public override string MethodName => "list"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "GET"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}/sitemaps"; /// <summary>Initializes List parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); RequestParameters.Add("sitemapIndex", new Google.Apis.Discovery.Parameter { Name = "sitemapIndex", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>Submits a sitemap for a site.</summary> /// <param name="siteUrl">The site's URL, including protocol. For example: http://www.example.com/</param> /// <param name="feedpath"> /// The URL of the sitemap to add. For example: http://www.example.com/sitemap.xml /// </param> public virtual SubmitRequest Submit(string siteUrl, string feedpath) { return new SubmitRequest(service, siteUrl, feedpath); } /// <summary>Submits a sitemap for a site.</summary> public class SubmitRequest : WebmastersBaseServiceRequest<string> { /// <summary>Constructs a new Submit request.</summary> public SubmitRequest(Google.Apis.Services.IClientService service, string siteUrl, string feedpath) : base(service) { SiteUrl = siteUrl; Feedpath = feedpath; InitParameters(); } /// <summary>The site's URL, including protocol. For example: http://www.example.com/</summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>The URL of the sitemap to add. For example: http://www.example.com/sitemap.xml</summary> [Google.Apis.Util.RequestParameterAttribute("feedpath", Google.Apis.Util.RequestParameterType.Path)] public virtual string Feedpath { get; private set; } /// <summary>Gets the method name.</summary> public override string MethodName => "submit"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "PUT"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}/sitemaps/{feedpath}"; /// <summary>Initializes Submit parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); RequestParameters.Add("feedpath", new Google.Apis.Discovery.Parameter { Name = "feedpath", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } } /// <summary>The "sites" collection of methods.</summary> public class SitesResource { private const string Resource = "sites"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public SitesResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Adds a site to the set of the user's sites in Search Console.</summary> /// <param name="siteUrl">The URL of the site to add.</param> public virtual AddRequest Add(string siteUrl) { return new AddRequest(service, siteUrl); } /// <summary>Adds a site to the set of the user's sites in Search Console.</summary> public class AddRequest : WebmastersBaseServiceRequest<string> { /// <summary>Constructs a new Add request.</summary> public AddRequest(Google.Apis.Services.IClientService service, string siteUrl) : base(service) { SiteUrl = siteUrl; InitParameters(); } /// <summary>The URL of the site to add.</summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>Gets the method name.</summary> public override string MethodName => "add"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "PUT"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}"; /// <summary>Initializes Add parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } /// <summary>Removes a site from the set of the user's Search Console sites.</summary> /// <param name="siteUrl"> /// The URI of the property as defined in Search Console. Examples: http://www.example.com/ or /// android-app://com.example/ Note: for property-sets, use the URI that starts with sc-set: which is used in /// Search Console URLs. /// </param> public virtual DeleteRequest Delete(string siteUrl) { return new DeleteRequest(service, siteUrl); } /// <summary>Removes a site from the set of the user's Search Console sites.</summary> public class DeleteRequest : WebmastersBaseServiceRequest<string> { /// <summary>Constructs a new Delete request.</summary> public DeleteRequest(Google.Apis.Services.IClientService service, string siteUrl) : base(service) { SiteUrl = siteUrl; InitParameters(); } /// <summary> /// The URI of the property as defined in Search Console. Examples: http://www.example.com/ or /// android-app://com.example/ Note: for property-sets, use the URI that starts with sc-set: which is used /// in Search Console URLs. /// </summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>Gets the method name.</summary> public override string MethodName => "delete"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "DELETE"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}"; /// <summary>Initializes Delete parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } /// <summary>Retrieves information about specific site.</summary> /// <param name="siteUrl"> /// The URI of the property as defined in Search Console. Examples: http://www.example.com/ or /// android-app://com.example/ Note: for property-sets, use the URI that starts with sc-set: which is used in /// Search Console URLs. /// </param> public virtual GetRequest Get(string siteUrl) { return new GetRequest(service, siteUrl); } /// <summary>Retrieves information about specific site.</summary> public class GetRequest : WebmastersBaseServiceRequest<Google.Apis.Webmasters.v3.Data.WmxSite> { /// <summary>Constructs a new Get request.</summary> public GetRequest(Google.Apis.Services.IClientService service, string siteUrl) : base(service) { SiteUrl = siteUrl; InitParameters(); } /// <summary> /// The URI of the property as defined in Search Console. Examples: http://www.example.com/ or /// android-app://com.example/ Note: for property-sets, use the URI that starts with sc-set: which is used /// in Search Console URLs. /// </summary> [Google.Apis.Util.RequestParameterAttribute("siteUrl", Google.Apis.Util.RequestParameterType.Path)] public virtual string SiteUrl { get; private set; } /// <summary>Gets the method name.</summary> public override string MethodName => "get"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "GET"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites/{siteUrl}"; /// <summary>Initializes Get parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add("siteUrl", new Google.Apis.Discovery.Parameter { Name = "siteUrl", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = null, }); } } /// <summary>Lists the user's Search Console sites.</summary> public virtual ListRequest List() { return new ListRequest(service); } /// <summary>Lists the user's Search Console sites.</summary> public class ListRequest : WebmastersBaseServiceRequest<Google.Apis.Webmasters.v3.Data.SitesListResponse> { /// <summary>Constructs a new List request.</summary> public ListRequest(Google.Apis.Services.IClientService service) : base(service) { InitParameters(); } /// <summary>Gets the method name.</summary> public override string MethodName => "list"; /// <summary>Gets the HTTP method.</summary> public override string HttpMethod => "GET"; /// <summary>Gets the REST path.</summary> public override string RestPath => "sites"; /// <summary>Initializes List parameter list.</summary> protected override void InitParameters() { base.InitParameters(); } } } } namespace Google.Apis.Webmasters.v3.Data { public class ApiDataRow : Google.Apis.Requests.IDirectResponseSchema { [Newtonsoft.Json.JsonPropertyAttribute("clicks")] public virtual System.Nullable<double> Clicks { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("ctr")] public virtual System.Nullable<double> Ctr { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("impressions")] public virtual System.Nullable<double> Impressions { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("keys")] public virtual System.Collections.Generic.IList<string> Keys { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("position")] public virtual System.Nullable<double> Position { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } public class ApiDimensionFilter : Google.Apis.Requests.IDirectResponseSchema { [Newtonsoft.Json.JsonPropertyAttribute("dimension")] public virtual string Dimension { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("expression")] public virtual string Expression { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("operator")] public virtual string Operator__ { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } public class ApiDimensionFilterGroup : Google.Apis.Requests.IDirectResponseSchema { [Newtonsoft.Json.JsonPropertyAttribute("filters")] public virtual System.Collections.Generic.IList<ApiDimensionFilter> Filters { get; set; } [Newtonsoft.Json.JsonPropertyAttribute("groupType")] public virtual string GroupType { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } public class SearchAnalyticsQueryRequest : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// [Optional; Default is "auto"] How data is aggregated. If aggregated by property, all data for the same /// property is aggregated; if aggregated by page, all data is aggregated by canonical URI. If you filter or /// group by page, choose AUTO; otherwise you can aggregate either by property or by page, depending on how you /// want your data calculated; see the help documentation to learn how data is calculated differently by site /// versus by page. Note: If you group or filter by page, you cannot aggregate by property. If you specify any /// value other than AUTO, the aggregation type in the result will match the requested type, or if you request /// an invalid type, you will get an error. The API will never change your aggregation type if the requested /// type is invalid. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("aggregationType")] public virtual string AggregationType { get; set; } /// <summary> /// [Optional] If "all" (case-insensitive), data will include fresh data. If "final" (case-insensitive) or if /// this parameter is omitted, the returned data will include only finalized data. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("dataState")] public virtual string DataState { get; set; } /// <summary> /// [Optional] Zero or more filters to apply to the dimension grouping values; for example, 'query contains /// "buy"' to see only data where the query string contains the substring "buy" (not case-sensitive). You can /// filter by a dimension without grouping by it. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("dimensionFilterGroups")] public virtual System.Collections.Generic.IList<ApiDimensionFilterGroup> DimensionFilterGroups { get; set; } /// <summary> /// [Optional] Zero or more dimensions to group results by. Dimensions are the group-by values in the Search /// Analytics page. Dimensions are combined to create a unique row key for each row. Results are grouped in the /// order that you supply these dimensions. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("dimensions")] public virtual System.Collections.Generic.IList<string> Dimensions { get; set; } /// <summary> /// [Required] End date of the requested date range, in YYYY-MM-DD format, in PST (UTC - 8:00). Must be greater /// than or equal to the start date. This value is included in the range. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("endDate")] public virtual string EndDate { get; set; } /// <summary> /// [Optional; Default is 1000] The maximum number of rows to return. Must be a number from 1 to 5,000 /// (inclusive). /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("rowLimit")] public virtual System.Nullable<int> RowLimit { get; set; } /// <summary>[Optional; Default is "web"] The search type to filter for.</summary> [Newtonsoft.Json.JsonPropertyAttribute("searchType")] public virtual string SearchType { get; set; } /// <summary> /// [Required] Start date of the requested date range, in YYYY-MM-DD format, in PST time (UTC - 8:00). Must be /// less than or equal to the end date. This value is included in the range. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("startDate")] public virtual string StartDate { get; set; } /// <summary> /// [Optional; Default is 0] Zero-based index of the first row in the response. Must be a non-negative number. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("startRow")] public virtual System.Nullable<int> StartRow { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// A list of rows, one per result, grouped by key. Metrics in each row are aggregated for all data grouped by that /// key either by page or property, as specified by the aggregation type parameter. /// </summary> public class SearchAnalyticsQueryResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>How the results were aggregated.</summary> [Newtonsoft.Json.JsonPropertyAttribute("responseAggregationType")] public virtual string ResponseAggregationType { get; set; } /// <summary>A list of rows grouped by the key values in the order given in the query.</summary> [Newtonsoft.Json.JsonPropertyAttribute("rows")] public virtual System.Collections.Generic.IList<ApiDataRow> Rows { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>List of sitemaps.</summary> public class SitemapsListResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Contains detailed information about a specific URL submitted as a sitemap.</summary> [Newtonsoft.Json.JsonPropertyAttribute("sitemap")] public virtual System.Collections.Generic.IList<WmxSitemap> Sitemap { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>List of sites with access level information.</summary> public class SitesListResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary> /// Contains permission level information about a Search Console site. For more information, see Permissions in /// Search Console. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("siteEntry")] public virtual System.Collections.Generic.IList<WmxSite> SiteEntry { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary> /// Contains permission level information about a Search Console site. For more information, see Permissions in /// Search Console. /// </summary> public class WmxSite : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The user's permission level for the site.</summary> [Newtonsoft.Json.JsonPropertyAttribute("permissionLevel")] public virtual string PermissionLevel { get; set; } /// <summary>The URL of the site.</summary> [Newtonsoft.Json.JsonPropertyAttribute("siteUrl")] public virtual string SiteUrl { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Contains detailed information about a specific URL submitted as a sitemap.</summary> public class WmxSitemap : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The various content types in the sitemap.</summary> [Newtonsoft.Json.JsonPropertyAttribute("contents")] public virtual System.Collections.Generic.IList<WmxSitemapContent> Contents { get; set; } /// <summary> /// Number of errors in the sitemap. These are issues with the sitemap itself that need to be fixed before it /// can be processed correctly. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("errors")] public virtual System.Nullable<long> Errors { get; set; } /// <summary>If true, the sitemap has not been processed.</summary> [Newtonsoft.Json.JsonPropertyAttribute("isPending")] public virtual System.Nullable<bool> IsPending { get; set; } /// <summary>If true, the sitemap is a collection of sitemaps.</summary> [Newtonsoft.Json.JsonPropertyAttribute("isSitemapsIndex")] public virtual System.Nullable<bool> IsSitemapsIndex { get; set; } /// <summary> /// Date &amp;amp; time in which this sitemap was last downloaded. Date format is in RFC 3339 format /// (yyyy-mm-dd). /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("lastDownloaded")] public virtual string LastDownloadedRaw { get; set; } /// <summary><seealso cref="System.DateTime"/> representation of <see cref="LastDownloadedRaw"/>.</summary> [Newtonsoft.Json.JsonIgnoreAttribute] public virtual System.Nullable<System.DateTime> LastDownloaded { get => Google.Apis.Util.Utilities.GetDateTimeFromString(LastDownloadedRaw); set => LastDownloadedRaw = Google.Apis.Util.Utilities.GetStringFromDateTime(value); } /// <summary> /// Date &amp;amp; time in which this sitemap was submitted. Date format is in RFC 3339 format (yyyy-mm-dd). /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("lastSubmitted")] public virtual string LastSubmittedRaw { get; set; } /// <summary><seealso cref="System.DateTime"/> representation of <see cref="LastSubmittedRaw"/>.</summary> [Newtonsoft.Json.JsonIgnoreAttribute] public virtual System.Nullable<System.DateTime> LastSubmitted { get => Google.Apis.Util.Utilities.GetDateTimeFromString(LastSubmittedRaw); set => LastSubmittedRaw = Google.Apis.Util.Utilities.GetStringFromDateTime(value); } /// <summary>The url of the sitemap.</summary> [Newtonsoft.Json.JsonPropertyAttribute("path")] public virtual string Path { get; set; } /// <summary>The type of the sitemap. For example: rssFeed.</summary> [Newtonsoft.Json.JsonPropertyAttribute("type")] public virtual string Type { get; set; } /// <summary> /// Number of warnings for the sitemap. These are generally non-critical issues with URLs in the sitemaps. /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("warnings")] public virtual System.Nullable<long> Warnings { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Information about the various content types in the sitemap.</summary> public class WmxSitemapContent : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The number of URLs from the sitemap that were indexed (of the content type).</summary> [Newtonsoft.Json.JsonPropertyAttribute("indexed")] public virtual System.Nullable<long> Indexed { get; set; } /// <summary>The number of URLs in the sitemap (of the content type).</summary> [Newtonsoft.Json.JsonPropertyAttribute("submitted")] public virtual System.Nullable<long> Submitted { get; set; } /// <summary>The specific type of content in this sitemap. For example: web.</summary> [Newtonsoft.Json.JsonPropertyAttribute("type")] public virtual string Type { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } }
using System; using NBitcoin.BouncyCastle.Crypto.Digests; using NBitcoin.BouncyCastle.Crypto.Parameters; using NBitcoin.BouncyCastle.Security; namespace NBitcoin.BouncyCastle.Crypto.Encodings { /** * Optimal Asymmetric Encryption Padding (OAEP) - see PKCS 1 V 2. */ public class OaepEncoding : IAsymmetricBlockCipher { private byte[] defHash; private IDigest hash; private IDigest mgf1Hash; private IAsymmetricBlockCipher engine; private SecureRandom random; private bool forEncryption; public OaepEncoding( IAsymmetricBlockCipher cipher) : this(cipher, new Sha1Digest(), null) { } public OaepEncoding( IAsymmetricBlockCipher cipher, IDigest hash) : this(cipher, hash, null) { } public OaepEncoding( IAsymmetricBlockCipher cipher, IDigest hash, byte[] encodingParams) : this(cipher, hash, hash, encodingParams) { } public OaepEncoding( IAsymmetricBlockCipher cipher, IDigest hash, IDigest mgf1Hash, byte[] encodingParams) { this.engine = cipher; this.hash = hash; this.mgf1Hash = mgf1Hash; this.defHash = new byte[hash.GetDigestSize()]; if (encodingParams != null) { hash.BlockUpdate(encodingParams, 0, encodingParams.Length); } hash.DoFinal(defHash, 0); } public IAsymmetricBlockCipher GetUnderlyingCipher() { return engine; } public string AlgorithmName { get { return engine.AlgorithmName + "/OAEPPadding"; } } public void Init( bool forEncryption, ICipherParameters param) { if (param is ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; this.random = rParam.Random; } else { this.random = new SecureRandom(); } engine.Init(forEncryption, param); this.forEncryption = forEncryption; } public int GetInputBlockSize() { int baseBlockSize = engine.GetInputBlockSize(); if (forEncryption) { return baseBlockSize - 1 - 2 * defHash.Length; } else { return baseBlockSize; } } public int GetOutputBlockSize() { int baseBlockSize = engine.GetOutputBlockSize(); if (forEncryption) { return baseBlockSize; } else { return baseBlockSize - 1 - 2 * defHash.Length; } } public byte[] ProcessBlock( byte[] inBytes, int inOff, int inLen) { if (forEncryption) { return EncodeBlock(inBytes, inOff, inLen); } else { return DecodeBlock(inBytes, inOff, inLen); } } private byte[] EncodeBlock( byte[] inBytes, int inOff, int inLen) { byte[] block = new byte[GetInputBlockSize() + 1 + 2 * defHash.Length]; // // copy in the message // Array.Copy(inBytes, inOff, block, block.Length - inLen, inLen); // // add sentinel // block[block.Length - inLen - 1] = 0x01; // // as the block is already zeroed - there's no need to add PS (the >= 0 pad of 0) // // // add the hash of the encoding params. // Array.Copy(defHash, 0, block, defHash.Length, defHash.Length); // // generate the seed. // byte[] seed = random.GenerateSeed(defHash.Length); // // mask the message block. // byte[] mask = maskGeneratorFunction1(seed, 0, seed.Length, block.Length - defHash.Length); for (int i = defHash.Length; i != block.Length; i++) { block[i] ^= mask[i - defHash.Length]; } // // add in the seed // Array.Copy(seed, 0, block, 0, defHash.Length); // // mask the seed. // mask = maskGeneratorFunction1( block, defHash.Length, block.Length - defHash.Length, defHash.Length); for (int i = 0; i != defHash.Length; i++) { block[i] ^= mask[i]; } return engine.ProcessBlock(block, 0, block.Length); } /** * @exception InvalidCipherTextException if the decrypted block turns out to * be badly formatted. */ private byte[] DecodeBlock( byte[] inBytes, int inOff, int inLen) { byte[] data = engine.ProcessBlock(inBytes, inOff, inLen); byte[] block; // // as we may have zeros in our leading bytes for the block we produced // on encryption, we need to make sure our decrypted block comes back // the same size. // if (data.Length < engine.GetOutputBlockSize()) { block = new byte[engine.GetOutputBlockSize()]; Array.Copy(data, 0, block, block.Length - data.Length, data.Length); } else { block = data; } if (block.Length < (2 * defHash.Length) + 1) { throw new InvalidCipherTextException("data too short"); } // // unmask the seed. // byte[] mask = maskGeneratorFunction1( block, defHash.Length, block.Length - defHash.Length, defHash.Length); for (int i = 0; i != defHash.Length; i++) { block[i] ^= mask[i]; } // // unmask the message block. // mask = maskGeneratorFunction1(block, 0, defHash.Length, block.Length - defHash.Length); for (int i = defHash.Length; i != block.Length; i++) { block[i] ^= mask[i - defHash.Length]; } // // check the hash of the encoding params. // long check to try to avoid this been a source of a timing attack. // { int diff = 0; for (int i = 0; i < defHash.Length; ++i) { diff |= (byte)(defHash[i] ^ block[defHash.Length + i]); } if (diff != 0) throw new InvalidCipherTextException("data hash wrong"); } // // find the data block // int start; for (start = 2 * defHash.Length; start != block.Length; start++) { if (block[start] != 0) { break; } } if (start >= (block.Length - 1) || block[start] != 1) { throw new InvalidCipherTextException("data start wrong " + start); } start++; // // extract the data block // byte[] output = new byte[block.Length - start]; Array.Copy(block, start, output, 0, output.Length); return output; } /** * int to octet string. */ private void ItoOSP( int i, byte[] sp) { sp[0] = (byte)((uint)i >> 24); sp[1] = (byte)((uint)i >> 16); sp[2] = (byte)((uint)i >> 8); sp[3] = (byte)((uint)i >> 0); } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { byte[] mask = new byte[length]; byte[] hashBuf = new byte[mgf1Hash.GetDigestSize()]; byte[] C = new byte[4]; int counter = 0; hash.Reset(); do { ItoOSP(counter, C); mgf1Hash.BlockUpdate(Z, zOff, zLen); mgf1Hash.BlockUpdate(C, 0, C.Length); mgf1Hash.DoFinal(hashBuf, 0); Array.Copy(hashBuf, 0, mask, counter * hashBuf.Length, hashBuf.Length); } while (++counter < (length / hashBuf.Length)); if ((counter * hashBuf.Length) < length) { ItoOSP(counter, C); mgf1Hash.BlockUpdate(Z, zOff, zLen); mgf1Hash.BlockUpdate(C, 0, C.Length); mgf1Hash.DoFinal(hashBuf, 0); Array.Copy(hashBuf, 0, mask, counter * hashBuf.Length, mask.Length - (counter * hashBuf.Length)); } return mask; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace AutoQueryable.Sample.AspNetFramework.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
using System; using System.Diagnostics; using System.IO; using System.Threading.Tasks; namespace Plugin.AudioRecorder { /// <summary> /// A service that records audio on the device's microphone input. /// </summary> public partial class AudioRecorderService { const string DefaultFileName = "ARS_recording.wav"; const float NearZero = .00000000001F; WaveRecorder recorder; IAudioStream audioStream; bool audioDetected; DateTime? silenceTime; DateTime? startTime; TaskCompletionSource<string> recordTask; /// <summary> /// Gets the details of the underlying audio stream. /// </summary> /// <remarks>Accessible once <see cref="StartRecording"/> has been called.</remarks> public AudioStreamDetails AudioStreamDetails { get; private set; } /// <summary> /// Gets/sets the desired file path. If null it will be set automatically /// to a temporary file. /// </summary> public string FilePath { get; set; } /// <summary> /// Gets/sets the preferred sample rate to be used during recording. /// </summary> /// <remarks>This value may be overridden by platform-specific implementations, e.g. the Android AudioManager will be asked for its preferred sample rate and may override any user-set value here.</remarks> public int PreferredSampleRate { get; set; } = 44100; /// <summary> /// Returns a value indicating if the <see cref="AudioRecorderService"/> is currently recording audio /// </summary> public bool IsRecording => audioStream?.Active ?? false; /// <summary> /// If <see cref="StopRecordingOnSilence"/> is set to <c>true</c>, this <see cref="TimeSpan"/> indicates the amount of 'silent' time is required before recording is stopped. /// </summary> /// <remarks>Defaults to 2 seconds.</remarks> public TimeSpan AudioSilenceTimeout { get; set; } = TimeSpan.FromSeconds (2); /// <summary> /// If <see cref="StopRecordingAfterTimeout"/> is set to <c>true</c>, this <see cref="TimeSpan"/> indicates the total amount of time to record audio for before recording is stopped. Defaults to 30 seconds. /// </summary> /// <seealso cref="StopRecordingAfterTimeout"/> public TimeSpan TotalAudioTimeout { get; set; } = TimeSpan.FromSeconds (30); /// <summary> /// Gets/sets a value indicating if the <see cref="AudioRecorderService"/> should stop recording after silence (low audio signal) is detected. /// </summary> /// <remarks>Default is `true`</remarks> public bool StopRecordingOnSilence { get; set; } = true; /// <summary> /// Gets/sets a value indicating if the <see cref="AudioRecorderService"/> should stop recording after a certain amount of time. /// </summary> /// <remarks>Defaults to <c>true</c></remarks> /// <seealso cref="TotalAudioTimeout"/> public bool StopRecordingAfterTimeout { get; set; } = true; /// <summary> /// Gets/sets a value indicating the signal threshold that determines silence. If the recorder is being over or under aggressive when detecting silence, you can alter this value to achieve different results. /// </summary> /// <remarks>Defaults to .15. Value should be between 0 and 1.</remarks> public float SilenceThreshold { get; set; } = .15f; /// <summary> /// This event is raised when audio recording is complete and delivers a full filepath to the recorded audio file. /// </summary> /// <remarks>This event will be raised on a background thread to allow for any further processing needed. The audio file will be <c>null</c> in the case that no audio was recorded.</remarks> public event EventHandler<string> AudioInputReceived; partial void Init (); /// <summary> /// Creates a new instance of the <see cref="AudioRecorderService"/>. /// </summary> public AudioRecorderService () { Init (); } /// <summary> /// Starts recording audio. /// </summary> /// <returns>A <see cref="Task"/> that will complete when recording is finished. /// The task result will be the path to the recorded audio file, or null if no audio was recorded.</returns> public async Task<Task<string>> StartRecording () { if (FilePath == null) { FilePath = await GetDefaultFilePath (); } ResetAudioDetection (); OnRecordingStarting (); InitializeStream (PreferredSampleRate); await recorder.StartRecorder (audioStream, FilePath); AudioStreamDetails = new AudioStreamDetails { ChannelCount = audioStream.ChannelCount, SampleRate = audioStream.SampleRate, BitsPerSample = audioStream.BitsPerSample }; startTime = DateTime.Now; recordTask = new TaskCompletionSource<string> (); Debug.WriteLine ("AudioRecorderService.StartRecording() complete. Audio is being recorded."); return recordTask.Task; } /// <summary> /// Gets a new <see cref="Stream"/> to the recording audio file in readonly mode. /// </summary> /// <returns>A <see cref="Stream"/> object that can be used to read the audio file from the beginning.</returns> public Stream GetAudioFileStream () { return recorder.GetAudioFileStream (); } void ResetAudioDetection () { audioDetected = false; silenceTime = null; startTime = null; } void AudioStream_OnBroadcast (object sender, byte [] bytes) { var level = AudioFunctions.CalculateLevel (bytes); if (level < NearZero && !audioDetected) // discard any initial 0s so we don't jump the gun on timing out { Debug.WriteLine ("level == {0} && !audioDetected", level); return; } if (level > SilenceThreshold) // did we find a signal? { audioDetected = true; silenceTime = null; Debug.WriteLine ("AudioStream_OnBroadcast :: {0} :: level > SilenceThreshold :: bytes: {1}; level: {2}", DateTime.Now, bytes.Length, level); } else // no audio detected { // see if we've detected 'near' silence for more than <audioTimeout> if (StopRecordingOnSilence && silenceTime.HasValue) { var currentTime = DateTime.Now; if (currentTime.Subtract (silenceTime.Value).TotalMilliseconds > AudioSilenceTimeout.TotalMilliseconds) { Timeout ($"AudioStream_OnBroadcast :: {currentTime} :: AudioSilenceTimeout exceeded, stopping recording :: Near-silence detected at: {silenceTime}"); return; } } else { silenceTime = DateTime.Now; Debug.WriteLine ("AudioStream_OnBroadcast :: {0} :: Near-silence detected :: bytes: {1}; level: {2}", silenceTime, bytes.Length, level); } } if (StopRecordingAfterTimeout && DateTime.Now - startTime > TotalAudioTimeout) { Timeout ("AudioStream_OnBroadcast(): TotalAudioTimeout exceeded, stopping recording"); } } void Timeout (string reason) { Debug.WriteLine (reason); audioStream.OnBroadcast -= AudioStream_OnBroadcast; // need this to be immediate or we can try to stop more than once // since we're in the middle of handling a broadcast event when an audio timeout occurs, we need to break the StopRecording call on another thread // Otherwise, Bad. Things. Happen. _ = Task.Run (() => StopRecording ()); } /// <summary> /// Stops recording audio. /// </summary> /// <param name="continueProcessing"><c>true</c> (default) to finish recording and raise the <see cref="AudioInputReceived"/> event. /// Use <c>false</c> here to stop recording but do nothing further (from an error state, etc.).</param> public async Task StopRecording (bool continueProcessing = true) { audioStream.Flush (); // allow the stream to send any remaining data audioStream.OnBroadcast -= AudioStream_OnBroadcast; try { await audioStream.Stop (); // WaveRecorder will be stopped as result of stream stopping } catch (Exception ex) { Debug.WriteLine ("Error in StopRecording: {0}", ex); } OnRecordingStopped (); var returnedFilePath = GetAudioFilePath (); // complete the recording Task for anthing waiting on this recordTask.TrySetResult (returnedFilePath); if (continueProcessing) { Debug.WriteLine ($"AudioRecorderService.StopRecording(): Recording stopped, raising AudioInputReceived event; audioDetected == {audioDetected}; filePath == {returnedFilePath}"); AudioInputReceived?.Invoke (this, returnedFilePath); } } void InitializeStream (int sampleRate) { try { if (audioStream != null) { audioStream.OnBroadcast -= AudioStream_OnBroadcast; } else { audioStream = new AudioStream (sampleRate); } audioStream.OnBroadcast += AudioStream_OnBroadcast; if (recorder == null) { recorder = new WaveRecorder (); } } catch (Exception ex) { Debug.WriteLine ("Error: {0}", ex); } } /// <summary> /// Gets the full filepath to the recorded audio file. /// </summary> /// <returns>The full filepath to the recorded audio file, or null if no audio was detected during the last record.</returns> public string GetAudioFilePath () { return audioDetected ? FilePath : null; } } }
/* THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. This is sample code and is freely distributable. */ #region #Quantizer# /// <summary> /// Quantizer /// </summary> public abstract class Quantizer { /// <summary> /// Construct the quantizer /// </summary> /// <param name="singlePass">If true, the quantization only needs to loop through the source pixels once</param> /// <remarks> /// If you construct this class with a true value for singlePass, then the code will, when quantizing your image, /// only call the 'QuantizeImage' function. If two passes are required, the code will call 'InitialQuantizeImage' /// and then 'QuantizeImage'. /// </remarks> public Quantizer(bool singlePass) { _singlePass = singlePass; _pixelSize = Marshal.SizeOf(typeof(Color32)); } /// <summary> /// Quantize an image and return the resulting output bitmap /// </summary> /// <param name="source">The image to quantize</param> /// <returns>A quantized version of the image</returns> public Bitmap Quantize(System.Drawing.Image source) { // Get the size of the source image int height = source.Height; int width = source.Width; // And construct a rectangle from these dimensions Rectangle bounds = new Rectangle(0, 0, width, height); // First off take a 32bpp copy of the image Bitmap copy = new Bitmap(width, height, PixelFormat.Format32bppArgb); // And construct an 8bpp version Bitmap output = new Bitmap(width, height, PixelFormat.Format8bppIndexed); // Now lock the bitmap into memory using (Graphics g = Graphics.FromImage(copy)) { g.PageUnit = GraphicsUnit.Pixel; // Draw the source image onto the copy bitmap, // which will effect a widening as appropriate. g.DrawImage(source, bounds); } // Define a pointer to the bitmap data BitmapData sourceData = null; try { // Get the source image bits and lock into memory sourceData = copy.LockBits(bounds, ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); // Call the FirstPass function if not a single pass algorithm. // For something like an octree quantizer, this will run through // all image pixels, build a data structure, and create a palette. if (!_singlePass) FirstPass(sourceData, width, height); // Then set the color palette on the output bitmap. I'm passing in the current palette // as there's no way to construct a new, empty palette. output.Palette = GetPalette(output.Palette); // Then call the second pass which actually does the conversion SecondPass(sourceData, output, width, height, bounds); } finally { // Ensure that the bits are unlocked copy.UnlockBits(sourceData); } // Last but not least, return the output bitmap return output; } /// <summary> /// Execute the first pass through the pixels in the image /// </summary> /// <param name="sourceData">The source data</param> /// <param name="width">The width in pixels of the image</param> /// <param name="height">The height in pixels of the image</param> protected virtual void FirstPass(BitmapData sourceData, int width, int height) { // Define the source data pointers. The source row is a byte to // keep addition of the stride value easier (as this is in bytes) IntPtr pSourceRow = sourceData.Scan0; // Loop through each row for (int row = 0; row < height; row++) { // Set the source pixel to the first pixel in this row IntPtr pSourcePixel = pSourceRow; // And loop through each column for (int col = 0; col < width; col++) { InitialQuantizePixel(new Color32(pSourcePixel)); pSourcePixel = (IntPtr)((Int32)pSourcePixel + _pixelSize); } // Now I have the pixel, call the FirstPassQuantize function... // Add the stride to the source row pSourceRow = (IntPtr)((long)pSourceRow + sourceData.Stride); } } /// <summary> /// Execute a second pass through the bitmap /// </summary> /// <param name="sourceData">The source bitmap, locked into memory</param> /// <param name="output">The output bitmap</param> /// <param name="width">The width in pixels of the image</param> /// <param name="height">The height in pixels of the image</param> /// <param name="bounds">The bounding rectangle</param> protected virtual void SecondPass(BitmapData sourceData, Bitmap output, int width, int height, Rectangle bounds) { BitmapData outputData = null; try { // Lock the output bitmap into memory outputData = output.LockBits(bounds, ImageLockMode.WriteOnly, PixelFormat.Format8bppIndexed); // Define the source data pointers. The source row is a byte to // keep addition of the stride value easier (as this is in bytes) IntPtr pSourceRow = sourceData.Scan0; IntPtr pSourcePixel = pSourceRow; IntPtr pPreviousPixel = pSourcePixel; // Now define the destination data pointers IntPtr pDestinationRow = outputData.Scan0; IntPtr pDestinationPixel = pDestinationRow; // And convert the first pixel, so that I have values going into the loop byte pixelValue = QuantizePixel(new Color32(pSourcePixel)); // Assign the value of the first pixel Marshal.WriteByte(pDestinationPixel, pixelValue); // Loop through each row for (int row = 0; row < height; row++) { // Set the source pixel to the first pixel in this row pSourcePixel = pSourceRow; // And set the destination pixel pointer to the first pixel in the row pDestinationPixel = pDestinationRow; // Loop through each pixel on this scan line for (int col = 0; col < width; col++) { // Check if this is the same as the last pixel. If so use that value // rather than calculating it again. This is an inexpensive optimisation. if (Marshal.ReadInt32(pPreviousPixel) != Marshal.ReadInt32(pSourcePixel)) { // Quantize the pixel pixelValue = QuantizePixel(new Color32(pSourcePixel)); // And setup the previous pointer pPreviousPixel = pSourcePixel; } // And set the pixel in the output Marshal.WriteByte(pDestinationPixel, pixelValue); pSourcePixel = (IntPtr)((long)pSourcePixel + _pixelSize); pDestinationPixel = (IntPtr)((long)pDestinationPixel + 1); } // Add the stride to the source row pSourceRow = (IntPtr)((long)pSourceRow + sourceData.Stride); // And to the destination row pDestinationRow = (IntPtr)((long)pDestinationRow + outputData.Stride); } } finally { // Ensure that I unlock the output bits output.UnlockBits(outputData); } } /// <summary> /// Override this to process the pixel in the first pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <remarks> /// This function need only be overridden if your quantize algorithm needs two passes, /// such as an Octree quantizer. /// </remarks> protected virtual void InitialQuantizePixel(Color32 pixel) { } /// <summary> /// Override this to process the pixel in the second pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <returns>The quantized value</returns> protected abstract byte QuantizePixel(Color32 pixel); /// <summary> /// Retrieve the palette for the quantized image /// </summary> /// <param name="original">Any old palette, this is overrwritten</param> /// <returns>The new color palette</returns> protected abstract ColorPalette GetPalette(ColorPalette original); /// <summary> /// Flag used to indicate whether a single pass or two passes are needed for quantization. /// </summary> private bool _singlePass; private int _pixelSize; /// <summary> /// Struct that defines a 32 bpp colour /// </summary> /// <remarks> /// This struct is used to read data from a 32 bits per pixel image /// in memory, and is ordered in this manner as this is the way that /// the data is layed out in memory /// </remarks> [StructLayout(LayoutKind.Explicit)] public struct Color32 { public Color32(IntPtr pSourcePixel) { this = (Color32)Marshal.PtrToStructure(pSourcePixel, typeof(Color32)); } /// <summary> /// Holds the blue component of the colour /// </summary> [FieldOffset(0)] public byte Blue; /// <summary> /// Holds the green component of the colour /// </summary> [FieldOffset(1)] public byte Green; /// <summary> /// Holds the red component of the colour /// </summary> [FieldOffset(2)] public byte Red; /// <summary> /// Holds the alpha component of the colour /// </summary> [FieldOffset(3)] public byte Alpha; /// <summary> /// Permits the color32 to be treated as an int32 /// </summary> [FieldOffset(0)] public int ARGB; /// <summary> /// Return the color for this Color32 object /// </summary> public Color Color { get { return Color.FromArgb(Alpha, Red, Green, Blue); } } } } #endregion #region #PaletteQuantizer# /// <summary> /// Summary description for PaletteQuantizer. /// </summary> public class PaletteQuantizer : Quantizer { /// <summary> /// Construct the palette quantizer /// </summary> /// <param name="palette">The color palette to quantize to</param> /// <remarks> /// Palette quantization only requires a single quantization step /// </remarks> public PaletteQuantizer(ArrayList palette) : base(true) { _colorMap = new Hashtable(); _colors = new Color[palette.Count]; palette.CopyTo(_colors); } /// <summary> /// Override this to process the pixel in the second pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <returns>The quantized value</returns> protected override byte QuantizePixel(Color32 pixel) { byte colorIndex = 0; int colorHash = pixel.ARGB; // Check if the color is in the lookup table if (_colorMap.ContainsKey(colorHash)) colorIndex = (byte)_colorMap[colorHash]; else { // Not found - loop through the palette and find the nearest match. // Firstly check the alpha value - if 0, lookup the transparent color if (0 == pixel.Alpha) { // Transparent. Lookup the first color with an alpha value of 0 for (int index = 0; index < _colors.Length; index++) { if (0 == _colors[index].A) { colorIndex = (byte)index; break; } } } else { // Not transparent... int leastDistance = int.MaxValue; int red = pixel.Red; int green = pixel.Green; int blue = pixel.Blue; // Loop through the entire palette, looking for the closest color match for (int index = 0; index < _colors.Length; index++) { Color paletteColor = _colors[index]; int redDistance = paletteColor.R - red; int greenDistance = paletteColor.G - green; int blueDistance = paletteColor.B - blue; int distance = (redDistance * redDistance) + (greenDistance * greenDistance) + (blueDistance * blueDistance); if (distance < leastDistance) { colorIndex = (byte)index; leastDistance = distance; // And if it's an exact match, exit the loop if (0 == distance) break; } } } // Now I have the color, pop it into the hashtable for next time _colorMap.Add(colorHash, colorIndex); } return colorIndex; } /// <summary> /// Retrieve the palette for the quantized image /// </summary> /// <param name="palette">Any old palette, this is overrwritten</param> /// <returns>The new color palette</returns> protected override ColorPalette GetPalette(ColorPalette palette) { for (int index = 0; index < _colors.Length; index++) palette.Entries[index] = _colors[index]; return palette; } /// <summary> /// Lookup table for colors /// </summary> private Hashtable _colorMap; /// <summary> /// List of all colors in the palette /// </summary> protected Color[] _colors; } #endregion #region #OctreeQuantizer# /// <summary> /// Quantize using an Octree /// </summary> public class OctreeQuantizer : Quantizer { /// <summary> /// Construct the octree quantizer /// </summary> /// <remarks> /// The Octree quantizer is a two pass algorithm. The initial pass sets up the octree, /// the second pass quantizes a color based on the nodes in the tree /// </remarks> /// <param name="maxColors">The maximum number of colors to return</param> /// <param name="maxColorBits">The number of significant bits</param> public OctreeQuantizer(int maxColors, int maxColorBits) : base(false) { if (maxColors > 255) throw new ArgumentOutOfRangeException("maxColors", maxColors, "The number of colors should be less than 256"); if ((maxColorBits < 1) | (maxColorBits > 8)) throw new ArgumentOutOfRangeException("maxColorBits", maxColorBits, "This should be between 1 and 8"); // Construct the octree _octree = new Octree(maxColorBits); _maxColors = maxColors; } /// <summary> /// Process the pixel in the first pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <remarks> /// This function need only be overridden if your quantize algorithm needs two passes, /// such as an Octree quantizer. /// </remarks> protected override void InitialQuantizePixel(Color32 pixel) { // Add the color to the octree _octree.AddColor(pixel); } /// <summary> /// Override this to process the pixel in the second pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <returns>The quantized value</returns> protected override byte QuantizePixel(Color32 pixel) { byte paletteIndex = (byte)_maxColors; // The color at [_maxColors] is set to transparent // Get the palette index if this non-transparent if (pixel.Alpha > 0) paletteIndex = (byte)_octree.GetPaletteIndex(pixel); return paletteIndex; } /// <summary> /// Retrieve the palette for the quantized image /// </summary> /// <param name="original">Any old palette, this is overrwritten</param> /// <returns>The new color palette</returns> protected override ColorPalette GetPalette(ColorPalette original) { // First off convert the octree to _maxColors colors ArrayList palette = _octree.Palletize(_maxColors - 1); // Then convert the palette based on those colors for (int index = 0; index < palette.Count; index++) original.Entries[index] = (Color)palette[index]; // Add the transparent color original.Entries[_maxColors] = Color.FromArgb(0, 0, 0, 0); return original; } /// <summary> /// Stores the tree /// </summary> private Octree _octree; /// <summary> /// Maximum allowed color depth /// </summary> private int _maxColors; /// <summary> /// Class which does the actual quantization /// </summary> private class Octree { /// <summary> /// Construct the octree /// </summary> /// <param name="maxColorBits">The maximum number of significant bits in the image</param> public Octree(int maxColorBits) { _maxColorBits = maxColorBits; _leafCount = 0; _reducibleNodes = new OctreeNode[9]; _root = new OctreeNode(0, _maxColorBits, this); _previousColor = 0; _previousNode = null; } /// <summary> /// Add a given color value to the octree /// </summary> /// <param name="pixel"></param> public void AddColor(Color32 pixel) { // Check if this request is for the same color as the last if (_previousColor == pixel.ARGB) { // If so, check if I have a previous node setup. This will only ocurr if the first color in the image // happens to be black, with an alpha component of zero. if (null == _previousNode) { _previousColor = pixel.ARGB; _root.AddColor(pixel, _maxColorBits, 0, this); } else // Just update the previous node _previousNode.Increment(pixel); } else { _previousColor = pixel.ARGB; _root.AddColor(pixel, _maxColorBits, 0, this); } } /// <summary> /// Reduce the depth of the tree /// </summary> public void Reduce() { int index; // Find the deepest level containing at least one reducible node for (index = _maxColorBits - 1; (index > 0) && (null == _reducibleNodes[index]); index--) ; // Reduce the node most recently added to the list at level 'index' OctreeNode node = _reducibleNodes[index]; _reducibleNodes[index] = node.NextReducible; // Decrement the leaf count after reducing the node _leafCount -= node.Reduce(); // And just in case I've reduced the last color to be added, and the next color to // be added is the same, invalidate the previousNode... _previousNode = null; } /// <summary> /// Get/Set the number of leaves in the tree /// </summary> public int Leaves { get { return _leafCount; } set { _leafCount = value; } } /// <summary> /// Return the array of reducible nodes /// </summary> protected OctreeNode[] ReducibleNodes { get { return _reducibleNodes; } } /// <summary> /// Keep track of the previous node that was quantized /// </summary> /// <param name="node">The node last quantized</param> protected void TrackPrevious(OctreeNode node) { _previousNode = node; } /// <summary> /// Convert the nodes in the octree to a palette with a maximum of colorCount colors /// </summary> /// <param name="colorCount">The maximum number of colors</param> /// <returns>An arraylist with the palettized colors</returns> public ArrayList Palletize(int colorCount) { while (Leaves > colorCount) Reduce(); // Now palettize the nodes ArrayList palette = new ArrayList(Leaves); int paletteIndex = 0; _root.ConstructPalette(palette, ref paletteIndex); // And return the palette return palette; } /// <summary> /// Get the palette index for the passed color /// </summary> /// <param name="pixel"></param> /// <returns></returns> public int GetPaletteIndex(Color32 pixel) { return _root.GetPaletteIndex(pixel, 0); } /// <summary> /// Mask used when getting the appropriate pixels for a given node /// </summary> private static int[] mask = new int[8] { 0x80, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01 }; /// <summary> /// The root of the octree /// </summary> private OctreeNode _root; /// <summary> /// Number of leaves in the tree /// </summary> private int _leafCount; /// <summary> /// Array of reducible nodes /// </summary> private OctreeNode[] _reducibleNodes; /// <summary> /// Maximum number of significant bits in the image /// </summary> private int _maxColorBits; /// <summary> /// Store the last node quantized /// </summary> private OctreeNode _previousNode; /// <summary> /// Cache the previous color quantized /// </summary> private int _previousColor; /// <summary> /// Class which encapsulates each node in the tree /// </summary> protected class OctreeNode { /// <summary> /// Construct the node /// </summary> /// <param name="level">The level in the tree = 0 - 7</param> /// <param name="colorBits">The number of significant color bits in the image</param> /// <param name="octree">The tree to which this node belongs</param> public OctreeNode(int level, int colorBits, Octree octree) { // Construct the new node _leaf = (level == colorBits); _red = _green = _blue = 0; _pixelCount = 0; // If a leaf, increment the leaf count if (_leaf) { octree.Leaves++; _nextReducible = null; _children = null; } else { // Otherwise add this to the reducible nodes _nextReducible = octree.ReducibleNodes[level]; octree.ReducibleNodes[level] = this; _children = new OctreeNode[8]; } } /// <summary> /// Add a color into the tree /// </summary> /// <param name="pixel">The color</param> /// <param name="colorBits">The number of significant color bits</param> /// <param name="level">The level in the tree</param> /// <param name="octree">The tree to which this node belongs</param> public void AddColor(Color32 pixel, int colorBits, int level, Octree octree) { // Update the color information if this is a leaf if (_leaf) { Increment(pixel); // Setup the previous node octree.TrackPrevious(this); } else { // Go to the next level down in the tree int shift = 7 - level; int index = ((pixel.Red & mask[level]) >> (shift - 2)) | ((pixel.Green & mask[level]) >> (shift - 1)) | ((pixel.Blue & mask[level]) >> (shift)); OctreeNode child = _children[index]; if (null == child) { // Create a new child node & store in the array child = new OctreeNode(level + 1, colorBits, octree); _children[index] = child; } // Add the color to the child node child.AddColor(pixel, colorBits, level + 1, octree); } } /// <summary> /// Get/Set the next reducible node /// </summary> public OctreeNode NextReducible { get { return _nextReducible; } set { _nextReducible = value; } } /// <summary> /// Return the child nodes /// </summary> public OctreeNode[] Children { get { return _children; } } /// <summary> /// Reduce this node by removing all of its children /// </summary> /// <returns>The number of leaves removed</returns> public int Reduce() { _red = _green = _blue = 0; int children = 0; // Loop through all children and add their information to this node for (int index = 0; index < 8; index++) { if (null != _children[index]) { _red += _children[index]._red; _green += _children[index]._green; _blue += _children[index]._blue; _pixelCount += _children[index]._pixelCount; ++children; _children[index] = null; } } // Now change this to a leaf node _leaf = true; // Return the number of nodes to decrement the leaf count by return (children - 1); } /// <summary> /// Traverse the tree, building up the color palette /// </summary> /// <param name="palette">The palette</param> /// <param name="paletteIndex">The current palette index</param> public void ConstructPalette(ArrayList palette, ref int paletteIndex) { if (_leaf) { // Consume the next palette index _paletteIndex = paletteIndex++; // And set the color of the palette entry palette.Add(Color.FromArgb(_red / _pixelCount, _green / _pixelCount, _blue / _pixelCount)); } else { // Loop through children looking for leaves for (int index = 0; index < 8; index++) { if (null != _children[index]) _children[index].ConstructPalette(palette, ref paletteIndex); } } } /// <summary> /// Return the palette index for the passed color /// </summary> public int GetPaletteIndex(Color32 pixel, int level) { int paletteIndex = _paletteIndex; if (!_leaf) { int shift = 7 - level; int index = ((pixel.Red & mask[level]) >> (shift - 2)) | ((pixel.Green & mask[level]) >> (shift - 1)) | ((pixel.Blue & mask[level]) >> (shift)); if (null != _children[index]) paletteIndex = _children[index].GetPaletteIndex(pixel, level + 1); else throw new Exception("Didn't expect this!"); } return paletteIndex; } /// <summary> /// Increment the pixel count and add to the color information /// </summary> public void Increment(Color32 pixel) { _pixelCount++; _red += pixel.Red; _green += pixel.Green; _blue += pixel.Blue; } /// <summary> /// Flag indicating that this is a leaf node /// </summary> private bool _leaf; /// <summary> /// Number of pixels in this node /// </summary> private int _pixelCount; /// <summary> /// Red component /// </summary> private int _red; /// <summary> /// Green Component /// </summary> private int _green; /// <summary> /// Blue component /// </summary> private int _blue; /// <summary> /// Pointers to any child nodes /// </summary> private OctreeNode[] _children; /// <summary> /// Pointer to next reducible node /// </summary> private OctreeNode _nextReducible; /// <summary> /// The index of this node in the palette /// </summary> private int _paletteIndex; } } } #endregion #region #GrayscaleQuantizer# /// <summary> /// Grayscale quantizer (255 Indexes Palette) /// </summary> public class GrayscaleQuantizer : PaletteQuantizer { /// <summary> /// Construct the palette quantizer /// </summary> /// <remarks> /// Palette quantization only requires a single quantization step /// </remarks> public GrayscaleQuantizer() : base(new ArrayList()) { _colors = new Color[256]; int nColors = 256; // Initialize a new color table with entries that are determined // by some optimal palette-finding algorithm; for demonstration // purposes, use a grayscale. for (uint i = 0; i < nColors; i++) { uint Alpha = 0xFF; // Colors are opaque. uint Intensity = Convert.ToUInt32(i * 0xFF / (nColors - 1)); // Even distribution. // The GIF encoder makes the first entry in the palette // that has a ZERO alpha the transparent color in the GIF. // Pick the first one arbitrarily, for demonstration purposes. // Create a gray scale for demonstration purposes. // Otherwise, use your favorite color reduction algorithm // and an optimum palette for that algorithm generated here. // For example, a color histogram, or a median cut palette. _colors[i] = Color.FromArgb((int)Alpha, (int)Intensity, (int)Intensity, (int)Intensity); } } /// <summary> /// Override this to process the pixel in the second pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <returns>The quantized value</returns> protected override byte QuantizePixel(Color32 pixel) { byte colorIndex = 0; double luminance = (pixel.Red * 0.299) + (pixel.Green * 0.587) + (pixel.Blue * 0.114); // Gray scale is an intensity map from black to white. // Compute the index to the grayscale entry that // approximates the luminance, and then round the index. // Also, constrain the index choices by the number of // colors to do, and then set that pixel's index to the // byte value. colorIndex = (byte)(luminance + 0.5); return colorIndex; } } #endregion
//----------------------------------------------------------------------- // <copyright file="Folder.cs">(c) http://www.codeplex.com/MSBuildExtensionPack. This source is subject to the Microsoft Permissive License. See http://www.microsoft.com/resources/sharedsource/licensingbasics/sharedsourcelicenses.mspx. All other rights reserved.</copyright> //----------------------------------------------------------------------- namespace MSBuild.ExtensionPack.FileSystem { using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Management; using System.Security.AccessControl; using System.Text.RegularExpressions; using Microsoft.Build.Framework; using Microsoft.Build.Utilities; /// <summary> /// <b>Valid TaskActions are:</b> /// <para><i>AddSecurity</i> (<b>Required: </b> Path, Users <b>Optional: </b>AccessType, Permission)</para> /// <para><i>DeleteAll</i> (<b>Required: </b> Path, Match)</para> /// <para><i>Get</i> (<b>Required: </b> Path <b>Optional:</b> Match, Recursive)</para> /// <para><i>Move</i> (<b>Required: </b> Path, TargetPath)</para> /// <para><i>RemoveContent</i> (<b>Required: </b> Path <b>Optional: </b>Force, RetryCount)</para> /// <para><i>RemoveSecurity</i> (<b>Required: </b> Path, Users <b>Optional: </b>AccessType)</para> /// <para><b>Remote Execution Support:</b> No</para> /// </summary> /// <example> /// <code lang="xml"><![CDATA[ /// <Project ToolsVersion="3.5" DefaultTargets="Default" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> /// <PropertyGroup> /// <TPath>$(MSBuildProjectDirectory)\..\MSBuild.ExtensionPack.tasks</TPath> /// <TPath Condition="Exists('$(MSBuildProjectDirectory)\..\..\Common\MSBuild.ExtensionPack.tasks')">$(MSBuildProjectDirectory)\..\..\Common\MSBuild.ExtensionPack.tasks</TPath> /// </PropertyGroup> /// <Import Project="$(TPath)"/> /// <Target Name="Default"> /// <ItemGroup> /// <Users Include="AReadUser"> /// <Permission>ExecuteFile, Read</Permission> /// </Users> /// <Users Include="AChangeUser"> /// <Permission>FullControl</Permission> /// </Users> /// <FoldersToPermission Include="c:\az"> /// <Account>Performance Log Users</Account> /// <Permission>Read,Write,Modify,Delete</Permission> /// <AccessType>Allow</AccessType> /// </FoldersToPermission> /// <FoldersToPermission Include="c:\az"> /// <Account>AChangeUser</Account> /// <Permission>Read,Write,Modify,Delete</Permission> /// <AccessType>Allow</AccessType> /// </FoldersToPermission> /// <FoldersToRemovePermissions Include="c:\az"> /// <Account>Performance Log Users</Account> /// <Permission>Read,Write,Modify,Delete</Permission> /// </FoldersToRemovePermissions> /// </ItemGroup> /// <Microsoft.Build.Tasks.MakeDir Directories="c:\Demo2;c:\Demo1;c:\ddd"/> /// <Microsoft.Build.Tasks.RemoveDir Directories="C:\adeeeee"/> /// <!-- Add security for users --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="AddSecurity" Path="c:\Demo2" Users="@(Users)"/> /// <!-- Remove security for users --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="RemoveSecurity" Path="c:\Demo2" Users="@(Users)"/> /// <!-- Add Deny security for users --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="AddSecurity" AccessType="Deny" Path="c:\Demo2" Users="@(Users)"/> /// <!-- Remove Deny security for users --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="RemoveSecurity" AccessType="Deny" Path="c:\Demo2" Users="@(Users)"/> /// <!-- Delete all folders matching a given name --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="DeleteAll" Path="c:\Demo2" Match="_svn"/> /// <!-- Remove all content from a folder whilst maintaining the target folder --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="RemoveContent" Path="c:\Demo"/> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="AddSecurity" AccessType="%(FoldersToPermission.AccessType)" Path="%(FoldersToPermission.Identity)" Users="%(FoldersToPermission.Account)" Permission="%(FoldersToPermission.Permission)"/> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="RemoveSecurity" AccessType="%(FoldersToRemovePermissions.AccessType)" Path="%(FoldersToRemovePermissions.Identity)" Users="%(FoldersToRemovePermissions.Account)" Permission="%(FoldersToRemovePermissions.Permission)"/> /// <!-- Move a folder --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="Move" Path="c:\Demo1" TargetPath="C:\adeeeee"/> /// <!-- Lets copy a selection of folders to multiple locations --> /// <MSBuild.ExtensionPack.FileSystem.Folder TaskAction="Get" Path="c:\ddd"> /// <Output TaskParameter="Folders" ItemName="FoundFolders"/> /// </MSBuild.ExtensionPack.FileSystem.Folder> /// <Message Text="%(FoundFolders.Identity)"/> /// <ItemGroup> /// <MyWebService Include="C:\a\Dist\**\*.*"> /// <ToDir>%(FoundFolders.Identity)</ToDir> /// </MyWebService> /// </ItemGroup> /// <!-- Copy using the metadata --> /// <Copy SourceFiles="@(MyWebService)" DestinationFolder="%(ToDir)\%(RecursiveDir)" /> /// </Target> /// </Project> /// ]]></code> /// </example> [HelpUrl("http://www.msbuildextensionpack.com/help/3.5.12.0/html/c0f7dd21-7229-b08d-469c-9e02e66e974b.htm")] public class Folder : BaseTask { private const string AddSecurityTaskAction = "AddSecurity"; private const string DeleteAllTaskAction = "DeleteAll"; private const string GetTaskAction = "Get"; private const string MoveTaskAction = "Move"; private const string RemoveContentTaskAction = "RemoveContent"; private const string RemoveSecurityTaskAction = "RemoveSecurity"; private List<string> foldersFound; private AccessControlType accessType = AccessControlType.Allow; private int retryCount = 5; [DropdownValue(AddSecurityTaskAction)] [DropdownValue(DeleteAllTaskAction)] [DropdownValue(GetTaskAction)] [DropdownValue(MoveTaskAction)] [DropdownValue(RemoveContentTaskAction)] [DropdownValue(RemoveSecurityTaskAction)] public override string TaskAction { get { return base.TaskAction; } set { base.TaskAction = value; } } /// <summary> /// Sets the path to remove content from, or the base path for Delete /// </summary> [Required] [TaskAction(AddSecurityTaskAction, true)] [TaskAction(DeleteAllTaskAction, true)] [TaskAction(GetTaskAction, true)] [TaskAction(MoveTaskAction, true)] [TaskAction(RemoveContentTaskAction, true)] [TaskAction(RemoveSecurityTaskAction, true)] public ITaskItem Path { get; set; } /// <summary> /// Sets the regular expression to match in the name of a folder for Delete. Case is ignored. /// </summary> [TaskAction(DeleteAllTaskAction, true)] [TaskAction(GetTaskAction, false)] public string Match { get; set; } /// <summary> /// Sets the TargetPath for a renamed folder /// </summary> [TaskAction(MoveTaskAction, true)] public ITaskItem TargetPath { get; set; } /// <summary> /// Sets a value indicating whether to delete readonly files when performing RemoveContent /// </summary> [TaskAction(RemoveContentTaskAction, false)] public bool Force { get; set; } /// <summary> /// Sets the users collection. Use the Permission metadata tag to specify permissions. Separate pemissions with a comma. /// <para/> <UsersCol Include="AUser"> /// <para/> <Permission>Read,etc</Permission> /// <para/> </UsersCol> /// </summary> [TaskAction(AddSecurityTaskAction, true)] [TaskAction(RemoveSecurityTaskAction, true)] public ITaskItem[] Users { get; set; } /// <summary> /// A comma-separated list of <a href="http://msdn.microsoft.com/en-us/library/942f991b.aspx">FileSystemRights</a>. /// </summary> [TaskAction(AddSecurityTaskAction, false)] [TaskAction(RemoveSecurityTaskAction, false)] public string Permission { get; set; } /// <summary> /// Set the AccessType. Can be Allow or Deny. Default is Allow. /// </summary> [TaskAction(AddSecurityTaskAction, false)] [TaskAction(RemoveSecurityTaskAction, false)] public string AccessType { get { return this.accessType.ToString(); } set { this.accessType = (AccessControlType)Enum.Parse(typeof(AccessControlType), value); } } /// <summary> /// Sets a value indicating how many times to retry removing the content, e.g. if files are temporarily locked. Default is 5. The retry occurs every 5 seconds. /// </summary> [TaskAction(RemoveContentTaskAction, false)] public int RetryCount { get { return this.retryCount; } set { this.retryCount = value; } } /// <summary> /// Set to true to perform a recursive scan. Default is false. /// </summary> public bool Recursive { get; set; } /// <summary> /// Gets the folder list /// </summary> [Output] public ITaskItem[] Folders { get; set; } /// <summary> /// Performs the action of this task. /// </summary> /// <remarks> /// LogError should be thrown in the event of errors /// </remarks> protected override void InternalExecute() { if (!this.TargetingLocalMachine()) { return; } DirectoryInfo dir = new DirectoryInfo(this.Path.GetMetadata("FullPath")); if (!dir.Exists) { this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "The directory does not exist: {0}", this.Path)); return; } switch (this.TaskAction) { case AddSecurityTaskAction: this.SetSecurity("Add"); break; case RemoveSecurityTaskAction: this.SetSecurity("Remove"); break; case RemoveContentTaskAction: this.RemoveContent(dir); break; case MoveTaskAction: this.Move(); break; case DeleteAllTaskAction: this.DeleteAll(); break; case GetTaskAction: this.GetFolders(); break; default: this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Invalid TaskAction passed: {0}", this.TaskAction)); return; } } private void DelTree(DirectoryInfo root) { // Delete all files in current folder. foreach (FileInfo i in root.GetFiles()) { // First make sure the file is writable. FileAttributes fileAttributes = System.IO.File.GetAttributes(i.FullName); // If readonly attribute is set, reset it. if ((fileAttributes & FileAttributes.ReadOnly) == FileAttributes.ReadOnly) { System.IO.File.SetAttributes(i.FullName, fileAttributes ^ FileAttributes.ReadOnly); } try { FileInfo f = new FileInfo(i.FullName); if (f.Exists) { System.IO.File.Delete(i.FullName); } } catch (Exception ex) { this.LogTaskWarning(ex.Message); bool deleted = false; int count = 1; while (!deleted && count <= this.RetryCount) { this.LogTaskMessage(MessageImportance.High, string.Format(CultureInfo.InvariantCulture, "Delete failed, trying again in 5 seconds. Attempt {0} of {1}", count, this.RetryCount)); System.Threading.Thread.Sleep(5000); count++; try { FileInfo f = new FileInfo(i.FullName); if (f.Exists) { System.IO.File.Delete(i.FullName); } deleted = true; } catch { this.LogTaskWarning(ex.Message); } } if (deleted != true) { throw; } } } foreach (DirectoryInfo d in root.GetDirectories()) { this.DelTree(d); try { Directory.Delete(d.FullName); } catch (Exception ex) { this.LogTaskWarning(ex.Message); bool deleted = false; int count = 1; while (!deleted && count <= this.RetryCount) { this.LogTaskMessage(MessageImportance.High, string.Format(CultureInfo.InvariantCulture, "Delete failed, trying again in 5 seconds. Attempt {0} of {1}", count, this.RetryCount)); System.Threading.Thread.Sleep(5000); count++; try { Directory.Delete(d.FullName); deleted = true; } catch { this.LogTaskWarning(ex.Message); } } if (deleted != true) { throw; } } } } private void GetFolders() { if (string.IsNullOrEmpty(this.Path.GetMetadata("FullPath"))) { Log.LogError("Path must be specified."); return; } this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Getting Folders from: {0}", this.Path)); DirectoryInfo dirInfo = new DirectoryInfo(this.Path.GetMetadata("FullPath")); this.foldersFound = new List<string>(); this.ProcessGetAll(dirInfo); this.Folders = new ITaskItem[this.foldersFound.Count]; int i = 0; this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Folders Found: {0}", this.foldersFound.Count)); foreach (string s in this.foldersFound) { ITaskItem newItem = new TaskItem(s); this.Folders[i] = newItem; i++; } } private void ProcessGetAll(DirectoryInfo dirInfo) { foreach (DirectoryInfo child in dirInfo.GetDirectories()) { if (string.IsNullOrEmpty(this.Match)) { this.foldersFound.Add(child.FullName); } else { // Load the regex to use Regex reg = new Regex(this.Match, RegexOptions.IgnoreCase | RegexOptions.Compiled); // Match the regular expression pattern against a text string. Match m = reg.Match(child.Name); if (m.Success) { this.LogTaskMessage(MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Getting: {0}", child.FullName)); this.foldersFound.Add(child.FullName); } } if (this.Recursive) { this.ProcessGetAll(child); } } } private void SetSecurity(string action) { DirectoryInfo dirInfo = new DirectoryInfo(this.Path.GetMetadata("FullPath")); DirectorySecurity currentSecurity = dirInfo.GetAccessControl(); if (this.Users != null) { foreach (ITaskItem user in this.Users) { string userName = user.ItemSpec; string[] permissions = string.IsNullOrEmpty(this.Permission) ? user.GetMetadata("Permission").Split(new[] { "," }, StringSplitOptions.RemoveEmptyEntries) : this.Permission.Split(new[] { "," }, StringSplitOptions.RemoveEmptyEntries); FileSystemRights userRights = permissions.Aggregate(new FileSystemRights(), (current, s) => current | (FileSystemRights)Enum.Parse(typeof(FileSystemRights), s)); if (action == "Add") { this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Adding security for user: {0} on {1}", userName, this.Path)); currentSecurity.AddAccessRule(new FileSystemAccessRule(userName, userRights, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, PropagationFlags.None, this.accessType)); } else { this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Removing security for user: {0} on {1}", userName, this.Path)); if (permissions.Length == 0) { currentSecurity.RemoveAccessRuleAll(new FileSystemAccessRule(userName, userRights, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, PropagationFlags.None, this.accessType)); } else { currentSecurity.RemoveAccessRule(new FileSystemAccessRule(userName, userRights, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, PropagationFlags.None, this.accessType)); } } } } // Set the new access settings. dirInfo.SetAccessControl(currentSecurity); } private void DeleteAll() { this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Removing all Folders from: {0} that match: {1}", this.Path, this.Match)); if (string.IsNullOrEmpty(this.Match)) { Log.LogError("Match must be specified."); return; } DirectoryInfo d = new DirectoryInfo(this.Path.GetMetadata("FullPath")); this.ProcessDeleteAll(d); } private void ProcessDeleteAll(DirectoryInfo dirInfo) { foreach (DirectoryInfo child in dirInfo.GetDirectories()) { // Load the regex to use Regex reg = new Regex(this.Match, RegexOptions.IgnoreCase | RegexOptions.Compiled); // Match the regular expression pattern against a text string. Match m = reg.Match(child.Name); if (m.Success) { this.LogTaskMessage(MessageImportance.Low, string.Format(CultureInfo.CurrentCulture, "Removing: {0}", child.FullName)); this.DelTree(child); try { Directory.Delete(child.FullName); } catch (Exception ex) { this.LogTaskWarning(ex.Message); bool deleted = false; int count = 1; while (!deleted && count <= this.RetryCount) { this.LogTaskMessage(MessageImportance.High, string.Format(CultureInfo.InvariantCulture, "Delete failed, trying again in 5 seconds. Attempt {0} of {1}", count, this.RetryCount)); System.Threading.Thread.Sleep(5000); count++; try { Directory.Delete(child.FullName); deleted = true; } catch { this.LogTaskWarning(ex.Message); } } if (deleted != true) { throw; } } } else { this.ProcessDeleteAll(child); } } } private void RemoveContent(DirectoryInfo dir) { this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Removing Content from Folder: {0}", dir.FullName)); FileSystemInfo[] infos = dir.GetFileSystemInfos("*"); foreach (FileSystemInfo i in infos) { // Check to see if this is a DirectoryInfo object. if (i is DirectoryInfo) { if (this.Force) { // if its a folder path we can use WMI for a quick delete if (i.FullName.Contains(@"\\") == false) { string dirObject = string.Format(CultureInfo.CurrentCulture, "win32_Directory.Name='{0}'", i.FullName); using (ManagementObject mdir = new ManagementObject(dirObject)) { mdir.Get(); ManagementBaseObject outParams = mdir.InvokeMethod("Delete", null, null); // ReturnValue should be 0, else failure if (outParams != null) { if (Convert.ToInt32(outParams.Properties["ReturnValue"].Value, CultureInfo.CurrentCulture) != 0) { this.Log.LogError(string.Format(CultureInfo.CurrentCulture, "Directory deletion error: ReturnValue: {0}", outParams.Properties["ReturnValue"].Value)); return; } } else { this.Log.LogError("The ManagementObject call to invoke Delete returned null."); return; } } } else { // it's a share, so we need to manually check all file attributes and delete this.DelTree((DirectoryInfo)i); try { Directory.Delete(i.FullName, true); } catch (Exception ex) { this.LogTaskWarning(ex.Message); bool deleted = false; int count = 1; while (!deleted && count <= this.RetryCount) { this.LogTaskMessage(MessageImportance.High, string.Format(CultureInfo.InvariantCulture, "Delete failed, trying again in 5 seconds. Attempt {0} of {1}", count, this.RetryCount)); System.Threading.Thread.Sleep(5000); count++; try { if (Directory.Exists(i.FullName)) { Directory.Delete(i.FullName, true); } deleted = true; } catch { this.LogTaskWarning(ex.Message); } } if (deleted != true) { throw; } } } } else { try { Directory.Delete(i.FullName, true); } catch (Exception ex) { this.LogTaskWarning(ex.Message); bool deleted = false; int count = 1; while (!deleted && count <= this.RetryCount) { this.LogTaskMessage(MessageImportance.High, string.Format(CultureInfo.InvariantCulture, "Delete failed, trying again in 5 seconds. Attempt {0} of {1}", count, this.RetryCount)); System.Threading.Thread.Sleep(5000); count++; try { if (Directory.Exists(i.FullName)) { Directory.Delete(i.FullName, true); } deleted = true; } catch { this.LogTaskWarning(ex.Message); } } if (deleted != true) { throw; } } } } else if (i is FileInfo) { if (this.Force) { // First make sure the file is writable. FileAttributes fileAttributes = System.IO.File.GetAttributes(i.FullName); // If readonly attribute is set, reset it. if ((fileAttributes & FileAttributes.ReadOnly) == FileAttributes.ReadOnly) { System.IO.File.SetAttributes(i.FullName, fileAttributes ^ FileAttributes.ReadOnly); } } try { if (i.Exists) { System.IO.File.Delete(i.FullName); } } catch (Exception ex) { this.LogTaskWarning(ex.Message); bool deleted = false; int count = 1; while (!deleted && count <= this.RetryCount) { this.LogTaskMessage(MessageImportance.High, string.Format(CultureInfo.InvariantCulture, "Delete failed, trying again in 5 seconds. Attempt {0} of {1}", count, this.RetryCount)); System.Threading.Thread.Sleep(5000); count++; try { if (i.Exists) { System.IO.File.Delete(i.FullName); } deleted = true; } catch { this.LogTaskWarning(ex.Message); } } if (deleted != true) { throw; } } } } } private void Move() { if (string.IsNullOrEmpty(this.TargetPath.GetMetadata("FullPath"))) { Log.LogError("TargetPath must be specified."); return; } this.LogTaskMessage(string.Format(CultureInfo.CurrentCulture, "Moving Folder: {0} to: {1}", this.Path, this.TargetPath)); // If the TargetPath has multiple folders, then we need to create the parent DirectoryInfo f = new DirectoryInfo(this.TargetPath.GetMetadata("FullPath")); if (f.Parent != null && !f.Parent.Exists) { Directory.CreateDirectory(f.Parent.FullName); } Directory.Move(this.Path.GetMetadata("FullPath"), this.TargetPath.GetMetadata("FullPath")); } } }
// // System.Web.Compilation.AspParser // // Authors: // Gonzalo Paniagua Javier ([email protected]) // // (C) 2002,2003 Ximian, Inc (http://www.ximian.com) // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections; using System.Globalization; using System.IO; using System.Text; using DodoNet.Http.Util; namespace DodoNet.Http.Compilation { delegate void ParseErrorHandler(ILocation location, string message); delegate void TextParsedHandler(ILocation location, string text); delegate void TagParsedHandler(ILocation location, TagType tagtype, string id, TagAttributes attributes); class AspParser : ILocation { AspTokenizer tokenizer; int beginLine, endLine; int beginColumn, endColumn; int beginPosition, endPosition; string filename; string fileText; string verbatimID; public AspParser(string filename, TextReader input) { this.filename = filename; fileText = input.ReadToEnd(); StringReader reader = new StringReader(fileText); tokenizer = new AspTokenizer(reader); } public int BeginLine { get { return beginLine; } } public int BeginColumn { get { return beginColumn; } } public int EndLine { get { return endLine; } } public int EndColumn { get { return endColumn; } } public string PlainText { get { if (beginPosition >= endPosition) return null; return fileText.Substring(beginPosition, endPosition - beginPosition); } } public string Filename { get { return filename; } } public string VerbatimID { set { tokenizer.Verbatim = true; verbatimID = value; } } bool Eat(int expected_token) { if (tokenizer.get_token() != expected_token) { tokenizer.put_back(); return false; } endLine = tokenizer.EndLine; endColumn = tokenizer.EndColumn; return true; } void BeginElement() { beginLine = tokenizer.BeginLine; beginColumn = tokenizer.BeginColumn; beginPosition = tokenizer.Position - 1; } void EndElement() { endLine = tokenizer.EndLine; endColumn = tokenizer.EndColumn; endPosition = tokenizer.Position; } public void Parse() { int token; string id; TagAttributes attributes; TagType tagtype = TagType.Text; StringBuilder text = new StringBuilder(); while ((token = tokenizer.get_token()) != Token.EOF) { BeginElement(); if (tokenizer.Verbatim) { string end_verbatim = "</" + verbatimID + ">"; string verbatim_text = GetVerbatim(token, end_verbatim); if (verbatim_text == null) OnError("Unexpected EOF processing " + verbatimID); tokenizer.Verbatim = false; EndElement(); endPosition -= end_verbatim.Length; OnTextParsed(verbatim_text); beginPosition = endPosition; endPosition += end_verbatim.Length; OnTagParsed(TagType.Close, verbatimID, null); continue; } if (token == '<') { GetTag(out tagtype, out id, out attributes); EndElement(); if (tagtype == TagType.ServerComment) continue; if (tagtype == TagType.Text) OnTextParsed(id); else OnTagParsed(tagtype, id, attributes); continue; } if (tokenizer.Value.Trim() == "" && tagtype == TagType.Directive) { continue; } text.Length = 0; do { text.Append(tokenizer.Value); token = tokenizer.get_token(); } while (token != '<' && token != Token.EOF); tokenizer.put_back(); EndElement(); OnTextParsed(text.ToString()); } } bool GetInclude(string str, out string pathType, out string filename) { pathType = null; filename = null; str = str.Substring(2).Trim(); int len = str.Length; int lastQuote = str.LastIndexOf('"'); if (len < 10 || lastQuote != len - 1) return false; if (!StrUtils.StartsWith(str, "#include ", true)) return false; str = str.Substring(9).Trim(); bool isfile = (StrUtils.StartsWith(str, "file", true)); if (!isfile && !StrUtils.StartsWith(str, "virtual", true)) return false; pathType = (isfile) ? "file" : "virtual"; if (str.Length < pathType.Length + 3) return false; str = str.Substring(pathType.Length).Trim(); if (str.Length < 3 || str[0] != '=') return false; int index = 1; for (; index < str.Length; index++) { if (Char.IsWhiteSpace(str[index])) continue; else if (str[index] == '"') break; } if (index == str.Length || index == lastQuote) return false; str = str.Substring(index); if (str.Length == 2) { // only quotes OnError("Empty file name."); return false; } filename = str.Trim().Substring(index, str.Length - 2); if (filename.LastIndexOf('"') != -1) return false; // file=""" -> no error return true; } void GetTag(out TagType tagtype, out string id, out TagAttributes attributes) { int token = tokenizer.get_token(); tagtype = TagType.ServerComment; id = null; attributes = null; switch (token) { case '%': GetServerTag(out tagtype, out id, out attributes); break; case '/': if (!Eat(Token.IDENTIFIER)) OnError("expecting TAGNAME"); id = tokenizer.Value; if (!Eat('>')) OnError("expecting '>'. Got '" + id + "'"); tagtype = TagType.Close; break; case '!': bool double_dash = Eat(Token.DOUBLEDASH); if (double_dash) tokenizer.put_back(); tokenizer.Verbatim = true; string end = double_dash ? "-->" : ">"; string comment = GetVerbatim(tokenizer.get_token(), end); tokenizer.Verbatim = false; if (comment == null) OnError("Unfinished HTML comment/DTD"); string pathType, filename; if (double_dash && GetInclude(comment, out pathType, out filename)) { tagtype = TagType.Include; attributes = new TagAttributes(); attributes.Add(pathType, filename); } else { tagtype = TagType.Text; id = "<!" + comment + end; } break; case Token.IDENTIFIER: if (this.filename == "@@inner_string@@") { // Actually not tag but "xxx < yyy" stuff in inner_string! tagtype = TagType.Text; tokenizer.InTag = false; id = "<" + tokenizer.Odds + tokenizer.Value; } else { id = tokenizer.Value; try { attributes = GetAttributes(); } catch (Exception e) { OnError(e.Message); break; } tagtype = TagType.Tag; if (Eat('/') && Eat('>')) { tagtype = TagType.SelfClosing; } else if (!Eat('>')) { if (attributes.IsRunAtServer()) { OnError("The server tag is not well formed."); break; } tokenizer.Verbatim = true; attributes.Add("", GetVerbatim(tokenizer.get_token(), ">") + ">"); tokenizer.Verbatim = false; } } break; default: tagtype = TagType.Text; tokenizer.InTag = false; id = "<" + tokenizer.Value; break; } } TagAttributes GetAttributes() { int token; TagAttributes attributes; string id; bool wellFormedForServer = true; attributes = new TagAttributes(); while ((token = tokenizer.get_token()) != Token.EOF) { if (token == '<' && Eat('%')) { tokenizer.Verbatim = true; attributes.Add("", "<%" + GetVerbatim(tokenizer.get_token(), "%>") + "%>"); tokenizer.Verbatim = false; tokenizer.InTag = true; continue; } if (token != Token.IDENTIFIER) break; id = tokenizer.Value; if (Eat('=')) { if (Eat(Token.ATTVALUE)) { attributes.Add(id, tokenizer.Value); wellFormedForServer &= tokenizer.AlternatingQuotes; } else if (Eat('<') && Eat('%')) { tokenizer.Verbatim = true; attributes.Add(id, "<%" + GetVerbatim(tokenizer.get_token(), "%>") + "%>"); tokenizer.Verbatim = false; tokenizer.InTag = true; } else { OnError("expected ATTVALUE"); return null; } } else { attributes.Add(id, null); } } tokenizer.put_back(); if (attributes.IsRunAtServer() && !wellFormedForServer) { OnError("The server tag is not well formed."); return null; } return attributes; } string GetVerbatim(int token, string end) { StringBuilder vb_text = new StringBuilder(); int i = 0; if (tokenizer.Value.Length > 1) { // May be we have a put_back token that is not a single character vb_text.Append(tokenizer.Value); token = tokenizer.get_token(); } end = end.ToLower(CultureInfo.InvariantCulture); while (token != Token.EOF) { if (Char.ToLower((char)token, CultureInfo.InvariantCulture) == end[i]) { if (++i >= end.Length) break; token = tokenizer.get_token(); continue; } else if (i > 0) { for (int j = 0; j < i; j++) vb_text.Append(end[j]); i = 0; } vb_text.Append((char)token); token = tokenizer.get_token(); } if (token == Token.EOF) OnError("Expecting " + end + " and got EOF."); return RemoveComments(vb_text.ToString()); } string RemoveComments(string text) { int end; int start = text.IndexOf("<%--"); while (start != -1) { end = text.IndexOf("--%>"); if (end == -1 || end <= start + 1) break; text = text.Remove(start, end - start + 4); start = text.IndexOf("<%--"); } return text; } void GetServerTag(out TagType tagtype, out string id, out TagAttributes attributes) { string inside_tags; bool old = tokenizer.ExpectAttrValue; tokenizer.ExpectAttrValue = false; if (Eat('@')) { tokenizer.ExpectAttrValue = old; tagtype = TagType.Directive; id = ""; if (Eat(Token.DIRECTIVE)) id = tokenizer.Value; attributes = GetAttributes(); if (!Eat('%') || !Eat('>')) OnError("expecting '%>'"); return; } if (Eat(Token.DOUBLEDASH)) { tokenizer.ExpectAttrValue = old; tokenizer.Verbatim = true; inside_tags = GetVerbatim(tokenizer.get_token(), "--%>"); tokenizer.Verbatim = false; id = null; attributes = null; tagtype = TagType.ServerComment; return; } tokenizer.ExpectAttrValue = old; bool varname; bool databinding; varname = Eat('='); databinding = !varname && Eat('#'); tokenizer.Verbatim = true; inside_tags = GetVerbatim(tokenizer.get_token(), "%>"); tokenizer.Verbatim = false; id = inside_tags; attributes = null; tagtype = (databinding ? TagType.DataBinding : (varname ? TagType.CodeRenderExpression : TagType.CodeRender)); } public event ParseErrorHandler Error; public event TagParsedHandler TagParsed; public event TextParsedHandler TextParsed; void OnError(string msg) { if (Error != null) Error(this, msg); } void OnTagParsed(TagType tagtype, string id, TagAttributes attributes) { if (TagParsed != null) TagParsed(this, tagtype, id, attributes); } void OnTextParsed(string text) { if (TextParsed != null) TextParsed(this, text); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Reflection; using System.Threading; using System.Text; using System.Xml; using System.Security; using System.Linq; namespace System.Runtime.Serialization { #if NET_NATIVE public sealed class EnumDataContract : DataContract #else internal sealed class EnumDataContract : DataContract #endif { private EnumDataContractCriticalHelper _helper; public EnumDataContract() : base(new EnumDataContractCriticalHelper()) { _helper = base.Helper as EnumDataContractCriticalHelper; } public XmlQualifiedName BaseContractName { get; set; } internal EnumDataContract(Type type) : base(new EnumDataContractCriticalHelper(type)) { _helper = base.Helper as EnumDataContractCriticalHelper; } public List<DataMember> Members { get { return _helper.Members; } set { _helper.Members = value; } } public List<long> Values { get { return _helper.Values; } set { _helper.Values = value; } } public bool IsFlags { get { return _helper.IsFlags; } set { _helper.IsFlags = value; } } public bool IsULong { get { return _helper.IsULong; } set { _helper.IsULong = value; } } public XmlDictionaryString[] ChildElementNames { get { return _helper.ChildElementNames; } set { _helper.ChildElementNames = value; } } internal override bool CanContainReferences { get { return false; } } private class EnumDataContractCriticalHelper : DataContract.DataContractCriticalHelper { private static Dictionary<Type, XmlQualifiedName> s_typeToName; private static Dictionary<XmlQualifiedName, Type> s_nameToType; private List<DataMember> _members; private List<long> _values; private bool _isULong; private bool _isFlags; private bool _hasDataContract; private XmlDictionaryString[] _childElementNames; static EnumDataContractCriticalHelper() { s_typeToName = new Dictionary<Type, XmlQualifiedName>(); s_nameToType = new Dictionary<XmlQualifiedName, Type>(); Add(typeof(sbyte), "byte"); Add(typeof(byte), "unsignedByte"); Add(typeof(short), "short"); Add(typeof(ushort), "unsignedShort"); Add(typeof(int), "int"); Add(typeof(uint), "unsignedInt"); Add(typeof(long), "long"); Add(typeof(ulong), "unsignedLong"); } internal static void Add(Type type, string localName) { XmlQualifiedName stableName = CreateQualifiedName(localName, Globals.SchemaNamespace); s_typeToName.Add(type, stableName); s_nameToType.Add(stableName, type); } internal EnumDataContractCriticalHelper() { IsValueType = true; } internal EnumDataContractCriticalHelper(Type type) : base(type) { this.StableName = DataContract.GetStableName(type, out _hasDataContract); Type baseType = Enum.GetUnderlyingType(type); ImportBaseType(baseType); IsFlags = type.IsDefined(Globals.TypeOfFlagsAttribute, false); ImportDataMembers(); XmlDictionary dictionary = new XmlDictionary(2 + Members.Count); Name = dictionary.Add(StableName.Name); Namespace = dictionary.Add(StableName.Namespace); _childElementNames = new XmlDictionaryString[Members.Count]; for (int i = 0; i < Members.Count; i++) _childElementNames[i] = dictionary.Add(Members[i].Name); DataContractAttribute dataContractAttribute; if (TryGetDCAttribute(type, out dataContractAttribute)) { if (dataContractAttribute.IsReference) { DataContract.ThrowInvalidDataContractException( SR.Format(SR.EnumTypeCannotHaveIsReference, DataContract.GetClrTypeFullName(type), dataContractAttribute.IsReference, false), type); } } } internal List<DataMember> Members { get { return _members; } set { _members = value; } } internal List<long> Values { get { return _values; } set { _values = value; } } internal bool IsFlags { get { return _isFlags; } set { _isFlags = value; } } internal bool IsULong { get { return _isULong; } set { _isULong = value; } } internal XmlDictionaryString[] ChildElementNames { get { return _childElementNames; } set { _childElementNames = value; } } private void ImportBaseType(Type baseType) { _isULong = (baseType == Globals.TypeOfULong); } private void ImportDataMembers() { Type type = this.UnderlyingType; FieldInfo[] fields = type.GetFields(BindingFlags.Static | BindingFlags.Public); Dictionary<string, DataMember> memberValuesTable = new Dictionary<string, DataMember>(); List<DataMember> tempMembers = new List<DataMember>(fields.Length); List<long> tempValues = new List<long>(fields.Length); for (int i = 0; i < fields.Length; i++) { FieldInfo field = fields[i]; bool enumMemberValid = false; if (_hasDataContract) { object[] memberAttributes = field.GetCustomAttributes(Globals.TypeOfEnumMemberAttribute, false).ToArray(); if (memberAttributes != null && memberAttributes.Length > 0) { if (memberAttributes.Length > 1) ThrowInvalidDataContractException(SR.Format(SR.TooManyEnumMembers, DataContract.GetClrTypeFullName(field.DeclaringType), field.Name)); EnumMemberAttribute memberAttribute = (EnumMemberAttribute)memberAttributes[0]; DataMember memberContract = new DataMember(field); if (memberAttribute.IsValueSetExplicitly) { if (memberAttribute.Value == null || memberAttribute.Value.Length == 0) ThrowInvalidDataContractException(SR.Format(SR.InvalidEnumMemberValue, field.Name, DataContract.GetClrTypeFullName(type))); memberContract.Name = memberAttribute.Value; } else memberContract.Name = field.Name; ClassDataContract.CheckAndAddMember(tempMembers, memberContract, memberValuesTable); enumMemberValid = true; } object[] dataMemberAttributes = field.GetCustomAttributes(Globals.TypeOfDataMemberAttribute, false).ToArray(); if (dataMemberAttributes != null && dataMemberAttributes.Length > 0) ThrowInvalidDataContractException(SR.Format(SR.DataMemberOnEnumField, DataContract.GetClrTypeFullName(field.DeclaringType), field.Name)); } else { if (!field.IsNotSerialized) { DataMember memberContract = new DataMember(field); memberContract.Name = field.Name; ClassDataContract.CheckAndAddMember(tempMembers, memberContract, memberValuesTable); enumMemberValid = true; } } if (enumMemberValid) { object enumValue = field.GetValue(null); if (_isULong) tempValues.Add((long)Convert.ToUInt64(enumValue, null)); else tempValues.Add(Convert.ToInt64(enumValue, null)); } } Interlocked.MemoryBarrier(); _members = tempMembers; _values = tempValues; } } internal void WriteEnumValue(XmlWriterDelegator writer, object value) { long longValue = IsULong ? (long)Convert.ToUInt64(value, null) : Convert.ToInt64(value, null); for (int i = 0; i < Values.Count; i++) { if (longValue == Values[i]) { writer.WriteString(ChildElementNames[i].Value); return; } } if (IsFlags) { int zeroIndex = -1; bool noneWritten = true; for (int i = 0; i < Values.Count; i++) { long current = Values[i]; if (current == 0) { zeroIndex = i; continue; } if (longValue == 0) break; if ((current & longValue) == current) { if (noneWritten) noneWritten = false; else writer.WriteString(DictionaryGlobals.Space.Value); writer.WriteString(ChildElementNames[i].Value); longValue &= ~current; } } // enforce that enum value was completely parsed if (longValue != 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.InvalidEnumValueOnWrite, value, DataContract.GetClrTypeFullName(UnderlyingType)))); if (noneWritten && zeroIndex >= 0) writer.WriteString(ChildElementNames[zeroIndex].Value); } else throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.InvalidEnumValueOnWrite, value, DataContract.GetClrTypeFullName(UnderlyingType)))); } internal object ReadEnumValue(XmlReaderDelegator reader) { string stringValue = reader.ReadElementContentAsString(); long longValue = 0; int i = 0; if (IsFlags) { // Skip initial spaces for (; i < stringValue.Length; i++) if (stringValue[i] != ' ') break; // Read space-delimited values int startIndex = i; int count = 0; for (; i < stringValue.Length; i++) { if (stringValue[i] == ' ') { count = i - startIndex; if (count > 0) longValue |= ReadEnumValue(stringValue, startIndex, count); for (++i; i < stringValue.Length; i++) if (stringValue[i] != ' ') break; startIndex = i; if (i == stringValue.Length) break; } } count = i - startIndex; if (count > 0) longValue |= ReadEnumValue(stringValue, startIndex, count); } else { if (stringValue.Length == 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.InvalidEnumValueOnRead, stringValue, DataContract.GetClrTypeFullName(UnderlyingType)))); longValue = ReadEnumValue(stringValue, 0, stringValue.Length); } if (IsULong) return Enum.ToObject(UnderlyingType, (object)(ulong)longValue); return Enum.ToObject(UnderlyingType, (object)longValue); } private long ReadEnumValue(string value, int index, int count) { for (int i = 0; i < Members.Count; i++) { string memberName = Members[i].Name; if (memberName.Length == count && String.CompareOrdinal(value, index, memberName, 0, count) == 0) { return Values[i]; } } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.InvalidEnumValueOnRead, value.Substring(index, count), DataContract.GetClrTypeFullName(UnderlyingType)))); } public override void WriteXmlValue(XmlWriterDelegator xmlWriter, object obj, XmlObjectSerializerWriteContext context) { WriteEnumValue(xmlWriter, obj); } public override object ReadXmlValue(XmlReaderDelegator xmlReader, XmlObjectSerializerReadContext context) { object obj = ReadEnumValue(xmlReader); if (context != null) context.AddNewObject(obj); return obj; } } }
using System; using System.Threading; using System.Net; using System.Net.Sockets; using Microsoft.SPOT; using Toolbox.NETMF; /* * Copyright 2011-2014 Stefan Thoolen (http://www.netmftoolbox.com/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Toolbox.NETMF.NET { /// <summary> /// Simplifies usage of sockets in .NETMF /// </summary> public class IntegratedSocket : SimpleSocket { /// <summary>Contains a reference to the socket</summary> private Socket _Sock; /// <summary>Stores the hostname connected to</summary> private string _Hostname; /// <summary>Stores the TCP port connected to</summary> private ushort _Port; /// <summary>Contains the buffer of the read data</summary> private string _Buffer = ""; /// <summary>Set to true when Close() is called</summary> private bool _Closed = false; /// <summary> /// Creates a new socket based on the integrated .NETMF socket TCP stack /// </summary> /// <param name="Hostname">The hostname to connect to</param> /// <param name="Port">The port to connect to</param> public IntegratedSocket(string Hostname, ushort Port) { // Stores the values to the memory this._Hostname = Hostname; this._Port = Port; // Default line ending values this.LineEnding = ""; // Creates a new socket } /// <summary> /// Listens on the port instead of connecting remotely /// </summary> public override void Listen() { Socket Listener = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); Listener.Bind(new IPEndPoint(IPAddress.Any, this._Port)); Listener.Listen(1); // Accepts the first connection this._Sock = Listener.Accept(); this._Closed = false; this._Hostname = ((IPEndPoint)this._Sock.RemoteEndPoint).Address.ToString(); // Stops further listening Listener.Close(); } /// <summary> /// Requests the amount of bytes available in the buffer /// </summary> public override uint BytesAvailable { get { return (uint)this._Buffer.Length; } } /// <summary> /// Connects to the remote host /// </summary> /// <param name="Protocol">The protocol to be used</param> public override void Connect(SocketProtocol Protocol = SocketProtocol.TcpStream) { // Creates a new socket object if (Protocol == SocketProtocol.TcpStream) this._Sock = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); else this._Sock = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp); // Resolves the hostname to an IP address IPHostEntry address = Dns.GetHostEntry(this._Hostname); // Creates the new IP end point EndPoint Destination = new IPEndPoint(address.AddressList[0], (int)this._Port); // Connects to the socket this._Sock.Connect(Destination); this._Closed = false; } /// <summary> /// Closes the connection /// </summary> public override void Close() { this._Closed = true; this._Sock.Close(); } /// <summary> /// Sends binary data to the socket /// </summary> /// <param name="Data">The binary data to send</param> public override void SendBinary(byte[] Data) { this._Sock.Send(Data); // I implemented this sleep here for an issue I encountered. // For more details, read http://forums.netduino.com/index.php?/topic/4555-socket-error-10055-wsaenobufs/ if (Data.Length < 32) Thread.Sleep(Data.Length * 10); } /// <summary> /// Returns true when connected, otherwise false /// </summary> public override bool IsConnected { get { if (this._Closed) return false; // SelectRead returns true if: // - Listen has been called and a connection is pending; -or- // - if data is available for reading; -or- // - if the connection has been closed, reset, or terminated // We're not listening, so we only need to check if data is available for reading. If not, the connection is a goner. if (this._Sock.Poll(1, SelectMode.SelectRead) && this._Sock.Available == 0) return false; else return true; } } /// <summary>Returns the hostname this socket is configured for</summary> public override string Hostname { get { return this._Hostname; } } /// <summary>Returns the port number this socket is configured for</summary> public override ushort Port { get { return this._Port; } } /// <summary> /// Receives data from the socket /// </summary> /// <param name="Block">When true, this function will wait until there is data to return</param> /// <returns>The received data (may be empty)</returns> public override string Receive(bool Block = false) { string RetValue = ""; do { // Do we need to read data? if (this._Sock.Available > 0) { // Do we already have enough data for now? (saves us from early out of memory exceptions) if (this.LineEnding.Length == 0 || this._Buffer.IndexOf(this.LineEnding) < 0) { // There is data, lets read it! byte[] ReadBuffer = new byte[this._Sock.Available]; this._Sock.Receive(ReadBuffer); // Lets add the data to the buffer this._Buffer += new string(Tools.Bytes2Chars(ReadBuffer)); } } if (this.LineEnding.Length > 0) { // We're going to do buffering int Pos = this._Buffer.IndexOf(this.LineEnding); // Appairently there's a line ending found, lets split the data up if (Pos > -1) { RetValue = this._Buffer.Substring(0, Pos + this.LineEnding.Length); this._Buffer = this._Buffer.Substring(Pos + this.LineEnding.Length); } } else { // We don't do buffering at this moment. We just send all data back. RetValue = this._Buffer; this._Buffer = ""; } } while (Block && RetValue == ""); return RetValue; } /// <summary> /// Receives binary data from the socket (line endings aren't used with this method) /// </summary> /// <param name="Length">The amount of bytes to receive</param> /// <returns>The binary data</returns> public override byte[] ReceiveBinary(int Length) { byte[] RetValue = new byte[Length]; this._Sock.Receive(RetValue); return RetValue; } /// <summary> /// Checks if a feature is implemented /// </summary> /// <param name="Feature">The feature to check for</param> /// <returns>True if the feature is implemented</returns> public override bool FeatureImplemented(SocketFeatures Feature) { switch (Feature) { case SocketFeatures.TcpStream: case SocketFeatures.UdpDatagram: case SocketFeatures.TcpListener: return true; default: return false; } } } }
/* File: jsonSerializer.cs * Author: Peter Zorbas * Date: March 20, 2010 * * This program is free software: you can redistribute it * and/or modify it under the terms of the jsonSerializer licence. * The full text of the licence can be found at: * http://jsonserializer.codeplex.com/license * * All files are provided as is without warranty or support. * The user must follow the guidelines oulined in the GNU license and * agrees the Author will not be held liable for any damages caused. * * Please leave feedback and comments at: http://jsonserializer.codeplex.com * */ using System; using System.Collections; using System.Reflection; using System.Text; using System.Data; namespace json { /// <summary> /// Summary description for JSON. /// </summary> public class jsonSerializer { /// <summary> /// /// </summary> /// <param name="obj"></param> /// <returns></returns> public static string jsonSerialize(object obj) { System.Text.StringBuilder sb = new System.Text.StringBuilder(); parseObject(obj, sb); return sb.ToString(); } /// <summary> /// Serializes a data table to a json string. /// </summary> /// <param name="table"></param> /// <returns></returns> public static string jsonSerialize(DataTable table) { StringBuilder sb = new StringBuilder(); if(table!=null) { foreach(DataRow row in table.Rows) { if(row.Table!=null && row.Table.Columns!=null && row.Table.Columns.Count>0) { foreach(DataColumn column in row.Table.Columns) { parseMember(row[column], column.ColumnName, sb); } } } } return sb.ToString(); } /// <summary> /// Method appends string to the string builder object. /// We assume that we either are passing primitive object members, /// array of objects or primitive, and arraylists of the former mentioned. /// You'll find recursive calls to the parseObject() function for child classes /// within an object. /// </summary> /// <param name="obj"></param> /// <param name="sb"></param> private static void parseObject(object obj, StringBuilder sb) { if(obj!=null) { int i=0; Type objType = obj.GetType(); PropertyInfo [] properties = objType.GetProperties(); sb.AppendFormat("'{0}':{1}", objType.Name, character.LEFTPARENTHESE); //Loop through each property and call Parse method. foreach(PropertyInfo pi in properties) { bool process = true; object [] attrib = pi.GetCustomAttributes(typeof(json.attributes.jsonSerializable), false); if(attrib!=null && attrib.Length>0) //We assume only the first received decorated member attribute. process = (attrib[0] as json.attributes.jsonSerializable).IsSerializable; if(process) { //Get the method so we can know what ReturnType we need to process. MethodInfo methodInfo = pi.GetGetMethod(false); if(methodInfo.ReturnType.IsPrimitive || isOfReturnType(methodInfo.ReturnType)) parseMember(pi.GetValue(obj, null), pi.Name, sb); //ArrayLists are considered objects, let's be explicit here. else if(methodInfo.ReturnType == typeof(ArrayList)) parseArrayList(pi.GetValue(obj, null) as ArrayList, pi.Name, sb); else if(methodInfo.ReturnType.IsArray) parseArray(pi.GetValue(obj, null), pi.Name, sb); else if(methodInfo.ReturnType.IsClass) parseObject(pi.GetValue(obj, null), sb); if(++i<properties.Length) sb.Append(character.COMMA); } } sb.Append(character.RIGHTPARENTHESE); } } /// <summary> /// Appends object member data, most presumably a string or primitive number type. /// </summary> /// <param name="obj"></param> /// <param name="memberName"></param> /// <param name="sb"></param> private static void parseMember(object val, string memberName, StringBuilder sb) { Type t = val.GetType(); if(memberName!=null && memberName.Trim().Length>0) sb.AppendFormat("'{0}':'", memberName); if(typeof(string)==t || typeof(char)==t) sb.AppendFormat("'{0}'", val.ToString()); else sb.AppendFormat("{0}", val.ToString()); } /// <summary> /// Calls parseArray to further process data. /// </summary> /// <param name="array"></param> /// <param name="memberName"></param> /// <param name="sb"></param> private static void parseArrayList(ArrayList array, string memberName, StringBuilder sb) { parseArray(array.ToArray(), memberName, sb); } /// <summary> /// Parses primitive array members, or objects. /// </summary> /// <param name="array"></param> /// <param name="memberName"></param> /// <param name="sb"></param> private static void parseArray(object array, string memberName, StringBuilder sb) { Array tempArray = array as Array; if(tempArray!=null && tempArray.Length>0) { sb.AppendFormat("'{0}': [", memberName); foreach(object item in tempArray) { Type t = item.GetType(); if(t.IsPrimitive || isOfReturnType(item.GetType())) sb.AppendFormat("'{0}'", item.ToString()); else if(t.IsClass) parseObject(item, sb); } sb.AppendFormat("]"); } } /// <summary> /// Method is used to tell us whether the type of of a member type. Reason is that /// strings are really objects in the CLR. We'll need to handle these and any other types /// we assume to be primitive types - such as strings. /// </summary> /// <param name="t"></param> /// <returns></returns> private static bool isOfReturnType(Type t) { if(t == typeof(string)) return true; return false; } } /// <summary> /// Summary description for Char. /// </summary> public class character { public static char NULL = '\x0000';//Null isOfReturnType public static char BELL = '\x0007';//Bell public static char BACKSPACE = '\x0008';//Backspace[t 4][t 5] public static char TAB = '\x0009';//Horizontal Tab public static char LINEFEED = '\x000A';//Line feed public static char VTAB = '\x000B';//Vertical Tab public static char CR = '\x000D';//Carriage return public static char LEFTPARENTHESE = '\x007B';//{ public static char RIGHTPARENTHESE = '\x007D';//} public static char COMMA = '\x002C';//, /*public static char SPACE = '\x0020';//[space] public static char EXCLAMATION = '\x0021';//! public static char DOUBLEQUOTE = '\x0022';//" public static char NUMBER = '\x0023';//# public static char DOLLAR = '\x0024';//$ public static char PERCENT = '\x0025';//% public static char AMPERSAND = '\x0026';//& public static char SINGLEQUOTE = '\x0027';//' public static char LEFTBRACKET = '\x0028';//( public static char RIGHTBRACKET = '\x0029';//) public static char ASTERIK = '\x002A';//* public static char PLUS = '\x002B';//+ public static char SUBTRACT = '\x002D';//- public static char PERIOD = '\x002E';//. public static char FORWARDSLASH = '\x002F';/// public static char LEFTSQUAREBRACKET = '\x005B';//[ public static char BACKSLASH = '\x005C';//\ public static char RIGHTSQUAREBRACKET = '\x005D';//] public static char UNDERSCORE = '\x005F';//_ public static char LEFTPARENTHESES = '\x007B';//{ public static char RIGHTPARENTHESES = '\x007D';//} */ } }
// Copyright (c) .NET Foundation and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; using WebApplication.Models; using WebApplication.Models.ManageViewModels; using WebApplication.Services; namespace WebApplication.Controllers { [Authorize] public class ManageController : Controller { private readonly UserManager<ApplicationUser> _userManager; private readonly SignInManager<ApplicationUser> _signInManager; private readonly IEmailSender _emailSender; private readonly ISmsSender _smsSender; private readonly ILogger _logger; public ManageController( UserManager<ApplicationUser> userManager, SignInManager<ApplicationUser> signInManager, IEmailSender emailSender, ISmsSender smsSender, ILoggerFactory loggerFactory) { _userManager = userManager; _signInManager = signInManager; _emailSender = emailSender; _smsSender = smsSender; _logger = loggerFactory.CreateLogger<ManageController>(); } // // GET: /Manage/Index [HttpGet] public async Task<IActionResult> Index(ManageMessageId? message = null) { ViewData["StatusMessage"] = message == ManageMessageId.ChangePasswordSuccess ? "Your password has been changed." : message == ManageMessageId.SetPasswordSuccess ? "Your password has been set." : message == ManageMessageId.SetTwoFactorSuccess ? "Your two-factor authentication provider has been set." : message == ManageMessageId.Error ? "An error has occurred." : message == ManageMessageId.AddPhoneSuccess ? "Your phone number was added." : message == ManageMessageId.RemovePhoneSuccess ? "Your phone number was removed." : ""; var user = await GetCurrentUserAsync(); var model = new IndexViewModel { HasPassword = await _userManager.HasPasswordAsync(user), PhoneNumber = await _userManager.GetPhoneNumberAsync(user), TwoFactor = await _userManager.GetTwoFactorEnabledAsync(user), Logins = await _userManager.GetLoginsAsync(user), BrowserRemembered = await _signInManager.IsTwoFactorClientRememberedAsync(user) }; return View(model); } // // POST: /Manage/RemoveLogin [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> RemoveLogin(RemoveLoginViewModel account) { ManageMessageId? message = ManageMessageId.Error; var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.RemoveLoginAsync(user, account.LoginProvider, account.ProviderKey); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); message = ManageMessageId.RemoveLoginSuccess; } } return RedirectToAction(nameof(ManageLogins), new { Message = message }); } // // GET: /Manage/AddPhoneNumber public IActionResult AddPhoneNumber() { return View(); } // // POST: /Manage/AddPhoneNumber [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> AddPhoneNumber(AddPhoneNumberViewModel model) { if (!ModelState.IsValid) { return View(model); } // Generate the token and send it var user = await GetCurrentUserAsync(); var code = await _userManager.GenerateChangePhoneNumberTokenAsync(user, model.PhoneNumber); await _smsSender.SendSmsAsync(model.PhoneNumber, "Your security code is: " + code); return RedirectToAction(nameof(VerifyPhoneNumber), new { PhoneNumber = model.PhoneNumber }); } // // POST: /Manage/EnableTwoFactorAuthentication [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> EnableTwoFactorAuthentication() { var user = await GetCurrentUserAsync(); if (user != null) { await _userManager.SetTwoFactorEnabledAsync(user, true); await _signInManager.SignInAsync(user, isPersistent: false); _logger.LogInformation(1, "User enabled two-factor authentication."); } return RedirectToAction(nameof(Index), "Manage"); } // // POST: /Manage/DisableTwoFactorAuthentication [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> DisableTwoFactorAuthentication() { var user = await GetCurrentUserAsync(); if (user != null) { await _userManager.SetTwoFactorEnabledAsync(user, false); await _signInManager.SignInAsync(user, isPersistent: false); _logger.LogInformation(2, "User disabled two-factor authentication."); } return RedirectToAction(nameof(Index), "Manage"); } // // GET: /Manage/VerifyPhoneNumber [HttpGet] public async Task<IActionResult> VerifyPhoneNumber(string phoneNumber) { var code = await _userManager.GenerateChangePhoneNumberTokenAsync(await GetCurrentUserAsync(), phoneNumber); // Send an SMS to verify the phone number return phoneNumber == null ? View("Error") : View(new VerifyPhoneNumberViewModel { PhoneNumber = phoneNumber }); } // // POST: /Manage/VerifyPhoneNumber [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> VerifyPhoneNumber(VerifyPhoneNumberViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.ChangePhoneNumberAsync(user, model.PhoneNumber, model.Code); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.AddPhoneSuccess }); } } // If we got this far, something failed, redisplay the form ModelState.AddModelError(string.Empty, "Failed to verify phone number"); return View(model); } // // POST: /Manage/RemovePhoneNumber [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> RemovePhoneNumber() { var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.SetPhoneNumberAsync(user, null); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.RemovePhoneSuccess }); } } return RedirectToAction(nameof(Index), new { Message = ManageMessageId.Error }); } // // GET: /Manage/ChangePassword [HttpGet] public IActionResult ChangePassword() { return View(); } // // POST: /Manage/ChangePassword [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> ChangePassword(ChangePasswordViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.ChangePasswordAsync(user, model.OldPassword, model.NewPassword); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); _logger.LogInformation(3, "User changed their password successfully."); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.ChangePasswordSuccess }); } AddErrors(result); return View(model); } return RedirectToAction(nameof(Index), new { Message = ManageMessageId.Error }); } // // GET: /Manage/SetPassword [HttpGet] public IActionResult SetPassword() { return View(); } // // POST: /Manage/SetPassword [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> SetPassword(SetPasswordViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.AddPasswordAsync(user, model.NewPassword); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.SetPasswordSuccess }); } AddErrors(result); return View(model); } return RedirectToAction(nameof(Index), new { Message = ManageMessageId.Error }); } //GET: /Manage/ManageLogins [HttpGet] public async Task<IActionResult> ManageLogins(ManageMessageId? message = null) { ViewData["StatusMessage"] = message == ManageMessageId.RemoveLoginSuccess ? "The external login was removed." : message == ManageMessageId.AddLoginSuccess ? "The external login was added." : message == ManageMessageId.Error ? "An error has occurred." : ""; var user = await GetCurrentUserAsync(); if (user == null) { return View("Error"); } var userLogins = await _userManager.GetLoginsAsync(user); var otherLogins = _signInManager.GetExternalAuthenticationSchemes().Where(auth => userLogins.All(ul => auth.AuthenticationScheme != ul.LoginProvider)).ToList(); ViewData["ShowRemoveButton"] = user.PasswordHash != null || userLogins.Count > 1; return View(new ManageLoginsViewModel { CurrentLogins = userLogins, OtherLogins = otherLogins }); } // // POST: /Manage/LinkLogin [HttpPost] [ValidateAntiForgeryToken] public IActionResult LinkLogin(string provider) { // Request a redirect to the external login provider to link a login for the current user var redirectUrl = Url.Action("LinkLoginCallback", "Manage"); var properties = _signInManager.ConfigureExternalAuthenticationProperties(provider, redirectUrl, _userManager.GetUserId(User)); return Challenge(properties, provider); } // // GET: /Manage/LinkLoginCallback [HttpGet] public async Task<ActionResult> LinkLoginCallback() { var user = await GetCurrentUserAsync(); if (user == null) { return View("Error"); } var info = await _signInManager.GetExternalLoginInfoAsync(await _userManager.GetUserIdAsync(user)); if (info == null) { return RedirectToAction(nameof(ManageLogins), new { Message = ManageMessageId.Error }); } var result = await _userManager.AddLoginAsync(user, info); var message = result.Succeeded ? ManageMessageId.AddLoginSuccess : ManageMessageId.Error; return RedirectToAction(nameof(ManageLogins), new { Message = message }); } #region Helpers private void AddErrors(IdentityResult result) { foreach (var error in result.Errors) { ModelState.AddModelError(string.Empty, error.Description); } } public enum ManageMessageId { AddPhoneSuccess, AddLoginSuccess, ChangePasswordSuccess, SetTwoFactorSuccess, SetPasswordSuccess, RemoveLoginSuccess, RemovePhoneSuccess, Error } private Task<ApplicationUser> GetCurrentUserAsync() { return _userManager.GetUserAsync(HttpContext.User); } #endregion } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) Under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You Under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed Under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations Under the License. ==================================================================== */ namespace NPOI.HSSF.Record { using System; using System.Collections; using System.Collections.Generic; using System.Text; using NPOI.Util; using NPOI.Util.Collections; using NPOI.HSSF.Record.Cont; /** * Title: Static String Table Record * * Description: This holds all the strings for LabelSSTRecords. * * REFERENCE: PG 389 Microsoft Excel 97 Developer's Kit (ISBN: * 1-57231-498-2) * * @author Andrew C. Oliver (acoliver at apache dot org) * @author Marc Johnson (mjohnson at apache dot org) * @author Glen Stampoultzis (glens at apache.org) * * @see org.apache.poi.hssf.record.LabelSSTRecord * @see org.apache.poi.hssf.record.ContinueRecord */ internal class SSTRecord : ContinuableRecord { public const short sid = 0x00FC; private static UnicodeString EMPTY_STRING = new UnicodeString(""); /** how big can an SST record be? As big as any record can be: 8228 bytes */ public const int MAX_RECORD_SIZE = 8228; /** standard record overhead: two shorts (record id plus data space size)*/ public const int STD_RECORD_OVERHEAD = 2 * LittleEndianConsts.SHORT_SIZE; /** SST overhead: the standard record overhead, plus the number of strings and the number of Unique strings -- two ints */ public const int SST_RECORD_OVERHEAD = (STD_RECORD_OVERHEAD + (2 * LittleEndianConsts.INT_SIZE)); /** how much data can we stuff into an SST record? That would be _max minus the standard SST record overhead */ public const int MAX_DATA_SPACE = RecordInputStream.MAX_RECORD_DATA_SIZE - 8;//MAX_RECORD_SIZE - SST_RECORD_OVERHEAD; // /** overhead for each string includes the string's Char count (a short) and the flag describing its Charistics (a byte) */ //public const int STRING_MINIMAL_OVERHEAD = LittleEndianConsts.SHORT_SIZE + LittleEndianConsts.BYTE_SIZE; /** Union of strings in the SST and EXTSST */ private int field_1_num_strings; /** according to docs ONLY SST */ private int field_2_num_unique_strings; private IntMapper<UnicodeString> field_3_strings; private SSTDeserializer deserializer; /** Offsets from the beginning of the SST record (even across continuations) */ int[] bucketAbsoluteOffsets; /** Offsets relative the start of the current SST or continue record */ int[] bucketRelativeOffsets; /** * default constructor */ public SSTRecord() { field_1_num_strings = 0; field_2_num_unique_strings = 0; field_3_strings = new IntMapper<UnicodeString>(); deserializer = new SSTDeserializer(field_3_strings); } /** * Constructs an SST record and Sets its fields appropriately. * * @param in the RecordInputstream to Read the record from */ public SSTRecord(RecordInputStream in1) { // this method Is ALWAYS called after construction -- using // the nontrivial constructor, of course -- so this Is where // we initialize our fields field_1_num_strings = in1.ReadInt(); field_2_num_unique_strings = in1.ReadInt(); field_3_strings = new IntMapper<UnicodeString>(); deserializer = new SSTDeserializer(field_3_strings); deserializer.ManufactureStrings(field_2_num_unique_strings, in1); } /** * Add a string. * * @param string string to be Added * * @return the index of that string in the table */ public int AddString(UnicodeString str) { field_1_num_strings++; UnicodeString ucs = (str == null) ? EMPTY_STRING : str; int rval; int index = field_3_strings.GetIndex(ucs); if (index != -1) { rval = index; } else { // This is a new string -- we didn't see it among the // strings we've already collected rval = field_3_strings.Size; field_2_num_unique_strings++; SSTDeserializer.AddToStringTable(field_3_strings, ucs); } return rval; } /** * @return number of strings */ public int NumStrings { get { return field_1_num_strings; } set { field_1_num_strings = value; } } /** * @return number of Unique strings */ public int NumUniqueStrings { get { return field_2_num_unique_strings; } set { field_2_num_unique_strings = value; } } /** * Get a particular string by its index * * @param id index into the array of strings * * @return the desired string */ public UnicodeString GetString(int id) { return (UnicodeString)field_3_strings[id]; } public bool IsString16bit(int id) { UnicodeString unicodeString = ((UnicodeString)field_3_strings[id]); return ((unicodeString.OptionFlags & 0x01) == 1); } /** * Return a debugging string representation * * @return string representation */ public override String ToString() { StringBuilder buffer = new StringBuilder(); buffer.Append("[SST]\n"); buffer.Append(" .numstrings = ") .Append(StringUtil.ToHexString(NumStrings)).Append("\n"); buffer.Append(" .uniquestrings = ") .Append(StringUtil.ToHexString(NumUniqueStrings)).Append("\n"); for (int k = 0; k < field_3_strings.Size; k++) { UnicodeString s = (UnicodeString)field_3_strings[k]; buffer.Append(" .string_" + k + " = ") .Append(s.GetDebugInfo()).Append("\n"); } buffer.Append("[/SST]\n"); return buffer.ToString(); } /** * @return sid */ public override short Sid { get { return sid; } } /** * @return hashcode */ public override int GetHashCode() { return field_2_num_unique_strings; } public override bool Equals(Object o) { if ((o == null) || (o.GetType() != this.GetType())) { return false; } SSTRecord other = (SSTRecord)o; return ((field_1_num_strings == other .field_1_num_strings) && (field_2_num_unique_strings == other .field_2_num_unique_strings) && field_3_strings .Equals(other.field_3_strings)); } /** * @return an iterator of the strings we hold. All instances are * UnicodeStrings */ public IEnumerator GetStrings() { return field_3_strings.GetEnumerator(); } /** * @return count of the strings we hold. */ public int CountStrings { get { return field_3_strings.Size; } } /** * called by the class that Is responsible for writing this sucker. * Subclasses should implement this so that their data Is passed back in a * byte array. * * @return size */ protected override void Serialize(ContinuableRecordOutput out1) { SSTSerializer serializer = new SSTSerializer(field_3_strings, NumStrings, NumUniqueStrings ); serializer.Serialize(out1); bucketAbsoluteOffsets = serializer.BucketAbsoluteOffsets; bucketRelativeOffsets = serializer.BucketRelativeOffsets; } SSTDeserializer GetDeserializer() { return deserializer; } /** * Creates an extended string record based on the current contents of * the current SST record. The offset within the stream to the SST record * Is required because the extended string record points directly to the * strings in the SST record. * * NOTE: THIS FUNCTION MUST ONLY BE CALLED AFTER THE SST RECORD HAS BEEN * SERIALIZED. * * @param sstOffset The offset in the stream to the start of the * SST record. * @return The new SST record. */ public ExtSSTRecord CreateExtSSTRecord(int sstOffset) { if (bucketAbsoluteOffsets == null || bucketAbsoluteOffsets == null) throw new InvalidOperationException("SST record has not yet been Serialized."); ExtSSTRecord extSST = new ExtSSTRecord(); extSST.NumStringsPerBucket=((short)8); int[] absoluteOffsets = (int[])bucketAbsoluteOffsets.Clone(); int[] relativeOffsets = (int[])bucketRelativeOffsets.Clone(); for (int i = 0; i < absoluteOffsets.Length; i++) absoluteOffsets[i] += sstOffset; extSST.SetBucketOffsets(absoluteOffsets, relativeOffsets); return extSST; } /** * Calculates the size in bytes of the EXTSST record as it would be if the * record was Serialized. * * @return The size of the ExtSST record in bytes. */ public int CalcExtSSTRecordSize() { return ExtSSTRecord.GetRecordSizeForStrings(field_3_strings.Size); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Text; namespace Apache.Geode.Client.FwkLib { using Apache.Geode.DUnitFramework; using System.Threading; using System.Xml.Serialization; using System.IO; using System.Reflection; public class EventTest : FwkTest { public const string EventCountersBB = "EventCountersBB"; #region Private members private List<CacheableKey> m_keysVec = new List<CacheableKey>(); #endregion #region Private utility methods private int percentDifferent() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { throw new FwkException("Data not provided for regionname failing"); } int bbExpected = 0; int expected = GetUIntValue("expectedKeyCount"); if (expected < 0) { bbExpected = (int)Util.BBGet(EventCountersBB, "CREATE_COUNT"); if (bbExpected <= 0) { throw new FwkException("Data not provided for expectedKeyCount failing"); } expected = bbExpected; } Region testReg = CacheHelper.GetRegion(testRegionName); IGeodeSerializable[] keys = testReg.GetKeys(); int keyCount = keys.Length; double diff = 0; if (keyCount > expected) { diff = keyCount - expected; } else { diff = expected - keyCount; } int retval = (int)((diff / ((double)expected + 1.0)) * 100.0); FwkInfo("Expected to have " + expected + " keys, found " + keyCount + " keys, percent Difference: " + retval); return retval; } private void doEntryTest(string opcode) { FwkInfo("Calling doEntryTest inside getRandomregion"); Region reg = GetRandomRegion(true); FwkInfo("After getrandomregion inside doentrytest"); if (reg == null) { FwkInfo("Check in side doEntryTest ... No region to operate on"); throw new FwkException("No reion to operate on"); } lock (this) { FwkInfo("After getrandomregion inside doentrytest Check 1"); if (reg.Attributes.CachingEnabled == false) { return; } IGeodeSerializable[] keys = reg.GetKeys(); int maxkeys = GetUIntValue("distinctKeys"); int balanceEntries = GetUIntValue("balanceEntries"); FwkInfo("After getrandomregion inside doentrytest Check 2 balance entries = {0}", balanceEntries); if (balanceEntries > 0) { if (keys.Length >= maxkeys) { FwkInfo("Balancing cache content by destroying excess entries."); int dcnt = 0; for (uint i = 100; i > 0; i--) { try { destroyObject(reg, true); } catch (Exception e) { // ignore the exception ... TODO print the message FwkSevere(e.Message); } } FwkInfo("Cache balancing complete, did " + dcnt + " destroys."); } else if (keys.Length == 0) { FwkInfo("Balancing cache content by creating entries."); int ccnt = 0; for (int i = 100; i > 0; i--) { try { addObject(reg, true, null, null); ccnt++; } catch (TimeoutException e) { FwkSevere("Caught unexpected timeout exception during entry " + opcode + " operation: " + e.Message + " continuing with test."); } catch (Exception e) { // Ignore other exception ... @TODO FwkInfo("Ignoring exception " + e.Message); } } FwkInfo("Cache balancing complete, did " + ccnt + " creates."); } } else { FwkInfo("Balance entries less than zero"); } } FwkInfo("After getrandomregion inside doentrytest Check 4 before switch opcode = {0}", opcode); //opcode = "read"; FwkInfo("opcode = " + opcode.ToString() + " " + opcode); switch (opcode) { case "add": addObject(reg, true, null, null); break; case "update": updateObject(reg); break; case "invalidate": invalidateObject(reg, false); break; case "destroy": destroyObject(reg, false); break; case "read": readObject(reg); break; case "localInvalidate": invalidateObject(reg, true); break; case "localDestroy": destroyObject(reg, true); break; default: FwkSevere("Invalid operation specified: " + opcode); break; } } private void doRegionTest(string opcode, int iMaxRegions) { Region randomRegion; lock (this) { int iRegionCount = getAllRegionCount(); if (iRegionCount >= iMaxRegions) { while (iRegionCount > iMaxRegions / 2) { try { randomRegion = GetRandomRegion(true); if (randomRegion == null) { FwkException("expected to get a valid random region, get a null region instead"); } else { destroyRegion(randomRegion, false); } iRegionCount = getAllRegionCount(); FwkInfo("Inside doregiontest ... iregioncount = {0}", iRegionCount); } catch (Exception ignore) { FwkInfo(ignore.Message); } } } else if (iRegionCount <= 0) { for (Int32 i = iMaxRegions / 2; i > 0; i--) { try { addRegion(); } catch (Exception ignore) { FwkInfo(ignore.Message); } } } FwkInfo("Inside doregiontest after else"); } FwkInfo("Again GetRandomRegion"); randomRegion = GetRandomRegion(true); if (randomRegion == null) { //need to create a region opcode = "addRegion"; } FwkInfo("Do region test: " + opcode); switch (opcode) { case "addRegion": addRegion(); break; case "clearRegion": clearRegion(randomRegion, false); break; case "invalidateRegion": invalidateRegion(randomRegion, false); break; case "destroyRegion": destroyRegion(randomRegion, false); break; case "localClearRegion": clearRegion(randomRegion, true); break; case "localInvalidateRegion": invalidateRegion(randomRegion, true); break; case "localDestroyRegion": destroyRegion(randomRegion, true); break; default: FwkSevere("Invalid operation specified: " + opcode); break; } } private string getNextRegionName(Region region) { string regionName = null; int count = 0; string path; do { path = GetStringValue("regionPaths"); if (path.Length == 0) { FwkException("No regionPaths defined in the xml file. Needed for region event test"); } do { int length = path.Length; try { region = CacheHelper.GetRegion(path); } catch (Exception e) { FwkSevere(e.Message); } if (region == null) { int pos = path.LastIndexOf('/'); regionName = path.Substring(pos + 1, path.Length - pos); path = path.Substring(0, pos); } } while ((region == null) && path.Length == 0); } while ((++count < 5) && regionName.Length != 0); return regionName; } public void measureMemory(string s, double vs, double rs) { } public CacheableKey findKeyNotInCache(Region region) { CacheableKey key; if (m_keysVec.Count == 0) { lock (this) { int numKeys = GetUIntValue("distinctKeys"); for (int i = 0; i < numKeys; i++) { string skey = i.ToString(); key = new CacheableString(skey); //int pos = m_keysVec.Length; m_keysVec.Add(key); } } } key = null; int start = Util.Rand(m_keysVec.Count); bool wrapped = false; int cur = start; while ((cur != start) || !wrapped) { if (cur >= m_keysVec.Count) { cur = 0; wrapped = true; } else { if (!region.ContainsKey(m_keysVec[cur])) { key = m_keysVec[cur]; cur = start; wrapped = true; } else { cur++; } } } return key; } #endregion #region Public methods public void DoEntryOperations() { Util.Log("Calling doeventoperations"); doEventOperations(); } public void doEventOperations() { UInt32 counter = 0; string taskID = "begin"; // Clear up everything from previous test. // Make sure we have one root region. { // TODO: Lock and task id business // ACE_Guard<ACE_Thread_Mutex> guard( *testLock); //if ((taskID != null) && (this.getn != taskID)) { // TODO CacheHelper.DestroyAllRegions(true); //destroyAllRegions(); CreateRootRegion(); if (taskID != null) { //(taskID); } // FWKINFO( "DBG doEventOperations set id" ); //taskID = strdup(getTaskId().c_str()); } // FWKINFO( "DBG doEventOperations release lock" ); } int workTime = GetUIntValue("workTime"); FwkInfo("doEventOperations will work for " + workTime + " seconds. "); int skipCounter = GetUIntValue("skipCount"); skipCounter = (skipCounter > 0) ? skipCounter : 100; int iMaxRegions = GetUIntValue("maxRegions"); // TODO: DEFAULT_MAX_REGION int DEFAULT_MAX_REGION = 10; iMaxRegions = (iMaxRegions > 0) ? iMaxRegions : DEFAULT_MAX_REGION; // TODO: check the correctness. DateTime endTime = DateTime.Now + TimeSpan.FromMilliseconds((double)workTime); int opsSecond = GetUIntValue("opsSecond"); opsSecond = (opsSecond > 0) ? opsSecond : 0; PaceMeter pm = new PaceMeter(opsSecond); int logSize = GetUIntValue("logSize"); int opCount = 0; DateTime now = new DateTime(); string opcode = string.Empty; bool isDone = false; FwkInfo("Entering event loop."); do { FwkInfo("Before getRegionCount"); if (logSize == 1) { int cnt = getRegionCount(); FwkInfo(cnt + ((cnt == 1) ? " region " : " regions ") + opCount); } FwkInfo("After getRegionCount"); int randomOP = GetUIntValue("randomOP"); if (randomOP == 5) { opcode = GetStringValue("regionOps"); } else { opcode = GetStringValue("entryOps"); } FwkInfo("Check 1"); if (opcode.Length != 0) { bool skipTest = false; if (opcode == "abort") { skipTest = true; if (--skipCounter == 0) { // TODO: definitely wrong. what is intended is unclear. //char * segv = NULL; //strcpy( segv, "Forcing segv" ); } } else if (opcode == "exit") { skipTest = true; if (--skipCounter == 0) { Environment.Exit(0); } } else if (opcode == "done") { skipTest = true; if (--skipCounter == 0) { isDone = true; } } if (!skipTest) { FwkInfo("Check 2 doRegionTest"); if (randomOP == 5) { doRegionTest(opcode, iMaxRegions); } else { FwkInfo("Check 3 doEntryTest"); doEntryTest(opcode); FwkInfo("Check 4 doentrytest over"); } opCount++; pm.CheckPace(); } counter++; if ((counter % 1000) == 0) { FwkInfo("Performed " + counter + " operations."); } Util.BBIncrement(EventCountersBB, "CURRENT_OPS_COUNT"); } else { FwkSevere("NULL operation specified." + "randomOP: " + randomOP); } now = DateTime.Now; FwkInfo("do while end in doeventoperations"); } while ((now < endTime) && !isDone); FwkInfo("Event loop complete."); FwkInfo("doEventOperations() performed " + counter + " operations."); } public void doIterate() { FwkInfo("doIterate()"); uint ulKeysInRegion = 0; uint ulNoneNullValuesInRegion = 0; string sError = null; Region[] rootRegionArray; Region rootRegion; RegionAttributes attr; rootRegionArray = CacheHelper.DCache.RootRegions(); int ulRegionCount = rootRegionArray.Length; for (int ulIndex = 0; ulIndex < ulRegionCount; ulIndex++) { rootRegion = rootRegionArray[ulIndex]; attr = rootRegion.Attributes; bool bHasInvalidateAction = attr.EntryIdleTimeoutAction == ExpirationAction.Invalidate || (attr.EntryTimeToLiveAction == ExpirationAction.Invalidate); iterateRegion(rootRegion, true, bHasInvalidateAction, ulKeysInRegion, ulNoneNullValuesInRegion, sError); if (sError.Length > 0) { FwkException(sError); } } } public void doMemoryMeasurement() { // TODO Later } public void verifyKeyCount() { int percentDiff = percentDifferent(); if (percentDiff > 10) { FwkSevere("Actual number of keys does not match expected number."); } } public void addEntry() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { FwkException("Data not provided for 'regionName', failing."); } Region region = CacheHelper.DCache.GetRegion(testRegionName); int usePid = GetUIntValue("usePID"); int pid = Util.PID; int opsSecond = GetUIntValue("opsSecond"); if (opsSecond < 0) { opsSecond = 0; // No throttle } PaceMeter pm = new PaceMeter(opsSecond); int entryCount = GetUIntValue("EntryCount"); if (entryCount <= 0) { entryCount = 100; } FwkInfo("addEntry: Adding " + entryCount + " entries to the cache."); for (Int32 count = 0; count < entryCount; count++) { string sKey; Serializable sValue; if (usePid == 1) { sKey = pid.ToString(); } else { sKey = string.Empty; } sKey += count.ToString(); // get value size int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); sValue = CacheableBytes.Create(buffer); // TODO: check CacheableKey key = new CacheableString(sKey); Serializable value = sValue; if (key == null) { FwkSevere("EventTest::addObject null keyPtr generated."); } FwkInfo("created entry with key: " + key.ToString()); region.Put(key as CacheableKey, value); Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); pm.CheckPace(); } FwkInfo("addEntry: Complete."); } public void addOrDestroyEntry() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { FwkException("Data not provided for 'regionName', failing."); } Region region = CacheHelper.DCache.GetRegion(testRegionName); int usePid = GetUIntValue("usePID"); int pid = Util.PID; int entryCount = GetUIntValue("EntryCount"); if (entryCount <= 0) { entryCount = 100; } FwkInfo("addOrDestroyEntry: Adding or Destroying ( if present )" + entryCount + " entries to the cache."); for (int count = 0; count < entryCount; count++) { string sKey; if (usePid == 1) { sKey = pid.ToString(); } else { sKey = string.Empty; } sKey += count.ToString(); // get value size int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableKey key = new CacheableString(sKey); CacheableBytes value = CacheableBytes.Create(buffer); if (key == null) { FwkSevere("EventTest::addObject null keyPtr generated."); } string op = GetStringValue("popOp"); if (op == "put") { region.Put(key as CacheableKey, value); } else { region.Destroy(key as CacheableKey); } Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); } FwkInfo("addOrDestroyEntry: Complete."); } public void validateCacheContent() { FwkInfo("validateCacheContent()"); string testRegionName = GetStringValue("testRegion"); string validateRegionName = GetStringValue("validateRegion"); Region testRegion = CacheHelper.DCache.GetRegion(testRegionName); Region validateRegion = CacheHelper.DCache.GetRegion(validateRegionName); ICacheableKey[] keyVector; keyVector = testRegion.GetKeys(); int ulKeysInRegion = keyVector.Length; if (ulKeysInRegion == 0) { FwkSevere("zero keys in testRegion " + testRegion.Name); } ICacheableKey key; IGeodeSerializable value; int entryPassCnt = 0; int entryFailCnt = 0; for (int ulIndex = 0; ulIndex < ulKeysInRegion; ulIndex++) { key = keyVector[ulIndex]; value = testRegion.Get(key); if (TestEntryPropagation(validateRegion, key as CacheableString, value as CacheableBytes)) { entryFailCnt++; } else { entryPassCnt++; } } FwkInfo("entryFailCnt is " + entryFailCnt + " entryPassCnt is " + entryPassCnt); if (entryFailCnt == 0) { FwkInfo("validateCacheContent() - TEST ENDED, RESULT = SUCCESSFUL "); } else { FwkSevere("validateCacheContent() - TEST ENDED, RESULT = FAILED "); } } public void validateRegionContent() { FwkInfo("validateRegionContent()"); string testRegionName = GetStringValue("testRegion"); string validateRegionName = GetStringValue("validateRegion"); string regionName = GetStringValue("regionName"); Region testRegion = CacheHelper.DCache.GetRegion(testRegionName); Region validateRegion = CacheHelper.DCache.GetRegion(validateRegionName); FwkInfo("localDestroyRegion region name is " + testRegion.Name); // destroy the region int iBeforeCounter = (int)Util.BBGet(EventCountersBB, "numAfterRegionDestroyEvents_isNotExp"); testRegion.LocalDestroyRegion(); CreateRootRegion(); Region region = CacheHelper.DCache.GetRegion(regionName); FwkInfo(" Recreated Region name is " + region.Name); ICacheableKey[] keyVector; ICacheableKey[] keyVectorValidateRegion; keyVector = region.GetKeys(); keyVectorValidateRegion = validateRegion.GetKeys(); int ulKeysInRegion = keyVector.Length; int ulKeysInValidateRegion = keyVectorValidateRegion.Length; if (ulKeysInRegion != ulKeysInValidateRegion) { FwkSevere("Region Key count is not equal, Region " + region.Name + " key count is " + ulKeysInRegion + " and Region " + validateRegion.Name + " key count is " + ulKeysInValidateRegion); } ICacheableKey key; IGeodeSerializable value; int entryPassCnt = 0; int entryFailCnt = 0; for (int ulIndex = 0; ulIndex < ulKeysInRegion; ulIndex++) { key = keyVector[ulIndex]; value = region.Get(key); if (TestEntryPropagation(validateRegion, key as CacheableString, value as CacheableBytes)) { entryFailCnt++; } else { entryPassCnt++; } } FwkInfo("entryFailCnt is " + entryFailCnt + " entryPassCnt is " + entryPassCnt); if (entryFailCnt == 0) { FwkInfo("validateRegionContent() - TEST ENDED, RESULT = SUCCESSFUL "); } else { FwkSevere("validateRegionContent() - TEST ENDED, RESULT = FAILED "); } } public void doCreateObject() { // Not implemented. } public void doIterateOnEntry() { FwkInfo("doIterateOnEntry()"); string testRegionName = GetStringValue("testRegion"); string validateRegionName = GetStringValue("validateRegion"); Region testRegion = CacheHelper.DCache.GetRegion(testRegionName); Region validateRegion = CacheHelper.DCache.GetRegion(validateRegionName); ICacheableKey[] keyVector = null; int keysInRegion = 1; int lastCount = 0; int tryCount = 30; int tries = 0; while ((keysInRegion != lastCount) && (tries++ < tryCount)) { Thread.Sleep(10000); // sleep for 10 seconds. lastCount = keysInRegion; keyVector = testRegion.GetKeys(); keysInRegion = keyVector.Length; } if ((keysInRegion == 0) || (tries >= tryCount)) { FwkException("After " + tries + " tries, counted " + keysInRegion + " keys in the region."); } FwkInfo("After " + tries + " tries, counted " + keysInRegion + " keys in the region."); CacheableKey key; Serializable value; for (int index = 0; index < keysInRegion; index++) { key = keyVector[index] as CacheableKey; value = testRegion.Get(key) as Serializable; validateRegion.Create(key, value); } } public void feedEntries() { string testRegionName = GetStringValue("regionName"); if (testRegionName.Length == 0) { FwkException("Data not provided for 'regionName', failing."); } Region region = CacheHelper.DCache.GetRegion(testRegionName); int opsSecond = GetUIntValue("opsSecond"); if (opsSecond < 0) { opsSecond = 0; // No throttle } PaceMeter pm = new PaceMeter(opsSecond); int secondsToRun = GetUIntValue("workTime"); secondsToRun = (secondsToRun < 1) ? 100 : secondsToRun; FwkInfo("feedEntries: Will add entries for " + secondsToRun + " seconds."); DateTime end = DateTime.Now + TimeSpan.FromSeconds((double)secondsToRun); DateTime now = DateTime.Now; int count = 0; while (now < end) { string key = (++count).ToString(); // get value size int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableKey skey = new CacheableString(key); Serializable value = CacheableBytes.Create(buffer); if (key == null) { FwkSevere("EventTest::feedEntries null keyPtr generated."); now = end; } region.Put(skey, value); Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); pm.CheckPace(); now = DateTime.Now; } } public void doBasicTest() { Region region = GetRandomRegion(true); int numKeys = GetUIntValue("distinctKeys"); numKeys = numKeys > 0 ? numKeys : 1000; CacheableKey[] keys = new CacheableKey[numKeys]; Serializable[] values = new Serializable[numKeys]; for (int i = 0; i < numKeys; ++i) { int ksize = GetUIntValue("valueSizes"); ksize = ksize > 0 ? ksize : 12; int vsize = GetUIntValue("valueSizes"); vsize = vsize > 0 ? vsize : 100; string kStr = "key_"; byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableKey key = new CacheableString(kStr); Serializable value = CacheableBytes.Create(buffer); keys[i] = key; values[i] = value; region.Create(key, value); } ICacheableKey[] expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after create, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.LocalInvalidate(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after localInvalidate, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { IGeodeSerializable val = region.Get(keys[i]); if (val.ToString() != values[i].ToString()) { FwkSevere("Expect " + values[i].ToString() + ", got " + val.ToString()); } } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after first get, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.LocalDestroy(keys[i]); } expectKeys = region.GetKeys(); if ((expectKeys.Length) != 0) { FwkSevere("Expect 0 keys after localDestroy, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { IGeodeSerializable val = region.Get(keys[i]); // get if (val.ToString() != values[i].ToString()) { FwkSevere("Expect " + values[i].ToString() + ", got " + val.ToString()); } } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after second get, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Invalidate(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after invalidate, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Get(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after invalidate all entries in server, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Put(keys[i], values[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after put, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Destroy(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != 0) { FwkSevere("Expect 0 keys after destroy, got " + expectKeys.Length + " keys"); } int excepCount = 0; for (int i = 0; i < numKeys; ++i) { try { region.Get(keys[i]); } catch (EntryNotFoundException e) { FwkInfo(e.Message); ++excepCount; } } expectKeys = region.GetKeys(); if (expectKeys.Length != 0) { FwkSevere("Expect 0 keys because all entries are destoyed in server, got " + expectKeys.Length + " keys"); } if (excepCount != numKeys) { FwkSevere("Expect " + numKeys + " exceptions because all entries are destoyed in server, got " + excepCount + " exceptions"); } for (int i = 0; i < numKeys; ++i) { region.Create(keys[i], values[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after second create, got " + expectKeys.Length + " keys"); } for (int i = 0; i < numKeys; ++i) { region.Get(keys[i]); } expectKeys = region.GetKeys(); if (expectKeys.Length != numKeys) { FwkSevere("Expect " + numKeys + " keys after invalidate all entries in server, got " + expectKeys.Length + " keys"); } } public void doTwinkleRegion() { int secondsToRun = GetUIntValue("workTime"); secondsToRun = (secondsToRun < 1) ? 10 : secondsToRun; FwkInfo("Seconds to run: " + secondsToRun); int end = DateTime.Now.Second + secondsToRun; bool done = false; bool regionDestroyed = false; int errCnt = 0; while (!done) { int sleepTime = GetUIntValue("sleepTime"); sleepTime = ((sleepTime < 1) || regionDestroyed) ? 10 : sleepTime; FwkInfo("sleepTime is " + sleepTime + " seconds."); DateTime now = DateTime.Now; // TODO: 10 magic number to avoid compilation if ((now.Second > end) || ((now.Second + 10) > end)) { // TODO : Check DateTime usage in the entire file. // FWKINFO( "Exiting loop, time is up." ); done = true; continue; } FwkInfo("EventTest::doTwinkleRegion() sleeping for " + sleepTime + " seconds."); Thread.Sleep(sleepTime * 1000); if (regionDestroyed) { FwkInfo("EventTest::doTwinkleRegion() will create a region."); CreateRootRegion(); regionDestroyed = false; FwkInfo("EventTest::doTwinkleRegion() region created."); int percentDiff = percentDifferent(); if (percentDiff > 10) { errCnt++; FwkSevere("Actual number of keys is not within 10% of expected."); } } else { FwkInfo("EventTest::doTwinkleRegion() will destroy a region."); Region region = GetRandomRegion(true); if (region != null) { region.LocalDestroyRegion(); region = null; } regionDestroyed = true; FwkInfo("EventTest::doTwinkleRegion() local region destroy is complete."); } } // while if (regionDestroyed) { CreateRootRegion(); FwkInfo("EventTest::doTwinkleRegion() region created."); } FwkInfo("EventTest::doTwinkleRegion() completed."); if (errCnt > 0) { FwkException("Region key count was out of bounds on " + errCnt + " region creates."); } } // TODO Entire method check. public void checkTest(string taskId) { // TODO: For lock // SpinLockGuard guard( m_lck ); // TODO: setTask(taskId) if (CacheHelper.DCache == null) { Properties pp = new Properties(); //TODO: Initialize? cacheInitialize( pp ); //string val = getStringValue( "EventBB" ); //if ( !val.empty() ) //{ // m_sEventBB = val; //} } } public void createRootRegion(string regionName) { FwkInfo("In createRootRegion region"); Region rootRegion; if (regionName == null) { rootRegion = CreateRootRegion(); } else { rootRegion = CacheHelper.CreateRegion(regionName, null); } Util.BBIncrement(EventCountersBB, rootRegion.FullPath); Util.BBIncrement(EventCountersBB, "ROOT_REGION_COUNT"); FwkInfo("In createRootRegion, Created root region: " + rootRegion.FullPath); } public bool TestEntryPropagation(Region region, CacheableString szKey, CacheableBytes szValue) { bool bEntryError = false; bool bContainsKey = false; bool bContainsValue = false; bContainsKey = region.ContainsKey(szKey); bContainsValue = region.ContainsValueForKey(szKey); if (!bContainsKey || !bContainsValue) { FwkSevere("Key: " + szKey.Value + " not found in region " + region.FullPath + ", mirroring is enabled"); bEntryError = true; } return bEntryError; } public void addObject(Region region, bool bLogAddition, string pszKey, string pszValue) { CacheableKey key; if (pszKey == null) { key = findKeyNotInCache(region); } else { key = new CacheableString(pszKey); } if (key == null) { FwkInfo("EventTest::addObject null key generated for " + pszKey); return; } Serializable value; if (pszValue == null) { int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); value = CacheableBytes.Create(buffer); } else { value = new CacheableString(pszValue); } if (value == null) { FwkInfo("EventTest::addObject null valuePtr generated."); return; } region.Create(key, value); Util.BBIncrement(EventCountersBB, "CREATE_COUNT"); } public void invalidateObject(Region randomRegion, bool bIsLocalInvalidate) { CacheableKey keyP = getKey(randomRegion, false); if (keyP == null) { Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); return; } if (bIsLocalInvalidate) { randomRegion.LocalInvalidate(keyP); Util.BBIncrement(EventCountersBB, "LOCAL_INVALIDATE_COUNT"); } else { randomRegion.Invalidate(keyP); Util.BBIncrement(EventCountersBB, "INVALIDATE_COUNT"); } } public void destroyObject(Region randomRegion, bool bIsLocalDestroy) { FwkInfo("EventTest::destroyObject"); CacheableKey keyP = getKey(randomRegion, true); if (keyP == null) { Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); return; } if (bIsLocalDestroy) { randomRegion.LocalDestroy(keyP); Util.BBIncrement(EventCountersBB, "LOCAL_DESTROY_COUNT"); } else { randomRegion.Destroy(keyP); Util.BBIncrement(EventCountersBB, "DESTROY_COUNT"); } } public void updateObject(Region randomRegion) { CacheableKey keyP = getKey(randomRegion, true); if (keyP == null) { FwkInfo("EventTest::updateObject key is null"); Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); return; } IGeodeSerializable anObj = randomRegion.Get(keyP); int vsize = GetUIntValue("valueSizes"); if (vsize < 0) { vsize = 1000; } byte[] buffer = new byte[vsize]; Util.RandBytes(buffer); CacheableBytes newObj = CacheableBytes.Create(buffer); randomRegion.Put(keyP, newObj); } public void readObject(Region randomRegion) { FwkInfo("Inside readObject randomregion = {0}", randomRegion.FullPath); CacheableKey keyP = getKey(randomRegion, true); FwkInfo("After getkey"); if (keyP == null) { Util.BBIncrement(EventCountersBB, "OPS_SKIPPED_COUNT"); FwkInfo("skipped and returned"); return; } FwkInfo("skipped and returned before Get"); CacheableBytes anObj = randomRegion.Get(keyP) as CacheableBytes; FwkInfo("got anobj"); //byte[] b = anObj.Value; //FwkInfo("byte array = " + b.ToString()); } public void addRegion() { Region parentRegion = null; string sRegionName = getNextRegionName(parentRegion); if (sRegionName.Length == 0) { // nothing to do return; } Region region; FwkInfo("In addRegion, enter create region " + sRegionName); if (parentRegion == null) { // TODO Is this right. region = CacheHelper.CreateRegion(sRegionName, null); } else { string fullName = parentRegion.FullPath; RegionAttributes atts = parentRegion.Attributes; AttributesFactory fact = new AttributesFactory(atts); atts = fact.CreateRegionAttributes(); region = parentRegion.CreateSubRegion(sRegionName, atts); Util.BBSet(EventCountersBB, sRegionName, fullName); } int iInitRegionNumObjects = GetUIntValue("initRegionNumObjects"); // Create objects in the new region for (int iIndex = 0; iIndex < iInitRegionNumObjects; iIndex++) { string skey = iIndex.ToString(); addObject(region, true, skey, null); } FwkInfo("In addRegion, exit create region " + sRegionName); } public void clearRegion(Region randomRegion, bool bIsLocalClear) { int iSubRegionCount = 0; // invalidate the region iSubRegionCount = getSubRegionCount(randomRegion) + 1; //bbGet("EventCountersBB", // TODO // "numAfterRegionInvalidateEvents_isNotExp", &iBeforeCounter); FwkInfo("In clearRegion, enter clear region " + randomRegion.Name); string pszCounterName = "LOCAL_REGION_CLEAR_COUNT"; if (bIsLocalClear) { randomRegion.LocalClear(); } else { pszCounterName = "REGION_CLEAR_COUNT"; randomRegion.Clear(); } Util.BBAdd(EventCountersBB, pszCounterName, iSubRegionCount); FwkInfo("In clearRegion, exit invalidate region " + randomRegion.Name); } public void invalidateRegion(Region randomRegion, bool bIsLocalInvalidate) { int iSubRegionCount = 0; // invalidate the region iSubRegionCount = getSubRegionCount(randomRegion) + 1; //bbGet("EventCountersBB", // TODO // "numAfterRegionInvalidateEvents_isNotExp", &iBeforeCounter); FwkInfo("In invalidateRegion, enter invalidate region " + randomRegion.Name); string pszCounterName = "LOCAL_REGION_INVALIDATE_COUNT"; if (bIsLocalInvalidate) { randomRegion.LocalInvalidateRegion(); } else { pszCounterName = "REGION_INVALIDATE_COUNT"; randomRegion.InvalidateRegion(); } Util.BBAdd(EventCountersBB, pszCounterName, iSubRegionCount); FwkInfo("In invalidateRegion, exit invalidate region " + randomRegion.Name); } public void destroyRegion(Region randomRegion, bool bIsLocalDestroy) { int iSubRegionCount = 0; // destroy the region //int iBeforeCounter = -1; //bbGet( "EventCountersBB", "numAfterRegionDestroyEvents_isNotExp", &iBeforeCounter );//TODO iSubRegionCount = getSubRegionCount(randomRegion) + 1; string pszCounterName = "LOCAL_REGION_DESTROY_COUNT"; FwkInfo("In destroyRegion, enter destroy region " + randomRegion.Name); if (bIsLocalDestroy) { randomRegion.LocalDestroyRegion(); } else { pszCounterName = "REGION_DESTROY_COUNT"; randomRegion.DestroyRegion(); } Util.BBIncrement(EventCountersBB, pszCounterName); FwkInfo("In destroyRegion, exit destroy region " + randomRegion.Name); } public Region GetRandomRegion(bool bAllowRootRegion) { FwkInfo("Inside GetRandomRegion ... Check 1"); Region[] rootRegionVector = CacheHelper.DCache.RootRegions(); int irootSize = rootRegionVector.Length; Region[] subRegionVector; FwkInfo("Inside GetRandomRegion ... Check 2"); int iRootSize = rootRegionVector.Length; if (iRootSize == 0) { // TODO return null; //return RegionPtr(); } FwkInfo("Inside GetRandomRegion ... Check 3 and irrotsize = {0}", irootSize); Region[] choseRegionVector = new Region[1]; // if roots can be chosen, add them to candidates if (bAllowRootRegion) { for (int iRootIndex = 0; iRootIndex < iRootSize; iRootIndex++) { FwkInfo("Inside GetRandomRegion ... Check 4.{0}", iRootIndex); choseRegionVector[iRootIndex] = rootRegionVector[iRootIndex]; } } FwkInfo("Inside GetRandomRegion ... Check 4"); // add all subregions for (int iRootIndex = 0; iRootIndex < iRootSize; iRootIndex++) { subRegionVector = rootRegionVector[iRootIndex].SubRegions(true); int iSubSize = subRegionVector.Length; for (int iSubIndex = 0; iSubIndex < iSubSize; iSubIndex++) { choseRegionVector[choseRegionVector.Length] = subRegionVector[iSubIndex]; //choseRegionVector.push_back(subRegionVector.at(iSubIndex)); } } FwkInfo("Inside GetRandomRegion ... Check 5"); int iChoseRegionSize = choseRegionVector.Length; if (iChoseRegionSize == 0) { // TODO return null; //return RegionPtr(); } FwkInfo("Inside GetRandomRegion ... Check 6"); int idx = Util.Rand(iChoseRegionSize); //string regionName = choseRegionVector.at(idx)->getFullPath(); FwkInfo("Inside GetRandomRegion ... Check 7"); return choseRegionVector[idx]; } public void handleExpectedException(Exception e) { FwkInfo("Caught and ignored: " + e.Message); } public void verifyObjectInvalidated(Region region, CacheableKey key) { if ((region == null) && (key == null)) { return; } string error = null; if (!region.ContainsKey(key)) { error = "unexpected contains key"; } if (region.ContainsValueForKey(key)) { error = "Unexpected containsValueForKey "; } RegionEntry entry = region.GetEntry(key); if (entry == null) { error = "getEntry returned null"; } else { if (entry.Key != key) { error = "Keys are different"; } if (entry.Value != null) { error = "Expected value to be null"; } } if (error.Length != 0) { FwkException(error); } } public void verifyObjectDestroyed(Region region, CacheableKey key) { if ((region == null) && (key == null)) { return; } string osError; bool bContainsKey = region.ContainsKey(key); if (bContainsKey) { // TODO key.ToString() osError = "Unexpected containsKey " + bContainsKey + " for key " + key.ToString() + " in region " + region.FullPath + Environment.NewLine; } bool bContainsValueForKey = region.ContainsValueForKey(key); if (bContainsValueForKey) { osError = "Unexpected containsValueForKey " + bContainsValueForKey + " for key " + key.ToString() + " in region " + region.FullPath + Environment.NewLine; } RegionEntry entry = region.GetEntry(key); // TODO ... see this section //if (entry != null) //{ // CacheableString entryKey = key.; // CacheableBytes entryValuePtr = entryPtr->getValue(); // osError << "getEntry for key " << CacheableStringPtr( keyPtr )->asChar() << //" in region " << regionPtr->getFullPath() << //" returned was non-null; getKey is " << entryKeyPtr->asChar() << //", value is " << entryValuePtr->bytes() << "\n"; //} //if (sError.size() > 0) //{ // FWKEXCEPTION(sError); //} } public void iterateRegion(Region aRegion, bool bAllowZeroKeys, bool bAllowZeroNonNullValues, uint ulKeysInRegion, uint ulNoneNullValuesInRegion, string sError) { if (aRegion == null) { return; } ulKeysInRegion = 0; ulNoneNullValuesInRegion = 0; ICacheableKey[] keyVector = aRegion.GetKeys(); ulKeysInRegion = (uint)keyVector.Length; if (ulKeysInRegion == 0) { if (!bAllowZeroKeys) { sError = "Region " + aRegion.FullPath + " has " + ulKeysInRegion + " keys" + Environment.NewLine; } } CacheableKey key = null; IGeodeSerializable value = null; for (uint ulIndex = 0; ulIndex < ulKeysInRegion; ulIndex++) { key = keyVector[ulIndex] as CacheableKey; try { value = aRegion.Get(key); } catch (CacheLoaderException e) { FwkException("CacheLoaderException " + e.Message); } catch (TimeoutException e) { FwkException("TimeoutException " + e.Message); } if (value != null) { ulNoneNullValuesInRegion++; } } if (ulNoneNullValuesInRegion == 0) { if (!bAllowZeroNonNullValues) { sError += "Region " + aRegion.FullPath + " has " + ulNoneNullValuesInRegion + " non-null values" + Environment.NewLine; } } } public int getSubRegionCount(Region region) { Region[] subregions = region.SubRegions(true); return subregions.Length; } public int getAllRegionCount() { if (CacheHelper.DCache == null) { FwkSevere("Null cache pointer, no connection established."); return 0; } Region[] rootRegions = CacheHelper.DCache.RootRegions(); int iRootSize = rootRegions.Length; int iTotalRegions = iRootSize; for (int iIndex = 0; iIndex < iRootSize; iIndex++) { // TODO getSubRegionCount implementation iTotalRegions += getSubRegionCount(rootRegions[iIndex]); } return iTotalRegions; } public CacheableKey getKey(Region region, bool bInvalidOK) { FwkInfo("random key check 1"); //int randomKey = int.Parse((string)Util.ReadObject("randomKey")); int randomKey = GetUIntValue("randomKey"); CacheableKey keyP = null; FwkInfo("random key check 2 ... randomkey = {0}", randomKey); if (randomKey > 0) { string sKey = randomKey.ToString(); keyP = new CacheableString(sKey); FwkInfo("random key check 2.1 .. keyP.tostring = {0}", keyP.ToString()); return keyP; } FwkInfo("random key check 3"); ICacheableKey[] keys = region.GetKeys(); int iKeySize = keys.Length; if (iKeySize == 0) { return keyP; } FwkInfo("random key check 4"); int iStartAt = Util.Rand(iKeySize); if (bInvalidOK) { return keys[iStartAt] as CacheableKey; } int iKeyIndex = iStartAt; do { FwkInfo("random key check 5"); bool hasValue = region.ContainsValueForKey(keys[iKeyIndex]); if (hasValue) { return keys[iKeyIndex] as CacheableKey; } iKeyIndex++; if (iKeyIndex >= iKeySize) { iKeyIndex = 0; } } while (iKeyIndex != iStartAt); FwkInfo("getKey: All values invalid in region"); return keyP; } public void setEventError(string pszMsg) { Util.BBSet(EventCountersBB, "EventErrorMessage", pszMsg); } public void removeRegion(Region region) { string name = region.FullPath; FwkInfo("In removeRegion, local destroy on " + name); region.LocalDestroyRegion(); Util.BBDecrement(EventCountersBB, name); } public Int32 getRegionCount() { FwkInfo("Check 1.1 Inside getRegionCount"); Region[] roots = CacheHelper.DCache.RootRegions(); FwkInfo("Check 1.1 root region count = {0}", roots.Length); return roots.Length; } #endregion #region Callback create methods public static ICacheWriter CreateETCacheWriter() { return new ETCacheWriter(); } public static ICacheLoader CreateETCacheLoader() { return new ETCacheLoader(); } public static ICacheListener CreateETCacheListener() { return new ETCacheListener(); } #endregion } }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <[email protected]> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // #if !SILVERLIGHT namespace NLog.UnitTests.Targets { using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading; using Xunit; using Xunit.Extensions; using NLog.Config; using NLog.Layouts; using NLog.Targets; using NLog.Targets.Wrappers; using NLog.Time; using NLog.Internal; using NLog.LayoutRenderers; public class FileTargetTests : NLogTestBase { private readonly ILogger logger = LogManager.GetLogger("NLog.UnitTests.Targets.FileTargetTests"); [Fact] public void SimpleFileTest1() { var tempFile = Path.GetTempFileName(); try { var ft = new FileTarget { FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}", OpenFileCacheTimeout = 0 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); } finally { if (File.Exists(tempFile)) File.Delete(tempFile); } } [Fact] public void CsvHeaderTest() { var tempFile = Path.GetTempFileName(); try { for (var i = 0; i < 2; i++) { var layout = new CsvLayout { Delimiter = CsvColumnDelimiterMode.Semicolon, WithHeader = true, Columns = { new CsvColumn("name", "${logger}"), new CsvColumn("level", "${level}"), new CsvColumn("message", "${message}"), } }; var ft = new FileTarget { FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = layout, OpenFileCacheTimeout = 0, ReplaceFileContentsOnEachWrite = false }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); LogManager.Configuration = null; } AssertFileContents(tempFile, "name;level;message\nNLog.UnitTests.Targets.FileTargetTests;Debug;aaa\nNLog.UnitTests.Targets.FileTargetTests;Debug;aaa\n", Encoding.UTF8); } finally { if (File.Exists(tempFile)) File.Delete(tempFile); } } [Fact] public void DeleteFileOnStartTest() { var tempFile = Path.GetTempFileName(); try { var ft = new FileTarget { DeleteOldFileOnStartup = false, FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}" }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); // configure again, without // DeleteOldFileOnStartup ft = new FileTarget { DeleteOldFileOnStartup = false, FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}" }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\nDebug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); // configure again, this time with // DeleteOldFileOnStartup ft = new FileTarget { FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}", DeleteOldFileOnStartup = true }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); } } [Fact] public void ArchiveFileOnStartTest() { ArchiveFileOnStartTests(enableCompression: false); } #if NET4_5 [Fact] public void ArchiveFileOnStartTest_WithCompression() { ArchiveFileOnStartTests(enableCompression: true); } #endif private void ArchiveFileOnStartTests(bool enableCompression) { var tempFile = Path.GetTempFileName(); var tempArchiveFolder = Path.Combine(Path.GetTempPath(), "Archive"); var archiveExtension = enableCompression ? "zip" : "txt"; try { // Configure first time with ArchiveOldFileOnStartup = false. var ft = new FileTarget { ArchiveOldFileOnStartup = false, FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}" }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); // Configure second time with ArchiveOldFileOnStartup = false again. // Expected behavior: Extra content to be appended to the file. ft = new FileTarget { ArchiveOldFileOnStartup = false, FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}" }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\nDebug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); // Configure third time with ArchiveOldFileOnStartup = true again. // Expected behavior: Extra content will be stored in a new file; the // old content should be moved into a new location. var archiveTempName = Path.Combine(tempArchiveFolder, "archive." + archiveExtension); ft = new FileTarget { #if NET4_5 EnableArchiveFileCompression = enableCompression, #endif FileName = SimpleLayout.Escape(tempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}", ArchiveOldFileOnStartup = true, ArchiveFileName = archiveTempName, ArchiveNumbering = ArchiveNumberingMode.Sequence, MaxArchiveFiles = 1 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("ddd"); logger.Info("eee"); logger.Warn("fff"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug ddd\nInfo eee\nWarn fff\n", Encoding.UTF8); Assert.True(File.Exists(archiveTempName)); var assertFileContents = #if NET4_5 enableCompression ? new Action<string, string, Encoding>(AssertZipFileContents) : AssertFileContents; #else new Action<string, string, Encoding>(AssertFileContents); #endif assertFileContents(archiveTempName, "Debug aaa\nInfo bbb\nWarn ccc\nDebug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempArchiveFolder)) Directory.Delete(tempArchiveFolder, true); } } [Fact] public void CreateDirsTest() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, LineEnding = LineEndingMode.LF, Layout = "${level} ${message}" }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); LogManager.Configuration = null; AssertFileContents(tempFile, "Debug aaa\nInfo bbb\nWarn ccc\n", Encoding.UTF8); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void SequentialArchiveTest1() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{####}.txt"), ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, Layout = "${message}", MaxArchiveFiles = 3, ArchiveNumbering = ArchiveNumberingMode.Sequence }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); // we emit 5 * 250 *(3 x aaa + \n) bytes // so that we should get a full file + 3 archives Generate1000BytesLog('a'); Generate1000BytesLog('b'); Generate1000BytesLog('c'); Generate1000BytesLog('d'); Generate1000BytesLog('e'); LogManager.Configuration = null; AssertFileContents(tempFile, StringRepeat(250, "eee\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0001.txt"), StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0002.txt"), StringRepeat(250, "ccc\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0003.txt"), StringRepeat(250, "ddd\n"), Encoding.UTF8); //0000 should not extists because of MaxArchiveFiles=3 Assert.True(!File.Exists(Path.Combine(tempPath, "archive/0000.txt"))); Assert.True(!File.Exists(Path.Combine(tempPath, "archive/0004.txt"))); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void SequentialArchiveTest1_MaxArchiveFiles_0() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{####}.txt"), ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Sequence, Layout = "${message}", MaxArchiveFiles = 0 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); // we emit 5 * 250 *(3 x aaa + \n) bytes // so that we should get a full file + 4 archives Generate1000BytesLog('a'); Generate1000BytesLog('b'); Generate1000BytesLog('c'); Generate1000BytesLog('d'); Generate1000BytesLog('e'); LogManager.Configuration = null; AssertFileContents(tempFile, StringRepeat(250, "eee\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0000.txt"), StringRepeat(250, "aaa\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0001.txt"), StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0002.txt"), StringRepeat(250, "ccc\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0003.txt"), StringRepeat(250, "ddd\n"), Encoding.UTF8); Assert.True(!File.Exists(Path.Combine(tempPath, "archive/0004.txt"))); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact(Skip = "this is not supported, because we cannot create multiple archive files with ArchiveNumberingMode.Date (for one day)")] public void ArchiveAboveSizeWithArchiveNumberingModeDate_maxfiles_o() { var tempPath = Path.Combine(Path.GetTempPath(), "ArchiveEveryCombinedWithArchiveAboveSize_" + Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{####}.txt"), ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, Layout = "${message}", ArchiveNumbering = ArchiveNumberingMode.Date }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //e.g. 20150804 var archiveFileName = DateTime.Now.ToString("yyyyMMdd"); // we emit 5 * 250 *(3 x aaa + \n) bytes // so that we should get a full file + 3 archives for (var i = 0; i < 250; ++i) { logger.Debug("aaa"); } for (var i = 0; i < 250; ++i) { logger.Debug("bbb"); } for (var i = 0; i < 250; ++i) { logger.Debug("ccc"); } for (var i = 0; i < 250; ++i) { logger.Debug("ddd"); } for (var i = 0; i < 250; ++i) { logger.Debug("eee"); } LogManager.Configuration = null; //we expect only eee and all other in the archive AssertFileContents(tempFile, StringRepeat(250, "eee\n"), Encoding.UTF8); //DUNNO what to expected! //try (which fails) AssertFileContents( Path.Combine(tempPath, string.Format("archive/{0}.txt", archiveFileName)), StringRepeat(250, "aaa\n") + StringRepeat(250, "bbb\n") + StringRepeat(250, "ccc\n") + StringRepeat(250, "ddd\n"), Encoding.UTF8); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void DeleteArchiveFilesByDate() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{#}.txt"), ArchiveAboveSize = 50, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Date, ArchiveDateFormat = "yyyyMMddHHmmssfff", //make sure the milliseconds are set in the filename Layout = "${message}", MaxArchiveFiles = 3 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing 19 times 10 bytes (9 char + linefeed) will result in 3 archive files and 1 current file for (var i = 0; i < 19; ++i) { logger.Debug("123456789"); //build in a small sleep to make sure the current time is reflected in the filename //do this every 5 entries if (i % 5 == 0) Thread.Sleep(50); } //Setting the Configuration to [null] will result in a 'Dump' of the current log entries LogManager.Configuration = null; var archivePath = Path.Combine(tempPath, "archive"); var files = Directory.GetFiles(archivePath).OrderBy(s => s); //the amount of archived files may not exceed the set 'MaxArchiveFiles' Assert.Equal(ft.MaxArchiveFiles, files.Count()); SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing just one line of 11 bytes will trigger the cleanup of old archived files //as stated by the MaxArchiveFiles property, but will only delete the oldest file logger.Debug("1234567890"); LogManager.Configuration = null; var files2 = Directory.GetFiles(archivePath).OrderBy(s => s); Assert.Equal(ft.MaxArchiveFiles, files2.Count()); //the oldest file should be deleted Assert.DoesNotContain(files.ElementAt(0), files2); //two files should still be there Assert.Equal(files.ElementAt(1), files2.ElementAt(0)); Assert.Equal(files.ElementAt(2), files2.ElementAt(1)); //one new archive file shoud be created Assert.DoesNotContain(files2.ElementAt(2), files); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void DeleteArchiveFilesByDateWithDateName() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "${date:format=yyyyMMddHHmmssfff}.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "{#}.txt"), ArchiveEvery = FileArchivePeriod.Minute, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Date, ArchiveDateFormat = "yyyyMMddHHmmssfff", //make sure the milliseconds are set in the filename Layout = "${message}", MaxArchiveFiles = 3 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing 4 times 10 bytes (9 char + linefeed) will result in 2 archive files and 1 current file for (var i = 0; i < 4; ++i) { logger.Debug("123456789"); //build in a sleep to make sure the current time is reflected in the filename Thread.Sleep(50); } //Setting the Configuration to [null] will result in a 'Dump' of the current log entries LogManager.Configuration = null; var files = Directory.GetFiles(tempPath).OrderBy(s => s); //the amount of archived files may not exceed the set 'MaxArchiveFiles' Assert.Equal(ft.MaxArchiveFiles, files.Count()); SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing 50ms later will trigger the cleanup of old archived files //as stated by the MaxArchiveFiles property, but will only delete the oldest file Thread.Sleep(50); logger.Debug("123456789"); LogManager.Configuration = null; var files2 = Directory.GetFiles(tempPath).OrderBy(s => s); Assert.Equal(ft.MaxArchiveFiles, files2.Count()); //the oldest file should be deleted Assert.DoesNotContain(files.ElementAt(0), files2); //two files should still be there Assert.Equal(files.ElementAt(1), files2.ElementAt(0)); Assert.Equal(files.ElementAt(2), files2.ElementAt(1)); //one new archive file shoud be created Assert.DoesNotContain(files2.ElementAt(2), files); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } public static IEnumerable<object[]> DateArchive_UsesDateFromCurrentTimeSource_TestParameters { get { var booleanValues = new[] { true, false }; var timeKindValues = new[] { DateTimeKind.Utc, DateTimeKind.Local }; return from concurrentWrites in booleanValues from keepFileOpen in booleanValues from networkWrites in booleanValues from timeKind in timeKindValues select new object[] { timeKind, concurrentWrites, keepFileOpen, networkWrites }; } } [Theory] [PropertyData("DateArchive_UsesDateFromCurrentTimeSource_TestParameters")] public void DateArchive_UsesDateFromCurrentTimeSource(DateTimeKind timeKind, bool concurrentWrites, bool keepFileOpen, bool networkWrites) { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); var defaultTimeSource = TimeSource.Current; try { var timeSource = new TimeSourceTests.ShiftedTimeSource(timeKind); TimeSource.Current = timeSource; var archiveFileNameTemplate = Path.Combine(tempPath, "archive/{#}.txt"); var ft = new FileTarget { FileName = tempFile, ArchiveFileName = archiveFileNameTemplate, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Date, ArchiveEvery = FileArchivePeriod.Day, ArchiveDateFormat = "yyyyMMdd", Layout = "${date:format=O}|${message}", MaxArchiveFiles = 3, ConcurrentWrites = concurrentWrites, KeepFileOpen = keepFileOpen, NetworkWrites = networkWrites, }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); logger.Debug("123456789"); DateTime previousWriteTime = timeSource.Time; const int daysToTestLogging = 5; const int intervalsPerDay = 24; var loggingInterval = TimeSpan.FromHours(1); for (var i = 0; i < daysToTestLogging * intervalsPerDay; ++i) { timeSource.AddToLocalTime(loggingInterval); var eventInfo = new LogEventInfo(LogLevel.Debug, logger.Name, "123456789"); logger.Log(eventInfo); var dayIsChanged = eventInfo.TimeStamp.Date != previousWriteTime.Date; // ensure new archive is created only when the day part of time is changed var archiveFileName = archiveFileNameTemplate.Replace("{#}", previousWriteTime.ToString(ft.ArchiveDateFormat)); var archiveExists = File.Exists(archiveFileName); if (dayIsChanged) Assert.True(archiveExists, string.Format("new archive should be created when the day part of {0} time is changed", timeKind)); else Assert.False(archiveExists, string.Format("new archive should not be create when day part of {0} time is unchanged", timeKind)); previousWriteTime = eventInfo.TimeStamp.Date; if (dayIsChanged) timeSource.AddToSystemTime(TimeSpan.FromDays(1)); } //Setting the Configuration to [null] will result in a 'Dump' of the current log entries LogManager.Configuration = null; var archivePath = Path.Combine(tempPath, "archive"); var files = Directory.GetFiles(archivePath).OrderBy(s => s).ToList(); //the amount of archived files may not exceed the set 'MaxArchiveFiles' Assert.Equal(ft.MaxArchiveFiles, files.Count); SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing one line on a new day will trigger the cleanup of old archived files //as stated by the MaxArchiveFiles property, but will only delete the oldest file timeSource.AddToLocalTime(TimeSpan.FromDays(1)); logger.Debug("1234567890"); LogManager.Configuration = null; var files2 = Directory.GetFiles(archivePath).OrderBy(s => s).ToList(); Assert.Equal(ft.MaxArchiveFiles, files2.Count); //the oldest file should be deleted Assert.DoesNotContain(files[0], files2); //two files should still be there Assert.Equal(files[1], files2[0]); Assert.Equal(files[2], files2[1]); //one new archive file shoud be created Assert.DoesNotContain(files2[2], files); } finally { TimeSource.Current = defaultTimeSource; // restore default time source LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void DeleteArchiveFilesByDate_MaxArchiveFiles_0() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{#}.txt"), ArchiveAboveSize = 50, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Date, ArchiveDateFormat = "yyyyMMddHHmmssfff", //make sure the milliseconds are set in the filename Layout = "${message}", MaxArchiveFiles = 0 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing 19 times 10 bytes (9 char + linefeed) will result in 3 archive files and 1 current file for (var i = 0; i < 19; ++i) { logger.Debug("123456789"); //build in a small sleep to make sure the current time is reflected in the filename //do this every 5 entries if (i % 5 == 0) { Thread.Sleep(50); } } //Setting the Configuration to [null] will result in a 'Dump' of the current log entries LogManager.Configuration = null; var archivePath = Path.Combine(tempPath, "archive"); var fileCount = Directory.EnumerateFiles(archivePath).Count(); Assert.Equal(3, fileCount); SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //create 1 new file for archive logger.Debug("1234567890"); LogManager.Configuration = null; var fileCount2 = Directory.EnumerateFiles(archivePath).Count(); //there should be 1 more file Assert.Equal(4, fileCount2); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) { File.Delete(tempFile); } if (Directory.Exists(tempPath)) { Directory.Delete(tempPath, true); } } } [Fact] public void DeleteArchiveFilesByDate_AlteredMaxArchive() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{#}.txt"), ArchiveAboveSize = 50, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Date, ArchiveDateFormat = "yyyyMMddHHmmssfff", //make sure the milliseconds are set in the filename Layout = "${message}", MaxArchiveFiles = 5 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing 29 times 10 bytes (9 char + linefeed) will result in 3 archive files and 1 current file for (var i = 0; i < 29; ++i) { logger.Debug("123456789"); //build in a small sleep to make sure the current time is reflected in the filename //do this every 5 entries if (i % 5 == 0) Thread.Sleep(50); } //Setting the Configuration to [null] will result in a 'Dump' of the current log entries LogManager.Configuration = null; var archivePath = Path.Combine(tempPath, "archive"); var files = Directory.GetFiles(archivePath).OrderBy(s => s); //the amount of archived files may not exceed the set 'MaxArchiveFiles' Assert.Equal(ft.MaxArchiveFiles, files.Count()); //alter the MaxArchivedFiles ft.MaxArchiveFiles = 2; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //writing just one line of 11 bytes will trigger the cleanup of old archived files //as stated by the MaxArchiveFiles property, but will only delete the oldest files logger.Debug("1234567890"); LogManager.Configuration = null; var files2 = Directory.GetFiles(archivePath).OrderBy(s => s); Assert.Equal(ft.MaxArchiveFiles, files2.Count()); //the oldest files should be deleted Assert.DoesNotContain(files.ElementAt(0), files2); Assert.DoesNotContain(files.ElementAt(1), files2); Assert.DoesNotContain(files.ElementAt(2), files2); Assert.DoesNotContain(files.ElementAt(3), files2); //one files should still be there Assert.Equal(files.ElementAt(4), files2.ElementAt(0)); //one new archive file shoud be created Assert.DoesNotContain(files2.ElementAt(1), files); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void RepeatingHeaderTest() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { const string header = "Headerline"; var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{####}.txt"), ArchiveAboveSize = 51, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Sequence, Layout = "${message}", Header = header, MaxArchiveFiles = 2, }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); for (var i = 0; i < 16; ++i) { logger.Debug("123456789"); } LogManager.Configuration = null; AssertFileContentsStartsWith(tempFile, header, Encoding.UTF8); AssertFileContentsStartsWith(Path.Combine(tempPath, "archive/0002.txt"), header, Encoding.UTF8); AssertFileContentsStartsWith(Path.Combine(tempPath, "archive/0001.txt"), header, Encoding.UTF8); Assert.True(!File.Exists(Path.Combine(tempPath, "archive/0000.txt"))); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void RollingArchiveTest1() { RollingArchiveTests(enableCompression: false); } #if NET4_5 [Fact] public void RollingArchiveCompressionTest1() { RollingArchiveTests(enableCompression: true); } #endif private void RollingArchiveTests(bool enableCompression) { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); var archiveExtension = enableCompression ? "zip" : "txt"; try { var ft = new FileTarget { #if NET4_5 EnableArchiveFileCompression = enableCompression, #endif FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{####}." + archiveExtension), ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Rolling, Layout = "${message}", MaxArchiveFiles = 3 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); // we emit 5 * 250 * (3 x aaa + \n) bytes // so that we should get a full file + 3 archives Generate1000BytesLog('a'); Generate1000BytesLog('b'); Generate1000BytesLog('c'); Generate1000BytesLog('d'); Generate1000BytesLog('e'); LogManager.Configuration = null; var assertFileContents = #if NET4_5 enableCompression ? new Action<string, string, Encoding>(AssertZipFileContents) : AssertFileContents; #else new Action<string, string, Encoding>(AssertFileContents); #endif AssertFileContents(tempFile, StringRepeat(250, "eee\n"), Encoding.UTF8); assertFileContents( Path.Combine(tempPath, "archive/0000." + archiveExtension), StringRepeat(250, "ddd\n"), Encoding.UTF8); assertFileContents( Path.Combine(tempPath, "archive/0001." + archiveExtension), StringRepeat(250, "ccc\n"), Encoding.UTF8); assertFileContents( Path.Combine(tempPath, "archive/0002." + archiveExtension), StringRepeat(250, "bbb\n"), Encoding.UTF8); Assert.True(!File.Exists(Path.Combine(tempPath, "archive/0003." + archiveExtension))); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void RollingArchiveTest_MaxArchiveFiles_0() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{####}.txt"), ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, ArchiveNumbering = ArchiveNumberingMode.Rolling, Layout = "${message}", MaxArchiveFiles = 0 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); // we emit 5 * 250 * (3 x aaa + \n) bytes // so that we should get a full file + 4 archives Generate1000BytesLog('a'); Generate1000BytesLog('b'); Generate1000BytesLog('c'); Generate1000BytesLog('d'); Generate1000BytesLog('e'); LogManager.Configuration = null; AssertFileContents(tempFile, StringRepeat(250, "eee\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0000.txt"), StringRepeat(250, "ddd\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0001.txt"), StringRepeat(250, "ccc\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0002.txt"), StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/0003.txt"), StringRepeat(250, "aaa\n"), Encoding.UTF8); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) { File.Delete(tempFile); } if (Directory.Exists(tempPath)) { Directory.Delete(tempPath, true); } } } [Fact] public void MultiFileWrite() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); try { var ft = new FileTarget { FileName = Path.Combine(tempPath, "${level}.txt"), LineEnding = LineEndingMode.LF, Layout = "${message}" }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); for (var i = 0; i < 250; ++i) { logger.Trace("@@@"); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); logger.Error("ddd"); logger.Fatal("eee"); } LogManager.Configuration = null; Assert.False(File.Exists(Path.Combine(tempPath, "Trace.txt"))); AssertFileContents(Path.Combine(tempPath, "Debug.txt"), StringRepeat(250, "aaa\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Info.txt"), StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Warn.txt"), StringRepeat(250, "ccc\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Error.txt"), StringRepeat(250, "ddd\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Fatal.txt"), StringRepeat(250, "eee\n"), Encoding.UTF8); } finally { //if (File.Exists(tempFile)) // File.Delete(tempFile); LogManager.Configuration = null; if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void BufferedMultiFileWrite() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); try { var ft = new FileTarget { FileName = Path.Combine(tempPath, "${level}.txt"), LineEnding = LineEndingMode.LF, Layout = "${message}" }; SimpleConfigurator.ConfigureForTargetLogging(new BufferingTargetWrapper(ft, 10), LogLevel.Debug); for (var i = 0; i < 250; ++i) { logger.Trace("@@@"); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); logger.Error("ddd"); logger.Fatal("eee"); } LogManager.Configuration = null; Assert.False(File.Exists(Path.Combine(tempPath, "Trace.txt"))); AssertFileContents(Path.Combine(tempPath, "Debug.txt"), StringRepeat(250, "aaa\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Info.txt"), StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Warn.txt"), StringRepeat(250, "ccc\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Error.txt"), StringRepeat(250, "ddd\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Fatal.txt"), StringRepeat(250, "eee\n"), Encoding.UTF8); } finally { //if (File.Exists(tempFile)) // File.Delete(tempFile); LogManager.Configuration = null; if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void AsyncMultiFileWrite() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); try { var ft = new FileTarget { FileName = Path.Combine(tempPath, "${level}.txt"), LineEnding = LineEndingMode.LF, Layout = "${message} ${threadid}" }; // this also checks that thread-volatile layouts // such as ${threadid} are properly cached and not recalculated // in logging threads. var threadID = Thread.CurrentThread.ManagedThreadId.ToString(); SimpleConfigurator.ConfigureForTargetLogging(new AsyncTargetWrapper(ft, 1000, AsyncTargetWrapperOverflowAction.Grow), LogLevel.Debug); LogManager.ThrowExceptions = true; for (var i = 0; i < 250; ++i) { logger.Trace("@@@"); logger.Debug("aaa"); logger.Info("bbb"); logger.Warn("ccc"); logger.Error("ddd"); logger.Fatal("eee"); } LogManager.Flush(); LogManager.Configuration = null; Assert.False(File.Exists(Path.Combine(tempPath, "Trace.txt"))); AssertFileContents(Path.Combine(tempPath, "Debug.txt"), StringRepeat(250, "aaa " + threadID + "\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Info.txt"), StringRepeat(250, "bbb " + threadID + "\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Warn.txt"), StringRepeat(250, "ccc " + threadID + "\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Error.txt"), StringRepeat(250, "ddd " + threadID + "\n"), Encoding.UTF8); AssertFileContents(Path.Combine(tempPath, "Fatal.txt"), StringRepeat(250, "eee " + threadID + "\n"), Encoding.UTF8); } finally { //if (File.Exists(tempFile)) // File.Delete(tempFile); LogManager.Configuration = null; if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); // Clean up configuration change, breaks onetimeonlyexceptioninhandlertest LogManager.ThrowExceptions = true; } } [Fact] public void BatchErrorHandlingTest() { var fileTarget = new FileTarget { FileName = "${logger}", Layout = "${message}" }; fileTarget.Initialize(null); // make sure that when file names get sorted, the asynchronous continuations are sorted with them as well var exceptions = new List<Exception>(); var events = new[] { new LogEventInfo(LogLevel.Info, "file99.txt", "msg1").WithContinuation(exceptions.Add), new LogEventInfo(LogLevel.Info, "", "msg1").WithContinuation(exceptions.Add), new LogEventInfo(LogLevel.Info, "", "msg2").WithContinuation(exceptions.Add), new LogEventInfo(LogLevel.Info, "", "msg3").WithContinuation(exceptions.Add) }; fileTarget.WriteAsyncLogEvents(events); Assert.Equal(4, exceptions.Count); Assert.Null(exceptions[0]); Assert.NotNull(exceptions[1]); Assert.NotNull(exceptions[2]); Assert.NotNull(exceptions[3]); } [Fact] public void DisposingFileTarget_WhenNotIntialized_ShouldNotThrow() { var exceptionThrown = false; var fileTarget = new FileTarget(); try { fileTarget.Dispose(); } catch { exceptionThrown = true; } Assert.False(exceptionThrown); } [Fact] public void FileTarget_ArchiveNumbering_DateAndSequence() { FileTarget_ArchiveNumbering_DateAndSequenceTests(enableCompression: false); } #if NET4_5 [Fact] public void FileTarget_ArchiveNumbering_DateAndSequence_WithCompression() { FileTarget_ArchiveNumbering_DateAndSequenceTests(enableCompression: true); } #endif private void FileTarget_ArchiveNumbering_DateAndSequenceTests(bool enableCompression) { var tempPath = ArchiveFilenameHelper.GenerateTempPath(); var tempFile = Path.Combine(tempPath, "file.txt"); var archiveExtension = enableCompression ? "zip" : "txt"; try { var ft = new FileTarget { #if NET4_5 EnableArchiveFileCompression = enableCompression, #endif FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{#}." + archiveExtension), ArchiveDateFormat = "yyyy-MM-dd", ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, Layout = "${message}", MaxArchiveFiles = 3, ArchiveNumbering = ArchiveNumberingMode.DateAndSequence, ArchiveEvery = FileArchivePeriod.Day }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); // we emit 5 * 250 *(3 x aaa + \n) bytes // so that we should get a full file + 3 archives Generate1000BytesLog('a'); Generate1000BytesLog('b'); Generate1000BytesLog('c'); Generate1000BytesLog('d'); Generate1000BytesLog('e'); string archiveFilename = DateTime.Now.ToString(ft.ArchiveDateFormat); LogManager.Configuration = null; #if NET4_5 var assertFileContents = enableCompression ? new Action<string, string, Encoding>(AssertZipFileContents) : AssertFileContents; #else var assertFileContents = new Action<string, string, Encoding>(AssertFileContents); #endif ArchiveFilenameHelper helper = new ArchiveFilenameHelper(Path.Combine(tempPath, "archive"), archiveFilename, archiveExtension); AssertFileContents(tempFile, StringRepeat(250, "eee\n"), Encoding.UTF8); assertFileContents(helper.GetFullPath(1), StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileSize(helper.GetFullPath(1), ft.ArchiveAboveSize); assertFileContents(helper.GetFullPath(2), StringRepeat(250, "ccc\n"), Encoding.UTF8); AssertFileSize(helper.GetFullPath(2), ft.ArchiveAboveSize); assertFileContents(helper.GetFullPath(3), StringRepeat(250, "ddd\n"), Encoding.UTF8); AssertFileSize(helper.GetFullPath(3), ft.ArchiveAboveSize); Assert.True(!helper.Exists(0), "old one removed - max files"); Assert.True(!helper.Exists(4), "stop at 3"); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void FileTarget_WithArchiveFileNameEndingInNumberPlaceholder_ShouldArchiveFile() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/test.log.{####}"), ArchiveAboveSize = 1000 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); for (var i = 0; i < 100; ++i) { logger.Debug("a"); } LogManager.Configuration = null; Assert.True(File.Exists(tempFile)); Assert.True(File.Exists(Path.Combine(tempPath, "archive/test.log.0000"))); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void FileTarget_InvalidFileNameCorrection() { var tempFile = Path.GetTempFileName(); var invalidTempFile = tempFile + Path.GetInvalidFileNameChars()[0]; var expectedCorrectedTempFile = tempFile + "_"; try { var ft = new FileTarget { FileName = SimpleLayout.Escape(invalidTempFile), LineEnding = LineEndingMode.LF, Layout = "${level} ${message}", OpenFileCacheTimeout = 0 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Fatal); logger.Fatal("aaa"); LogManager.Configuration = null; AssertFileContents(expectedCorrectedTempFile, "Fatal aaa\n", Encoding.UTF8); } finally { if (File.Exists(invalidTempFile)) File.Delete(invalidTempFile); if (File.Exists(expectedCorrectedTempFile)) File.Delete(expectedCorrectedTempFile); } } [Fact] public void FileTarget_LogAndArchiveFilesWithSameName_ShouldArchive() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var logFile = Path.Combine(tempPath, "Application.log"); var tempDirectory = new DirectoryInfo(tempPath); try { var archiveFile = Path.Combine(tempPath, "Application{#}.log"); var archiveFileMask = "Application*.log"; var ft = new FileTarget { FileName = logFile, ArchiveFileName = archiveFile, ArchiveAboveSize = 1, //Force immediate archival ArchiveNumbering = ArchiveNumberingMode.DateAndSequence, MaxArchiveFiles = 5 }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); //Creates 5 archive files. for (int i = 0; i <= 5; i++) { logger.Debug("a"); } Assert.True(File.Exists(logFile)); //Five archive files, plus the log file itself. Assert.True(tempDirectory.GetFiles(archiveFileMask).Count() == 5 + 1); } finally { LogManager.Configuration = null; if (tempDirectory.Exists) { tempDirectory.Delete(true); } } } [Fact] public void Single_Archive_File_Rolls_Correctly() { var tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var tempFile = Path.Combine(tempPath, "file.txt"); try { var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/file.txt2"), ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, Layout = "${message}", MaxArchiveFiles = 1, }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); // we emit 5 * 250 *(3 x aaa + \n) bytes // so that we should get a full file + 3 archives for (var i = 0; i < 250; ++i) { logger.Debug("aaa"); } for (var i = 0; i < 250; ++i) { logger.Debug("bbb"); } LogManager.Configuration = null; AssertFileContents(tempFile, StringRepeat(250, "bbb\n"), Encoding.UTF8); AssertFileContents( Path.Combine(tempPath, "archive/file.txt2"), StringRepeat(250, "aaa\n"), Encoding.UTF8); } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } /// <summary> /// Remove archived files in correct order /// </summary> [Fact] public void FileTarget_ArchiveNumbering_remove_correct_order() { var tempPath = ArchiveFilenameHelper.GenerateTempPath(); var tempFile = Path.Combine(tempPath, "file.txt"); var archiveExtension = "txt"; try { var maxArchiveFiles = 10; var ft = new FileTarget { FileName = tempFile, ArchiveFileName = Path.Combine(tempPath, "archive/{#}." + archiveExtension), ArchiveDateFormat = "yyyy-MM-dd", ArchiveAboveSize = 1000, LineEnding = LineEndingMode.LF, Layout = "${message}", MaxArchiveFiles = maxArchiveFiles, ArchiveNumbering = ArchiveNumberingMode.DateAndSequence, }; SimpleConfigurator.ConfigureForTargetLogging(ft, LogLevel.Debug); ArchiveFilenameHelper helper = new ArchiveFilenameHelper(Path.Combine(tempPath, "archive"), DateTime.Now.ToString(ft.ArchiveDateFormat), archiveExtension); Generate1000BytesLog('a'); for (int i = 0; i < maxArchiveFiles; i++) { Generate1000BytesLog('a'); Assert.True(helper.Exists(i), string.Format("file {0} is missing", i)); } for (int i = maxArchiveFiles; i < 100; i++) { Generate1000BytesLog('b'); var numberToBeRemoved = i - maxArchiveFiles; // number 11, we need to remove 1 etc Assert.True(!helper.Exists(numberToBeRemoved), string.Format("archive file {0} has not been removed! We are created file {1}", numberToBeRemoved, i)); } } finally { LogManager.Configuration = null; if (File.Exists(tempFile)) File.Delete(tempFile); if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } private void Generate1000BytesLog(char c) { for (var i = 0; i < 250; ++i) { //3 chars with newlines = 4 bytes logger.Debug(new string(c, 3)); } } /// <summary> /// Archive file helepr /// </summary> /// <remarks>TODO rewrite older test</remarks> private class ArchiveFilenameHelper { public string FolderName { get; private set; } public string FileName { get; private set; } /// <summary> /// Ext without dot /// </summary> public string Ext { get; set; } /// <summary> /// Initializes a new instance of the <see cref="T:System.Object"/> class. /// </summary> public ArchiveFilenameHelper(string folderName, string fileName, string ext) { Ext = ext; FileName = fileName; FolderName = folderName; } public bool Exists(int number) { return File.Exists(GetFullPath(number)); } public string GetFullPath(int number) { return Path.Combine(String.Format("{0}/{1}.{2}.{3}", FolderName, FileName, number, Ext)); } public static string GenerateTempPath() { return Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); } } } } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Globalization; using System.IO; using System.Text.Encodings.Web; using System.Text.Unicode; using Xunit; namespace Microsoft.Framework.WebEncoders { public class JavaScriptStringEncoderTests { [Fact] public void Ctor_WithTextEncoderSettings() { // Arrange var filter = new TextEncoderSettings(); filter.AllowCharacters('a', 'b'); filter.AllowCharacters('\0', '&', '\uFFFF', 'd'); JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(filter); // Act & assert Assert.Equal("a", encoder.JavaScriptStringEncode("a")); Assert.Equal("b", encoder.JavaScriptStringEncode("b")); Assert.Equal(@"\u0063", encoder.JavaScriptStringEncode("c")); Assert.Equal("d", encoder.JavaScriptStringEncode("d")); Assert.Equal(@"\u0000", encoder.JavaScriptStringEncode("\0")); // we still always encode control chars Assert.Equal(@"\u0026", encoder.JavaScriptStringEncode("&")); // we still always encode HTML-special chars Assert.Equal(@"\uFFFF", encoder.JavaScriptStringEncode("\uFFFF")); // we still always encode non-chars and other forbidden chars } [Fact] public void Ctor_WithUnicodeRanges() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(UnicodeRanges.Latin1Supplement, UnicodeRanges.MiscellaneousSymbols); // Act & assert Assert.Equal(@"\u0061", encoder.JavaScriptStringEncode("a")); Assert.Equal("\u00E9", encoder.JavaScriptStringEncode("\u00E9" /* LATIN SMALL LETTER E WITH ACUTE */)); Assert.Equal("\u2601", encoder.JavaScriptStringEncode("\u2601" /* CLOUD */)); } [Fact] public void Ctor_WithNoParameters_DefaultsToBasicLatin() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(); // Act & assert Assert.Equal("a", encoder.JavaScriptStringEncode("a")); Assert.Equal(@"\u00E9", encoder.JavaScriptStringEncode("\u00E9" /* LATIN SMALL LETTER E WITH ACUTE */)); Assert.Equal(@"\u2601", encoder.JavaScriptStringEncode("\u2601" /* CLOUD */)); } [Fact] public void Default_EquivalentToBasicLatin() { // Arrange JavaScriptStringEncoder controlEncoder = new JavaScriptStringEncoder(UnicodeRanges.BasicLatin); JavaScriptStringEncoder testEncoder = JavaScriptStringEncoder.Default; // Act & assert for (int i = 0; i <= Char.MaxValue; i++) { if (!IsSurrogateCodePoint(i)) { string input = new String((char)i, 1); Assert.Equal(controlEncoder.JavaScriptStringEncode(input), testEncoder.JavaScriptStringEncode(input)); } } } [Fact] public void JavaScriptStringEncode_AllRangesAllowed_StillEncodesForbiddenChars_Simple_Escaping() { // The following two calls could be simply InlineData to the Theory below // Unfortunatelly, the xUnit logger fails to escape the inputs when logging the test results, // and so the suite fails despite all tests passing. // TODO: I will try to fix it in xUnit, but for now this is a workaround to enable these tests. JavaScriptStringEncode_AllRangesAllowed_StillEncodesForbiddenChars_Simple("\b", @"\b"); JavaScriptStringEncode_AllRangesAllowed_StillEncodesForbiddenChars_Simple("\f", @"\f"); } [Theory] [InlineData("<", @"\u003C")] [InlineData(">", @"\u003E")] [InlineData("&", @"\u0026")] [InlineData("'", @"\u0027")] [InlineData("\"", @"\u0022")] [InlineData("+", @"\u002B")] [InlineData("\\", @"\\")] [InlineData("/", @"\/")] [InlineData("\n", @"\n")] [InlineData("\t", @"\t")] [InlineData("\r", @"\r")] public void JavaScriptStringEncode_AllRangesAllowed_StillEncodesForbiddenChars_Simple(string input, string expected) { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(UnicodeRanges.All); // Act string retVal = encoder.JavaScriptStringEncode(input); // Assert Assert.Equal(expected, retVal); } [Fact] public void JavaScriptStringEncode_AllRangesAllowed_StillEncodesForbiddenChars_Extended() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(UnicodeRanges.All); // Act & assert - BMP chars for (int i = 0; i <= 0xFFFF; i++) { string input = new String((char)i, 1); string expected; if (IsSurrogateCodePoint(i)) { expected = "\uFFFD"; // unpaired surrogate -> Unicode replacement char } else { if (input == "\b") { expected = @"\b"; } else if (input == "\t") { expected = @"\t"; } else if (input == "\n") { expected = @"\n"; } else if (input == "\f") { expected = @"\f"; } else if (input == "\r") { expected = @"\r"; } else if (input == "\\") { expected = @"\\"; } else if (input == "/") { expected = @"\/"; } else { bool mustEncode = false; switch (i) { case '<': case '>': case '&': case '\"': case '\'': case '+': mustEncode = true; break; } if (i <= 0x001F || (0x007F <= i && i <= 0x9F)) { mustEncode = true; // control char } else if (!UnicodeHelpers.IsCharacterDefined((char)i)) { mustEncode = true; // undefined (or otherwise disallowed) char } if (mustEncode) { expected = String.Format(CultureInfo.InvariantCulture, @"\u{0:X4}", i); } else { expected = input; // no encoding } } } string retVal = encoder.JavaScriptStringEncode(input); Assert.Equal(expected, retVal); } // Act & assert - astral chars for (int i = 0x10000; i <= 0x10FFFF; i++) { string input = Char.ConvertFromUtf32(i); string expected = String.Format(CultureInfo.InvariantCulture, @"\u{0:X4}\u{1:X4}", (uint)input[0], (uint)input[1]); string retVal = encoder.JavaScriptStringEncode(input); Assert.Equal(expected, retVal); } } [Fact] public void JavaScriptStringEncode_BadSurrogates_ReturnsUnicodeReplacementChar() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(UnicodeRanges.All); // allow all codepoints // "a<unpaired leading>b<unpaired trailing>c<trailing before leading>d<unpaired trailing><valid>e<high at end of string>" const string input = "a\uD800b\uDFFFc\uDFFF\uD800d\uDFFF\uD800\uDFFFe\uD800"; const string expected = "a\uFFFDb\uFFFDc\uFFFD\uFFFDd\uFFFD\\uD800\\uDFFFe\uFFFD"; // 'D800' 'DFFF' was preserved since it's valid // Act string retVal = encoder.JavaScriptStringEncode(input); // Assert Assert.Equal(expected, retVal); } [Fact] public void JavaScriptStringEncode_EmptyStringInput_ReturnsEmptyString() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(); // Act & assert Assert.Equal("", encoder.JavaScriptStringEncode("")); } [Fact] public void JavaScriptStringEncode_InputDoesNotRequireEncoding_ReturnsOriginalStringInstance() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(); string input = "Hello, there!"; // Act & assert Assert.Same(input, encoder.JavaScriptStringEncode(input)); } [Fact] public void JavaScriptStringEncode_NullInput_Throws() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(); Assert.Throws<ArgumentNullException>(() => { encoder.JavaScriptStringEncode(null); }); } [Fact] public void JavaScriptStringEncode_WithCharsRequiringEncodingAtBeginning() { Assert.Equal(@"\u0026Hello, there!", new JavaScriptStringEncoder().JavaScriptStringEncode("&Hello, there!")); } [Fact] public void JavaScriptStringEncode_WithCharsRequiringEncodingAtEnd() { Assert.Equal(@"Hello, there!\u0026", new JavaScriptStringEncoder().JavaScriptStringEncode("Hello, there!&")); } [Fact] public void JavaScriptStringEncode_WithCharsRequiringEncodingInMiddle() { Assert.Equal(@"Hello, \u0026there!", new JavaScriptStringEncoder().JavaScriptStringEncode("Hello, &there!")); } [Fact] public void JavaScriptStringEncode_WithCharsRequiringEncodingInterspersed() { Assert.Equal(@"Hello, \u003Cthere\u003E!", new JavaScriptStringEncoder().JavaScriptStringEncode("Hello, <there>!")); } [Fact] public void JavaScriptStringEncode_CharArray() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(); var output = new StringWriter(); // Act encoder.JavaScriptStringEncode("Hello+world!".ToCharArray(), 3, 5, output); // Assert Assert.Equal(@"lo\u002Bwo", output.ToString()); } [Fact] public void JavaScriptStringEncode_StringSubstring() { // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(); var output = new StringWriter(); // Act encoder.JavaScriptStringEncode("Hello+world!", 3, 5, output); // Assert Assert.Equal(@"lo\u002Bwo", output.ToString()); } [Theory] [InlineData("\"", @"\u0022")] [InlineData("'", @"\u0027")] public void JavaScriptStringEncode_Quotes(string input, string expected) { // Per the design document, we provide additional defense-in-depth // against breaking out of HTML attributes by having the encoders // never emit the ' or " characters. This means that we want to // \u-escape these characters instead of using \' and \". // Arrange JavaScriptStringEncoder encoder = new JavaScriptStringEncoder(UnicodeRanges.All); // Act string retVal = encoder.JavaScriptStringEncode(input); // Assert Assert.Equal(expected, retVal); } [Fact] public void JavaScriptStringEncode_DoesNotOutputHtmlSensitiveCharacters() { // Per the design document, we provide additional defense-in-depth // by never emitting HTML-sensitive characters unescaped. // Arrange JavaScriptStringEncoder javaScriptStringEncoder = new JavaScriptStringEncoder(UnicodeRanges.All); HtmlEncoder htmlEncoder = new HtmlEncoder(UnicodeRanges.All); // Act & assert for (int i = 0; i <= 0x10FFFF; i++) { if (IsSurrogateCodePoint(i)) { continue; // surrogates don't matter here } string javaScriptStringEncoded = javaScriptStringEncoder.JavaScriptStringEncode(Char.ConvertFromUtf32(i)); string thenHtmlEncoded = htmlEncoder.HtmlEncode(javaScriptStringEncoded); Assert.Equal(javaScriptStringEncoded, thenHtmlEncoded); // should have contained no HTML-sensitive characters } } private static bool IsSurrogateCodePoint(int codePoint) { return (0xD800 <= codePoint && codePoint <= 0xDFFF); } } }
//--------------------------------------------------------------------------- // // File: HtmlFromXamlConverter.cs // // Copyright (C) Microsoft Corporation. All rights reserved. // // Description: Prototype for Xaml - Html conversion // //--------------------------------------------------------------------------- using System; using System.Diagnostics; using System.IO; using System.Text; using System.Xml; namespace ManagementGui.View.Control.XamlToHtmlParser { /// <summary> /// HtmlToXamlConverter is a static class that takes an HTML string /// and converts it into XAML /// </summary> internal static class HtmlFromXamlConverter { // --------------------------------------------------------------------- // // Internal Methods // // --------------------------------------------------------------------- #region Internal Methods internal static string ConvertXamlToHtml(string xamlString) { return ConvertXamlToHtml(xamlString, true); } /// <summary> /// Main entry point for Xaml-to-Html converter. /// Converts a xaml string into html string. /// </summary> /// <param name="xamlString"> /// Xaml strinng to convert. /// </param> /// <returns> /// Html string produced from a source xaml. /// </returns> internal static string ConvertXamlToHtml(string xamlString, bool asFlowDocument) { XmlTextReader xamlReader; StringBuilder htmlStringBuilder; XmlTextWriter htmlWriter; if (!asFlowDocument) { xamlString = "<FlowDocument>" + xamlString + "</FlowDocument>"; } xamlReader = new XmlTextReader(new StringReader(xamlString)); htmlStringBuilder = new StringBuilder(100); htmlWriter = new XmlTextWriter(new StringWriter(htmlStringBuilder)); if (!WriteFlowDocument(xamlReader, htmlWriter)) { return ""; } string htmlString = htmlStringBuilder.ToString(); return htmlString; } #endregion Internal Methods // --------------------------------------------------------------------- // // Private Methods // // --------------------------------------------------------------------- #region Private Methods /// <summary> /// Processes a root level element of XAML (normally it's FlowDocument element). /// </summary> /// <param name="xamlReader"> /// XmlTextReader for a source xaml. /// </param> /// <param name="htmlWriter"> /// XmlTextWriter producing resulting html /// </param> private static bool WriteFlowDocument(XmlTextReader xamlReader, XmlTextWriter htmlWriter) { if (!ReadNextToken(xamlReader)) { // Xaml content is empty - nothing to convert return false; } if (xamlReader.NodeType != XmlNodeType.Element || xamlReader.Name != "FlowDocument") { // Root FlowDocument elemet is missing return false; } // Create a buffer StringBuilder for collecting css properties for inline STYLE attributes // on every element level (it will be re-initialized on every level). StringBuilder inlineStyle = new StringBuilder(); //htmlWriter.WriteStartElement("HTML"); //htmlWriter.WriteStartElement("BODY"); WriteFormattingProperties(xamlReader, htmlWriter, inlineStyle); WriteElementContent(xamlReader, htmlWriter, inlineStyle); //htmlWriter.WriteEndElement(); //htmlWriter.WriteEndElement(); return true; } /// <summary> /// Reads attributes of the current xaml element and converts /// them into appropriate html attributes or css styles. /// </summary> /// <param name="xamlReader"> /// XmlTextReader which is expected to be at XmlNodeType.Element /// (opening element tag) position. /// The reader will remain at the same level after function complete. /// </param> /// <param name="htmlWriter"> /// XmlTextWriter for output html, which is expected to be in /// after WriteStartElement state. /// </param> /// <param name="inlineStyle"> /// String builder for collecting css properties for inline STYLE attribute. /// </param> private static void WriteFormattingProperties(XmlTextReader xamlReader, XmlTextWriter htmlWriter, StringBuilder inlineStyle) { Debug.Assert(xamlReader.NodeType == XmlNodeType.Element); // Clear string builder for the inline style inlineStyle.Remove(0, inlineStyle.Length); if (!xamlReader.HasAttributes) { return; } bool borderSet = false; while (xamlReader.MoveToNextAttribute()) { string css = null; switch (xamlReader.Name) { // Character fomatting properties // ------------------------------ case "Background": css = "background-color:" + ParseXamlColor(xamlReader.Value) + ";"; break; case "FontFamily": css = "font-family:" + xamlReader.Value + ";"; break; case "FontStyle": css = "font-style:" + xamlReader.Value.ToLower() + ";"; break; case "FontWeight": css = "font-weight:" + xamlReader.Value.ToLower() + ";"; break; case "FontStretch": break; case "FontSize": css = "font-size:" + xamlReader.Value + ";"; break; case "Foreground": css = "color:" + ParseXamlColor(xamlReader.Value) + ";"; break; case "TextDecorations": css = "text-decoration:underline;"; break; case "TextEffects": break; case "Emphasis": break; case "StandardLigatures": break; case "Variants": break; case "Capitals": break; case "Fraction": break; // Paragraph formatting properties // ------------------------------- case "Padding": css = "padding:" + ParseXamlThickness(xamlReader.Value) + ";"; break; case "Margin": css = "margin:" + ParseXamlThickness(xamlReader.Value) + ";"; break; case "BorderThickness": css = "border-width:" + ParseXamlThickness(xamlReader.Value) + ";"; borderSet = true; break; case "BorderBrush": css = "border-color:" + ParseXamlColor(xamlReader.Value) + ";"; borderSet = true; break; case "LineHeight": break; case "TextIndent": css = "text-indent:" + xamlReader.Value + ";"; break; case "TextAlignment": css = "text-align:" + xamlReader.Value + ";"; break; case "IsKeptTogether": break; case "IsKeptWithNext": break; case "ColumnBreakBefore": break; case "PageBreakBefore": break; case "FlowDirection": break; // Table attributes // ---------------- case "Width": css = "width:" + xamlReader.Value + ";"; break; case "ColumnSpan": htmlWriter.WriteAttributeString("COLSPAN", xamlReader.Value); break; case "RowSpan": htmlWriter.WriteAttributeString("ROWSPAN", xamlReader.Value); break; } if (css != null) { inlineStyle.Append(css); } } if (borderSet) { inlineStyle.Append("border-style:solid;mso-element:para-border-div;"); } // Return the xamlReader back to element level xamlReader.MoveToElement(); Debug.Assert(xamlReader.NodeType == XmlNodeType.Element); } private static string ParseXamlColor(string color) { if (color.StartsWith("#")) { // Remove transparancy value color = "#" + color.Substring(3); } return color; } private static string ParseXamlThickness(string thickness) { string[] values = thickness.Split(','); for (int i = 0; i < values.Length; i++) { double value; if (double.TryParse(values[i], out value)) { values[i] = Math.Ceiling(value).ToString(); } else { values[i] = "1"; } } string cssThickness; switch (values.Length) { case 1: cssThickness = thickness; break; case 2: cssThickness = values[1] + " " + values[0]; break; case 4: cssThickness = values[1] + " " + values[2] + " " + values[3] + " " + values[0]; break; default: cssThickness = values[0]; break; } return cssThickness; } /// <summary> /// Reads a content of current xaml element, converts it /// </summary> /// <param name="xamlReader"> /// XmlTextReader which is expected to be at XmlNodeType.Element /// (opening element tag) position. /// </param> /// <param name="htmlWriter"> /// May be null, in which case we are skipping the xaml element; /// witout producing any output to html. /// </param> /// <param name="inlineStyle"> /// StringBuilder used for collecting css properties for inline STYLE attribute. /// </param> private static void WriteElementContent(XmlTextReader xamlReader, XmlTextWriter htmlWriter, StringBuilder inlineStyle) { Debug.Assert(xamlReader.NodeType == XmlNodeType.Element); bool elementContentStarted = false; if (xamlReader.IsEmptyElement) { if (htmlWriter != null && !elementContentStarted && inlineStyle.Length > 0) { // Output STYLE attribute and clear inlineStyle buffer. htmlWriter.WriteAttributeString("STYLE", inlineStyle.ToString()); inlineStyle.Remove(0, inlineStyle.Length); } elementContentStarted = true; } else { while (ReadNextToken(xamlReader) && xamlReader.NodeType != XmlNodeType.EndElement) { switch (xamlReader.NodeType) { case XmlNodeType.Element: if (xamlReader.Name.Contains(".")) { AddComplexProperty(xamlReader, inlineStyle); } else { if (htmlWriter != null && !elementContentStarted && inlineStyle.Length > 0) { // Output STYLE attribute and clear inlineStyle buffer. htmlWriter.WriteAttributeString("STYLE", inlineStyle.ToString()); inlineStyle.Remove(0, inlineStyle.Length); } elementContentStarted = true; WriteElement(xamlReader, htmlWriter, inlineStyle); } Debug.Assert(xamlReader.NodeType == XmlNodeType.EndElement || xamlReader.NodeType == XmlNodeType.Element && xamlReader.IsEmptyElement); break; case XmlNodeType.Comment: if (htmlWriter != null) { if (!elementContentStarted && inlineStyle.Length > 0) { htmlWriter.WriteAttributeString("STYLE", inlineStyle.ToString()); } htmlWriter.WriteComment(xamlReader.Value); } elementContentStarted = true; break; case XmlNodeType.CDATA: case XmlNodeType.Text: case XmlNodeType.SignificantWhitespace: if (htmlWriter != null) { if (!elementContentStarted && inlineStyle.Length > 0) { htmlWriter.WriteAttributeString("STYLE", inlineStyle.ToString()); } htmlWriter.WriteString(xamlReader.Value); } elementContentStarted = true; break; } } Debug.Assert(xamlReader.NodeType == XmlNodeType.EndElement); } } /// <summary> /// Conberts an element notation of complex property into /// </summary> /// <param name="xamlReader"> /// On entry this XmlTextReader must be on Element start tag; /// on exit - on EndElement tag. /// </param> /// <param name="inlineStyle"> /// StringBuilder containing a value for STYLE attribute. /// </param> private static void AddComplexProperty(XmlTextReader xamlReader, StringBuilder inlineStyle) { Debug.Assert(xamlReader.NodeType == XmlNodeType.Element); if (inlineStyle != null && xamlReader.Name.EndsWith(".TextDecorations")) { inlineStyle.Append("text-decoration:underline;"); } // Skip the element representing the complex property WriteElementContent(xamlReader, /*htmlWriter:*/null, /*inlineStyle:*/null); } /// <summary> /// Converts a xaml element into an appropriate html element. /// </summary> /// <param name="xamlReader"> /// On entry this XmlTextReader must be on Element start tag; /// on exit - on EndElement tag. /// </param> /// <param name="htmlWriter"> /// May be null, in which case we are skipping xaml content /// without producing any html output /// </param> /// <param name="inlineStyle"> /// StringBuilder used for collecting css properties for inline STYLE attributes on every level. /// </param> private static void WriteElement(XmlTextReader xamlReader, XmlTextWriter htmlWriter, StringBuilder inlineStyle) { Debug.Assert(xamlReader.NodeType == XmlNodeType.Element); if (htmlWriter == null) { // Skipping mode; recurse into the xaml element without any output WriteElementContent(xamlReader, /*htmlWriter:*/null, null); } else { string htmlElementName = null; switch (xamlReader.Name) { case "Run" : case "Span": htmlElementName = "SPAN"; break; case "InlineUIContainer": htmlElementName = "SPAN"; break; case "Bold": htmlElementName = "B"; break; case "Italic" : htmlElementName = "I"; break; case "Paragraph" : htmlElementName = "P"; break; case "BlockUIContainer": htmlElementName = "DIV"; break; case "Section": htmlElementName = "DIV"; break; case "Table": htmlElementName = "TABLE"; break; case "TableColumn": htmlElementName = "COL"; break; case "TableRowGroup" : htmlElementName = "TBODY"; break; case "TableRow" : htmlElementName = "TR"; break; case "TableCell" : htmlElementName = "TD"; break; case "List" : string marker = xamlReader.GetAttribute("MarkerStyle"); if (marker == null || marker == "None" || marker == "Disc" || marker == "Circle" || marker == "Square" || marker == "Box") { htmlElementName = "UL"; } else { htmlElementName = "OL"; } break; case "ListItem" : htmlElementName = "LI"; break; default : htmlElementName = null; // Ignore the element break; } if (htmlWriter != null && htmlElementName != null) { htmlWriter.WriteStartElement(htmlElementName); WriteFormattingProperties(xamlReader, htmlWriter, inlineStyle); WriteElementContent(xamlReader, htmlWriter, inlineStyle); htmlWriter.WriteEndElement(); } else { // Skip this unrecognized xaml element WriteElementContent(xamlReader, /*htmlWriter:*/null, null); } } } // Reader advance helpers // ---------------------- /// <summary> /// Reads several items from xamlReader skipping all non-significant stuff. /// </summary> /// <param name="xamlReader"> /// XmlTextReader from tokens are being read. /// </param> /// <returns> /// True if new token is available; false if end of stream reached. /// </returns> private static bool ReadNextToken(XmlReader xamlReader) { while (xamlReader.Read()) { Debug.Assert(xamlReader.ReadState == ReadState.Interactive, "Reader is expected to be in Interactive state (" + xamlReader.ReadState + ")"); switch (xamlReader.NodeType) { case XmlNodeType.Element: case XmlNodeType.EndElement: case XmlNodeType.None: case XmlNodeType.CDATA: case XmlNodeType.Text: case XmlNodeType.SignificantWhitespace: return true; case XmlNodeType.Whitespace: if (xamlReader.XmlSpace == XmlSpace.Preserve) { return true; } // ignore insignificant whitespace break; case XmlNodeType.EndEntity: case XmlNodeType.EntityReference: // Implement entity reading //xamlReader.ResolveEntity(); //xamlReader.Read(); //ReadChildNodes( parent, parentBaseUri, xamlReader, positionInfo); break; // for now we ignore entities as insignificant stuff case XmlNodeType.Comment: return true; case XmlNodeType.ProcessingInstruction: case XmlNodeType.DocumentType: case XmlNodeType.XmlDeclaration: default: // Ignorable stuff break; } } return false; } #endregion Private Methods // --------------------------------------------------------------------- // // Private Fields // // --------------------------------------------------------------------- #region Private Fields #endregion Private Fields } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Collections.Generic; using System.Diagnostics; using System.Text; using System.Threading; using System.Threading.Tasks; namespace System.IO { // Note: This class has an OS Limitation where the inotify API can miss events if a directory is created and immediately has // changes underneath. This is due to the inotify* APIs not being recursive and needing to call inotify_add_watch on // each subdirectory, causing a race between adding the watch and file system events happening. public partial class FileSystemWatcher { /// <summary>Starts a new watch operation if one is not currently running.</summary> private void StartRaisingEvents() { // If we already have a cancellation object, we're already running. if (_cancellation != null) { return; } // Open an inotify file descriptor. Ideally this would be a constrained execution region, but we don't have access to // PrepareConstrainedRegions. We still use a finally block to house the code that opens the handle and stores it in // hopes of making it as non-interruptable as possible. Ideally the SafeFileHandle would be allocated before the block, // but SetHandle is protected and SafeFileHandle is sealed, so we're stuck doing the allocation here. SafeFileHandle handle; try { } finally { handle = Interop.Sys.INotifyInit(); if (handle.IsInvalid) { Interop.ErrorInfo error = Interop.Sys.GetLastErrorInfo(); switch (error.Error) { case Interop.Error.EMFILE: string maxValue = ReadMaxUserLimit(MaxUserInstancesPath); string message = !string.IsNullOrEmpty(maxValue) ? SR.Format(SR.IOException_INotifyInstanceUserLimitExceeded_Value, maxValue) : SR.IOException_INotifyInstanceUserLimitExceeded; throw new IOException(message, error.RawErrno); case Interop.Error.ENFILE: throw new IOException(SR.IOException_INotifyInstanceSystemLimitExceeded, error.RawErrno); default: throw Interop.GetExceptionForIoErrno(error); } } } try { // Create the cancellation object that will be used by this FileSystemWatcher to cancel the new watch operation CancellationTokenSource cancellation = new CancellationTokenSource(); // Start running. All state associated with the watch operation is stored in a separate object; this is done // to avoid race conditions that could result if the users quickly starts/stops/starts/stops/etc. causing multiple // active operations to all be outstanding at the same time. var runner = new RunningInstance( this, handle, _directory, IncludeSubdirectories, TranslateFilters(NotifyFilter), cancellation.Token); // Now that we've created the runner, store the cancellation object and mark the instance // as running. We wait to do this so that if there was a failure, StartRaisingEvents // may be called to try again without first having to call StopRaisingEvents. _cancellation = cancellation; _enabled = true; // Start the runner runner.Start(); } catch { // If we fail to actually start the watching even though we've opened the // inotify handle, close the inotify handle proactively rather than waiting for it // to be finalized. handle.Dispose(); throw; } } /// <summary>Cancels the currently running watch operation if there is one.</summary> private void StopRaisingEvents() { _enabled = false; // If there's an active cancellation token, cancel and release it. // The cancellation token and the processing task respond to cancellation // to handle all other cleanup. var cts = _cancellation; if (cts != null) { _cancellation = null; cts.Cancel(); } } /// <summary>Called when FileSystemWatcher is finalized.</summary> private void FinalizeDispose() { // The RunningInstance remains rooted and holds open the SafeFileHandle until it's explicitly // torn down. FileSystemWatcher.Dispose will call StopRaisingEvents, but not on finalization; // thus we need to explicitly call it here. StopRaisingEvents(); } // ----------------------------- // ---- PAL layer ends here ---- // ----------------------------- /// <summary>Path to the procfs file that contains the maximum number of inotify instances an individual user may create.</summary> private const string MaxUserInstancesPath = "/proc/sys/fs/inotify/max_user_instances"; /// <summary>Path to the procfs file that contains the maximum number of inotify watches an individual user may create.</summary> private const string MaxUserWatchesPath = "/proc/sys/fs/inotify/max_user_watches"; /// <summary> /// Cancellation for the currently running watch operation. /// This is non-null if an operation has been started and null if stopped. /// </summary> private CancellationTokenSource _cancellation; /// <summary>Reads the value of a max user limit path from procfs.</summary> /// <param name="path">The path to read.</param> /// <returns>The value read, or "0" if a failure occurred.</returns> private static string ReadMaxUserLimit(string path) { try { return File.ReadAllText(path).Trim(); } catch { return null; } } /// <summary> /// Maps the FileSystemWatcher's NotifyFilters enumeration to the /// corresponding Interop.Sys.NotifyEvents values. /// </summary> /// <param name="filters">The filters provided the by user.</param> /// <returns>The corresponding NotifyEvents values to use with inotify.</returns> private static Interop.Sys.NotifyEvents TranslateFilters(NotifyFilters filters) { Interop.Sys.NotifyEvents result = 0; // We always include a few special inotify watch values that configure // the watch's behavior. result |= Interop.Sys.NotifyEvents.IN_ONLYDIR | // we only allow watches on directories Interop.Sys.NotifyEvents.IN_EXCL_UNLINK; // we want to stop monitoring unlinked files // For the Created and Deleted events, we need to always // register for the created/deleted inotify events, regardless // of the supplied filters values. We explicitly don't include IN_DELETE_SELF. // The Windows implementation doesn't include notifications for the root directory, // and having this for subdirectories results in duplicate notifications, one from // the parent and one from self. result |= Interop.Sys.NotifyEvents.IN_CREATE | Interop.Sys.NotifyEvents.IN_DELETE; // For the Changed event, which inotify events we subscribe to // are based on the NotifyFilters supplied. const NotifyFilters filtersForAccess = NotifyFilters.LastAccess; const NotifyFilters filtersForModify = NotifyFilters.LastAccess | NotifyFilters.LastWrite | NotifyFilters.Security | NotifyFilters.Size; const NotifyFilters filtersForAttrib = NotifyFilters.Attributes | NotifyFilters.CreationTime | NotifyFilters.LastAccess | NotifyFilters.LastWrite | NotifyFilters.Security | NotifyFilters.Size; if ((filters & filtersForAccess) != 0) { result |= Interop.Sys.NotifyEvents.IN_ACCESS; } if ((filters & filtersForModify) != 0) { result |= Interop.Sys.NotifyEvents.IN_MODIFY; } if ((filters & filtersForAttrib) != 0) { result |= Interop.Sys.NotifyEvents.IN_ATTRIB; } // For the Rename event, we'll register for the corresponding move inotify events if the // caller's NotifyFilters asks for notications related to names. const NotifyFilters filtersForMoved = NotifyFilters.FileName | NotifyFilters.DirectoryName; if ((filters & filtersForMoved) != 0) { result |= Interop.Sys.NotifyEvents.IN_MOVED_FROM | Interop.Sys.NotifyEvents.IN_MOVED_TO; } return result; } /// <summary> /// State and processing associated with an active watch operation. This state is kept separate from FileSystemWatcher to avoid /// race conditions when a user starts/stops/starts/stops/etc. in quick succession, resulting in the potential for multiple /// active operations. It also helps with avoiding rooted cycles and enabling proper finalization. /// </summary> private sealed class RunningInstance { /// <summary> /// The size of the native struct inotify_event. 4 32-bit integer values, the last of which is a length /// that indicates how many bytes follow to form the string name. /// </summary> const int c_INotifyEventSize = 16; /// <summary> /// Weak reference to the associated watcher. A weak reference is used so that the FileSystemWatcher may be collected and finalized, /// causing an active operation to be torn down. With a strong reference, a blocking read on the inotify handle will keep alive this /// instance which will keep alive the FileSystemWatcher which will not be finalizable and thus which will never signal to the blocking /// read to wake up in the event that the user neglects to stop raising events. /// </summary> private readonly WeakReference<FileSystemWatcher> _weakWatcher; /// <summary> /// The path for the primary watched directory. /// </summary> private readonly string _directoryPath; /// <summary> /// The inotify handle / file descriptor /// </summary> private readonly SafeFileHandle _inotifyHandle; /// <summary> /// Buffer used to store raw bytes read from the inotify handle. /// </summary> private readonly byte[] _buffer; /// <summary> /// The number of bytes read into the _buffer. /// </summary> private int _bufferAvailable; /// <summary> /// The next position in _buffer from which an event should be read. /// </summary> private int _bufferPos; /// <summary> /// Filters to use when adding a watch on directories. /// </summary> private readonly Interop.Sys.NotifyEvents _notifyFilters; /// <summary> /// Whether to monitor subdirectories. Unlike Win32, inotify does not implicitly monitor subdirectories; /// watches must be explicitly added for those subdirectories. /// </summary> private readonly bool _includeSubdirectories; /// <summary> /// Token to monitor for cancellation requests, upon which processing is stopped and all /// state is cleaned up. /// </summary> private readonly CancellationToken _cancellationToken; /// <summary> /// Mapping from watch descriptor (as returned by inotify_add_watch) to state for /// the associated directory being watched. Events from inotify include only relative /// names, so the watch descriptor in an event must be used to look up the associated /// directory path in order to conver the relative filename into a full path. /// </summary> private readonly Dictionary<int, WatchedDirectory> _wdToPathMap = new Dictionary<int, WatchedDirectory>(); /// <summary> /// Maximum length of a name returned from inotify event. /// </summary> private const int NAME_MAX = 255; // from limits.h /// <summary>Initializes the instance with all state necessary to operate a watch.</summary> internal RunningInstance( FileSystemWatcher watcher, SafeFileHandle inotifyHandle, string directoryPath, bool includeSubdirectories, Interop.Sys.NotifyEvents notifyFilters, CancellationToken cancellationToken) { Debug.Assert(watcher != null); Debug.Assert(inotifyHandle != null && !inotifyHandle.IsInvalid && !inotifyHandle.IsClosed); Debug.Assert(directoryPath != null); _weakWatcher = new WeakReference<FileSystemWatcher>(watcher); _inotifyHandle = inotifyHandle; _directoryPath = directoryPath; _buffer = watcher.AllocateBuffer(); Debug.Assert(_buffer != null && _buffer.Length > (c_INotifyEventSize + NAME_MAX + 1)); _includeSubdirectories = includeSubdirectories; _notifyFilters = notifyFilters; _cancellationToken = cancellationToken; // Add a watch for this starting directory. We keep track of the watch descriptor => directory information // mapping in a dictionary; this is needed in order to be able to determine the containing directory // for all notifications so that we can reconstruct the full path. AddDirectoryWatchUnlocked(null, directoryPath); } internal void Start() { // Schedule a task to read from the inotify queue and process the events. Task.Factory.StartNew(obj => ((RunningInstance)obj).ProcessEvents(), this, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default); // PERF: As needed, we can look into making this use async I/O rather than burning // a thread that blocks in the read syscall. } /// <summary>Object to use for synchronizing access to state when necessary.</summary> private object SyncObj { get { return _wdToPathMap; } } /// <summary>Adds a watch on a directory to the existing inotify handle.</summary> /// <param name="parent">The parent directory entry.</param> /// <param name="directoryName">The new directory path to monitor, relative to the root.</param> private void AddDirectoryWatch(WatchedDirectory parent, string directoryName) { lock (SyncObj) { // The read syscall on the file descriptor will block until either close is called or until // all previously added watches are removed. We don't want to rely on close, as a) that could // lead to race conditions where we inadvertently read from a recycled file descriptor, and b) // the SafeFileHandle that wraps the file descriptor can't be disposed (thus closing // the underlying file descriptor and allowing read to wake up) while there's an active ref count // against the handle, so we'd deadlock if we relied on that approach. Instead, we want to follow // the approach of removing all watches when we're done, which means we also don't want to // add any new watches once the count hits zero. if (parent == null || _wdToPathMap.Count > 0) { Debug.Assert(parent != null || _wdToPathMap.Count == 0); AddDirectoryWatchUnlocked(parent, directoryName); } } } /// <summary>Adds a watch on a directory to the existing inotify handle.</summary> /// <param name="parent">The parent directory entry.</param> /// <param name="directoryName">The new directory path to monitor, relative to the root.</param> private void AddDirectoryWatchUnlocked(WatchedDirectory parent, string directoryName) { string fullPath = parent != null ? parent.GetPath(false, directoryName) : directoryName; // inotify_add_watch will fail if this is a symlink, so check that we didn't get a symlink Interop.Sys.FileStatus status = default(Interop.Sys.FileStatus); if ((Interop.Sys.LStat(fullPath, out status) == 0) && ((status.Mode & (uint)Interop.Sys.FileTypes.S_IFMT) == Interop.Sys.FileTypes.S_IFLNK)) { return; } // Add a watch for the full path. If the path is already being watched, this will return // the existing descriptor. This works even in the case of a rename. We also add the DONT_FOLLOW // and EXCL_UNLINK flags to keep parity with Windows where we don't pickup symlinks or unlinked // files (which don't exist in Windows) int wd = Interop.Sys.INotifyAddWatch(_inotifyHandle, fullPath, (uint)(this._notifyFilters | Interop.Sys.NotifyEvents.IN_DONT_FOLLOW | Interop.Sys.NotifyEvents.IN_EXCL_UNLINK)); if (wd == -1) { // If we get an error when trying to add the watch, don't let that tear down processing. Instead, // raise the Error event with the exception and let the user decide how to handle it. Interop.ErrorInfo error = Interop.Sys.GetLastErrorInfo(); Exception exc; if (error.Error == Interop.Error.ENOSPC) { string maxValue = ReadMaxUserLimit(MaxUserWatchesPath); string message = !string.IsNullOrEmpty(maxValue) ? SR.Format(SR.IOException_INotifyWatchesUserLimitExceeded_Value, maxValue) : SR.IOException_INotifyWatchesUserLimitExceeded; exc = new IOException(message, error.RawErrno); } else { exc = Interop.GetExceptionForIoErrno(error, fullPath); } FileSystemWatcher watcher; if (_weakWatcher.TryGetTarget(out watcher)) { watcher.OnError(new ErrorEventArgs(exc)); } return; } // Then store the path information into our map. WatchedDirectory directoryEntry; bool isNewDirectory = false; if (_wdToPathMap.TryGetValue(wd, out directoryEntry)) { // The watch descriptor was already in the map. Hard links on directories // aren't possible, and symlinks aren't annotated as IN_ISDIR, // so this is a rename. (In extremely remote cases, this could be // a recycled watch descriptor if many, many events were lost // such that our dictionary got very inconsistent with the state // of the world, but there's little that can be done about that.) if (directoryEntry.Parent != parent) { if (directoryEntry.Parent != null) { directoryEntry.Parent.Children.Remove (directoryEntry); } directoryEntry.Parent = parent; if (parent != null) { parent.InitializedChildren.Add (directoryEntry); } } directoryEntry.Name = directoryName; } else { // The watch descriptor wasn't in the map. This is a creation. directoryEntry = new WatchedDirectory { Parent = parent, WatchDescriptor = wd, Name = directoryName }; if (parent != null) { parent.InitializedChildren.Add (directoryEntry); } _wdToPathMap.Add(wd, directoryEntry); isNewDirectory = true; } // Since inotify doesn't handle nesting implicitly, explicitly // add a watch for each child directory if the developer has // asked for subdirectories to be included. if (isNewDirectory && _includeSubdirectories) { // This method is recursive. If we expect to see hierarchies // so deep that it would cause us to overflow the stack, we could // consider using an explicit stack object rather than recursion. // This is unlikely, however, given typical directory names // and max path limits. foreach (string subDir in Directory.EnumerateDirectories(fullPath)) { AddDirectoryWatchUnlocked(directoryEntry, System.IO.Path.GetFileName(subDir)); // AddDirectoryWatchUnlocked will add the new directory to // this.Children, so we don't have to / shouldn't also do it here. } } } /// <summary>Removes the watched directory from our state, and optionally removes the inotify watch itself.</summary> /// <param name="directoryEntry">The directory entry to remove.</param> /// <param name="removeInotify">true to remove the inotify watch; otherwise, false. The default is true.</param> private void RemoveWatchedDirectory(WatchedDirectory directoryEntry, bool removeInotify = true) { Debug.Assert (_includeSubdirectories); lock (SyncObj) { if (directoryEntry.Parent != null) { directoryEntry.Parent.Children.Remove (directoryEntry); } RemoveWatchedDirectoryUnlocked (directoryEntry, removeInotify); } } /// <summary>Removes the watched directory from our state, and optionally removes the inotify watch itself.</summary> /// <param name="directoryEntry">The directory entry to remove.</param> /// <param name="removeInotify">true to remove the inotify watch; otherwise, false. The default is true.</param> private void RemoveWatchedDirectoryUnlocked(WatchedDirectory directoryEntry, bool removeInotify) { // If the directory has children, recursively remove them (see comments on recursion in AddDirectoryWatch). if (directoryEntry.Children != null) { foreach (WatchedDirectory child in directoryEntry.Children) { RemoveWatchedDirectoryUnlocked (child, removeInotify); } directoryEntry.Children = null; } // Then remove the directory itself. _wdToPathMap.Remove(directoryEntry.WatchDescriptor); // And if the caller has requested, remove the associated inotify watch. if (removeInotify) { // Remove the inotify watch. This could fail if our state has become inconsistent // with the state of the world (e.g. due to lost events). So we don't want failures // to throw exceptions, but we do assert to detect coding problems during debugging. int result = Interop.Sys.INotifyRemoveWatch(_inotifyHandle, directoryEntry.WatchDescriptor); Debug.Assert(result >= 0); } } /// <summary> /// Callback invoked when cancellation is requested. Removes all watches, /// which will cause the active processing loop to shutdown. /// </summary> private void CancellationCallback() { lock (SyncObj) { // Remove all watches (inotiy_rm_watch) and clear out the map. // No additional watches will be added after this point. foreach (int wd in this._wdToPathMap.Keys) { int result = Interop.Sys.INotifyRemoveWatch(_inotifyHandle, wd); Debug.Assert(result >= 0); // ignore errors; they're non-fatal, but they also shouldn't happen } _wdToPathMap.Clear(); } } /// <summary> /// Main processing loop. This is currently implemented as a synchronous operation that continually /// reads events and processes them... in the future, this could be changed to use asynchronous processing /// if the impact of using a thread-per-FileSystemWatcher is too high. /// </summary> private void ProcessEvents() { // When cancellation is requested, clear out all watches. This should force any active or future reads // on the inotify handle to return 0 bytes read immediately, allowing us to wake up from the blocking call // and exit the processing loop and clean up. var ctr = _cancellationToken.Register(obj => ((RunningInstance)obj).CancellationCallback(), this); try { // Previous event information string previousEventName = null; WatchedDirectory previousEventParent = null; uint previousEventCookie = 0; // Process events as long as we're not canceled and there are more to read... NotifyEvent nextEvent; while (!_cancellationToken.IsCancellationRequested && TryReadEvent(out nextEvent)) { // Try to get the actual watcher from our weak reference. We maintain a weak reference most of the time // so as to avoid a rooted cycle that would prevent our processing loop from ever ending // if the watcher is dropped by the user without being disposed. If we can't get the watcher, // there's nothing more to do (we can't raise events), so bail. FileSystemWatcher watcher; if (!_weakWatcher.TryGetTarget(out watcher)) { break; } uint mask = nextEvent.mask; string expandedName = null; WatchedDirectory associatedDirectoryEntry = null; // An overflow event means that we can't trust our state without restarting since we missed events and // some of those events could be a directory create, meaning we wouldn't have added the directory to the // watch and would not provide correct data to the caller. if ((mask & (uint)Interop.Sys.NotifyEvents.IN_Q_OVERFLOW) != 0) { // Notify the caller of the error and, if the includeSubdirectories flag is set, restart to pick up any // potential directories we missed due to the overflow. watcher.NotifyInternalBufferOverflowEvent(); if (_includeSubdirectories) { watcher.Restart(); } break; } else { // Look up the directory information for the supplied wd lock (SyncObj) { if (!_wdToPathMap.TryGetValue(nextEvent.wd, out associatedDirectoryEntry)) { // The watch descriptor could be missing from our dictionary if it was removed // due to cancellation, or if we already removed it and this is a related event // like IN_IGNORED. In any case, just ignore it... even if for some reason we // should have the value, there's little we can do about it at this point, // and there's no more processing of this event we can do without it. continue; } } expandedName = associatedDirectoryEntry.GetPath(true, nextEvent.name); } // Determine whether the affected object is a directory (rather than a file). // If it is, we may need to do special processing, such as adding a watch for new // directories if IncludeSubdirectories is enabled. Since we're only watching // directories, any IN_IGNORED event is also for a directory. bool isDir = (mask & (uint)(Interop.Sys.NotifyEvents.IN_ISDIR | Interop.Sys.NotifyEvents.IN_IGNORED)) != 0; // Renames come in the form of two events: IN_MOVED_FROM and IN_MOVED_TO. // In general, these should come as a sequence, one immediately after the other. // So, we delay raising an event for IN_MOVED_FROM until we see what comes next. if (previousEventName != null && ((mask & (uint)Interop.Sys.NotifyEvents.IN_MOVED_TO) == 0 || previousEventCookie != nextEvent.cookie)) { // IN_MOVED_FROM without an immediately-following corresponding IN_MOVED_TO. // We have to assume that it was moved outside of our root watch path, which // should be considered a deletion to match Win32 behavior. // But since we explicitly added watches on directories, if it's a directory it'll // still be watched, so we need to explicitly remove the watch. if (previousEventParent != null && previousEventParent.Children != null) { // previousEventParent will be non-null iff the IN_MOVED_FROM // was for a directory, in which case previousEventParent is that directory's // parent and previousEventName is the name of the directory to be removed. foreach (WatchedDirectory child in previousEventParent.Children) { if (child.Name == previousEventName) { RemoveWatchedDirectory(child); break; } } } // Then fire the deletion event, even though the event was IN_MOVED_FROM. watcher.NotifyFileSystemEventArgs(WatcherChangeTypes.Deleted, previousEventName); previousEventName = null; previousEventParent = null; previousEventCookie = 0; } // If the event signaled that there's a new subdirectory and if we're monitoring subdirectories, // add a watch for it. const Interop.Sys.NotifyEvents AddMaskFilters = Interop.Sys.NotifyEvents.IN_CREATE | Interop.Sys.NotifyEvents.IN_MOVED_TO; bool addWatch = ((mask & (uint)AddMaskFilters) != 0); if (addWatch && isDir && _includeSubdirectories) { AddDirectoryWatch(associatedDirectoryEntry, nextEvent.name); } const Interop.Sys.NotifyEvents switchMask = Interop.Sys.NotifyEvents.IN_IGNORED |Interop.Sys.NotifyEvents.IN_CREATE | Interop.Sys.NotifyEvents.IN_DELETE | Interop.Sys.NotifyEvents.IN_ACCESS | Interop.Sys.NotifyEvents.IN_MODIFY | Interop.Sys.NotifyEvents.IN_ATTRIB | Interop.Sys.NotifyEvents.IN_MOVED_FROM | Interop.Sys.NotifyEvents.IN_MOVED_TO; switch ((Interop.Sys.NotifyEvents)(mask & (uint)switchMask)) { case Interop.Sys.NotifyEvents.IN_CREATE: watcher.NotifyFileSystemEventArgs(WatcherChangeTypes.Created, expandedName); break; case Interop.Sys.NotifyEvents.IN_IGNORED: // We're getting an IN_IGNORED because a directory watch was removed. // and we're getting this far in our code because we still have an entry for it // in our dictionary. So we want to clean up the relevant state, but not clean // attempt to call back to inotify to remove the watches. RemoveWatchedDirectory(associatedDirectoryEntry, removeInotify:false); break; case Interop.Sys.NotifyEvents.IN_DELETE: watcher.NotifyFileSystemEventArgs(WatcherChangeTypes.Deleted, expandedName); // We don't explicitly RemoveWatchedDirectory here, as that'll be handled // by IN_IGNORED processing if this is a directory. break; case Interop.Sys.NotifyEvents.IN_ACCESS: case Interop.Sys.NotifyEvents.IN_MODIFY: case Interop.Sys.NotifyEvents.IN_ATTRIB: watcher.NotifyFileSystemEventArgs(WatcherChangeTypes.Changed, expandedName); break; case Interop.Sys.NotifyEvents.IN_MOVED_FROM: // We need to check if this MOVED_FROM event is standalone - meaning the item was moved out // of scope. We do this by checking if we are at the end of our buffer (meaning no more events) // and if there is data to be read by polling the fd. If there aren't any more events, fire the // deleted event; if there are more events, handle it via next pass. This adds an additional // edge case where we get the MOVED_FROM event and the MOVED_TO event hasn't been generated yet // so we will send a DELETE for this event and a CREATE when the MOVED_TO is eventually processed. if (_bufferPos == _bufferAvailable) { // Do the poll with a small timeout value. Community research showed that a few milliseconds // was enough to allow the vast majority of MOVED_TO events that were going to show // up to actually arrive. This doesn't need to be perfect; there's always the chance // that a MOVED_TO could show up after whatever timeout is specified, in which case // it'll just result in a delete + create instead of a rename. We need the value to be // small so that we don't significantly delay the delivery of the deleted event in case // that's actually what's needed (otherwise it'd be fine to block indefinitely waiting // for the next event to arrive). const int MillisecondsTimeout = 2; Interop.Sys.PollEvents events; Interop.Sys.Poll(_inotifyHandle, Interop.Sys.PollEvents.POLLIN, MillisecondsTimeout, out events); // If we error or don't have any signaled handles, send the deleted event if (events == Interop.Sys.PollEvents.POLLNONE) { // There isn't any more data in the queue so this is a deleted event watcher.NotifyFileSystemEventArgs(WatcherChangeTypes.Deleted, expandedName); break; } } // We will set these values if the buffer has more data OR if the poll call tells us that more data is available. previousEventName = expandedName; previousEventParent = isDir ? associatedDirectoryEntry : null; previousEventCookie = nextEvent.cookie; break; case Interop.Sys.NotifyEvents.IN_MOVED_TO: if (previousEventName != null) { // If the previous name from IN_MOVED_FROM is non-null, then this is a rename. watcher.NotifyRenameEventArgs(WatcherChangeTypes.Renamed, expandedName, previousEventName); } else { // If it is null, then we didn't get an IN_MOVED_FROM (or we got it a long time // ago and treated it as a deletion), in which case this is considered a creation. watcher.NotifyFileSystemEventArgs(WatcherChangeTypes.Created, expandedName); } previousEventName = null; previousEventParent = null; previousEventCookie = 0; break; } // Drop our strong reference to the watcher now that we're potentially going to block again for another read watcher = null; } } catch (Exception exc) { FileSystemWatcher watcher; if (_weakWatcher.TryGetTarget(out watcher)) { watcher.OnError(new ErrorEventArgs(exc)); } } finally { ctr.Dispose(); _inotifyHandle.Dispose(); } } /// <summary>Read events from the inotify handle into the supplied array.</summary> /// <param name="events">The array into which events should be read.</param> /// <returns>The number of events read and stored into the array.</returns> private bool TryReadEvent(out NotifyEvent notifyEvent) { Debug.Assert(_buffer != null); Debug.Assert(_bufferAvailable >= 0 && _bufferAvailable <= _buffer.Length); Debug.Assert(_bufferPos >= 0 && _bufferPos <= _bufferAvailable); // Read more data into our buffer if we need it if (_bufferAvailable == 0 || _bufferPos == _bufferAvailable) { // Read from the handle. This will block until either data is available // or all watches have been removed, in which case zero bytes are read. unsafe { try { fixed (byte* buf = this._buffer) { _bufferAvailable = Interop.CheckIo(Interop.Sys.Read(_inotifyHandle, buf, this._buffer.Length), isDirectory: true); Debug.Assert(_bufferAvailable <= this._buffer.Length); } } catch (ArgumentException) { _bufferAvailable = 0; Debug.Fail("Buffer provided to read was too small"); } Debug.Assert(_bufferAvailable >= 0); } if (_bufferAvailable == 0) { notifyEvent = default(NotifyEvent); return false; } Debug.Assert(_bufferAvailable >= c_INotifyEventSize); _bufferPos = 0; } // Parse each event: // struct inotify_event { // int wd; // uint32_t mask; // uint32_t cookie; // uint32_t len; // char name[]; // length determined by len; at least 1 for required null termination // }; Debug.Assert(_bufferPos + c_INotifyEventSize <= _bufferAvailable); NotifyEvent readEvent; readEvent.wd = BitConverter.ToInt32(_buffer, _bufferPos); readEvent.mask = BitConverter.ToUInt32(_buffer, _bufferPos + 4); // +4 to get past wd readEvent.cookie = BitConverter.ToUInt32(_buffer, _bufferPos + 8); // +8 to get past wd, mask int nameLength = (int)BitConverter.ToUInt32(_buffer, _bufferPos + 12); // +12 to get past wd, mask, cookie readEvent.name = ReadName(_bufferPos + c_INotifyEventSize, nameLength); // +16 to get past wd, mask, cookie, len _bufferPos += c_INotifyEventSize + nameLength; notifyEvent = readEvent; return true; } /// <summary> /// Reads a UTF8 string from _buffer starting at the specified position and up to /// the specified length. Null termination is trimmed off (the length may include /// many null bytes, not just one, or it may include none). /// </summary> /// <param name="position"></param> /// <param name="nameLength"></param> /// <returns></returns> private string ReadName(int position, int nameLength) { Debug.Assert(position > 0); Debug.Assert(nameLength >= 0 && (position + nameLength) <= _buffer.Length); int lengthWithoutNullTerm = nameLength; for (int i = 0; i < nameLength; i++) { if (_buffer[position + i] == '\0') { lengthWithoutNullTerm = i; break; } } Debug.Assert(lengthWithoutNullTerm <= nameLength); // should be null terminated or empty return lengthWithoutNullTerm > 0 ? Encoding.UTF8.GetString(_buffer, position, lengthWithoutNullTerm) : string.Empty; } /// <summary>An event read and translated from the inotify handle.</summary> /// <remarks> /// Unlike it's native counterpart, this struct stores a string name rather than /// an integer length and a char[]. It is not directly marshalable. /// </remarks> private struct NotifyEvent { internal int wd; internal uint mask; internal uint cookie; internal string name; } /// <summary>State associated with a watched directory.</summary> private sealed class WatchedDirectory { /// <summary>A StringBuilder cached on the current thread to avoid allocations when possible.</summary> [ThreadStatic] private static StringBuilder t_builder; /// <summary>The parent directory.</summary> internal WatchedDirectory Parent; /// <summary>The watch descriptor associated with this directory.</summary> internal int WatchDescriptor; /// <summary>The filename of this directory.</summary> internal string Name; /// <summary>Child directories of this directory for which we added explicit watches.</summary> internal List<WatchedDirectory> Children; /// <summary>Child directories of this directory for which we added explicit watches. This is the same as Children, but ensured to be initialized as non-null.</summary> internal List<WatchedDirectory> InitializedChildren { get { if (Children == null) { Children = new List<WatchedDirectory> (); } return Children; } } // PERF: Work is being done here proportionate to depth of watch directories. // If this becomes a bottleneck, we'll need to come up with another mechanism // for obtaining and keeping paths up to date, for example storing the full path // in each WatchedDirectory node and recursively updating all children on a move, // which we can do given that we store all children. For now we're not doing that // because it's not a clear win: either you update all children recursively when // a directory moves / is added, or you compute each name when an event occurs. // The former is better if there are going to be lots of events causing lots // of traversals to compute names, and the latter is better for big directory // structures that incur fewer file events. /// <summary>Gets the path of this directory.</summary> /// <param name="relativeToRoot">Whether to get a path relative to the root directory being watched, or a full path.</param> /// <param name="additionalName">An additional name to include in the path, relative to this directory.</param> /// <returns>The computed path.</returns> internal string GetPath(bool relativeToRoot, string additionalName = null) { // Use our cached builder StringBuilder builder = t_builder; if (builder == null) { t_builder = builder = new StringBuilder(); } builder.Clear(); // Write the directory's path. Then if an additional filename was supplied, append it Write(builder, relativeToRoot); if (additionalName != null) { AppendSeparatorIfNeeded(builder); builder.Append(additionalName); } return builder.ToString(); } /// <summary>Write's this directory's path to the builder.</summary> /// <param name="builder">The builder to which to write.</param> /// <param name="relativeToRoot"> /// true if the path should be relative to the root directory being watched. /// false if the path should be a full file system path, including that of /// the root directory being watched. /// </param> private void Write(StringBuilder builder, bool relativeToRoot) { // This method is recursive. If we expect to see hierarchies // so deep that it would cause us to overflow the stack, we could // consider using an explicit stack object rather than recursion. // This is unlikely, however, given typical directory names // and max path limits. // First append the parent's path if (Parent != null) { Parent.Write(builder, relativeToRoot); AppendSeparatorIfNeeded(builder); } // Then append ours. In the case of the root directory // being watched, we only append its name if the caller // has asked for a full path. if (Parent != null || !relativeToRoot) { builder.Append(Name); } } /// <summary>Adds a directory path separator to the end of the builder if one isn't there.</summary> /// <param name="builder">The builder.</param> private static void AppendSeparatorIfNeeded(StringBuilder builder) { if (builder.Length > 0) { char c = builder[builder.Length - 1]; if (c != System.IO.Path.DirectorySeparatorChar && c != System.IO.Path.AltDirectorySeparatorChar) { builder.Append(System.IO.Path.DirectorySeparatorChar); } } } } } } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace ApplicationGateway { using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// ExpressRouteCircuitsOperations operations. /// </summary> public partial interface IExpressRouteCircuitsOperations { /// <summary> /// Deletes the specified express route circuit. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string circuitName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets information about the specified express route circuit. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of express route circuit. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuit>> GetWithHttpMessagesAsync(string resourceGroupName, string circuitName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates or updates an express route circuit. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the circuit. /// </param> /// <param name='parameters'> /// Parameters supplied to the create or update express route circuit /// operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuit>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string circuitName, ExpressRouteCircuit parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the currently advertised ARP table associated with the express /// route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='devicePath'> /// The path of the device. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitsArpTableListResult>> ListArpTableWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, string devicePath, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the currently advertised routes table associated with the /// express route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='devicePath'> /// The path of the device. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitsRoutesTableListResult>> ListRoutesTableWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, string devicePath, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the currently advertised routes table summary associated with /// the express route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='devicePath'> /// The path of the device. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitsRoutesTableSummaryListResult>> ListRoutesTableSummaryWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, string devicePath, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all the stats from an express route circuit in a resource /// group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitStats>> GetStatsWithHttpMessagesAsync(string resourceGroupName, string circuitName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all stats from an express route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitStats>> GetPeeringStatsWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all the express route circuits in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ExpressRouteCircuit>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all the express route circuits in a subscription. /// </summary> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ExpressRouteCircuit>>> ListAllWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified express route circuit. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string circuitName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates or updates an express route circuit. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the circuit. /// </param> /// <param name='parameters'> /// Parameters supplied to the create or update express route circuit /// operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuit>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string circuitName, ExpressRouteCircuit parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the currently advertised ARP table associated with the express /// route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='devicePath'> /// The path of the device. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitsArpTableListResult>> BeginListArpTableWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, string devicePath, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the currently advertised routes table associated with the /// express route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='devicePath'> /// The path of the device. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitsRoutesTableListResult>> BeginListRoutesTableWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, string devicePath, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the currently advertised routes table summary associated with /// the express route circuit in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='circuitName'> /// The name of the express route circuit. /// </param> /// <param name='peeringName'> /// The name of the peering. /// </param> /// <param name='devicePath'> /// The path of the device. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ExpressRouteCircuitsRoutesTableSummaryListResult>> BeginListRoutesTableSummaryWithHttpMessagesAsync(string resourceGroupName, string circuitName, string peeringName, string devicePath, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all the express route circuits in a resource group. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ExpressRouteCircuit>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all the express route circuits in a subscription. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ExpressRouteCircuit>>> ListAllNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.CompilerServices; // Do not remove. This is necessary for netstandard, since this file is mirrored into corefx using System.Numerics; #if !netstandard using Internal.Runtime.CompilerServices; #endif namespace System { internal static partial class SpanHelpers // .T { public static int IndexOf<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. T valueHead = value; ref T valueTail = ref Unsafe.Add(ref value, 1); int valueTailLength = valueLength - 1; int index = 0; for (; ; ) { Debug.Assert(0 <= index && index <= searchSpaceLength); // Ensures no deceptive underflows in the computation of "remainingSearchSpaceLength". int remainingSearchSpaceLength = searchSpaceLength - index - valueTailLength; if (remainingSearchSpaceLength <= 0) break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there. // Do a quick search for the first element of "value". int relativeIndex = IndexOf(ref Unsafe.Add(ref searchSpace, index), valueHead, remainingSearchSpaceLength); if (relativeIndex == -1) break; index += relativeIndex; // Found the first element of "value". See if the tail matches. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, index + 1), ref valueTail, valueTailLength)) return index; // The tail matched. Return a successful find. index++; } return -1; } // Adapted from IndexOf(...) public static bool Contains<T>(ref T searchSpace, T value, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); IntPtr index = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations while (length >= 8) { length -= 8; if (value.Equals(Unsafe.Add(ref searchSpace, index + 0)) || value.Equals(Unsafe.Add(ref searchSpace, index + 1)) || value.Equals(Unsafe.Add(ref searchSpace, index + 2)) || value.Equals(Unsafe.Add(ref searchSpace, index + 3)) || value.Equals(Unsafe.Add(ref searchSpace, index + 4)) || value.Equals(Unsafe.Add(ref searchSpace, index + 5)) || value.Equals(Unsafe.Add(ref searchSpace, index + 6)) || value.Equals(Unsafe.Add(ref searchSpace, index + 7))) { goto Found; } index += 8; } if (length >= 4) { length -= 4; if (value.Equals(Unsafe.Add(ref searchSpace, index + 0)) || value.Equals(Unsafe.Add(ref searchSpace, index + 1)) || value.Equals(Unsafe.Add(ref searchSpace, index + 2)) || value.Equals(Unsafe.Add(ref searchSpace, index + 3))) { goto Found; } index += 4; } while (length > 0) { length -= 1; if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; index += 1; } return false; Found: return true; } public static unsafe int IndexOf<T>(ref T searchSpace, T value, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); IntPtr index = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations while (length >= 8) { length -= 8; if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; if (value.Equals(Unsafe.Add(ref searchSpace, index + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, index + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, index + 3))) goto Found3; if (value.Equals(Unsafe.Add(ref searchSpace, index + 4))) goto Found4; if (value.Equals(Unsafe.Add(ref searchSpace, index + 5))) goto Found5; if (value.Equals(Unsafe.Add(ref searchSpace, index + 6))) goto Found6; if (value.Equals(Unsafe.Add(ref searchSpace, index + 7))) goto Found7; index += 8; } if (length >= 4) { length -= 4; if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; if (value.Equals(Unsafe.Add(ref searchSpace, index + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, index + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, index + 3))) goto Found3; index += 4; } while (length > 0) { if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; index += 1; length--; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return (int)(byte*)index; Found1: return (int)(byte*)(index + 1); Found2: return (int)(byte*)(index + 2); Found3: return (int)(byte*)(index + 3); Found4: return (int)(byte*)(index + 4); Found5: return (int)(byte*)(index + 5); Found6: return (int)(byte*)(index + 6); Found7: return (int)(byte*)(index + 7); } public static int IndexOfAny<T>(ref T searchSpace, T value0, T value1, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; int index = 0; while ((length - index) >= 8) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, index + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, index + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, index + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, index + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found7; index += 8; } if ((length - index) >= 4) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; index += 4; } while (index < length) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; index++; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return index; Found1: return index + 1; Found2: return index + 2; Found3: return index + 3; Found4: return index + 4; Found5: return index + 5; Found6: return index + 6; Found7: return index + 7; } public static int IndexOfAny<T>(ref T searchSpace, T value0, T value1, T value2, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; int index = 0; while ((length - index) >= 8) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, index + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, index + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, index + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, index + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found7; index += 8; } if ((length - index) >= 4) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; index += 4; } while (index < length) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; index++; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return index; Found1: return index + 1; Found2: return index + 2; Found3: return index + 3; Found4: return index + 4; Found5: return index + 5; Found6: return index + 6; Found7: return index + 7; } public static int IndexOfAny<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. int index = -1; for (int i = 0; i < valueLength; i++) { var tempIndex = IndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength); if ((uint)tempIndex < (uint)index) { index = tempIndex; // Reduce space for search, cause we don't care if we find the search value after the index of a previously found value searchSpaceLength = tempIndex; if (index == 0) break; } } return index; } public static int LastIndexOf<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. T valueHead = value; ref T valueTail = ref Unsafe.Add(ref value, 1); int valueTailLength = valueLength - 1; int index = 0; for (; ; ) { Debug.Assert(0 <= index && index <= searchSpaceLength); // Ensures no deceptive underflows in the computation of "remainingSearchSpaceLength". int remainingSearchSpaceLength = searchSpaceLength - index - valueTailLength; if (remainingSearchSpaceLength <= 0) break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there. // Do a quick search for the first element of "value". int relativeIndex = LastIndexOf(ref searchSpace, valueHead, remainingSearchSpaceLength); if (relativeIndex == -1) break; // Found the first element of "value". See if the tail matches. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, relativeIndex + 1), ref valueTail, valueTailLength)) return relativeIndex; // The tail matched. Return a successful find. index += remainingSearchSpaceLength - relativeIndex; } return -1; } public static int LastIndexOf<T>(ref T searchSpace, T value, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); while (length >= 8) { length -= 8; if (value.Equals(Unsafe.Add(ref searchSpace, length + 7))) goto Found7; if (value.Equals(Unsafe.Add(ref searchSpace, length + 6))) goto Found6; if (value.Equals(Unsafe.Add(ref searchSpace, length + 5))) goto Found5; if (value.Equals(Unsafe.Add(ref searchSpace, length + 4))) goto Found4; if (value.Equals(Unsafe.Add(ref searchSpace, length + 3))) goto Found3; if (value.Equals(Unsafe.Add(ref searchSpace, length + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, length + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, length))) goto Found; } if (length >= 4) { length -= 4; if (value.Equals(Unsafe.Add(ref searchSpace, length + 3))) goto Found3; if (value.Equals(Unsafe.Add(ref searchSpace, length + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, length + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, length))) goto Found; } while (length > 0) { length--; if (value.Equals(Unsafe.Add(ref searchSpace, length))) goto Found; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return length; Found1: return length + 1; Found2: return length + 2; Found3: return length + 3; Found4: return length + 4; Found5: return length + 5; Found6: return length + 6; Found7: return length + 7; } public static int LastIndexOfAny<T>(ref T searchSpace, T value0, T value1, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; while (length >= 8) { length -= 8; lookUp = Unsafe.Add(ref searchSpace, length + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found7; lookUp = Unsafe.Add(ref searchSpace, length + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, length + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, length + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; } if (length >= 4) { length -= 4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; } while (length > 0) { length--; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return length; Found1: return length + 1; Found2: return length + 2; Found3: return length + 3; Found4: return length + 4; Found5: return length + 5; Found6: return length + 6; Found7: return length + 7; } public static int LastIndexOfAny<T>(ref T searchSpace, T value0, T value1, T value2, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; while (length >= 8) { length -= 8; lookUp = Unsafe.Add(ref searchSpace, length + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found7; lookUp = Unsafe.Add(ref searchSpace, length + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, length + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, length + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; } if (length >= 4) { length -= 4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; } while (length > 0) { length--; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return length; Found1: return length + 1; Found2: return length + 2; Found3: return length + 3; Found4: return length + 4; Found5: return length + 5; Found6: return length + 6; Found7: return length + 7; } public static int LastIndexOfAny<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. int index = -1; for (int i = 0; i < valueLength; i++) { var tempIndex = LastIndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength); if (tempIndex > index) index = tempIndex; } return index; } public static bool SequenceEqual<T>(ref T first, ref T second, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); if (Unsafe.AreSame(ref first, ref second)) goto Equal; IntPtr index = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations while (length >= 8) { length -= 8; if (!Unsafe.Add(ref first, index).Equals(Unsafe.Add(ref second, index))) goto NotEqual; if (!Unsafe.Add(ref first, index + 1).Equals(Unsafe.Add(ref second, index + 1))) goto NotEqual; if (!Unsafe.Add(ref first, index + 2).Equals(Unsafe.Add(ref second, index + 2))) goto NotEqual; if (!Unsafe.Add(ref first, index + 3).Equals(Unsafe.Add(ref second, index + 3))) goto NotEqual; if (!Unsafe.Add(ref first, index + 4).Equals(Unsafe.Add(ref second, index + 4))) goto NotEqual; if (!Unsafe.Add(ref first, index + 5).Equals(Unsafe.Add(ref second, index + 5))) goto NotEqual; if (!Unsafe.Add(ref first, index + 6).Equals(Unsafe.Add(ref second, index + 6))) goto NotEqual; if (!Unsafe.Add(ref first, index + 7).Equals(Unsafe.Add(ref second, index + 7))) goto NotEqual; index += 8; } if (length >= 4) { length -= 4; if (!Unsafe.Add(ref first, index).Equals(Unsafe.Add(ref second, index))) goto NotEqual; if (!Unsafe.Add(ref first, index + 1).Equals(Unsafe.Add(ref second, index + 1))) goto NotEqual; if (!Unsafe.Add(ref first, index + 2).Equals(Unsafe.Add(ref second, index + 2))) goto NotEqual; if (!Unsafe.Add(ref first, index + 3).Equals(Unsafe.Add(ref second, index + 3))) goto NotEqual; index += 4; } while (length > 0) { if (!Unsafe.Add(ref first, index).Equals(Unsafe.Add(ref second, index))) goto NotEqual; index += 1; length--; } Equal: return true; NotEqual: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return false; } public static int SequenceCompareTo<T>(ref T first, int firstLength, ref T second, int secondLength) where T : IComparable<T> { Debug.Assert(firstLength >= 0); Debug.Assert(secondLength >= 0); var minLength = firstLength; if (minLength > secondLength) minLength = secondLength; for (int i = 0; i < minLength; i++) { int result = Unsafe.Add(ref first, i).CompareTo(Unsafe.Add(ref second, i)); if (result != 0) return result; } return firstLength.CompareTo(secondLength); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace ISLA2.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
using System; using System.Collections; using System.IO; using NUnit.Framework; using Org.BouncyCastle.Asn1.X509; using Org.BouncyCastle.Utilities; using Org.BouncyCastle.Utilities.Encoders; using Org.BouncyCastle.Utilities.Test; namespace Org.BouncyCastle.Asn1.Tests { [TestFixture] public class X509NameTest : SimpleTest { private static readonly string[] subjects = { "C=AU,ST=Victoria,L=South Melbourne,O=Connect 4 Pty Ltd,OU=Webserver Team,CN=www2.connect4.com.au,[email protected]", "C=AU,ST=Victoria,L=South Melbourne,O=Connect 4 Pty Ltd,OU=Certificate Authority,CN=Connect 4 CA,[email protected]", "C=AU,ST=QLD,CN=SSLeay/rsa test cert", "C=US,O=National Aeronautics and Space Administration,SERIALNUMBER=16+CN=Steve Schoch", "[email protected],C=US,OU=Hewlett Packard Company (ISSL),CN=Paul A. Cooke", "O=Sun Microsystems Inc,CN=store.sun.com", "unstructuredAddress=192.168.1.33,unstructuredName=pixfirewall.ciscopix.com,CN=pixfirewall.ciscopix.com" }; public override string Name { get { return "X509Name"; } } private static X509Name FromBytes( byte[] bytes) { return X509Name.GetInstance(Asn1Object.FromByteArray(bytes)); } private IAsn1Convertible createEntryValue( DerObjectIdentifier oid, string value) { IDictionary attrs = new Hashtable(); attrs.Add(oid, value); IList ord = new ArrayList(); ord.Add(oid); X509Name name = new X509Name(ord, attrs); Asn1Sequence seq = (Asn1Sequence)name.ToAsn1Object(); Asn1Set set = (Asn1Set)seq[0]; seq = (Asn1Sequence)set[0]; return seq[1]; } private IAsn1Convertible createEntryValueFromString( DerObjectIdentifier oid, string val) { IDictionary attrs = new Hashtable(); attrs.Add(oid, val); IList ord = new ArrayList(attrs.Keys); X509Name name = new X509Name(new X509Name(ord, attrs).ToString()); Asn1Sequence seq = (Asn1Sequence) name.ToAsn1Object(); Asn1Set asn1Set = (Asn1Set) seq[0]; seq = (Asn1Sequence) asn1Set[0]; return seq[1]; } private void doTestEncodingPrintableString( DerObjectIdentifier oid, string value) { IAsn1Convertible converted = createEntryValue(oid, value); if (!(converted is DerPrintableString)) { Fail("encoding for " + oid + " not printable string"); } } private void doTestEncodingIA5String( DerObjectIdentifier oid, string value) { IAsn1Convertible converted = createEntryValue(oid, value); if (!(converted is DerIA5String)) { Fail("encoding for " + oid + " not IA5String"); } } private void doTestEncodingGeneralizedTime( DerObjectIdentifier oid, string val) { IAsn1Convertible converted = createEntryValue(oid, val); if (!(converted is DerGeneralizedTime)) { Fail("encoding for " + oid + " not GeneralizedTime"); } converted = createEntryValueFromString(oid, val); if (!(converted is DerGeneralizedTime)) { Fail("encoding for " + oid + " not GeneralizedTime"); } } public override void PerformTest() { doTestEncodingPrintableString(X509Name.C, "AU"); doTestEncodingPrintableString(X509Name.SerialNumber, "123456"); doTestEncodingPrintableString(X509Name.DnQualifier, "123456"); doTestEncodingIA5String(X509Name.EmailAddress, "[email protected]"); doTestEncodingIA5String(X509Name.DC, "test"); // correct encoding doTestEncodingGeneralizedTime(X509Name.DateOfBirth, "#180F32303032303132323132323232305A"); // compatability encoding doTestEncodingGeneralizedTime(X509Name.DateOfBirth, "20020122122220Z"); // // composite // IDictionary attrs = new Hashtable(); attrs.Add(X509Name.C, "AU"); attrs.Add(X509Name.O, "The Legion of the Bouncy Castle"); attrs.Add(X509Name.L, "Melbourne"); attrs.Add(X509Name.ST, "Victoria"); attrs.Add(X509Name.E, "[email protected]"); IList order = new ArrayList(); order.Add(X509Name.C); order.Add(X509Name.O); order.Add(X509Name.L); order.Add(X509Name.ST); order.Add(X509Name.E); X509Name name1 = new X509Name(order, attrs); if (!name1.Equivalent(name1)) { Fail("Failed same object test"); } if (!name1.Equivalent(name1, true)) { Fail("Failed same object test - in Order"); } X509Name name2 = new X509Name(order, attrs); if (!name1.Equivalent(name2)) { Fail("Failed same name test"); } if (!name1.Equivalent(name2, true)) { Fail("Failed same name test - in Order"); } if (name1.GetHashCode() != name2.GetHashCode()) { Fail("Failed same name test - in Order"); } IList ord1 = new ArrayList(); ord1.Add(X509Name.C); ord1.Add(X509Name.O); ord1.Add(X509Name.L); ord1.Add(X509Name.ST); ord1.Add(X509Name.E); IList ord2 = new ArrayList(); ord2.Add(X509Name.E); ord2.Add(X509Name.ST); ord2.Add(X509Name.L); ord2.Add(X509Name.O); ord2.Add(X509Name.C); name1 = new X509Name(ord1, attrs); name2 = new X509Name(ord2, attrs); if (!name1.Equivalent(name2)) { Fail("Failed reverse name test"); } // FIXME Sort out X509Name hashcode problem // if (name1.GetHashCode() != name2.GetHashCode()) // { // Fail("Failed reverse name test GetHashCode"); // } if (name1.Equivalent(name2, true)) { Fail("Failed reverse name test - in Order"); } if (!name1.Equivalent(name2, false)) { Fail("Failed reverse name test - in Order false"); } IList oids = name1.GetOidList(); if (!CompareVectors(oids, ord1)) { Fail("oid comparison test"); } IList val1 = new ArrayList(); val1.Add("AU"); val1.Add("The Legion of the Bouncy Castle"); val1.Add("Melbourne"); val1.Add("Victoria"); val1.Add("[email protected]"); name1 = new X509Name(ord1, val1); IList values = name1.GetValueList(); if (!CompareVectors(values, val1)) { Fail("value comparison test"); } ord2 = new ArrayList(); ord2.Add(X509Name.ST); ord2.Add(X509Name.ST); ord2.Add(X509Name.L); ord2.Add(X509Name.O); ord2.Add(X509Name.C); name1 = new X509Name(ord1, attrs); name2 = new X509Name(ord2, attrs); if (name1.Equivalent(name2)) { Fail("Failed different name test"); } ord2 = new ArrayList(); ord2.Add(X509Name.ST); ord2.Add(X509Name.L); ord2.Add(X509Name.O); ord2.Add(X509Name.C); name1 = new X509Name(ord1, attrs); name2 = new X509Name(ord2, attrs); if (name1.Equivalent(name2)) { Fail("Failed subset name test"); } compositeTest(); // // getValues test // ArrayList v1 = name1.GetValues(X509Name.O); if (v1.Count != 1 || !v1[0].Equals("The Legion of the Bouncy Castle")) { Fail("O test failed"); } ArrayList v2 = name1.GetValues(X509Name.L); if (v2.Count != 1 || !v2[0].Equals("Melbourne")) { Fail("L test failed"); } // // general subjects test // for (int i = 0; i != subjects.Length; i++) { X509Name name = new X509Name(subjects[i]); byte[] encodedName = name.GetEncoded(); name = X509Name.GetInstance(Asn1Object.FromByteArray(encodedName)); if (!name.ToString().Equals(subjects[i])) { Fail("Failed regeneration test " + i); } } // // sort test // X509Name unsorted = new X509Name("SERIALNUMBER=BBB + CN=AA"); if (!FromBytes(unsorted.GetEncoded()).ToString().Equals("CN=AA+SERIALNUMBER=BBB")) { Fail("Failed sort test 1"); } unsorted = new X509Name("CN=AA + SERIALNUMBER=BBB"); if (!FromBytes(unsorted.GetEncoded()).ToString().Equals("CN=AA+SERIALNUMBER=BBB")) { Fail("Failed sort test 2"); } unsorted = new X509Name("SERIALNUMBER=B + CN=AA"); if (!FromBytes(unsorted.GetEncoded()).ToString().Equals("SERIALNUMBER=B+CN=AA")) { Fail("Failed sort test 3"); } unsorted = new X509Name("CN=AA + SERIALNUMBER=B"); if (!FromBytes(unsorted.GetEncoded()).ToString().Equals("SERIALNUMBER=B+CN=AA")) { Fail("Failed sort test 4"); } // // equality tests // equalityTest(new X509Name("CN=The Legion"), new X509Name("CN=The Legion")); equalityTest(new X509Name("CN= The Legion"), new X509Name("CN=The Legion")); equalityTest(new X509Name("CN=The Legion "), new X509Name("CN=The Legion")); equalityTest(new X509Name("CN= The Legion "), new X509Name("CN=The Legion")); equalityTest(new X509Name("CN= the legion "), new X509Name("CN=The Legion")); // # test X509Name n1 = new X509Name("SERIALNUMBER=8,O=ABC,CN=ABC Class 3 CA,C=LT"); X509Name n2 = new X509Name("2.5.4.5=8,O=ABC,CN=ABC Class 3 CA,C=LT"); X509Name n3 = new X509Name("2.5.4.5=#130138,O=ABC,CN=ABC Class 3 CA,C=LT"); equalityTest(n1, n2); equalityTest(n2, n3); equalityTest(n3, n1); n1 = new X509Name(true, "2.5.4.5=#130138,CN=SSC Class 3 CA,O=UAB Skaitmeninio sertifikavimo centras,C=LT"); n2 = new X509Name(true, "SERIALNUMBER=#130138,CN=SSC Class 3 CA,O=UAB Skaitmeninio sertifikavimo centras,C=LT"); n3 = X509Name.GetInstance(Asn1Object.FromByteArray(Hex.Decode("3063310b3009060355040613024c54312f302d060355040a1326" + "55414220536b6169746d656e696e696f20736572746966696b6176696d6f2063656e74726173311730150603550403130e53534320436c6173732033204341310a30080603550405130138"))); equalityTest(n1, n2); equalityTest(n2, n3); equalityTest(n3, n1); n1 = new X509Name("SERIALNUMBER=8,O=XX,CN=ABC Class 3 CA,C=LT"); n2 = new X509Name("2.5.4.5=8,O=,CN=ABC Class 3 CA,C=LT"); if (n1.Equivalent(n2)) { Fail("empty inequality check failed"); } n1 = new X509Name("SERIALNUMBER=8,O=,CN=ABC Class 3 CA,C=LT"); n2 = new X509Name("2.5.4.5=8,O=,CN=ABC Class 3 CA,C=LT"); equalityTest(n1, n2); // // inequality to sequences // name1 = new X509Name("CN=The Legion"); if (name1.Equals(DerSequence.Empty)) { Fail("inequality test with sequence"); } if (name1.Equals(new DerSequence(DerSet.Empty))) { Fail("inequality test with sequence and set"); } Asn1EncodableVector v = new Asn1EncodableVector( new DerObjectIdentifier("1.1"), new DerObjectIdentifier("1.1")); if (name1.Equals(new DerSequence(new DerSet(new DerSet(v))))) { Fail("inequality test with sequence and bad set"); } // if (name1.Equals(new DerSequence(new DerSet(new DerSet(v))), true)) // { // Fail("inequality test with sequence and bad set"); // } try { X509Name.GetInstance(new DerSequence(new DerSet(new DerSet(v)))); Fail("GetInstance should reject bad sequence"); } catch (ArgumentException) { //expected } if (name1.Equals(new DerSequence(new DerSet(DerSequence.Empty)))) { Fail("inequality test with sequence and short sequence"); } // if (name1.Equals(new DerSequence(new DerSet(DerSequence.Empty)), true)) // { // Fail("inequality test with sequence and short sequence"); // } try { X509Name.GetInstance(new DerSequence(new DerSet(DerSequence.Empty))); Fail("GetInstance should reject short sequence"); } catch (ArgumentException) { //expected } v = new Asn1EncodableVector( new DerObjectIdentifier("1.1"), DerSequence.Empty); if (name1.Equals(new DerSequence(new DerSet(new DerSequence(v))))) { Fail("inequality test with sequence and bad sequence"); } if (name1.Equivalent(null)) { Fail("inequality test with null"); } if (name1.Equivalent(null, true)) { Fail("inequality test with null"); } // // this is contrived but it checks sorting of sets with equal elements // unsorted = new X509Name("CN=AA + CN=AA + CN=AA"); // // tagging test - only works if CHOICE implemented // /* ASN1TaggedObject tag = new DERTaggedObject(false, 1, new X509Name("CN=AA")); if (!tag.isExplicit()) { Fail("failed to explicitly tag CHOICE object"); } X509Name name = X509Name.getInstance(tag, false); if (!name.equals(new X509Name("CN=AA"))) { Fail("failed to recover tagged name"); } */ DerUtf8String testString = new DerUtf8String("The Legion of the Bouncy Castle"); byte[] encodedBytes = testString.GetEncoded(); string hexEncodedString = "#" + Hex.ToHexString(encodedBytes); DerUtf8String converted = (DerUtf8String) new X509DefaultEntryConverter().GetConvertedValue( X509Name.L , hexEncodedString); if (!converted.Equals(testString)) { Fail("Failed X509DefaultEntryConverter test"); } // // try escaped. // converted = (DerUtf8String) new X509DefaultEntryConverter().GetConvertedValue( X509Name.L , "\\" + hexEncodedString); if (!converted.Equals(new DerUtf8String(hexEncodedString))) { Fail("Failed X509DefaultEntryConverter test got " + converted + " expected: " + hexEncodedString); } // // try a weird value // X509Name n = new X509Name("CN=\\#nothex#string"); if (!n.ToString().Equals("CN=\\#nothex#string")) { Fail("# string not properly escaped."); } ArrayList vls = n.GetValues(X509Name.CN); if (vls.Count != 1 || !vls[0].Equals("#nothex#string")) { Fail("Escaped # not reduced properly"); } n = new X509Name("CN=\"a+b\""); vls = n.GetValues(X509Name.CN); if (vls.Count != 1 || !vls[0].Equals("a+b")) { Fail("Escaped + not reduced properly"); } n = new X509Name("CN=a\\+b"); vls = n.GetValues(X509Name.CN); if (vls.Count != 1 || !vls[0].Equals("a+b")) { Fail("Escaped + not reduced properly"); } if (!n.ToString().Equals("CN=a\\+b")) { Fail("+ in string not properly escaped."); } n = new X509Name("CN=a\\=b"); vls = n.GetValues(X509Name.CN); if (vls.Count != 1 || !vls[0].Equals("a=b")) { Fail("Escaped = not reduced properly"); } if (!n.ToString().Equals("CN=a\\=b")) { Fail("= in string not properly escaped."); } n = new X509Name("TELEPHONENUMBER=\"+61999999999\""); vls = n.GetValues(X509Name.TelephoneNumber); if (vls.Count != 1 || !vls[0].Equals("+61999999999")) { Fail("telephonenumber escaped + not reduced properly"); } n = new X509Name("TELEPHONENUMBER=\\+61999999999"); vls = n.GetValues(X509Name.TelephoneNumber); if (vls.Count != 1 || !vls[0].Equals("+61999999999")) { Fail("telephonenumber escaped + not reduced properly"); } n = new X509Name(@"TELEPHONENUMBER=\+61999999999"); vls = n.GetValues(X509Name.TelephoneNumber); if (vls.Count != 1 || !vls[0].Equals("+61999999999")) { Fail("telephonenumber escaped + not reduced properly"); } } private void compositeTest() { // // composite test // byte[] enc = Hex.Decode("305e310b300906035504061302415531283026060355040a0c1f546865204c6567696f6e206f662074686520426f756e637920436173746c653125301006035504070c094d656c626f75726e653011060355040b0c0a4173636f742056616c65"); X509Name n = X509Name.GetInstance(Asn1Object.FromByteArray(enc)); if (!n.ToString().Equals("C=AU,O=The Legion of the Bouncy Castle,L=Melbourne+OU=Ascot Vale")) { Fail("Failed composite to string test got: " + n.ToString()); } IDictionary symbols = X509Name.DefaultSymbols; if (!n.ToString(true, symbols).Equals("L=Melbourne+OU=Ascot Vale,O=The Legion of the Bouncy Castle,C=AU")) { Fail("Failed composite to string test got: " + n.ToString(true, symbols)); } n = new X509Name(true, "L=Melbourne+OU=Ascot Vale,O=The Legion of the Bouncy Castle,C=AU"); if (!n.ToString().Equals("C=AU,O=The Legion of the Bouncy Castle,L=Melbourne+OU=Ascot Vale")) { Fail("Failed composite to string reversal test got: " + n.ToString()); } n = new X509Name("C=AU, O=The Legion of the Bouncy Castle, L=Melbourne + OU=Ascot Vale"); MemoryStream bOut = new MemoryStream(); Asn1OutputStream aOut = new Asn1OutputStream(bOut); aOut.WriteObject(n); byte[] enc2 = bOut.ToArray(); if (!Arrays.AreEqual(enc, enc2)) { Fail("Failed composite string to encoding test"); } // // dud name test - handle empty DN without barfing. // n = new X509Name("C=CH,O=,OU=dummy,[email protected]"); n = X509Name.GetInstance(Asn1Object.FromByteArray(n.GetEncoded())); } private void equalityTest( X509Name x509Name, X509Name x509Name1) { if (!x509Name.Equivalent(x509Name1)) { Fail("equality test failed for " + x509Name + " : " + x509Name1); } // FIXME Sort out X509Name hashcode problem // if (x509Name.GetHashCode() != x509Name1.GetHashCode()) // { // Fail("GetHashCode test failed for " + x509Name + " : " + x509Name1); // } if (!x509Name.Equivalent(x509Name1, true)) { Fail("equality test failed for " + x509Name + " : " + x509Name1); } } private bool CompareVectors( IList one, IList two) { if (one.Count != two.Count) return false; for (int i = 0; i < one.Count; ++i) { if (!one[i].Equals(two[i])) return false; } return true; } public static void Main( string[] args) { ITest test = new X509NameTest(); ITestResult result = test.Perform(); Console.WriteLine(result); } [Test] public void TestFunction() { string resultText = Perform().ToString(); Assert.AreEqual(Name + ": Okay", resultText); } } }
#if UNITY_WINRT && !UNITY_EDITOR && !UNITY_WP8 #region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Numerics; using System.Reflection; using System.Collections; using System.Globalization; using System.Runtime.Serialization; using System.Runtime.Serialization.Formatters; using System.Text; using System.Linq; using Newtonsoft.Json.Serialization; namespace Newtonsoft.Json.Utilities { internal enum MemberTypes { Property, Field, Event, Method, Other } [Flags] internal enum BindingFlags { Default = 0, IgnoreCase = 1, DeclaredOnly = 2, Instance = 4, Static = 8, Public = 16, NonPublic = 32, FlattenHierarchy = 64, InvokeMethod = 256, CreateInstance = 512, GetField = 1024, SetField = 2048, GetProperty = 4096, SetProperty = 8192, PutDispProperty = 16384, ExactBinding = 65536, PutRefDispProperty = 32768, SuppressChangeType = 131072, OptionalParamBinding = 262144, IgnoreReturn = 16777216 } internal static class ReflectionUtils { public static readonly Type[] EmptyTypes; static ReflectionUtils() { EmptyTypes = new Type[0]; } public static bool IsVirtual(this PropertyInfo propertyInfo) { ValidationUtils.ArgumentNotNull(propertyInfo, "propertyInfo"); MethodInfo m = propertyInfo.GetGetMethod(); if (m != null && m.IsVirtual) return true; m = propertyInfo.GetSetMethod(); if (m != null && m.IsVirtual) return true; return false; } public static MethodInfo GetBaseDefinition(this PropertyInfo propertyInfo) { ValidationUtils.ArgumentNotNull(propertyInfo, "propertyInfo"); MethodInfo m = propertyInfo.GetGetMethod(); if (m != null) return m.GetBaseDefinition(); m = propertyInfo.GetSetMethod(); if (m != null) return m.GetBaseDefinition(); return null; } public static bool IsPublic(PropertyInfo property) { if (property.GetGetMethod() != null && property.GetGetMethod().IsPublic) return true; if (property.GetSetMethod() != null && property.GetSetMethod().IsPublic) return true; return false; } public static Type GetObjectType(object v) { return (v != null) ? v.GetType() : null; } public static string GetTypeName(Type t, FormatterAssemblyStyle assemblyFormat, SerializationBinder binder) { string fullyQualifiedTypeName; if (binder != null) { string assemblyName, typeName; binder.BindToName(t, out assemblyName, out typeName); fullyQualifiedTypeName = typeName + (assemblyName == null ? "" : ", " + assemblyName); } else { fullyQualifiedTypeName = t.AssemblyQualifiedName; } switch (assemblyFormat) { case FormatterAssemblyStyle.Simple: return RemoveAssemblyDetails(fullyQualifiedTypeName); case FormatterAssemblyStyle.Full: return fullyQualifiedTypeName; default: throw new ArgumentOutOfRangeException(); } } private static string RemoveAssemblyDetails(string fullyQualifiedTypeName) { StringBuilder builder = new StringBuilder(); // loop through the type name and filter out qualified assembly details from nested type names bool writingAssemblyName = false; bool skippingAssemblyDetails = false; for (int i = 0; i < fullyQualifiedTypeName.Length; i++) { char current = fullyQualifiedTypeName[i]; switch (current) { case '[': writingAssemblyName = false; skippingAssemblyDetails = false; builder.Append(current); break; case ']': writingAssemblyName = false; skippingAssemblyDetails = false; builder.Append(current); break; case ',': if (!writingAssemblyName) { writingAssemblyName = true; builder.Append(current); } else { skippingAssemblyDetails = true; } break; default: if (!skippingAssemblyDetails) builder.Append(current); break; } } return builder.ToString(); } public static bool HasDefaultConstructor(Type t, bool nonPublic) { ValidationUtils.ArgumentNotNull(t, "t"); if (t.IsValueType()) return true; return (GetDefaultConstructor(t, nonPublic) != null); } public static ConstructorInfo GetDefaultConstructor(Type t) { return GetDefaultConstructor(t, false); } public static ConstructorInfo GetDefaultConstructor(Type t, bool nonPublic) { BindingFlags bindingFlags = BindingFlags.Instance | BindingFlags.Public; if (nonPublic) bindingFlags = bindingFlags | BindingFlags.NonPublic; return t.GetConstructors(bindingFlags).SingleOrDefault(c => !c.GetParameters().Any()); } public static bool IsNullable(Type t) { ValidationUtils.ArgumentNotNull(t, "t"); if (t.IsValueType()) return IsNullableType(t); return true; } public static bool IsNullableType(Type t) { ValidationUtils.ArgumentNotNull(t, "t"); return (t.IsGenericType() && t.GetGenericTypeDefinition() == typeof(Nullable<>)); } public static Type EnsureNotNullableType(Type t) { return (IsNullableType(t)) ? Nullable.GetUnderlyingType(t) : t; } public static bool IsGenericDefinition(Type type, Type genericInterfaceDefinition) { if (!type.IsGenericType()) return false; Type t = type.GetGenericTypeDefinition(); return (t == genericInterfaceDefinition); } public static bool ImplementsGenericDefinition(Type type, Type genericInterfaceDefinition) { Type implementingType; return ImplementsGenericDefinition(type, genericInterfaceDefinition, out implementingType); } public static bool ImplementsGenericDefinition(Type type, Type genericInterfaceDefinition, out Type implementingType) { ValidationUtils.ArgumentNotNull(type, "type"); ValidationUtils.ArgumentNotNull(genericInterfaceDefinition, "genericInterfaceDefinition"); if (!genericInterfaceDefinition.IsInterface() || !genericInterfaceDefinition.IsGenericTypeDefinition()) throw new ArgumentNullException("'{0}' is not a generic interface definition.".FormatWith(CultureInfo.InvariantCulture, genericInterfaceDefinition)); if (type.IsInterface()) { if (type.IsGenericType()) { Type interfaceDefinition = type.GetGenericTypeDefinition(); if (genericInterfaceDefinition == interfaceDefinition) { implementingType = type; return true; } } } foreach (Type i in type.GetInterfaces()) { if (i.IsGenericType()) { Type interfaceDefinition = i.GetGenericTypeDefinition(); if (genericInterfaceDefinition == interfaceDefinition) { implementingType = i; return true; } } } implementingType = null; return false; } public static bool InheritsGenericDefinition(Type type, Type genericClassDefinition) { Type implementingType; return InheritsGenericDefinition(type, genericClassDefinition, out implementingType); } public static bool InheritsGenericDefinition(Type type, Type genericClassDefinition, out Type implementingType) { ValidationUtils.ArgumentNotNull(type, "type"); ValidationUtils.ArgumentNotNull(genericClassDefinition, "genericClassDefinition"); if (!genericClassDefinition.IsClass() || !genericClassDefinition.IsGenericTypeDefinition()) throw new ArgumentNullException("'{0}' is not a generic class definition.".FormatWith(CultureInfo.InvariantCulture, genericClassDefinition)); return InheritsGenericDefinitionInternal(type, genericClassDefinition, out implementingType); } private static bool InheritsGenericDefinitionInternal(Type currentType, Type genericClassDefinition, out Type implementingType) { if (currentType.IsGenericType()) { Type currentGenericClassDefinition = currentType.GetGenericTypeDefinition(); if (genericClassDefinition == currentGenericClassDefinition) { implementingType = currentType; return true; } } if (currentType.BaseType() == null) { implementingType = null; return false; } return InheritsGenericDefinitionInternal(currentType.BaseType(), genericClassDefinition, out implementingType); } /// <summary> /// Gets the type of the typed collection's items. /// </summary> /// <param name="type">The type.</param> /// <returns>The type of the typed collection's items.</returns> public static Type GetCollectionItemType(Type type) { ValidationUtils.ArgumentNotNull(type, "type"); Type genericListType; if (type.IsArray) { return type.GetElementType(); } else if (ImplementsGenericDefinition(type, typeof(IEnumerable<>), out genericListType)) { if (genericListType.IsGenericTypeDefinition()) throw new Exception("Type {0} is not a collection.".FormatWith(CultureInfo.InvariantCulture, type)); return genericListType.GetGenericArguments()[0]; } else if (typeof(IEnumerable).IsAssignableFrom(type)) { return null; } else { throw new Exception("Type {0} is not a collection.".FormatWith(CultureInfo.InvariantCulture, type)); } } public static void GetDictionaryKeyValueTypes(Type dictionaryType, out Type keyType, out Type valueType) { ValidationUtils.ArgumentNotNull(dictionaryType, "type"); Type genericDictionaryType; if (ImplementsGenericDefinition(dictionaryType, typeof(IDictionary<,>), out genericDictionaryType)) { if (genericDictionaryType.IsGenericTypeDefinition()) throw new Exception("Type {0} is not a dictionary.".FormatWith(CultureInfo.InvariantCulture, dictionaryType)); Type[] dictionaryGenericArguments = genericDictionaryType.GetGenericArguments(); keyType = dictionaryGenericArguments[0]; valueType = dictionaryGenericArguments[1]; return; } else if (typeof(IDictionary).IsAssignableFrom(dictionaryType)) { keyType = null; valueType = null; return; } else { throw new Exception("Type {0} is not a dictionary.".FormatWith(CultureInfo.InvariantCulture, dictionaryType)); } } /// <summary> /// Gets the member's underlying type. /// </summary> /// <param name="member">The member.</param> /// <returns>The underlying type of the member.</returns> public static Type GetMemberUnderlyingType(MemberInfo member) { ValidationUtils.ArgumentNotNull(member, "member"); switch (member.MemberType()) { case MemberTypes.Field: return ((FieldInfo)member).FieldType; case MemberTypes.Property: return ((PropertyInfo)member).PropertyType; case MemberTypes.Event: return ((EventInfo)member).EventHandlerType; default: throw new ArgumentException("MemberInfo must be of type FieldInfo, PropertyInfo or EventInfo", "member"); } } /// <summary> /// Determines whether the member is an indexed property. /// </summary> /// <param name="member">The member.</param> /// <returns> /// <c>true</c> if the member is an indexed property; otherwise, <c>false</c>. /// </returns> public static bool IsIndexedProperty(MemberInfo member) { ValidationUtils.ArgumentNotNull(member, "member"); PropertyInfo propertyInfo = member as PropertyInfo; if (propertyInfo != null) return IsIndexedProperty(propertyInfo); else return false; } /// <summary> /// Determines whether the property is an indexed property. /// </summary> /// <param name="property">The property.</param> /// <returns> /// <c>true</c> if the property is an indexed property; otherwise, <c>false</c>. /// </returns> public static bool IsIndexedProperty(PropertyInfo property) { ValidationUtils.ArgumentNotNull(property, "property"); return (property.GetIndexParameters().Length > 0); } /// <summary> /// Gets the member's value on the object. /// </summary> /// <param name="member">The member.</param> /// <param name="target">The target object.</param> /// <returns>The member's value on the object.</returns> public static object GetMemberValue(MemberInfo member, object target) { ValidationUtils.ArgumentNotNull(member, "member"); ValidationUtils.ArgumentNotNull(target, "target"); switch (member.MemberType()) { case MemberTypes.Field: return ((FieldInfo)member).GetValue(target); case MemberTypes.Property: try { return ((PropertyInfo)member).GetValue(target, null); } catch (TargetParameterCountException e) { throw new ArgumentException("MemberInfo '{0}' has index parameters".FormatWith(CultureInfo.InvariantCulture, member.Name), e); } default: throw new ArgumentException("MemberInfo '{0}' is not of type FieldInfo or PropertyInfo".FormatWith(CultureInfo.InvariantCulture, CultureInfo.InvariantCulture, member.Name), "member"); } } /// <summary> /// Sets the member's value on the target object. /// </summary> /// <param name="member">The member.</param> /// <param name="target">The target.</param> /// <param name="value">The value.</param> public static void SetMemberValue(MemberInfo member, object target, object value) { ValidationUtils.ArgumentNotNull(member, "member"); ValidationUtils.ArgumentNotNull(target, "target"); switch (member.MemberType()) { case MemberTypes.Field: ((FieldInfo)member).SetValue(target, value); break; case MemberTypes.Property: ((PropertyInfo)member).SetValue(target, value, null); break; default: throw new ArgumentException("MemberInfo '{0}' must be of type FieldInfo or PropertyInfo".FormatWith(CultureInfo.InvariantCulture, member.Name), "member"); } } /// <summary> /// Determines whether the specified MemberInfo can be read. /// </summary> /// <param name="member">The MemberInfo to determine whether can be read.</param> /// /// <param name="nonPublic">if set to <c>true</c> then allow the member to be gotten non-publicly.</param> /// <returns> /// <c>true</c> if the specified MemberInfo can be read; otherwise, <c>false</c>. /// </returns> public static bool CanReadMemberValue(MemberInfo member, bool nonPublic) { switch (member.MemberType()) { case MemberTypes.Field: FieldInfo fieldInfo = (FieldInfo)member; if (nonPublic) return true; else if (fieldInfo.IsPublic) return true; return false; case MemberTypes.Property: PropertyInfo propertyInfo = (PropertyInfo)member; if (!propertyInfo.CanRead) return false; if (nonPublic) return true; return (propertyInfo.GetGetMethod(nonPublic) != null); default: return false; } } /// <summary> /// Determines whether the specified MemberInfo can be set. /// </summary> /// <param name="member">The MemberInfo to determine whether can be set.</param> /// <param name="nonPublic">if set to <c>true</c> then allow the member to be set non-publicly.</param> /// <param name="canSetReadOnly">if set to <c>true</c> then allow the member to be set if read-only.</param> /// <returns> /// <c>true</c> if the specified MemberInfo can be set; otherwise, <c>false</c>. /// </returns> public static bool CanSetMemberValue(MemberInfo member, bool nonPublic, bool canSetReadOnly) { switch (member.MemberType()) { case MemberTypes.Field: FieldInfo fieldInfo = (FieldInfo)member; if (fieldInfo.IsInitOnly && !canSetReadOnly) return false; if (nonPublic) return true; else if (fieldInfo.IsPublic) return true; return false; case MemberTypes.Property: PropertyInfo propertyInfo = (PropertyInfo)member; if (!propertyInfo.CanWrite) return false; if (nonPublic) return true; return (propertyInfo.GetSetMethod(nonPublic) != null); default: return false; } } public static List<MemberInfo> GetFieldsAndProperties(Type type, BindingFlags bindingAttr) { List<MemberInfo> targetMembers = new List<MemberInfo>(); targetMembers.AddRange(GetFields(type, bindingAttr)); targetMembers.AddRange(GetProperties(type, bindingAttr)); // for some reason .NET returns multiple members when overriding a generic member on a base class // http://forums.msdn.microsoft.com/en-US/netfxbcl/thread/b5abbfee-e292-4a64-8907-4e3f0fb90cd9/ // filter members to only return the override on the topmost class // update: I think this is fixed in .NET 3.5 SP1 - leave this in for now... List<MemberInfo> distinctMembers = new List<MemberInfo>(targetMembers.Count); foreach (var groupedMember in targetMembers.GroupBy(m => m.Name)) { int count = groupedMember.Count(); IList<MemberInfo> members = groupedMember.ToList(); if (count == 1) { distinctMembers.Add(members.First()); } else { var resolvedMembers = members.Where(m => !IsOverridenGenericMember(m, bindingAttr) || m.Name == "Item"); distinctMembers.AddRange(resolvedMembers); } } return distinctMembers; } private static bool IsOverridenGenericMember(MemberInfo memberInfo, BindingFlags bindingAttr) { MemberTypes memberType = memberInfo.MemberType(); if (memberType != MemberTypes.Field && memberType != MemberTypes.Property) throw new ArgumentException("Member must be a field or property."); Type declaringType = memberInfo.DeclaringType; if (!declaringType.IsGenericType()) return false; Type genericTypeDefinition = declaringType.GetGenericTypeDefinition(); if (genericTypeDefinition == null) return false; MemberInfo[] members = genericTypeDefinition.GetMember(memberInfo.Name, bindingAttr); if (members.Length == 0) return false; Type memberUnderlyingType = GetMemberUnderlyingType(members[0]); if (!memberUnderlyingType.IsGenericParameter) return false; return true; } public static T GetAttribute<T>(object attributeProvider) where T : Attribute { return GetAttribute<T>(attributeProvider, true); } public static T GetAttribute<T>(object attributeProvider, bool inherit) where T : Attribute { T[] attributes = GetAttributes<T>(attributeProvider, inherit); return (attributes != null) ? attributes.SingleOrDefault() : null; } public static T[] GetAttributes<T>(object provider, bool inherit) where T : Attribute { if (provider is Type) return ((Type)provider).GetTypeInfo().GetCustomAttributes<T>(inherit).ToArray(); if (provider is Assembly) return ((Assembly)provider).GetCustomAttributes<T>().ToArray(); if (provider is MemberInfo) return ((MemberInfo)provider).GetCustomAttributes<T>(inherit).ToArray(); if (provider is Module) return ((Module)provider).GetCustomAttributes<T>().ToArray(); if (provider is ParameterInfo) return ((ParameterInfo)provider).GetCustomAttributes<T>(inherit).ToArray(); throw new Exception("Cannot get attributes from '{0}'.".FormatWith(CultureInfo.InvariantCulture, provider)); } public static void SplitFullyQualifiedTypeName(string fullyQualifiedTypeName, out string typeName, out string assemblyName) { int? assemblyDelimiterIndex = GetAssemblyDelimiterIndex(fullyQualifiedTypeName); if (assemblyDelimiterIndex != null) { typeName = fullyQualifiedTypeName.Substring(0, assemblyDelimiterIndex.Value).Trim(); assemblyName = fullyQualifiedTypeName.Substring(assemblyDelimiterIndex.Value + 1, fullyQualifiedTypeName.Length - assemblyDelimiterIndex.Value - 1).Trim(); } else { typeName = fullyQualifiedTypeName; assemblyName = null; } } private static int? GetAssemblyDelimiterIndex(string fullyQualifiedTypeName) { // we need to get the first comma following all surrounded in brackets because of generic types // e.g. System.Collections.Generic.Dictionary`2[[System.String, mscorlib,Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089],[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]], mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 int scope = 0; for (int i = 0; i < fullyQualifiedTypeName.Length; i++) { char current = fullyQualifiedTypeName[i]; switch (current) { case '[': scope++; break; case ']': scope--; break; case ',': if (scope == 0) return i; break; } } return null; } public static MemberInfo GetMemberInfoFromType(Type targetType, MemberInfo memberInfo) { const BindingFlags bindingAttr = BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic; switch (memberInfo.MemberType()) { case MemberTypes.Property: PropertyInfo propertyInfo = (PropertyInfo)memberInfo; Type[] types = propertyInfo.GetIndexParameters().Select(p => p.ParameterType).ToArray(); return targetType.GetProperty(propertyInfo.Name, bindingAttr, null, propertyInfo.PropertyType, types, null); default: return targetType.GetMember(memberInfo.Name, memberInfo.MemberType(), bindingAttr).SingleOrDefault(); } } public static IEnumerable<FieldInfo> GetFields(Type targetType, BindingFlags bindingAttr) { ValidationUtils.ArgumentNotNull(targetType, "targetType"); List<MemberInfo> fieldInfos = new List<MemberInfo>(targetType.GetFields(bindingAttr)); return fieldInfos.Cast<FieldInfo>(); } private static void GetChildPrivateFields(IList<MemberInfo> initialFields, Type targetType, BindingFlags bindingAttr) { // fix weirdness with private FieldInfos only being returned for the current Type // find base type fields and add them to result if ((bindingAttr & BindingFlags.NonPublic) != 0) { // modify flags to not search for public fields BindingFlags nonPublicBindingAttr = bindingAttr.RemoveFlag(BindingFlags.Public); while ((targetType = targetType.BaseType()) != null) { // filter out protected fields IEnumerable<MemberInfo> childPrivateFields = targetType.GetFields(nonPublicBindingAttr).Where(f => f.IsPrivate).Cast<MemberInfo>(); initialFields.AddRange(childPrivateFields); } } } public static IEnumerable<PropertyInfo> GetProperties(Type targetType, BindingFlags bindingAttr) { ValidationUtils.ArgumentNotNull(targetType, "targetType"); List<PropertyInfo> propertyInfos = new List<PropertyInfo>(targetType.GetProperties(bindingAttr)); GetChildPrivateProperties(propertyInfos, targetType, bindingAttr); // a base class private getter/setter will be inaccessable unless the property was gotten from the base class for (int i = 0; i < propertyInfos.Count; i++) { PropertyInfo member = propertyInfos[i]; if (member.DeclaringType != targetType) { PropertyInfo declaredMember = (PropertyInfo)GetMemberInfoFromType(member.DeclaringType, member); propertyInfos[i] = declaredMember; } } return propertyInfos; } public static BindingFlags RemoveFlag(this BindingFlags bindingAttr, BindingFlags flag) { return ((bindingAttr & flag) == flag) ? bindingAttr ^ flag : bindingAttr; } private static void GetChildPrivateProperties(IList<PropertyInfo> initialProperties, Type targetType, BindingFlags bindingAttr) { // fix weirdness with private PropertyInfos only being returned for the current Type // find base type properties and add them to result // also find base properties that have been hidden by subtype properties with the same name while ((targetType = targetType.BaseType()) != null) { foreach (PropertyInfo propertyInfo in targetType.GetProperties(bindingAttr)) { PropertyInfo subTypeProperty = propertyInfo; if (!IsPublic(subTypeProperty)) { // have to test on name rather than reference because instances are different // depending on the type that GetProperties was called on int index = initialProperties.IndexOf(p => p.Name == subTypeProperty.Name); if (index == -1) { initialProperties.Add(subTypeProperty); } else { // replace nonpublic properties for a child, but gotten from // the parent with the one from the child // the property gotten from the child will have access to private getter/setter initialProperties[index] = subTypeProperty; } } else { if (!subTypeProperty.IsVirtual()) { int index = initialProperties.IndexOf(p => p.Name == subTypeProperty.Name && p.DeclaringType == subTypeProperty.DeclaringType); if (index == -1) initialProperties.Add(subTypeProperty); } else { int index = initialProperties.IndexOf(p => p.Name == subTypeProperty.Name && p.IsVirtual() && p.GetBaseDefinition() != null && p.GetBaseDefinition().DeclaringType.IsAssignableFrom(subTypeProperty.DeclaringType)); if (index == -1) initialProperties.Add(subTypeProperty); } } } } } public static bool IsMethodOverridden(Type currentType, Type methodDeclaringType, string method) { bool isMethodOverriden = currentType.GetMethods(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) .Any(info => info.Name == method && // check that the method overrides the original on DynamicObjectProxy info.DeclaringType != methodDeclaringType && info.GetBaseDefinition().DeclaringType == methodDeclaringType ); return isMethodOverriden; } public static object GetDefaultValue(Type type) { if (!type.IsValueType()) return null; switch (ConvertUtils.GetTypeCode(type)) { case PrimitiveTypeCode.Boolean: return false; case PrimitiveTypeCode.Char: case PrimitiveTypeCode.SByte: case PrimitiveTypeCode.Byte: case PrimitiveTypeCode.Int16: case PrimitiveTypeCode.UInt16: case PrimitiveTypeCode.Int32: case PrimitiveTypeCode.UInt32: return 0; case PrimitiveTypeCode.Int64: case PrimitiveTypeCode.UInt64: return 0L; case PrimitiveTypeCode.Single: return 0f; case PrimitiveTypeCode.Double: return 0.0; case PrimitiveTypeCode.Decimal: return 0m; case PrimitiveTypeCode.DateTime: return new DateTime(); case PrimitiveTypeCode.BigInteger: return new BigInteger(); case PrimitiveTypeCode.Guid: return new Guid(); case PrimitiveTypeCode.DateTimeOffset: return new DateTimeOffset(); } if (IsNullable(type)) return null; // possibly use IL initobj for perf here? return Activator.CreateInstance(type); } } } #endif
using UnityEngine; namespace VolumetricLines { /// <summary> /// Render a line strip of volumetric lines /// /// Based on the Volumetric lines algorithm by Sebastien Hillaire /// http://sebastien.hillaire.free.fr/index.php?option=com_content&view=article&id=57&Itemid=74 /// /// Thread in the Unity3D Forum: /// http://forum.unity3d.com/threads/181618-Volumetric-lines /// /// Unity3D port by Johannes Unterguggenberger /// [email protected] /// /// Thanks to Michael Probst for support during development. /// /// Thanks for bugfixes and improvements to Unity Forum User "Mistale" /// http://forum.unity3d.com/members/102350-Mistale /// /// /// Shader code optimization and cleanup by Lex Darlog (aka DRL) /// http://forum.unity3d.com/members/lex-drl.67487/ /// /// </summary> [RequireComponent(typeof(MeshFilter))] [RequireComponent(typeof(MeshRenderer))] [ExecuteInEditMode] public class VolumetricLineStripBehavior : MonoBehaviour { // Used to compute the average value of all the Vector3's components: static readonly Vector3 Average = new Vector3(1f/3f, 1f/3f, 1f/3f); #region private variables /// <summary> /// Template material to be used /// </summary> [SerializeField] public Material m_templateMaterial; /// <summary> /// Set to false in order to change the material's properties as specified in this script. /// Set to true in order to *initially* leave the material's properties as they are in the template material. /// </summary> [SerializeField] private bool m_doNotOverwriteTemplateMaterialProperties; /// <summary> /// Line Color /// </summary> [SerializeField] private Color m_lineColor; /// <summary> /// The width of the line /// </summary> [SerializeField] private float m_lineWidth; /// <summary> /// Light saber factor /// </summary> [SerializeField] [Range(0.0f, 1.0f)] private float m_lightSaberFactor; /// <summary> /// This GameObject's specific material /// </summary> private Material m_material; /// <summary> /// This GameObject's mesh filter /// </summary> private MeshFilter m_meshFilter; /// <summary> /// The vertices of the line /// </summary> [SerializeField] private Vector3[] m_lineVertices; #endregion #region properties /// <summary> /// Gets or sets the tmplate material. /// Setting this will only have an impact once. /// Subsequent changes will be ignored. /// </summary> public Material TemplateMaterial { get { return m_templateMaterial; } set { m_templateMaterial = value; } } /// <summary> /// Gets or sets whether or not the template material properties /// should be used (false) or if the properties of this MonoBehavior /// instance should be used (true, default). /// Setting this will only have an impact once, and then only if it /// is set before TemplateMaterial has been assigned. /// </summary> public bool DoNotOverwriteTemplateMaterialProperties { get { return m_doNotOverwriteTemplateMaterialProperties; } set { m_doNotOverwriteTemplateMaterialProperties = value; } } /// <summary> /// Get or set the line color of this volumetric line's material /// </summary> public Color LineColor { get { return m_lineColor; } set { CreateMaterial(); if (null != m_material) { m_lineColor = value; m_material.color = m_lineColor; } } } /// <summary> /// Get or set the line width of this volumetric line's material /// </summary> public float LineWidth { get { return m_lineWidth; } set { CreateMaterial(); if (null != m_material) { m_lineWidth = value; m_material.SetFloat("_LineWidth", m_lineWidth); } UpdateBounds(); } } /// <summary> /// Get or set the light saber factor of this volumetric line's material /// </summary> public float LightSaberFactor { get { return m_lightSaberFactor; } set { CreateMaterial(); if (null != m_material) { m_lightSaberFactor = value; m_material.SetFloat("_LightSaberFactor", m_lightSaberFactor); } } } /// <summary> /// Gets the vertices of this line strip /// </summary> public Vector3[] LineVertices { get { return m_lineVertices; } } #endregion #region methods /// <summary> /// Creates a copy of the template material for this instance /// </summary> private void CreateMaterial() { if (null == m_material || null == GetComponent<MeshRenderer>().sharedMaterial) { if (null != m_templateMaterial) { m_material = Material.Instantiate(m_templateMaterial); GetComponent<MeshRenderer>().sharedMaterial = m_material; SetAllMaterialProperties(); } else { m_material = GetComponent<MeshRenderer>().sharedMaterial; } } } /// <summary> /// Destroys the copy of the template material which was used for this instance /// </summary> private void DestroyMaterial() { if (null != m_material) { DestroyImmediate(m_material); m_material = null; } } /// <summary> /// Calculates the (approximated) _LineScale factor based on the object's scale. /// </summary> private float CalculateLineScale() { return Vector3.Dot(transform.lossyScale, Average); } /// <summary> /// Updates the line scaling of this volumetric line based on the current object scaling. /// </summary> public void UpdateLineScale() { if (null != m_material) { m_material.SetFloat("_LineScale", CalculateLineScale()); } } /// <summary> /// Sets all material properties (color, width, start-, endpos) /// </summary> private void SetAllMaterialProperties() { UpdateLineVertices(m_lineVertices); if (null != m_material) { if (!m_doNotOverwriteTemplateMaterialProperties) { m_material.color = m_lineColor; m_material.SetFloat("_LineWidth", m_lineWidth); m_material.SetFloat("_LightSaberFactor", m_lightSaberFactor); } UpdateLineScale(); } } /// <summary> /// Calculate the bounds of this line based on the coordinates of the line vertices, /// the line width, and the scaling of the object. /// </summary> private Bounds CalculateBounds() { var maxWidth = Mathf.Max(transform.lossyScale.x, transform.lossyScale.y, transform.lossyScale.z); var scaledLineWidth = maxWidth * LineWidth * 0.5f; var scaledLineWidthVec = new Vector3(scaledLineWidth, scaledLineWidth, scaledLineWidth); Debug.Assert(m_lineVertices.Length > 0); if (m_lineVertices.Length == 0) { return new Bounds(); } var min = m_lineVertices[0]; var max = m_lineVertices[0]; for (int i = 1; i < m_lineVertices.Length; ++i) { min = new Vector3( Mathf.Min(min.x, m_lineVertices[i].x), Mathf.Min(min.y, m_lineVertices[i].y), Mathf.Min(min.z, m_lineVertices[i].z) ); max = new Vector3( Mathf.Max(max.x, m_lineVertices[i].x), Mathf.Max(max.y, m_lineVertices[i].y), Mathf.Max(max.z, m_lineVertices[i].z) ); } return new Bounds { min = min - scaledLineWidthVec, max = max + scaledLineWidthVec }; } /// <summary> /// Updates the bounds of this line according to the current properties, /// which there are: coordinates of the line vertices, line width, scaling of the object. /// </summary> public void UpdateBounds() { if (null != m_meshFilter) { var mesh = m_meshFilter.sharedMesh; Debug.Assert(null != mesh); if (null != mesh) { mesh.bounds = CalculateBounds(); } } } /// <summary> /// Updates the vertices of this VolumetricLineStrip. /// This is an expensive operation. /// </summary> /// <param name="newSetOfVertices">new set of vertices for the line strip.</param> public void UpdateLineVertices(Vector3[] newSetOfVertices) { if (null == newSetOfVertices) { return; } if (newSetOfVertices.Length < 3) { Debug.LogError("Add at least 3 vertices to the VolumetricLineStrip"); return; } m_lineVertices = newSetOfVertices; // fill vertex positions, and indices // 2 for each position, + 2 for the start, + 2 for the end Vector3[] vertexPositions = new Vector3[m_lineVertices.Length * 2 + 4]; // there are #vertices - 2 faces, and 3 indices each int[] indices = new int[(m_lineVertices.Length * 2 + 2) * 3]; int v = 0; int x = 0; vertexPositions[v++] = m_lineVertices[0]; vertexPositions[v++] = m_lineVertices[0]; for (int i = 0; i < m_lineVertices.Length; ++i) { vertexPositions[v++] = m_lineVertices[i]; vertexPositions[v++] = m_lineVertices[i]; indices[x++] = v - 2; indices[x++] = v - 3; indices[x++] = v - 4; indices[x++] = v - 1; indices[x++] = v - 2; indices[x++] = v - 3; } vertexPositions[v++] = m_lineVertices[m_lineVertices.Length - 1]; vertexPositions[v++] = m_lineVertices[m_lineVertices.Length - 1]; indices[x++] = v - 2; indices[x++] = v - 3; indices[x++] = v - 4; indices[x++] = v - 1; indices[x++] = v - 2; indices[x++] = v - 3; // fill texture coordinates and vertex offsets Vector2[] texCoords = new Vector2[vertexPositions.Length]; Vector2[] vertexOffsets = new Vector2[vertexPositions.Length]; int t = 0; int o = 0; texCoords[t++] = new Vector2(1.0f, 0.0f); texCoords[t++] = new Vector2(1.0f, 1.0f); texCoords[t++] = new Vector2(0.5f, 0.0f); texCoords[t++] = new Vector2(0.5f, 1.0f); vertexOffsets[o++] = new Vector2(1.0f, -1.0f); vertexOffsets[o++] = new Vector2(1.0f, 1.0f); vertexOffsets[o++] = new Vector2(0.0f, -1.0f); vertexOffsets[o++] = new Vector2(0.0f, 1.0f); for (int i = 1; i < m_lineVertices.Length - 1; ++i) { if ((i & 0x1) == 0x1) { texCoords[t++] = new Vector2(0.5f, 0.0f); texCoords[t++] = new Vector2(0.5f, 1.0f); } else { texCoords[t++] = new Vector2(0.5f, 0.0f); texCoords[t++] = new Vector2(0.5f, 1.0f); } vertexOffsets[o++] = new Vector2(0.0f, 1.0f); vertexOffsets[o++] = new Vector2(0.0f, -1.0f); } texCoords[t++] = new Vector2(0.5f, 0.0f); texCoords[t++] = new Vector2(0.5f, 1.0f); texCoords[t++] = new Vector2(0.0f, 0.0f); texCoords[t++] = new Vector2(0.0f, 1.0f); vertexOffsets[o++] = new Vector2(0.0f, 1.0f); vertexOffsets[o++] = new Vector2(0.0f, -1.0f); vertexOffsets[o++] = new Vector2(1.0f, 1.0f); vertexOffsets[o++] = new Vector2(1.0f, -1.0f); // fill previous and next positions Vector3[] prevPositions = new Vector3[vertexPositions.Length]; Vector4[] nextPositions = new Vector4[vertexPositions.Length]; int p = 0; int n = 0; prevPositions[p++] = m_lineVertices[1]; prevPositions[p++] = m_lineVertices[1]; prevPositions[p++] = m_lineVertices[1]; prevPositions[p++] = m_lineVertices[1]; nextPositions[n++] = m_lineVertices[1]; nextPositions[n++] = m_lineVertices[1]; nextPositions[n++] = m_lineVertices[1]; nextPositions[n++] = m_lineVertices[1]; for (int i = 1; i < m_lineVertices.Length - 1; ++i) { prevPositions[p++] = m_lineVertices[i - 1]; prevPositions[p++] = m_lineVertices[i - 1]; nextPositions[n++] = m_lineVertices[i + 1]; nextPositions[n++] = m_lineVertices[i + 1]; } prevPositions[p++] = m_lineVertices[m_lineVertices.Length - 2]; prevPositions[p++] = m_lineVertices[m_lineVertices.Length - 2]; prevPositions[p++] = m_lineVertices[m_lineVertices.Length - 2]; prevPositions[p++] = m_lineVertices[m_lineVertices.Length - 2]; nextPositions[n++] = m_lineVertices[m_lineVertices.Length - 2]; nextPositions[n++] = m_lineVertices[m_lineVertices.Length - 2]; nextPositions[n++] = m_lineVertices[m_lineVertices.Length - 2]; nextPositions[n++] = m_lineVertices[m_lineVertices.Length - 2]; if (null != m_meshFilter) { var mesh = m_meshFilter.sharedMesh; Debug.Assert(null != mesh); if (null != mesh) { mesh.SetIndices(null, MeshTopology.Triangles, 0); // Reset before setting again to prevent a unity error message. mesh.vertices = vertexPositions; mesh.normals = prevPositions; mesh.tangents = nextPositions; mesh.uv = texCoords; mesh.uv2 = vertexOffsets; mesh.SetIndices(indices, MeshTopology.Triangles, 0); UpdateBounds(); } } } #endregion #region event functions void Start () { Mesh mesh = new Mesh(); m_meshFilter = GetComponent<MeshFilter>(); m_meshFilter.mesh = mesh; UpdateLineVertices(m_lineVertices); CreateMaterial(); } void OnDestroy() { if (null != m_meshFilter) { if (Application.isPlaying) { Mesh.Destroy(m_meshFilter.sharedMesh); } else // avoid "may not be called from edit mode" error { Mesh.DestroyImmediate(m_meshFilter.sharedMesh); } m_meshFilter.sharedMesh = null; } DestroyMaterial(); } void Update() { if (transform.hasChanged) { UpdateLineScale(); UpdateBounds(); } } void OnValidate() { // This function is called when the script is loaded or a value is changed in the inspector (Called in the editor only). // => make sure, everything stays up-to-date if(string.IsNullOrEmpty(gameObject.scene.name) || string.IsNullOrEmpty(gameObject.scene.path)) { return; // ...but not if a Prefab is selected! (Only if we're using it within a scene.) } CreateMaterial(); SetAllMaterialProperties(); UpdateBounds(); } void OnDrawGizmos() { Gizmos.color = Color.green; if (null == m_lineVertices) { return; } for (int i=0; i < m_lineVertices.Length - 1; ++i) { Gizmos.DrawLine(gameObject.transform.TransformPoint(m_lineVertices[i]), gameObject.transform.TransformPoint(m_lineVertices[i+1])); } } #endregion } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net.Mail; using System.Net.Mime; using System.Text; using System.Text.RegularExpressions; namespace Kato { /// <summary> /// Stores an incoming SMTP Message. /// </summary> public class SmtpMessageData { private static readonly string DoubleNewline = Environment.NewLine + Environment.NewLine; private const string SubjectHeader = "subject"; private const string SenderHeader = "sender"; private const string ReplyToHeader = "reply-to"; private const string ContentDispositionHeader = "content-disposition"; private const string AttachmentDisposition = "attachment"; private const string ContentTypeHeader = "content-type"; private const string ContentDispositionFileName = "filename"; private const string ContentTypeBoundry = "boundary"; private const string ContentTypeMultiPartMixed = "multipart/mixed"; private const string ContentTypeMultiPartAlternative = "multipart/alternative"; private const string ContentTypeHtml = "text/html"; private const string ContentTypeName = "name"; private const string ContentTransferEncodingHeader = "content-transfer-encoding"; private static readonly string[] StandardHeaders = { SubjectHeader, SenderHeader, ReplyToHeader }; private readonly List<MailAddress> _recipientAddresses; private readonly StringBuilder _data; /// <summary> /// Creates a new message. /// </summary> public SmtpMessageData() { _recipientAddresses = new List<MailAddress>(); _data = new StringBuilder(); } public void AddData(string data) { _data.Append(data); } /// <summary> /// The email address of the person /// that sent this email. /// </summary> public MailAddress FromAddress { get; set; } /// <summary>Addes an address to the recipient list.</summary> public void AddToAddress(MailAddress address) { _recipientAddresses.Add(address); } public MailMessage ParseMessage() { var headers = ParseHeaders(_data.ToString()); var message = new MailMessage { From = headers.ContainsKey("from") ? new MailAddress(headers["from"].Value) : FromAddress }; var to = headers.ContainsKey("to") ? ParseAddresses(headers["to"].Value) : Enumerable.Empty<MailAddress>().ToList(); var cc = headers.ContainsKey("cc") ? ParseAddresses(headers["cc"].Value) : Enumerable.Empty<MailAddress>().ToList(); if (to.Any() || cc.Any()) { to.ForEach(x => message.To.Add(x)); cc.ForEach(x => message.CC.Add(x)); _recipientAddresses.Select(x => x.Address) .Except(to.Select(x => x.Address) .Concat(cc.Select(x => x.Address))) .ToList().ForEach(x => message.Bcc.Add(x)); } else _recipientAddresses.ForEach(x => message.To.Add(x)); if (headers.ContainsKey(SubjectHeader)) message.Subject = headers[SubjectHeader].Value; if (headers.ContainsKey(SenderHeader)) message.Sender = new MailAddress(headers[SenderHeader].Value); if (headers.ContainsKey(ReplyToHeader)) message.ReplyToList.Add(new MailAddress(headers[ReplyToHeader].Value)); foreach (var header in headers.Where(x => StandardHeaders.All(y => x.Key != y))) message.Headers[header.Key] = header.Value.RawValue; var parts = ParseMessageParts(_data.ToString(), headers); if (parts.Any()) { var bodies = parts.Where(x => !x.Headers.ContainsKey(ContentDispositionHeader) || x.Headers[ContentDispositionHeader].Value != AttachmentDisposition); if (bodies.Any()) { var body = bodies.First(); message.Body = !body.Headers.ContainsKey(ContentTransferEncodingHeader) ? body.Data : Encoding.UTF8.GetString(DecodeData(body.Data, body.Headers[ContentTransferEncodingHeader].Value)); message.IsBodyHtml = (body.Headers.ContainsKey(ContentTypeHeader) && body.Headers[ContentTypeHeader].Value == ContentTypeHtml) || (headers.ContainsKey(ContentTypeHeader) && headers[ContentTypeHeader].Value == ContentTypeHtml); } if (headers.ContainsKey(ContentTypeHeader) && headers[ContentTypeHeader].Value == ContentTypeMultiPartAlternative && bodies.Count() > 1) { bodies.Skip(1).ToList().ForEach(x => message.AlternateViews.Add( AlternateView.CreateAlternateViewFromString(x.Data, new ContentType(x.Headers[ContentTypeHeader].RawValue)))); } parts.Where(x => x.Headers.ContainsKey(ContentDispositionHeader) && x.Headers[ContentDispositionHeader].Value == AttachmentDisposition) .Select(x => new { Data = new MemoryStream(DecodeData(x.Data, x.Headers[ContentTransferEncodingHeader].Value)), Filename = x.Headers[ContentDispositionHeader].SubValues.ContainsKey(ContentDispositionFileName) ? x.Headers[ContentDispositionHeader].SubValues[ContentDispositionFileName] : null, Name = x.Headers[ContentTypeHeader].SubValues.ContainsKey(ContentTypeName) ? x.Headers[ContentTypeHeader].SubValues[ContentTypeName] : null, MediaType = x.Headers[ContentTypeHeader].RawValue }) .ToList().ForEach(x => message.Attachments.Add(new Attachment(x.Data, x.Filename ?? x.Name, x.MediaType))); } return message; } private static byte[] DecodeData(string data, string encoding) { switch (encoding) { case "base64": return Convert.FromBase64String(data); case "7bit": case "8bit": case "binary": return Encoding.ASCII.GetBytes(data); case "quoted-printable": return Encoding.UTF8.GetBytes(Attachment.CreateAttachmentFromString("", data).Name); default: throw new Exception($"Content transfer encoding of type {encoding} not supported."); } } private static IDictionary<string, Header> ParseHeaders(string data) { var headers = new Dictionary<string, Header>(); var parts = Regex.Split(data, DoubleNewline); var headerString = parts[0] + DoubleNewline; var headerKeyCollectionMatch = Regex.Matches(headerString, @"^(?<key>\S*):", RegexOptions.Multiline); foreach(Match headerKeyMatch in headerKeyCollectionMatch) { var key = headerKeyMatch.Result("${key}"); var valueMatch = Regex.Match(headerString, key + @":(?<value>.*?)\r\n[\S\r]", RegexOptions.Singleline); if (!valueMatch.Success) continue; var value = valueMatch.Result("${value}").Trim(); value = Regex.Replace(value, "\r\n", ""); value = Regex.Replace(value, @"\s+", " "); var subValues = value.Contains(";") ? value.Split(';').Skip(1).Select(x => x.Split(new [] {'='}, 2)).ToDictionary(x => x[0].Trim().ToLower(), x => x[1].Trim(new [] {'\"'}).Trim()) : new Dictionary<string, string>(); key = key.ToLower().Trim(); headers[key] = new Header { Name = key, Value = value.Split(';').First(), RawValue = value.Trim(), SubValues = subValues }; } return headers; } private static List<MessagePart> ParseMessageParts(string data, IDictionary<string, Header> headers) { var messageParts = new List<MessagePart>(); if (headers.ContainsKey(ContentTypeHeader) && (headers[ContentTypeHeader].Value == ContentTypeMultiPartMixed || headers[ContentTypeHeader].Value == ContentTypeMultiPartAlternative) && headers[ContentTypeHeader].SubValues.ContainsKey(ContentTypeBoundry)) { var partRegex = new Regex(string.Format("--{0}(?<part>.*?)--{0}", Regex.Escape(headers[ContentTypeHeader].SubValues[ContentTypeBoundry])), RegexOptions.Singleline); Match match = partRegex.Match(data); while (match.Success) { var parts = Regex.Split(match.Result("${part}").Trim(), DoubleNewline); messageParts.Add(new MessagePart { Headers = ParseHeaders(parts[0]), Data = parts.Length > 1 ? parts[1].Trim() : null }); match = partRegex.Match(data, match.Index + 1); } } else { var parts = Regex.Split(data, DoubleNewline); if (parts.Length >= 2) messageParts.Add(new MessagePart { Headers = headers, Data = parts[1].Trim() }); } return messageParts; } private static readonly Regex AddressRegex = new Regex("(.*?<.+?@.+?>)(,)?", RegexOptions.IgnoreCase); private static List<MailAddress> ParseAddresses(string addresses) { return AddressRegex.Matches(addresses).Cast<Match>() .Select(x => new MailAddress(x.Groups[1].Value)).ToList(); } private class Header { public string Name { get; set; } public string Value { get; set; } public string RawValue { get; set; } public Dictionary<string, string> SubValues { get; set; } } private class MessagePart { public IDictionary<string, Header> Headers { get; set; } public string Data { get; set; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // ------------------------------------------------------------------------------ // Changes to this file must follow the http://aka.ms/api-review process. // ------------------------------------------------------------------------------ namespace System { public static partial class MemoryExtensions { public static System.ReadOnlyMemory<char> AsMemory(this string text) { throw null; } public static System.ReadOnlyMemory<char> AsMemory(this string text, int start) { throw null; } public static System.ReadOnlyMemory<char> AsMemory(this string text, int start, int length) { throw null; } public static System.Memory<T> AsMemory<T>(this System.ArraySegment<T> segment) { throw null; } public static System.Memory<T> AsMemory<T>(this System.ArraySegment<T> segment, int start) { throw null; } public static System.Memory<T> AsMemory<T>(this System.ArraySegment<T> segment, int start, int length) { throw null; } public static System.Memory<T> AsMemory<T>(this T[] array) { throw null; } public static System.Memory<T> AsMemory<T>(this T[] array, int start) { throw null; } public static System.Memory<T> AsMemory<T>(this T[] array, int start, int length) { throw null; } public static System.ReadOnlySpan<char> AsSpan(this string text) { throw null; } public static System.ReadOnlySpan<char> AsSpan(this string text, int start) { throw null; } public static System.ReadOnlySpan<char> AsSpan(this string text, int start, int length) { throw null; } public static System.Span<T> AsSpan<T>(this System.ArraySegment<T> segment) { throw null; } public static System.Span<T> AsSpan<T>(this System.ArraySegment<T> segment, int start) { throw null; } public static System.Span<T> AsSpan<T>(this System.ArraySegment<T> segment, int start, int length) { throw null; } public static System.Span<T> AsSpan<T>(this T[] array) { throw null; } public static System.Span<T> AsSpan<T>(this T[] array, int start) { throw null; } public static System.Span<T> AsSpan<T>(this T[] array, int start, int length) { throw null; } public static int BinarySearch<T>(this System.ReadOnlySpan<T> span, System.IComparable<T> comparable) { throw null; } public static int BinarySearch<T>(this System.Span<T> span, System.IComparable<T> comparable) { throw null; } public static int BinarySearch<T, TComparer>(this System.ReadOnlySpan<T> span, T value, TComparer comparer) where TComparer : System.Collections.Generic.IComparer<T> { throw null; } public static int BinarySearch<T, TComparable>(this System.ReadOnlySpan<T> span, TComparable comparable) where TComparable : System.IComparable<T> { throw null; } public static int BinarySearch<T, TComparer>(this System.Span<T> span, T value, TComparer comparer) where TComparer : System.Collections.Generic.IComparer<T> { throw null; } public static int BinarySearch<T, TComparable>(this System.Span<T> span, TComparable comparable) where TComparable : System.IComparable<T> { throw null; } public static int CompareTo(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> other, System.StringComparison comparisonType) { throw null; } public static bool Contains(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> value, System.StringComparison comparisonType) { throw null; } public static bool Contains<T>(this System.ReadOnlySpan<T> span, T value) where T : System.IEquatable<T> { throw null; } public static bool Contains<T>(this System.Span<T> span, T value) where T : System.IEquatable<T> { throw null; } public static void CopyTo<T>(this T[] source, System.Memory<T> destination) { } public static void CopyTo<T>(this T[] source, System.Span<T> destination) { } public static bool EndsWith(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> value, System.StringComparison comparisonType) { throw null; } public static bool EndsWith<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static bool EndsWith<T>(this System.Span<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static bool Equals(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> other, System.StringComparison comparisonType) { throw null; } public static int IndexOf(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> value, System.StringComparison comparisonType) { throw null; } public static int IndexOfAny<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> values) where T : System.IEquatable<T> { throw null; } public static int IndexOfAny<T>(this System.ReadOnlySpan<T> span, T value0, T value1) where T : System.IEquatable<T> { throw null; } public static int IndexOfAny<T>(this System.ReadOnlySpan<T> span, T value0, T value1, T value2) where T : System.IEquatable<T> { throw null; } public static int IndexOfAny<T>(this System.Span<T> span, System.ReadOnlySpan<T> values) where T : System.IEquatable<T> { throw null; } public static int IndexOfAny<T>(this System.Span<T> span, T value0, T value1) where T : System.IEquatable<T> { throw null; } public static int IndexOfAny<T>(this System.Span<T> span, T value0, T value1, T value2) where T : System.IEquatable<T> { throw null; } public static int IndexOf<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static int IndexOf<T>(this System.ReadOnlySpan<T> span, T value) where T : System.IEquatable<T> { throw null; } public static int IndexOf<T>(this System.Span<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static int IndexOf<T>(this System.Span<T> span, T value) where T : System.IEquatable<T> { throw null; } public static bool IsWhiteSpace(this System.ReadOnlySpan<char> span) { throw null; } public static int LastIndexOf(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> value, System.StringComparison comparisonType) { throw null; } public static int LastIndexOfAny<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> values) where T : System.IEquatable<T> { throw null; } public static int LastIndexOfAny<T>(this System.ReadOnlySpan<T> span, T value0, T value1) where T : System.IEquatable<T> { throw null; } public static int LastIndexOfAny<T>(this System.ReadOnlySpan<T> span, T value0, T value1, T value2) where T : System.IEquatable<T> { throw null; } public static int LastIndexOfAny<T>(this System.Span<T> span, System.ReadOnlySpan<T> values) where T : System.IEquatable<T> { throw null; } public static int LastIndexOfAny<T>(this System.Span<T> span, T value0, T value1) where T : System.IEquatable<T> { throw null; } public static int LastIndexOfAny<T>(this System.Span<T> span, T value0, T value1, T value2) where T : System.IEquatable<T> { throw null; } public static int LastIndexOf<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static int LastIndexOf<T>(this System.ReadOnlySpan<T> span, T value) where T : System.IEquatable<T> { throw null; } public static int LastIndexOf<T>(this System.Span<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static int LastIndexOf<T>(this System.Span<T> span, T value) where T : System.IEquatable<T> { throw null; } public static bool Overlaps<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> other) { throw null; } public static bool Overlaps<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> other, out int elementOffset) { throw null; } public static bool Overlaps<T>(this System.Span<T> span, System.ReadOnlySpan<T> other) { throw null; } public static bool Overlaps<T>(this System.Span<T> span, System.ReadOnlySpan<T> other, out int elementOffset) { throw null; } public static void Reverse<T>(this System.Span<T> span) { } public static int SequenceCompareTo<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> other) where T : System.IComparable<T> { throw null; } public static int SequenceCompareTo<T>(this System.Span<T> span, System.ReadOnlySpan<T> other) where T : System.IComparable<T> { throw null; } public static bool SequenceEqual<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> other) where T : System.IEquatable<T> { throw null; } public static bool SequenceEqual<T>(this System.Span<T> span, System.ReadOnlySpan<T> other) where T : System.IEquatable<T> { throw null; } public static bool StartsWith(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> value, System.StringComparison comparisonType) { throw null; } public static bool StartsWith<T>(this System.ReadOnlySpan<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static bool StartsWith<T>(this System.Span<T> span, System.ReadOnlySpan<T> value) where T : System.IEquatable<T> { throw null; } public static int ToLower(this System.ReadOnlySpan<char> source, System.Span<char> destination, System.Globalization.CultureInfo culture) { throw null; } public static int ToLowerInvariant(this System.ReadOnlySpan<char> source, System.Span<char> destination) { throw null; } public static int ToUpper(this System.ReadOnlySpan<char> source, System.Span<char> destination, System.Globalization.CultureInfo culture) { throw null; } public static int ToUpperInvariant(this System.ReadOnlySpan<char> source, System.Span<char> destination) { throw null; } public static System.ReadOnlySpan<char> Trim(this System.ReadOnlySpan<char> span) { throw null; } public static System.ReadOnlySpan<char> Trim(this System.ReadOnlySpan<char> span, char trimChar) { throw null; } public static System.ReadOnlySpan<char> Trim(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> trimChars) { throw null; } public static System.ReadOnlySpan<char> TrimEnd(this System.ReadOnlySpan<char> span) { throw null; } public static System.ReadOnlySpan<char> TrimEnd(this System.ReadOnlySpan<char> span, char trimChar) { throw null; } public static System.ReadOnlySpan<char> TrimEnd(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> trimChars) { throw null; } public static System.ReadOnlySpan<char> TrimStart(this System.ReadOnlySpan<char> span) { throw null; } public static System.ReadOnlySpan<char> TrimStart(this System.ReadOnlySpan<char> span, char trimChar) { throw null; } public static System.ReadOnlySpan<char> TrimStart(this System.ReadOnlySpan<char> span, System.ReadOnlySpan<char> trimChars) { throw null; } } public readonly partial struct SequencePosition : System.IEquatable<System.SequencePosition> { private readonly object _dummy; private readonly int _dummyPrimitive; public SequencePosition(object @object, int integer) { throw null; } [System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))] public override bool Equals(object obj) { throw null; } public bool Equals(System.SequencePosition other) { throw null; } [System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))] public override int GetHashCode() { throw null; } [System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))] public int GetInteger() { throw null; } [System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))] public object GetObject() { throw null; } } } namespace System.Buffers { public static partial class BuffersExtensions { public static void CopyTo<T>(this in System.Buffers.ReadOnlySequence<T> source, System.Span<T> destination) { } public static System.Nullable<System.SequencePosition> PositionOf<T>(this in System.Buffers.ReadOnlySequence<T> source, T value) where T : System.IEquatable<T> { throw null; } public static T[] ToArray<T>(this in System.Buffers.ReadOnlySequence<T> sequence) { throw null; } public static void Write<T>(this System.Buffers.IBufferWriter<T> writer, System.ReadOnlySpan<T> value) { } } public partial interface IBufferWriter<T> { void Advance(int count); System.Memory<T> GetMemory(int sizeHint = 0); System.Span<T> GetSpan(int sizeHint = 0); } public abstract partial class MemoryPool<T> : System.IDisposable { protected MemoryPool() { } public abstract int MaxBufferSize { get; } public static System.Buffers.MemoryPool<T> Shared { get { throw null; } } public void Dispose() { } protected abstract void Dispose(bool disposing); public abstract System.Buffers.IMemoryOwner<T> Rent(int minBufferSize = -1); } public enum OperationStatus { DestinationTooSmall = 1, Done = 0, InvalidData = 3, NeedMoreData = 2, } public abstract partial class ReadOnlySequenceSegment<T> { protected ReadOnlySequenceSegment() { } public System.ReadOnlyMemory<T> Memory { get { throw null; } protected set { } } public System.Buffers.ReadOnlySequenceSegment<T> Next { get { throw null; } protected set { } } public long RunningIndex { get { throw null; } protected set { } } } public readonly partial struct ReadOnlySequence<T> { private readonly object _dummy; private readonly int _dummyPrimitive; public static readonly System.Buffers.ReadOnlySequence<T> Empty; public ReadOnlySequence(System.Buffers.ReadOnlySequenceSegment<T> startSegment, int startIndex, System.Buffers.ReadOnlySequenceSegment<T> endSegment, int endIndex) { throw null; } public ReadOnlySequence(System.ReadOnlyMemory<T> memory) { throw null; } public ReadOnlySequence(T[] array) { throw null; } public ReadOnlySequence(T[] array, int start, int length) { throw null; } public System.SequencePosition End { get { throw null; } } public System.ReadOnlyMemory<T> First { get { throw null; } } public bool IsEmpty { get { throw null; } } public bool IsSingleSegment { get { throw null; } } public long Length { get { throw null; } } public System.SequencePosition Start { get { throw null; } } public System.Buffers.ReadOnlySequence<T>.Enumerator GetEnumerator() { throw null; } public System.SequencePosition GetPosition(long offset) { throw null; } public System.SequencePosition GetPosition(long offset, System.SequencePosition origin) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(int start, int length) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(int start, System.SequencePosition end) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(long start) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(long start, long length) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(long start, System.SequencePosition end) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(System.SequencePosition start) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(System.SequencePosition start, int length) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(System.SequencePosition start, long length) { throw null; } public System.Buffers.ReadOnlySequence<T> Slice(System.SequencePosition start, System.SequencePosition end) { throw null; } public override string ToString() { throw null; } public bool TryGet(ref System.SequencePosition position, out System.ReadOnlyMemory<T> memory, bool advance = true) { throw null; } public partial struct Enumerator { private object _dummy; private int _dummyPrimitive; public Enumerator(in System.Buffers.ReadOnlySequence<T> sequence) { throw null; } public System.ReadOnlyMemory<T> Current { get { throw null; } } public bool MoveNext() { throw null; } } } public readonly partial struct StandardFormat : System.IEquatable<System.Buffers.StandardFormat> { private readonly int _dummyPrimitive; public const byte MaxPrecision = (byte)99; public const byte NoPrecision = (byte)255; public StandardFormat(char symbol, byte precision = (byte)255) { throw null; } public bool HasPrecision { get { throw null; } } public bool IsDefault { get { throw null; } } public byte Precision { get { throw null; } } public char Symbol { get { throw null; } } public bool Equals(System.Buffers.StandardFormat other) { throw null; } public override bool Equals(object obj) { throw null; } public override int GetHashCode() { throw null; } public static bool operator ==(System.Buffers.StandardFormat left, System.Buffers.StandardFormat right) { throw null; } public static implicit operator System.Buffers.StandardFormat (char symbol) { throw null; } public static bool operator !=(System.Buffers.StandardFormat left, System.Buffers.StandardFormat right) { throw null; } public static System.Buffers.StandardFormat Parse(System.ReadOnlySpan<char> format) { throw null; } public static System.Buffers.StandardFormat Parse(string format) { throw null; } public override string ToString() { throw null; } } } namespace System.Buffers.Binary { public static partial class BinaryPrimitives { public static short ReadInt16BigEndian(System.ReadOnlySpan<byte> source) { throw null; } public static short ReadInt16LittleEndian(System.ReadOnlySpan<byte> source) { throw null; } public static int ReadInt32BigEndian(System.ReadOnlySpan<byte> source) { throw null; } public static int ReadInt32LittleEndian(System.ReadOnlySpan<byte> source) { throw null; } public static long ReadInt64BigEndian(System.ReadOnlySpan<byte> source) { throw null; } public static long ReadInt64LittleEndian(System.ReadOnlySpan<byte> source) { throw null; } [System.CLSCompliantAttribute(false)] public static ushort ReadUInt16BigEndian(System.ReadOnlySpan<byte> source) { throw null; } [System.CLSCompliantAttribute(false)] public static ushort ReadUInt16LittleEndian(System.ReadOnlySpan<byte> source) { throw null; } [System.CLSCompliantAttribute(false)] public static uint ReadUInt32BigEndian(System.ReadOnlySpan<byte> source) { throw null; } [System.CLSCompliantAttribute(false)] public static uint ReadUInt32LittleEndian(System.ReadOnlySpan<byte> source) { throw null; } [System.CLSCompliantAttribute(false)] public static ulong ReadUInt64BigEndian(System.ReadOnlySpan<byte> source) { throw null; } [System.CLSCompliantAttribute(false)] public static ulong ReadUInt64LittleEndian(System.ReadOnlySpan<byte> source) { throw null; } public static byte ReverseEndianness(byte value) { throw null; } public static short ReverseEndianness(short value) { throw null; } public static int ReverseEndianness(int value) { throw null; } public static long ReverseEndianness(long value) { throw null; } [System.CLSCompliantAttribute(false)] public static sbyte ReverseEndianness(sbyte value) { throw null; } [System.CLSCompliantAttribute(false)] public static ushort ReverseEndianness(ushort value) { throw null; } [System.CLSCompliantAttribute(false)] public static uint ReverseEndianness(uint value) { throw null; } [System.CLSCompliantAttribute(false)] public static ulong ReverseEndianness(ulong value) { throw null; } public static bool TryReadInt16BigEndian(System.ReadOnlySpan<byte> source, out short value) { throw null; } public static bool TryReadInt16LittleEndian(System.ReadOnlySpan<byte> source, out short value) { throw null; } public static bool TryReadInt32BigEndian(System.ReadOnlySpan<byte> source, out int value) { throw null; } public static bool TryReadInt32LittleEndian(System.ReadOnlySpan<byte> source, out int value) { throw null; } public static bool TryReadInt64BigEndian(System.ReadOnlySpan<byte> source, out long value) { throw null; } public static bool TryReadInt64LittleEndian(System.ReadOnlySpan<byte> source, out long value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryReadUInt16BigEndian(System.ReadOnlySpan<byte> source, out ushort value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryReadUInt16LittleEndian(System.ReadOnlySpan<byte> source, out ushort value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryReadUInt32BigEndian(System.ReadOnlySpan<byte> source, out uint value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryReadUInt32LittleEndian(System.ReadOnlySpan<byte> source, out uint value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryReadUInt64BigEndian(System.ReadOnlySpan<byte> source, out ulong value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryReadUInt64LittleEndian(System.ReadOnlySpan<byte> source, out ulong value) { throw null; } public static bool TryWriteInt16BigEndian(System.Span<byte> destination, short value) { throw null; } public static bool TryWriteInt16LittleEndian(System.Span<byte> destination, short value) { throw null; } public static bool TryWriteInt32BigEndian(System.Span<byte> destination, int value) { throw null; } public static bool TryWriteInt32LittleEndian(System.Span<byte> destination, int value) { throw null; } public static bool TryWriteInt64BigEndian(System.Span<byte> destination, long value) { throw null; } public static bool TryWriteInt64LittleEndian(System.Span<byte> destination, long value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryWriteUInt16BigEndian(System.Span<byte> destination, ushort value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryWriteUInt16LittleEndian(System.Span<byte> destination, ushort value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryWriteUInt32BigEndian(System.Span<byte> destination, uint value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryWriteUInt32LittleEndian(System.Span<byte> destination, uint value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryWriteUInt64BigEndian(System.Span<byte> destination, ulong value) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryWriteUInt64LittleEndian(System.Span<byte> destination, ulong value) { throw null; } public static void WriteInt16BigEndian(System.Span<byte> destination, short value) { } public static void WriteInt16LittleEndian(System.Span<byte> destination, short value) { } public static void WriteInt32BigEndian(System.Span<byte> destination, int value) { } public static void WriteInt32LittleEndian(System.Span<byte> destination, int value) { } public static void WriteInt64BigEndian(System.Span<byte> destination, long value) { } public static void WriteInt64LittleEndian(System.Span<byte> destination, long value) { } [System.CLSCompliantAttribute(false)] public static void WriteUInt16BigEndian(System.Span<byte> destination, ushort value) { } [System.CLSCompliantAttribute(false)] public static void WriteUInt16LittleEndian(System.Span<byte> destination, ushort value) { } [System.CLSCompliantAttribute(false)] public static void WriteUInt32BigEndian(System.Span<byte> destination, uint value) { } [System.CLSCompliantAttribute(false)] public static void WriteUInt32LittleEndian(System.Span<byte> destination, uint value) { } [System.CLSCompliantAttribute(false)] public static void WriteUInt64BigEndian(System.Span<byte> destination, ulong value) { } [System.CLSCompliantAttribute(false)] public static void WriteUInt64LittleEndian(System.Span<byte> destination, ulong value) { } } } namespace System.Buffers.Text { public static partial class Base64 { public static System.Buffers.OperationStatus DecodeFromUtf8(System.ReadOnlySpan<byte> utf8, System.Span<byte> bytes, out int bytesConsumed, out int bytesWritten, bool isFinalBlock = true) { throw null; } public static System.Buffers.OperationStatus DecodeFromUtf8InPlace(System.Span<byte> buffer, out int bytesWritten) { throw null; } public static System.Buffers.OperationStatus EncodeToUtf8(System.ReadOnlySpan<byte> bytes, System.Span<byte> utf8, out int bytesConsumed, out int bytesWritten, bool isFinalBlock = true) { throw null; } public static System.Buffers.OperationStatus EncodeToUtf8InPlace(System.Span<byte> buffer, int dataLength, out int bytesWritten) { throw null; } public static int GetMaxDecodedFromUtf8Length(int length) { throw null; } public static int GetMaxEncodedToUtf8Length(int length) { throw null; } } public static partial class Utf8Formatter { public static bool TryFormat(bool value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(byte value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(System.DateTime value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(System.DateTimeOffset value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(decimal value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(double value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(System.Guid value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(short value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(int value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(long value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryFormat(sbyte value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(float value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } public static bool TryFormat(System.TimeSpan value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryFormat(ushort value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryFormat(uint value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryFormat(ulong value, System.Span<byte> destination, out int bytesWritten, System.Buffers.StandardFormat format = default(System.Buffers.StandardFormat)) { throw null; } } public static partial class Utf8Parser { public static bool TryParse(System.ReadOnlySpan<byte> source, out bool value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out byte value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out System.DateTime value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out System.DateTimeOffset value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out decimal value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out double value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out System.Guid value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out short value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out int value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out long value, out int bytesConsumed, char standardFormat = '\0') { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryParse(System.ReadOnlySpan<byte> source, out sbyte value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out float value, out int bytesConsumed, char standardFormat = '\0') { throw null; } public static bool TryParse(System.ReadOnlySpan<byte> source, out System.TimeSpan value, out int bytesConsumed, char standardFormat = '\0') { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryParse(System.ReadOnlySpan<byte> source, out ushort value, out int bytesConsumed, char standardFormat = '\0') { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryParse(System.ReadOnlySpan<byte> source, out uint value, out int bytesConsumed, char standardFormat = '\0') { throw null; } [System.CLSCompliantAttribute(false)] public static bool TryParse(System.ReadOnlySpan<byte> source, out ulong value, out int bytesConsumed, char standardFormat = '\0') { throw null; } } } namespace System.Runtime.InteropServices { public static partial class MemoryMarshal { public static System.ReadOnlySpan<byte> AsBytes<T>(System.ReadOnlySpan<T> span) where T : struct { throw null; } public static System.Span<byte> AsBytes<T>(System.Span<T> span) where T : struct { throw null; } public static System.Memory<T> AsMemory<T>(System.ReadOnlyMemory<T> memory) { throw null; } public static ref readonly T AsRef<T>(System.ReadOnlySpan<byte> span) where T : struct { throw null; } public static ref T AsRef<T>(System.Span<byte> span) where T : struct { throw null; } public static System.ReadOnlySpan<TTo> Cast<TFrom, TTo>(System.ReadOnlySpan<TFrom> span) where TFrom : struct where TTo : struct { throw null; } public static System.Span<TTo> Cast<TFrom, TTo>(System.Span<TFrom> span) where TFrom : struct where TTo : struct { throw null; } public static System.Memory<T> CreateFromPinnedArray<T>(T[] array, int start, int length) { throw null; } public static System.ReadOnlySpan<T> CreateReadOnlySpan<T>(ref T reference, int length) { throw null; } public static System.Span<T> CreateSpan<T>(ref T reference, int length) { throw null; } public static ref T GetReference<T>(System.ReadOnlySpan<T> span) { throw null; } public static ref T GetReference<T>(System.Span<T> span) { throw null; } public static T Read<T>(System.ReadOnlySpan<byte> source) where T : struct { throw null; } public static System.Collections.Generic.IEnumerable<T> ToEnumerable<T>(System.ReadOnlyMemory<T> memory) { throw null; } public static bool TryGetArray<T>(System.ReadOnlyMemory<T> memory, out System.ArraySegment<T> segment) { throw null; } public static bool TryGetMemoryManager<T, TManager>(System.ReadOnlyMemory<T> memory, out TManager manager) where TManager : System.Buffers.MemoryManager<T> { throw null; } public static bool TryGetMemoryManager<T, TManager>(System.ReadOnlyMemory<T> memory, out TManager manager, out int start, out int length) where TManager : System.Buffers.MemoryManager<T> { throw null; } public static bool TryGetString(System.ReadOnlyMemory<char> memory, out string text, out int start, out int length) { throw null; } public static bool TryRead<T>(System.ReadOnlySpan<byte> source, out T value) where T : struct { throw null; } public static bool TryWrite<T>(System.Span<byte> destination, ref T value) where T : struct { throw null; } public static void Write<T>(System.Span<byte> destination, ref T value) where T : struct { } } public static partial class SequenceMarshal { public static bool TryGetArray<T>(System.Buffers.ReadOnlySequence<T> sequence, out System.ArraySegment<T> segment) { throw null; } public static bool TryGetReadOnlyMemory<T>(System.Buffers.ReadOnlySequence<T> sequence, out System.ReadOnlyMemory<T> memory) { throw null; } public static bool TryGetReadOnlySequenceSegment<T>(System.Buffers.ReadOnlySequence<T> sequence, out System.Buffers.ReadOnlySequenceSegment<T> startSegment, out int startIndex, out System.Buffers.ReadOnlySequenceSegment<T> endSegment, out int endIndex) { throw null; } } }
using System; using System.Linq; using Microsoft.CodeAnalysis.Diagnostics; using System.Collections.Generic; using Microsoft.CodeAnalysis; using System.Threading; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.Text; using System.Text; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.CodeActions; using RefactoringEssentials.Tests.Common; using Xunit; namespace RefactoringEssentials.Tests { public abstract class DiagnosticTestBase { static MetadataReference mscorlib; static MetadataReference systemAssembly; static MetadataReference systemXmlLinq; static MetadataReference systemCore; private static MetadataReference visualBasic; internal static MetadataReference[] DefaultMetadataReferences; static Dictionary<string, CodeFixProvider> providers = new Dictionary<string, CodeFixProvider>(); static DiagnosticTestBase() { try { mscorlib = MetadataReference.CreateFromFile(typeof(Console).Assembly.Location); systemAssembly = MetadataReference.CreateFromFile(typeof(System.ComponentModel.BrowsableAttribute).Assembly.Location); systemXmlLinq = MetadataReference.CreateFromFile(typeof(System.Xml.Linq.XElement).Assembly.Location); systemCore = MetadataReference.CreateFromFile(typeof(Enumerable).Assembly.Location); visualBasic = MetadataReference.CreateFromFile(typeof(Microsoft.VisualBasic.Constants).Assembly.Location); DefaultMetadataReferences = new[] { mscorlib, systemAssembly, systemCore, systemXmlLinq, visualBasic }; foreach (var provider in typeof(DiagnosticAnalyzerCategories).Assembly.GetTypes().Where(t => t.GetCustomAttributes(typeof(ExportCodeFixProviderAttribute), false).Length > 0)) { //var attr = (ExportCodeFixProviderAttribute)provider.GetCustomAttributes(typeof(ExportCodeFixProviderAttribute), false) [0]; var codeFixProvider = (CodeFixProvider)Activator.CreateInstance(provider); foreach (var id in codeFixProvider.FixableDiagnosticIds) { if (providers.ContainsKey(id)) { Console.WriteLine("Provider " + id + " already added."); continue; } providers.Add(id, codeFixProvider); } } } catch (Exception e) { Console.WriteLine(e); } } public static string GetUniqueName() { return Guid.NewGuid().ToString("D"); } internal class TestWorkspace : Workspace { readonly static HostServices services = Microsoft.CodeAnalysis.Host.Mef.MefHostServices.DefaultHost;/* MefHostServices.Create(new [] { typeof(MefHostServices).Assembly, typeof(Microsoft.CodeAnalysis.CSharp.Formatting.CSharpFormattingOptions).Assembly });*/ public TestWorkspace(string workspaceKind = "Test") : base(services, workspaceKind) { /* foreach (var a in MefHostServices.DefaultAssemblies) { Console.WriteLine(a.FullName); }*/ } public void ChangeDocument(DocumentId id, SourceText text) { ApplyDocumentTextChanged(id, text); } protected override void ApplyDocumentTextChanged(DocumentId id, SourceText text) { base.ApplyDocumentTextChanged(id, text); var document = CurrentSolution.GetDocument(id); if (document != null) OnDocumentTextChanged(id, text, PreservationMode.PreserveValue); } public override bool CanApplyChange(ApplyChangesKind feature) { return true; } public void Open(ProjectInfo projectInfo) { var sInfo = SolutionInfo.Create( SolutionId.CreateNewId(), VersionStamp.Create(), null, new[] { projectInfo } ); OnSolutionAdded(sInfo); } } protected static void RunFix(Workspace workspace, ProjectId projectId, DocumentId documentId, Diagnostic diagnostic, int index = 0) { CodeFixProvider provider; if (providers.TryGetValue(diagnostic.Id, out provider)) { Assert.True(provider != null, "null provider for : " + diagnostic.Id); var document = workspace.CurrentSolution.GetProject(projectId).GetDocument(documentId); var actions = new List<CodeAction>(); var context = new CodeFixContext(document, diagnostic, (fix, diags) => actions.Add(fix), default(CancellationToken)); provider.RegisterCodeFixesAsync(context).GetAwaiter().GetResult(); if (!actions.Any()) { Assert.True(false, "Provider has no fix for " + diagnostic.Id + " at " + diagnostic.Location.SourceSpan); return; } foreach (var op in actions[index].GetOperationsAsync(default(CancellationToken)).GetAwaiter().GetResult()) { op.Apply(workspace, default(CancellationToken)); } } else { Assert.True(false, "No code fix provider found for :" + diagnostic.Id); } } protected static void Test<T>(string input, int expectedDiagnostics = 1, string output = null, int issueToFix = -1, int actionToRun = 0) where T : DiagnosticAnalyzer, new() { Assert.True(false, "Use Analyze"); } protected static void Test<T>(string input, string output, int fixIndex = 0) where T : DiagnosticAnalyzer, new() { Assert.True(false, "Use Analyze"); } protected static void TestIssue<T>(string input, int issueCount = 1) where T : DiagnosticAnalyzer, new() { Assert.True(false, "Use Analyze"); } protected static void TestWrongContextWithSubIssue<T>(string input, string id) where T : DiagnosticAnalyzer, new() { Assert.True(false, "Use AnalyzeWithRule"); } protected static void TestWithSubIssue<T>(string input, string output, string subIssue, int fixIndex = 0) where T : DiagnosticAnalyzer, new() { Assert.True(false, "Use AnalyzeWithRule"); } class TestDiagnosticAnalyzer<T> : DiagnosticAnalyzer { readonly DiagnosticAnalyzer t; public TestDiagnosticAnalyzer(DiagnosticAnalyzer t) { this.t = t; } #region IDiagnosticAnalyzer implementation public override void Initialize(AnalysisContext context) { t.Initialize(context); } public override System.Collections.Immutable.ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return t.SupportedDiagnostics; } } #endregion } protected static TextSpan GetWholeSpan(Diagnostic d) { int start = d.Location.SourceSpan.Start; int end = d.Location.SourceSpan.End; foreach (var a in d.AdditionalLocations) { start = Math.Min(start, a.SourceSpan.Start); end = Math.Max(start, a.SourceSpan.End); } return TextSpan.FromBounds(start, end); } protected static void Analyze<T>(Func<string, SyntaxTree> parseTextFunc, Func<SyntaxTree[], Compilation> createCompilationFunc, string language, string input, string output = null, int issueToFix = -1, int actionToRun = 0, Action<int, Diagnostic> diagnosticCheck = null) where T : DiagnosticAnalyzer, new() { var text = new StringBuilder(); var expectedDiagnosics = new List<TextSpan>(); int start = -1; for (int i = 0; i < input.Length; i++) { char ch = input[i]; if (ch == '$' && ((i > 0) && (input[i - 1] == '$'))) { // Ignore 2nd "$" in "$$" } else if (ch == '$' && (i + 1 >= input.Length || input[i + 1] != '$')) { if (start < 0) { start = text.Length; continue; } expectedDiagnosics.Add(TextSpan.FromBounds(start, text.Length)); start = -1; } else { text.Append(ch); } } var syntaxTree = parseTextFunc(text.ToString()); Compilation compilation = createCompilationFunc(new[] { syntaxTree }); var diagnostics = new List<Diagnostic>(); var compilationWithAnalyzers = compilation.WithAnalyzers(System.Collections.Immutable.ImmutableArray<DiagnosticAnalyzer>.Empty.Add(new T())); var result = compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync().GetAwaiter().GetResult(); diagnostics.AddRange(result); diagnostics.Sort((d1, d2) => d1.Location.SourceSpan.Start.CompareTo(d2.Location.SourceSpan.Start)); expectedDiagnosics.Sort((d1, d2) => d1.Start.CompareTo(d2.Start)); if (expectedDiagnosics.Count != diagnostics.Count) { foreach (var diag in diagnostics) { Console.WriteLine(diag.Id + "/" + diag.GetMessage() + "/" + diag.Location.SourceSpan); } Assert.True(false, "Diagnostic count mismatch expected: " + expectedDiagnosics.Count + " was " + diagnostics.Count); } for (int i = 0; i < expectedDiagnosics.Count; i++) { var d = diagnostics[i]; var wholeSpan = GetWholeSpan(d); if (wholeSpan != expectedDiagnosics[i]) { Assert.True(false, "Diagnostic " + i + " span mismatch expected: " + expectedDiagnosics[i] + " but was " + wholeSpan); } if (diagnosticCheck != null) diagnosticCheck(i, d); } if (output == null) return; var workspace = new TestWorkspace(); var projectId = ProjectId.CreateNewId(); var documentId = DocumentId.CreateNewId(projectId); workspace.Open(ProjectInfo.Create( projectId, VersionStamp.Create(), "a", "a.exe", language, null, null, null, null, new[] { DocumentInfo.Create( documentId, "a.cs", null, SourceCodeKind.Regular, TextLoader.From(TextAndVersion.Create(SourceText.From(text.ToString()), VersionStamp.Create()))) } )); if (issueToFix < 0) { diagnostics.Reverse(); foreach (var v in diagnostics) { RunFix(workspace, projectId, documentId, v); } } else { RunFix(workspace, projectId, documentId, diagnostics.ElementAt(issueToFix), actionToRun); } var txt = workspace.CurrentSolution.GetProject(projectId).GetDocument(documentId).GetTextAsync().GetAwaiter().GetResult().ToString(); output = Utils.HomogenizeEol(output); txt = Utils.HomogenizeEol(txt); if (output != txt) { StringBuilder sb = new StringBuilder(); sb.AppendLine("expected:"); sb.AppendLine(output); sb.AppendLine("got:"); sb.AppendLine(txt); sb.AppendLine("-----Mismatch:"); for (int i = 0; i < txt.Length; i++) { if (i >= output.Length) { sb.Append("#"); continue; } if (txt[i] != output[i]) { sb.Append("#"); continue; } sb.Append(txt[i]); } Assert.True(false, sb.ToString()); } } protected static void AnalyzeWithRule<T>(Func<string, SyntaxTree> parseTextFunc, Func<SyntaxTree[], Compilation> createCompilationFunc, string language, string input, string ruleId, string output = null, int issueToFix = -1, int actionToRun = 0, Action<int, Diagnostic> diagnosticCheck = null) where T : DiagnosticAnalyzer, new() { var text = new StringBuilder(); var expectedDiagnosics = new List<TextSpan>(); int start = -1; for (int i = 0; i < input.Length; i++) { char ch = input[i]; if (ch == '$') { if (start < 0) { start = text.Length; continue; } expectedDiagnosics.Add(TextSpan.FromBounds(start, text.Length)); start = -1; } else { text.Append(ch); } } var syntaxTree = parseTextFunc(text.ToString()); Compilation compilation = createCompilationFunc(new[] { syntaxTree }); var diagnostics = new List<Diagnostic>(); var compilationWithAnalyzers = compilation.WithAnalyzers(System.Collections.Immutable.ImmutableArray<DiagnosticAnalyzer>.Empty.Add(new T())); diagnostics.AddRange(compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync().GetAwaiter().GetResult()); if (expectedDiagnosics.Count != diagnostics.Count) { Console.WriteLine("Diagnostics: " + diagnostics.Count); foreach (var diag in diagnostics) { Console.WriteLine(diag.Id + "/" + diag.GetMessage()); } Assert.True(false, "Diagnostic count mismatch expected: " + expectedDiagnosics.Count + " but was:" + diagnostics.Count); } for (int i = 0; i < expectedDiagnosics.Count; i++) { var d = diagnostics[i]; var wholeSpan = GetWholeSpan(d); if (wholeSpan != expectedDiagnosics[i]) { Assert.True(false, "Diagnostic " + i + " span mismatch expected: " + expectedDiagnosics[i] + " but was " + wholeSpan); } if (diagnosticCheck != null) diagnosticCheck(i, d); } if (output == null) return; var workspace = new TestWorkspace(); var projectId = ProjectId.CreateNewId(); var documentId = DocumentId.CreateNewId(projectId); workspace.Open(ProjectInfo.Create( projectId, VersionStamp.Create(), "", "", language, null, null, null, null, new[] { DocumentInfo.Create( documentId, "a.cs", null, SourceCodeKind.Regular, TextLoader.From(TextAndVersion.Create(SourceText.From(text.ToString()), VersionStamp.Create()))) } )); if (issueToFix < 0) { diagnostics.Reverse(); foreach (var v in diagnostics) { RunFix(workspace, projectId, documentId, v); } } else { RunFix(workspace, projectId, documentId, diagnostics.ElementAt(issueToFix), actionToRun); } var txt = workspace.CurrentSolution.GetProject(projectId).GetDocument(documentId).GetTextAsync().GetAwaiter().GetResult().ToString(); txt = Utils.HomogenizeEol(txt); output = Utils.HomogenizeEol(output); if (output != txt) { StringBuilder sb = new StringBuilder(); sb.AppendLine("expected:"); sb.AppendLine(output); sb.AppendLine("got:"); sb.AppendLine(txt); Assert.True(false, sb.ToString()); } } } }