context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Copyright (c) 2007-2014 SIL International // Licensed under the MIT license: opensource.org/licenses/MIT using System; using System.Collections.Generic; using System.ComponentModel; using System.IO; using System.Linq; using System.Text; using System.Text.RegularExpressions; using System.Windows.Forms; using SIL.Extensions; using SIL.Progress; using SIL.Reporting; using SIL.Windows.Forms.Progress; using SolidGui.Engine; using SolidGui.Filter; using SolidGui.Processes; namespace SolidGui.Model { public struct DictionaryOpenArguments { public SolidSettings SolidSettings; public RecordFilterSet FilterSet; } public class SfmDictionary : RecordManager { public class DataShape { public string Shape = ""; public IEnumerable<string> ShapeMarkers() { var ret = Shape.Split(' '); return ret.AsEnumerable(); } public string FocusedMarker = ""; public int Occurs = 0; public override string ToString() { return Shape; // this currently affects a ListView column -JMC } } private List<Record> _recordList; private string _filePath; private Dictionary<string, int> _markerFrequencies; private Dictionary<string, int> _markerErrors; private int _currentIndex; public SfmDictionary() { _currentIndex = 0; _recordList = new List<Record>(); _filePath = Path.Combine(Path.GetTempPath(), Path.GetTempFileName()); // File.GetLastWriteTime(_filePath); // Not sure where this was heading. Disabled for now. -JMC Reset(); } /// <summary> /// Call this during construction [JMC: and maybe after cancelling a File Open. Is that really needed? If so, do a Recheck too.] /// </summary> public void Reset() { _markerFrequencies = new Dictionary<string, int>(); _markerErrors = new Dictionary<string, int>(); // _currentIndex = 0; //add? } public List<Record> Records { get { return _recordList; } } public string ShortLabel() { if (_filePath.Length > 12) { return System.IO.Path.GetFileName(_filePath); } return _filePath; } public override string ToString() { return string.Format("{{{0} {1}}}", ShortLabel(), GetHashCode()); } public override int Count { get { return _recordList.Count; } } public override Record Current { get{ if(_recordList.Count > 0) return _recordList[_currentIndex]; return null; } } public override int CurrentIndex { get { return _currentIndex; } set { MoveTo(value); } } public override bool HasPrevious() { return _currentIndex > 0; } public override bool HasNext() { return _currentIndex < _recordList.Count - 1; } public override bool MoveToNext() { bool retval = HasNext(); if (retval) { _currentIndex++; } return retval; } public override bool MoveToPrevious() { bool retval = HasPrevious(); if (retval) { _currentIndex--; } return retval; } public override bool MoveTo(int index) { bool retval = false; if (index >= 0 && index < Count) { _currentIndex = index; retval = true; } return retval; } public string GetDirectoryPath() { return Path.GetDirectoryName(_filePath); //was return _filePath.Substring(0, _filePath.LastIndexOf(@"\")); -JMC } public void Clear() { _recordList.Clear(); _markerFrequencies.Clear(); _markerErrors.Clear(); } public void AddRecord(SfmLexEntry entry, SolidReport report) { // _recordList.Add(new Record(entry, report)); var record = new Record(entry, report); UpdateMarkerStatistics(record); _recordList.Add(record); } public void AddRecord(Record record) { _recordList.Add(record); } /* not used ??? public void AddRecord(List<string> fieldValues) { _recordList.Add(new Record(fieldValues)); } */ private void UpdateMarkerStatistics(Record record) { foreach (SfmFieldModel field in record.Fields) { if (!_markerFrequencies.ContainsKey(field.Marker)) { _markerFrequencies.Add(field.Marker, 0); _markerErrors.Add(field.Marker, 0); //can give a "key already exists" error if the code isn't perfect -JMC } _markerFrequencies[field.Marker] += 1; if (field.HasReportEntry) { _markerErrors[field.Marker] += 1; } } } private void OnDoOpenWork(Object sender, DoWorkEventArgs args) { var progressState = (ProgressState)args.Argument; var openArguments = (DictionaryOpenArguments)progressState.Arguments; openArguments.FilterSet.BeginBuild(this); // var sfmDataSet = new SfmDictionary();//SfmDataSet(); progressState.TotalNumberOfSteps = 0; // = sfmDataSet.Count; progressState.NumberOfStepsCompleted = 1; try { ReadDictionary(progressState, openArguments); } catch (FileNotFoundException e) { ErrorReport.NotifyUserOfProblem( "The specified file was not found. The error was\r\n" + e.Message); return; } catch (DataMisalignedException e) { ErrorReport.NotifyUserOfProblem( "Unable to finish opening the file. The error was\r\n" + e.Message); return; } openArguments.FilterSet.EndBuild(); } private void ReadDictionary(ProgressState progressState, DictionaryOpenArguments openArguments) { //TODO! Merge some of this code with ProcessEncoding.Process ("hacked fonts") . See esp FilterSet.AddRecord and CreateSolidErrorRecordFilter -JMC SfmLexEntry.ResetCounter(); var processes = new List<IProcess>(); processes.Add(new ProcessEncoding(openArguments.SolidSettings)); processes.Add(new ProcessStructure(openArguments.SolidSettings)); using (var reader = SfmRecordReader.CreateFromFilePath(_filePath)) { progressState.TotalNumberOfSteps = reader.SizeEstimate; // added -JMC 2013-09 while (reader.ReadRecord()) { // TODO: Fix the progress to use file size and progress through the file from SfmRecordReader CP 2010-08 // Partly done. But maybe should do this: divide elapsed time by what "should" have elapsed, then make a new (separate) estimate using that multiplier. -JMC 2013-09 progressState.NumberOfStepsCompleted += 1; SfmLexEntry lexEntry = SfmLexEntry.CreateFromReaderFields(reader.Fields); var recordReport = new SolidReport(); foreach (IProcess process in processes) { lexEntry = process.Process(lexEntry, recordReport); } AddRecord(lexEntry, recordReport); if (openArguments.FilterSet != null) { openArguments.FilterSet.AddRecord(Count - 1, recordReport); } } SfmHeader = reader.HeaderLinux; } } public string SfmHeader { get; set; } public bool Open(string path, SolidSettings solidSettings, RecordFilterSet filterSet) { SIL.Reporting.Logger.WriteEvent("Opening {0}",path); _filePath = path; // File.GetLastWriteTime(_filePath); // Not sure where this was heading. Disabled for now. -JMC Clear(); /* //TODO: #1291 Show zero-count markers in the UI list too... -JMC foreach (string s in solidSettings.Markers) { try { _markerFrequencies.Add(s, 0); } catch (ArgumentException e) { SIL.Reporting.ErrorReport.ReportFatalException(new ArgumentException("The .solid configuration file appears to have multiple settings for this marker: \\" + s + " \r\n" + e.Message, e)); } } */ using (var dlg = new ProgressDialog()) // JMC:! Move this UI stuff elsewhere? E.g. unit tests that call this are popping up progress dialogs. { dlg.Overview = "Loading and checking data..."; var worker = new BackgroundWorker(); worker.DoWork += OnDoOpenWork; dlg.BackgroundWorker = worker; dlg.CanCancel = true; var openArguments = new DictionaryOpenArguments(); openArguments.FilterSet = filterSet; openArguments.SolidSettings = solidSettings; dlg.ProgressState.Arguments = openArguments; dlg.ShowDialog(); if (dlg.ProgressStateResult != null && dlg.ProgressStateResult.ExceptionThatWasEncountered != null) { SIL.Reporting.ErrorReport.ReportNonFatalException(dlg.ProgressStateResult.ExceptionThatWasEncountered); //I suppose this is non-fatal because we'll just fall back to whatever was already open, same as with Cancel. -JMC return false; } if (dlg.ProgressState.Cancel == true) return false; } if (_currentIndex > _recordList.Count - 1) { _currentIndex = 0; } SIL.Reporting.Logger.WriteEvent("Done Opening."); return true; } public bool Save() { SaveAs(_filePath, null); // JMC: resurrect or delete the following old code? (First, search for other calls to GetLastWriteTime //if (_lastWrittenTo == File.GetLastWriteTime(_filePath) || // !File.Exists(_filePath)) //{ // if (_recordList != null) // { // StringBuilder builder = new System.Text.StringBuilder(); // for (int i = 0; i < _recordList.Count; i++) // { // builder.Append(_recordList[i].ToStringWithoutInferred()); // } // File.WriteAllText(_filePath, builder.ToString()); // _lastWrittenTo = File.GetLastWriteTime(_filePath); // return true; // } //} //Reporting.ErrorReporter.ReportNonFatalMessage("The file has been altered outside of Solid"); return false; } public bool SaveAs(string path, SolidSettings ss) { var rf = new RecordFormatter(); rf.SetDefaultsDisk(); return SaveAs(path, ss, rf); } // This may or may not be a real save that the UI would reflect to the user and file system. // It does cause _filePath to be set, but the calling code might also keep track of a "real" dictionary path. -JMC public bool SaveAs(string path, SolidSettings ss, RecordFormatter rf) { Logger.WriteEvent("Saving {0}", path); _filePath = path; // side effect try { using (var writer = new StreamWriter(new FileStream(_filePath, FileMode.Create, FileAccess.Write), SolidSettings.LegacyEncoding)) { writer.Write(SfmRecordReader.HeaderToWrite(SfmHeader, rf.NewLine)); foreach (Record record in _recordList) { writer.Write(rf.FormatPlain(record, ss)); /* foreach (var field in record.Fields) { if (!field.Inferred) { writer.Write("\\"); writer.Write(field.Marker); // .TrimStart('_')); //I think this was being trimmed off because the old parser detected the header based on leading underscores. -JMC 2013-10 if (field.HasValue) { writer.Write(" "); writer.Write(field.Value); } writer.Write(field.Trailing); } }*/ } writer.Close(); } } catch (Exception exception) { MessageBox.Show(null, exception.Message + "\r\n\r\nYou might try saving to a different location.", "Error on saving data file."); return false; } SIL.Reporting.Logger.WriteEvent("Done saving data file."); return true; } public IEnumerable<string> AllMarkers { get { return _markerFrequencies.Keys; } } public Dictionary<string,int> MarkerFrequencies { get { return _markerFrequencies; } } public Dictionary<string, int> MarkerErrors { get { return _markerErrors; } } public List<Record> AllRecords { get { return _recordList; } set { _recordList = value; } } public string FilePath { get { return _filePath; } set { _filePath = value; } } public override Record GetRecord(int index) { return _recordList[index]; } internal bool DeleteRecord(Record rec) { //int i = FindRecord(id); return _recordList.Remove(rec); } public IEnumerable<string> GetAllDataValues(string marker, int max, SolidSettings settings) { var dict = new SortedDictionary<string, int>(); foreach (Record rec in _recordList) { foreach (SfmFieldModel f in rec.Fields) { if (marker == f.Marker) { string s = f.ValueForceUtf8(settings); int tally = dict.GetOrDefault(s, 0); dict[s] = tally+1; } } } var ss = new List<string>(); foreach (var k in dict.Keys) { ss.Add(string.Format("[{0}] x{1}", k, dict[k])); } return ss; } public IEnumerable<DataShape> GetAllDataShapes(int linesAbove, int linesBelow, ISet<string> limitTo) { if (limitTo == null) limitTo = new HashSet<string>(); bool all = (limitTo.Count < 1); var re = new Regex(@"\b(\w+) (\1\b[ ]?)+", RegexOptions.CultureInvariant|RegexOptions.Compiled); string rw = "$1+ "; var shapes = new Dictionary<string, DataShape>(); // SortedDictionary<string, int>(); //var slidingWindow = new List<Record>(80); //var contextBefore = new List<Record>(); //var contextAfter = new List<Record>(); foreach (Record rec in _recordList) { int i = -1; foreach (var f in rec.Fields) { i++; if (!all && !limitTo.Contains(f.Marker)) continue; string sh = MarkersBefore(rec.Fields, i, linesAbove) + f.Marker + MarkersAfter(rec.Fields, i, linesBelow); sh = re.Replace(sh, rw); // collapse multiple-together, adding + sh = sh.Trim(); string key = f.Marker + " : " + sh; if (shapes.ContainsKey(key)) { shapes[key].Occurs++; } else { var shape = new DataShape(); shape.Shape = sh; shape.Occurs = 1; shape.FocusedMarker = f.Marker; shapes[key] = shape; } } } return shapes.Values; } public static readonly string StartOfRecord = @"^"; private static string MarkersBefore(List<SfmFieldModel> list, int i, int radius) { if (radius == 0) return ""; if (i <= 0) return StartOfRecord + " "; int stopAt = Math.Max(i-radius, 0); var sb = new StringBuilder(); for (int j = i-1; j >= stopAt; j--) { sb.Insert(0, list[j].Marker + " "); //prepend } if (stopAt > i-radius) { sb.Insert(0, StartOfRecord + " "); } return sb.ToString(); } public static readonly string EndOfRecord = @"$"; private static string MarkersAfter(List<SfmFieldModel> list, int i, int radius) { if (radius == 0) return ""; int max = list.Count - 1; if (i >= max) { return " " + EndOfRecord; } int stopAt = Math.Min(i + radius, max); var sb = new StringBuilder(); for (int j = i+1; j <= stopAt; j++) { sb.Append(" " + list[j].Marker); //append } if (stopAt < i + radius) { sb.Append(" " + EndOfRecord); } return sb.ToString(); } } }
// Copyright 2016 Michael Mairegger // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace Mairegger.Printing.Tests.Definition { using System; using System.Threading; using System.Windows; using System.Windows.Controls; using Mairegger.Printing.Definition; using Mairegger.Printing.PrintProcessor; using Moq; using NUnit.Framework; [TestFixture] public class PrintingDimensionsTests { [Test] [Apartment(ApartmentState.STA)] public void GetHeightFor() { Mock<IPrintProcessor> mock = new Mock<IPrintProcessor>(); mock.Setup(i => i.GetHeader()).Returns(new Grid { Height = 1 }); mock.Setup(i => i.GetFooter()).Returns(new Grid { Height = 2 }); mock.Setup(i => i.GetHeaderDescription()).Returns(new Grid { Height = 3 }); PrintDimension pd = new PrintDimension { PrintProcessor = mock.Object, InternalPrintDefinition = new PrintDefinition() }; pd.InternalPrintDefinition.SetPrintAttribute(new PrintOnAllPagesAttribute(PrintAppendixes.Header | PrintAppendixes.Footer | PrintAppendixes.Summary)); Assert.That(pd.GetHeightFor(PrintAppendixes.Header, 1, false), Is.EqualTo(1)); Assert.That(pd.GetHeightFor(PrintAppendixes.Footer, 1, false), Is.EqualTo(2)); Assert.That(pd.GetHeightFor(PrintAppendixes.HeaderDescription, 1, false), Is.EqualTo(0)); Assert.That(() => pd.GetHeightFor(PrintAppendixes.Summary, 1, false), Throws.ArgumentNullException); pd.InternalPrintDefinition.SetPrintAttribute(new PrintOnAllPagesAttribute(PrintAppendixes.HeaderDescription)); Assert.That(pd.GetHeightFor(PrintAppendixes.HeaderDescription, 1, false), Is.EqualTo(3)); } [Test] public void GetHeightForBody_IsTotalPageHeight_IfNoAdditionalPrintParts() { PrintDimension pd = new PrintDimension { InternalPrintDefinition = new PrintDefinition() }; Assert.That(pd.GetHeightForBodyGrid(1, false), Is.EqualTo(0)); pd.PageSize = new Size(100, 300); Assert.That(pd.GetHeightForBodyGrid(1, false), Is.EqualTo(300)); pd.Margin = new Thickness(10); Assert.That(pd.GetHeightForBodyGrid(1, false), Is.EqualTo(280)); } [Test] [Apartment(ApartmentState.STA)] public void GetHeightForBody_IsTotalPageHeightMinusPrintParts() { Mock<IPrintProcessor> mock = new Mock<IPrintProcessor>(); mock.Setup(i => i.GetHeader()).Returns(new Grid { Height = 10 }); mock.Setup(i => i.GetFooter()).Returns(new Grid { Height = 20 }); mock.Setup(i => i.GetHeaderDescription()).Returns(new Grid { Height = 30 }); mock.Setup(i => i.GetSummary()).Returns(new Grid { Height = 40 }); mock.Setup(i => i.GetPageNumbers(It.IsAny<int>(), It.IsAny<int>())).Returns(new Grid { Height = 25 }); PrintDimension pd = new PrintDimension { PrintProcessor = mock.Object, InternalPrintDefinition = new PrintDefinition() }; pd.InternalPrintDefinition.SetPrintAttribute(new PrintOnAllPagesAttribute(PrintAppendixes.All)); pd.PageSize = new Size(100, 300); // 300 - Header - Footer - HeaderDescritpion - Summary - PageNumbers Assert.That(pd.GetHeightForBodyGrid(1, false), Is.EqualTo(175)); } [Test] [Apartment(ApartmentState.STA)] public void GetRangeFor() { Mock<IPrintProcessor> mock = new Mock<IPrintProcessor>(); mock.Setup(i => i.GetHeader()).Returns(new Grid { Height = 10 }); mock.Setup(i => i.GetFooter()).Returns(new Grid { Height = 20 }); mock.Setup(i => i.GetHeaderDescription()).Returns(new Grid { Height = 30 }); mock.Setup(i => i.GetSummary()).Returns(new Grid { Height = 40 }); mock.Setup(i => i.GetPageNumbers(It.IsAny<int>(), It.IsAny<int>())).Returns(new Grid { Height = 25 }); PrintDimension pd = new PrintDimension { PrintProcessor = mock.Object, Margin = new Thickness(10), PageSize = new Size(100, 1000), InternalPrintDefinition = new PrintDefinition() }; pd.InternalPrintDefinition.SetPrintAttribute(new PrintOnAllPagesAttribute(PrintAppendixes.All)); Assert.That(pd.GetRangeFor(PrintAppendixes.Header, 1, false), Is.EqualTo(new Printing.Definition.PageRange(10, 20))); Assert.That(pd.GetRangeFor(PrintAppendixes.HeaderDescription, 1, false), Is.EqualTo(new Printing.Definition.PageRange(20, 50))); Assert.That(pd.GetRangeForBodyGrid(1, false), Is.EqualTo(new Printing.Definition.PageRange(50, 905))); Assert.That(pd.GetRangeFor(PrintAppendixes.Summary, 1, false), Is.EqualTo(new Printing.Definition.PageRange(905, 945))); Assert.That(pd.GetRangeFor(PrintAppendixes.Footer, 1, false), Is.EqualTo(new Printing.Definition.PageRange(945, 965))); Assert.That(pd.GetRangeFor(PrintAppendixes.PageNumbers, 1, false), Is.EqualTo(new Printing.Definition.PageRange(965, 990))); } [Test] public void GetRangeFor_InvalidPrintAppendix() { PrintDimension pd = new PrintDimension(); Assert.That(() => pd.GetRangeFor(PrintAppendixes.All, 1, false), Throws.ArgumentException); } [Test] public void PageSize_Test() { Thickness margin = new Thickness(10, 20, 30, 40); var printingDimensions = new PrintDimension(margin); Size pageSize = new Size(500, 1000); SetPageSizeToPrintDimension(printingDimensions, pageSize); Assert.AreEqual(pageSize, printingDimensions.PageSize); } [Test] public void PrintablePageSize_Test( [Values(1, 2)] double left, [Values(4, 5)] double top, [Values(7, 8)] double right, [Values(10, 11)] double bottom) { Thickness margin = new Thickness(left, top, right, bottom); var printingDimensions = new PrintDimension(margin); Size pageSize = new Size(500, 1000); SetPageSizeToPrintDimension(printingDimensions, pageSize); Size expected = new Size(pageSize.Width - margin.Left - margin.Right, pageSize.Height - margin.Top - margin.Bottom); Assert.AreEqual(expected, printingDimensions.PrintablePageSize); } [Test] public void PrintDimensionTest() { var thickness = new Thickness(10, 10, 10, 10); TestPrintDimension tpd = new TestPrintDimension(thickness); Size pageSize = new Size(500, 1000); SetPageSizeToPrintDimension(tpd, pageSize); double totalWidth = pageSize.Width - thickness.Left - thickness.Right; Assert.That(tpd.PrintablePageSize.Width, Is.EqualTo(totalWidth)); double pieces = 5; // sum of TestPrintDimensions double widthPerPiece = (totalWidth - 100) / pieces; Assert.That(tpd.Column1, Is.EqualTo(1 * widthPerPiece), $"{nameof(tpd.Column1)}"); Assert.That(tpd.Column2, Is.EqualTo(3 * widthPerPiece), $"{nameof(tpd.Column2)}"); Assert.That(tpd.Column3, Is.EqualTo(100), $"{nameof(tpd.Column3)}"); Assert.That(tpd.Column4, Is.EqualTo(1 * widthPerPiece), $"{nameof(tpd.Column4)}"); } [Test] public void SetColumnDimensionToPropertyWithWrongType_ThrowsException() { var invalidPrintDimension = new InvalidPrintDimension(); invalidPrintDimension.Column1 = new InvalidPrintDimension().Column1; Assert.That(() => SetPageSizeToPrintDimension(invalidPrintDimension, new Size(500, 1000)), Throws.InstanceOf<InvalidOperationException>()); } [Test] public void SetColumnDimensionToReadOnlyProperty() { var cannotWritePrintDimension = new CannotWritePrintDimension(); Assert.That(cannotWritePrintDimension.Column1, Is.EqualTo(1)); SetPageSizeToPrintDimension(cannotWritePrintDimension, new Size(500, 1000)); Assert.That(cannotWritePrintDimension.Column1, Is.EqualTo(500)); } [Test] public void SetHeightValue() { PrintDimension pd = new PrintDimension { InternalPrintDefinition = new PrintDefinition() }; pd.InternalPrintDefinition.SetPrintAttribute(new PrintOnAllPagesAttribute(PrintAppendixes.All)); pd.SetHeightValue(PrintAppendixes.Summary, 5); Assert.That(pd.GetHeightFor(PrintAppendixes.Summary, 1, false), Is.EqualTo(5)); pd.SetHeightValue(PrintAppendixes.Summary, 6); Assert.That(pd.GetHeightFor(PrintAppendixes.Summary, 1, false), Is.EqualTo(6)); Assert.That(()=> pd.SetHeightValue(PrintAppendixes.Summary, -1), Throws.TypeOf<ArgumentOutOfRangeException>()); } [Test] [Apartment(ApartmentState.STA)] public void RecalculateHeightValueWhen([Random(10, 100, 1)] int initialHeight) { PrintDimension pd = new PrintDimension { InternalPrintDefinition = new PrintDefinition() }; pd.InternalPrintDefinition.SetPrintAttribute(new PrintOnAllPagesAttribute(PrintAppendixes.All)); Mock<IPrintProcessor> printProcessor = new Mock<IPrintProcessor>(); printProcessor.Setup(i => i.GetSummary()).Returns(() => new Grid() { Height = initialHeight }); pd.PrintProcessor = printProcessor.Object; Assert.That(pd.GetHeightFor(PrintAppendixes.Summary, 1, false), Is.EqualTo(initialHeight)); pd.SetHeightValue(PrintAppendixes.Summary, 5); pd.RecalculateHeightValueWhen(() => false, PrintAppendixes.Summary); Assert.That(pd.GetHeightFor(PrintAppendixes.Summary, 1, false), Is.EqualTo(5)); pd.RecalculateHeightValueWhen(() => true, PrintAppendixes.Summary); Assert.That(pd.GetHeightFor(PrintAppendixes.Summary, 1, false), Is.EqualTo(initialHeight)); Assert.That(() => pd.RecalculateHeightValueWhen(null, PrintAppendixes.Summary), Throws.ArgumentNullException); } private static void SetPageSizeToPrintDimension(PrintDimension printingDimension, Size pageSize) { printingDimension.PageSize = pageSize; printingDimension.PositionRelative(); } private class InvalidPrintDimension : PrintDimension { public InvalidPrintDimension() { UseRelativeColumnPosition = true; } [ColumnDimension("1*")] public int Column1 { get; set; } } private class CannotWritePrintDimension : PrintDimension { public CannotWritePrintDimension() { UseRelativeColumnPosition = true; } [ColumnDimension(".1*")] public double Column1 { get; } = 1; } private class TestPrintDimension : PrintDimension { public TestPrintDimension(Thickness margin) : base(margin) { UseRelativeColumnPosition = true; } [ColumnDimension("1*")] public double Column1 { get; private set; } [ColumnDimension("3*")] public double Column2 { get; private set; } [ColumnDimension("100px")] public double Column3 { get; private set; } [ColumnDimension("1*")] public double Column4 { get; private set; } } } }
using CrystalDecisions.CrystalReports.Engine; using CrystalDecisions.Windows.Forms; using DpSdkEngLib; using DPSDKOPSLib; using Microsoft.VisualBasic; using System; using System.Collections; using System.Collections.Generic; using System.Drawing; using System.Diagnostics; using System.Windows.Forms; using System.Linq; using System.Xml.Linq; namespace _4PosBackOffice.NET { [Microsoft.VisualBasic.CompilerServices.DesignerGenerated()] partial class frmStockSalesShrink { #region "Windows Form Designer generated code " [System.Diagnostics.DebuggerNonUserCode()] public frmStockSalesShrink() : base() { Load += frmStockSalesShrink_Load; KeyPress += frmStockSalesShrink_KeyPress; //This call is required by the Windows Form Designer. InitializeComponent(); } //Form overrides dispose to clean up the component list. [System.Diagnostics.DebuggerNonUserCode()] protected override void Dispose(bool Disposing) { if (Disposing) { if ((components != null)) { components.Dispose(); } } base.Dispose(Disposing); } //Required by the Windows Form Designer private System.ComponentModel.IContainer components; public System.Windows.Forms.ToolTip ToolTip1; private System.Windows.Forms.Button withEventsField_cmdLoad; public System.Windows.Forms.Button cmdLoad { get { return withEventsField_cmdLoad; } set { if (withEventsField_cmdLoad != null) { withEventsField_cmdLoad.Click -= cmdLoad_Click; } withEventsField_cmdLoad = value; if (withEventsField_cmdLoad != null) { withEventsField_cmdLoad.Click += cmdLoad_Click; } } } private System.Windows.Forms.Button withEventsField_cmdExit; public System.Windows.Forms.Button cmdExit { get { return withEventsField_cmdExit; } set { if (withEventsField_cmdExit != null) { withEventsField_cmdExit.Click -= cmdExit_Click; } withEventsField_cmdExit = value; if (withEventsField_cmdExit != null) { withEventsField_cmdExit.Click += cmdExit_Click; } } } private System.Windows.Forms.Button withEventsField_cmdGroup; public System.Windows.Forms.Button cmdGroup { get { return withEventsField_cmdGroup; } set { if (withEventsField_cmdGroup != null) { withEventsField_cmdGroup.Click -= cmdGroup_Click; } withEventsField_cmdGroup = value; if (withEventsField_cmdGroup != null) { withEventsField_cmdGroup.Click += cmdGroup_Click; } } } public System.Windows.Forms.Label _lbl_1; public System.Windows.Forms.Label lblGroup; public Microsoft.VisualBasic.PowerPacks.RectangleShape _Shape1_1; //Public WithEvents lbl As Microsoft.VisualBasic.Compatibility.VB6.LabelArray public RectangleShapeArray Shape1; public Microsoft.VisualBasic.PowerPacks.ShapeContainer ShapeContainer1; //NOTE: The following procedure is required by the Windows Form Designer //It can be modified using the Windows Form Designer. //Do not modify it using the code editor. [System.Diagnostics.DebuggerStepThrough()] private void InitializeComponent() { System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(frmStockSalesShrink)); this.components = new System.ComponentModel.Container(); this.ToolTip1 = new System.Windows.Forms.ToolTip(components); this.ShapeContainer1 = new Microsoft.VisualBasic.PowerPacks.ShapeContainer(); this.cmdLoad = new System.Windows.Forms.Button(); this.cmdExit = new System.Windows.Forms.Button(); this.cmdGroup = new System.Windows.Forms.Button(); this._lbl_1 = new System.Windows.Forms.Label(); this.lblGroup = new System.Windows.Forms.Label(); this._Shape1_1 = new Microsoft.VisualBasic.PowerPacks.RectangleShape(); //Me.lbl = New Microsoft.VisualBasic.Compatibility.VB6.LabelArray(components) this.Shape1 = new RectangleShapeArray(components); this.SuspendLayout(); this.ToolTip1.Active = true; //CType(Me.lbl, System.ComponentModel.ISupportInitialize).BeginInit() ((System.ComponentModel.ISupportInitialize)this.Shape1).BeginInit(); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.Text = "Stock Item Sales Shrink Analysis"; this.ClientSize = new System.Drawing.Size(264, 192); this.Location = new System.Drawing.Point(3, 22); this.ControlBox = false; this.KeyPreview = true; this.MaximizeBox = false; this.MinimizeBox = false; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.BackColor = System.Drawing.SystemColors.Control; this.Enabled = true; this.Cursor = System.Windows.Forms.Cursors.Default; this.RightToLeft = System.Windows.Forms.RightToLeft.No; this.HelpButton = false; this.WindowState = System.Windows.Forms.FormWindowState.Normal; this.Name = "frmStockSalesShrink"; this.cmdLoad.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; this.cmdLoad.Text = "&Load Report"; this.cmdLoad.Size = new System.Drawing.Size(79, 31); this.cmdLoad.Location = new System.Drawing.Point(177, 147); this.cmdLoad.TabIndex = 4; this.cmdLoad.BackColor = System.Drawing.SystemColors.Control; this.cmdLoad.CausesValidation = true; this.cmdLoad.Enabled = true; this.cmdLoad.ForeColor = System.Drawing.SystemColors.ControlText; this.cmdLoad.Cursor = System.Windows.Forms.Cursors.Default; this.cmdLoad.RightToLeft = System.Windows.Forms.RightToLeft.No; this.cmdLoad.TabStop = true; this.cmdLoad.Name = "cmdLoad"; this.cmdExit.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; this.cmdExit.Text = "E&xit"; this.cmdExit.Size = new System.Drawing.Size(79, 31); this.cmdExit.Location = new System.Drawing.Point(9, 147); this.cmdExit.TabIndex = 3; this.cmdExit.BackColor = System.Drawing.SystemColors.Control; this.cmdExit.CausesValidation = true; this.cmdExit.Enabled = true; this.cmdExit.ForeColor = System.Drawing.SystemColors.ControlText; this.cmdExit.Cursor = System.Windows.Forms.Cursors.Default; this.cmdExit.RightToLeft = System.Windows.Forms.RightToLeft.No; this.cmdExit.TabStop = true; this.cmdExit.Name = "cmdExit"; this.cmdGroup.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; this.cmdGroup.Text = "&Filter"; this.cmdGroup.Size = new System.Drawing.Size(97, 31); this.cmdGroup.Location = new System.Drawing.Point(153, 93); this.cmdGroup.TabIndex = 2; this.cmdGroup.TabStop = false; this.cmdGroup.BackColor = System.Drawing.SystemColors.Control; this.cmdGroup.CausesValidation = true; this.cmdGroup.Enabled = true; this.cmdGroup.ForeColor = System.Drawing.SystemColors.ControlText; this.cmdGroup.Cursor = System.Windows.Forms.Cursors.Default; this.cmdGroup.RightToLeft = System.Windows.Forms.RightToLeft.No; this.cmdGroup.Name = "cmdGroup"; this._lbl_1.Text = "Filter"; this._lbl_1.Size = new System.Drawing.Size(29, 13); this._lbl_1.Location = new System.Drawing.Point(9, 6); this._lbl_1.TabIndex = 0; this._lbl_1.TextAlign = System.Drawing.ContentAlignment.TopLeft; this._lbl_1.BackColor = System.Drawing.Color.Transparent; this._lbl_1.Enabled = true; this._lbl_1.ForeColor = System.Drawing.SystemColors.ControlText; this._lbl_1.Cursor = System.Windows.Forms.Cursors.Default; this._lbl_1.RightToLeft = System.Windows.Forms.RightToLeft.No; this._lbl_1.UseMnemonic = true; this._lbl_1.Visible = true; this._lbl_1.AutoSize = true; this._lbl_1.BorderStyle = System.Windows.Forms.BorderStyle.None; this._lbl_1.Name = "_lbl_1"; this.lblGroup.Text = "lblGroup"; this.lblGroup.Size = new System.Drawing.Size(232, 58); this.lblGroup.Location = new System.Drawing.Point(15, 27); this.lblGroup.TabIndex = 1; this.lblGroup.TextAlign = System.Drawing.ContentAlignment.TopLeft; this.lblGroup.BackColor = System.Drawing.SystemColors.Control; this.lblGroup.Enabled = true; this.lblGroup.ForeColor = System.Drawing.SystemColors.ControlText; this.lblGroup.Cursor = System.Windows.Forms.Cursors.Default; this.lblGroup.RightToLeft = System.Windows.Forms.RightToLeft.No; this.lblGroup.UseMnemonic = true; this.lblGroup.Visible = true; this.lblGroup.AutoSize = false; this.lblGroup.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D; this.lblGroup.Name = "lblGroup"; this._Shape1_1.BackColor = System.Drawing.Color.FromArgb(192, 192, 255); this._Shape1_1.BackStyle = Microsoft.VisualBasic.PowerPacks.BackStyle.Opaque; this._Shape1_1.Size = new System.Drawing.Size(247, 112); this._Shape1_1.Location = new System.Drawing.Point(9, 21); this._Shape1_1.BorderColor = System.Drawing.SystemColors.WindowText; this._Shape1_1.BorderStyle = System.Drawing.Drawing2D.DashStyle.Solid; this._Shape1_1.BorderWidth = 1; this._Shape1_1.FillColor = System.Drawing.Color.Black; this._Shape1_1.FillStyle = Microsoft.VisualBasic.PowerPacks.FillStyle.Transparent; this._Shape1_1.Visible = true; this._Shape1_1.Name = "_Shape1_1"; this.Controls.Add(cmdLoad); this.Controls.Add(cmdExit); this.Controls.Add(cmdGroup); this.Controls.Add(_lbl_1); this.Controls.Add(lblGroup); this.ShapeContainer1.Shapes.Add(_Shape1_1); this.Controls.Add(ShapeContainer1); //Me.lbl.SetIndex(_lbl_1, CType(1, Short)) this.Shape1.SetIndex(_Shape1_1, Convert.ToInt16(1)); ((System.ComponentModel.ISupportInitialize)this.Shape1).EndInit(); //CType(Me.lbl, System.ComponentModel.ISupportInitialize).EndInit() this.ResumeLayout(false); this.PerformLayout(); } #endregion } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using HTTPOptions.Areas.HelpPage.ModelDescriptions; using HTTPOptions.Areas.HelpPage.Models; namespace HTTPOptions.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } if (complexTypeDescription != null) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using Docker.Sample.Areas.HelpPage.ModelDescriptions; using Docker.Sample.Areas.HelpPage.Models; namespace Docker.Sample.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
// Copyright (c) 2010-2013 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.IO; using SharpDX.Direct3D11; using SharpDX.IO; namespace SharpDX.Toolkit.Graphics { /// <summary> /// A TextureCube front end to <see cref="SharpDX.Direct3D11.Texture2D"/>. /// </summary> public class TextureCube : Texture2DBase { internal TextureCube(GraphicsDevice device, Texture2DDescription description2D, params DataBox[] dataBoxes) : base(device, description2D, dataBoxes) { Initialize(Resource); } internal TextureCube(GraphicsDevice device, Direct3D11.Texture2D texture) : base(device, texture) { Initialize(Resource); } internal override TextureView GetRenderTargetView(ViewType viewType, int arrayOrDepthSlice, int mipMapSlice) { throw new System.NotSupportedException(); } /// <summary> /// Makes a copy of this texture. /// </summary> /// <remarks> /// This method doesn't copy the content of the texture. /// </remarks> /// <returns> /// A copy of this texture. /// </returns> public override Texture Clone() { return new TextureCube(GraphicsDevice, this.Description); } /// <summary> /// Creates a new texture from a <see cref="Texture2DDescription"/>. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="description">The description.</param> /// <returns> /// A new instance of <see cref="TextureCube"/> class. /// </returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> public static TextureCube New(GraphicsDevice device, Texture2DDescription description) { return new TextureCube(device, description); } /// <summary> /// Creates a new texture from a <see cref="Direct3D11.Texture2D"/>. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="texture">The native texture <see cref="Direct3D11.Texture2D"/>.</param> /// <returns> /// A new instance of <see cref="TextureCube"/> class. /// </returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> public static TextureCube New(GraphicsDevice device, Direct3D11.Texture2D texture) { return new TextureCube(device, texture); } /// <summary> /// Creates a new <see cref="TextureCube"/>. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="size">The size (in pixels) of the top-level faces of the cube texture.</param> /// <param name="format">Describes the format to use.</param> /// <param name="usage">The usage.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <returns> /// A new instance of <see cref="Texture2D"/> class. /// </returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> public static TextureCube New(GraphicsDevice device, int size, PixelFormat format, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Default) { return New(device, size, false, format, flags, usage); } /// <summary> /// Creates a new <see cref="TextureCube"/>. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="size">The size (in pixels) of the top-level faces of the cube texture.</param> /// <param name="mipCount">Number of mipmaps, set to true to have all mipmaps, set to an int >=1 for a particular mipmap count.</param> /// <param name="format">Describes the format to use.</param> /// <param name="usage">The usage.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <returns> /// A new instance of <see cref="Texture2D"/> class. /// </returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> public static TextureCube New(GraphicsDevice device, int size, MipMapCount mipCount, PixelFormat format, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Default) { return new TextureCube(device, NewTextureCubeDescription(size, format, flags | TextureFlags.ShaderResource, mipCount, usage)); } /// <summary> /// Creates a new <see cref="TextureCube" /> from a initial data.. /// </summary> /// <typeparam name="T">Type of a pixel data</typeparam> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="size">The size (in pixels) of the top-level faces of the cube texture.</param> /// <param name="format">Describes the format to use.</param> /// <param name="usage">The usage.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <param name="textureData">an array of 6 textures. See remarks</param> /// <returns>A new instance of <see cref="TextureCube" /> class.</returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> /// <remarks> /// The first dimension of mipMapTextures describes the number of array (TextureCube Array), the second is the texture data for a particular cube face. /// </remarks> public unsafe static TextureCube New<T>(GraphicsDevice device, int size, PixelFormat format, T[][] textureData, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Immutable) where T : struct { if (textureData.Length != 6) throw new ArgumentException("Invalid texture data. First dimension must be equal to 6", "textureData"); var dataBox1 = GetDataBox(format, size, size, 1, textureData[0], (IntPtr)Interop.Fixed(textureData[0])); var dataBox2 = GetDataBox(format, size, size, 1, textureData[0], (IntPtr)Interop.Fixed(textureData[1])); var dataBox3 = GetDataBox(format, size, size, 1, textureData[0], (IntPtr)Interop.Fixed(textureData[2])); var dataBox4 = GetDataBox(format, size, size, 1, textureData[0], (IntPtr)Interop.Fixed(textureData[3])); var dataBox5 = GetDataBox(format, size, size, 1, textureData[0], (IntPtr)Interop.Fixed(textureData[4])); var dataBox6 = GetDataBox(format, size, size, 1, textureData[0], (IntPtr)Interop.Fixed(textureData[5])); return new TextureCube(device, NewTextureCubeDescription(size, format, flags | TextureFlags.ShaderResource, 1, usage), dataBox1, dataBox2, dataBox3, dataBox4, dataBox5, dataBox6); } /// <summary> /// Creates a new <see cref="TextureCube" /> from a initial data.. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="size">The size (in pixels) of the top-level faces of the cube texture.</param> /// <param name="format">Describes the format to use.</param> /// <param name="usage">The usage.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <param name="textureData">an array of 6 textures. See remarks</param> /// <returns>A new instance of <see cref="TextureCube" /> class.</returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> /// <remarks> /// The first dimension of mipMapTextures describes the number of array (TextureCube Array), the second is the texture data for a particular cube face. /// </remarks> public static TextureCube New(GraphicsDevice device, int size, PixelFormat format, DataBox[] textureData, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Immutable) { if (textureData.Length != 6) throw new ArgumentException("Invalid texture data. First dimension must be equal to 6", "textureData"); return new TextureCube(device, NewTextureCubeDescription(size, format, flags | TextureFlags.ShaderResource, 1, usage), textureData); } /// <summary> /// Creates a new <see cref="TextureCube" /> directly from an <see cref="Image"/>. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="image">An image in CPU memory.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <param name="usage">The usage.</param> /// <returns>A new instance of <see cref="TextureCube" /> class.</returns> /// <msdn-id>ff476521</msdn-id> /// <unmanaged>HRESULT ID3D11Device::CreateTexture2D([In] const D3D11_TEXTURE2D_DESC* pDesc,[In, Buffer, Optional] const D3D11_SUBRESOURCE_DATA* pInitialData,[Out, Fast] ID3D11Texture2D** ppTexture2D)</unmanaged> /// <unmanaged-short>ID3D11Device::CreateTexture2D</unmanaged-short> public static TextureCube New(GraphicsDevice device, Image image, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Immutable) { if (image == null) throw new ArgumentNullException("image"); if (image.Description.Dimension != TextureDimension.TextureCube) throw new ArgumentException("Invalid image. Must be Cube", "image"); return new TextureCube(device, CreateTextureDescriptionFromImage(image, flags | TextureFlags.ShaderResource, usage), image.ToDataBox()); } /// <summary> /// Loads a Cube texture from a stream. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="stream">The stream to load the texture from.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <param name="usage">Usage of the resource. Default is <see cref="ResourceUsage.Immutable"/> </param> /// <exception cref="ArgumentException">If the texture is not of type Cube</exception> /// <returns>A texture</returns> public static new TextureCube Load(GraphicsDevice device, Stream stream, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Immutable) { var texture = Texture.Load(device, stream, flags | TextureFlags.ShaderResource, usage); if (!(texture is TextureCube)) throw new ArgumentException(string.Format("Texture is not type of [TextureCube] but [{0}]", texture.GetType().Name)); return (TextureCube)texture; } /// <summary> /// Loads a Cube texture from a stream. /// </summary> /// <param name="device">The <see cref="GraphicsDevice"/>.</param> /// <param name="filePath">The file to load the texture from.</param> /// <param name="flags">Sets the texture flags (for unordered access...etc.)</param> /// <param name="usage">Usage of the resource. Default is <see cref="ResourceUsage.Immutable"/> </param> /// <exception cref="ArgumentException">If the texture is not of type Cube</exception> /// <returns>A texture</returns> public static new TextureCube Load(GraphicsDevice device, string filePath, TextureFlags flags = TextureFlags.ShaderResource, ResourceUsage usage = ResourceUsage.Immutable) { using (var stream = new NativeFileStream(filePath, NativeFileMode.Open, NativeFileAccess.Read)) return Load(device, stream, flags | TextureFlags.ShaderResource, usage); } protected static Texture2DDescription NewTextureCubeDescription(int size, PixelFormat format, TextureFlags flags, int mipCount, ResourceUsage usage) { var desc = NewDescription(size, size, format, flags, mipCount, 6, usage); desc.OptionFlags = ResourceOptionFlags.TextureCube; return desc; } } }
<?cs # A link to a package ?><?cs def:package_link(pkg) ?> <a href="<?cs var:toroot ?><?cs var:pkg.link ?>"><?cs var:pkg.name ?></a><?cs /def ?> <?cs # A link to a type, or not if it's a primitive type link: whether to create a link at the top level, always creates links in recursive invocations. Expects the following fields: .name .link .isPrimitive .superBounds.N.(more links) (... super ... & ...) .extendsBounds.N.(more links) (... extends ... & ...) .typeArguments.N.(more links) (< ... >) ?><?cs def:type_link_impl(type, link) ?><?cs if:type.link && link=="true" ?><?cs if:type.federated ?><a href="<?cs var:type.link ?>"><?cs var:type.label ?></a><?cs else ?><a href="<?cs var:toroot ?><?cs var:type.link ?>"><?cs var:type.label ?></a><?cs /if ?><?cs else ?><?cs var:type.label ?><?cs /if ?><?cs if:subcount(type.extendsBounds) ?><?cs each:t=type.extendsBounds ?><?cs if:first(t) ?>&nbsp;extends&nbsp;<?cs else ?>&nbsp;&amp;&nbsp;<?cs /if ?><?cs call:type_link_impl(t, "true") ?><?cs /each ?><?cs /if ?><?cs if:subcount(type.superBounds) ?><?cs each:t=type.superBounds ?><?cs if:first(t) ?>&nbsp;super&nbsp;<?cs else ?>&nbsp;&amp;&nbsp;<?cs /if ?><?cs call:type_link_impl(t, "true") ?><?cs /each ?><?cs /if ?><?cs if:subcount(type.typeArguments) ?>&lt;<?cs each:t=type.typeArguments ?><?cs call:type_link_impl(t, "true") ?><?cs if:!last(t) ?>,&nbsp;<?cs /if ?><?cs /each ?>&gt;<?cs /if ?><?cs /def ?> <?cs def:class_name(type) ?><?cs call:type_link_impl(type, "false") ?><?cs /def ?> <?cs def:type_link(type) ?><?cs call:type_link_impl(type, "true") ?><?cs /def ?> <?cs # a conditional link. if the "condition" parameter evals to true then the link is displayed otherwise only the text is displayed ?><?cs def:cond_link(text, root, path, condition) ?><?cs if:condition ?><a href="<?cs var:root ?><?cs var:path ?>"><?cs /if ?><?cs var:text ?><?cs if:condition ?></a><?cs /if ?><?cs /def ?> <?cs # A comma separated parameter list ?><?cs def:parameter_list(params) ?><?cs each:param = params ?><?cs call:type_link(param.type)?> <?cs var:param.name ?><?cs if: name(param)!=subcount(params)-1?>, <?cs /if ?><?cs /each ?><?cs /def ?> <?cs # Print a list of tags (e.g. description text ?><?cs def:tag_list(tags) ?><?cs each:tag = tags ?><?cs if:tag.name == "Text" ?><?cs var:tag.text?><?cs elif:tag.kind == "@more" ?><p><?cs elif:tag.kind == "@see" ?><code><a href="<?cs if:tag.isLocal?><?cs var:toroot ?><?cs /if ?><?cs var:tag.href ?>"><?cs var:tag.label ?></a></code><?cs elif:tag.kind == "@seeHref" ?><a href="<?cs var:tag.href ?>"><?cs var:tag.label ?></a><?cs elif:tag.kind == "@seeJustLabel" ?><?cs var:tag.label ?><?cs elif:tag.kind == "@code" ?><code><?cs var:tag.text ?></code><?cs elif:tag.kind == "@samplecode" ?><pre><?cs var:tag.text ?></pre><?cs elif:tag.name == "@sample" ?><pre><?cs var:tag.text ?></pre><?cs elif:tag.name == "@include" ?><?cs var:tag.text ?><?cs elif:tag.kind == "@docRoot" ?><?cs var:toroot ?><?cs elif:tag.kind == "@sdkCurrent" ?><?cs var:sdk.current ?><?cs elif:tag.kind == "@sdkCurrentVersion" ?><?cs var:sdk.version ?><?cs elif:tag.kind == "@sdkCurrentRelId" ?><?cs var:sdk.rel.id ?><?cs elif:tag.kind == "@sdkPlatformVersion" ?><?cs var:sdk.platform.version ?><?cs elif:tag.kind == "@sdkPlatformApiLevel" ?><?cs var:sdk.platform.apiLevel ?><?cs elif:tag.kind == "@sdkPlatformMajorMinor" ?><?cs var:sdk.platform.majorMinor ?><?cs elif:tag.kind == "@sdkPlatformReleaseDate" ?><?cs var:sdk.platform.releaseDate ?><?cs elif:tag.kind == "@sdkPlatformDeployableDate" ?><?cs var:sdk.platform.deployableDate ?><?cs elif:tag.kind == "@adtZipVersion" ?><?cs var:adt.zip.version ?><?cs elif:tag.kind == "@adtZipDownload" ?><?cs var:adt.zip.download ?><?cs elif:tag.kind == "@adtZipBytes" ?><?cs var:adt.zip.bytes ?><?cs elif:tag.kind == "@adtZipChecksum" ?><?cs var:adt.zip.checksum ?><?cs elif:tag.kind == "@inheritDoc" ?><?cs # This is the case when @inheritDoc is in something that doesn't inherit from anything?><?cs elif:tag.kind == "@attr" ?><?cs else ?>{<?cs var:tag.name?> <?cs var:tag.text ?>}<?cs /if ?><?cs /each ?><?cs /def ?> <?cs # The message about This xxx is deprecated. ?><?cs def:deprecated_text(kind) ?> This <?cs var:kind ?> is deprecated.<?cs /def ?> <?cs # Show the short-form description of something. These come from shortDescr and deprecated ?><?cs def:short_descr(obj) ?><?cs if:subcount(obj.deprecated) ?> <em><?cs call:deprecated_text(obj.kind) ?> <?cs call:tag_list(obj.deprecated) ?></em><?cs else ?><?cs call:tag_list(obj.shortDescr) ?><?cs /if ?><?cs /def ?> <?cs # Show the red box with the deprecated warning ?><?cs def:deprecated_warning(obj) ?><?cs if:subcount(obj.deprecated) ?><p> <p class="caution"> <strong><?cs call:deprecated_text(obj.kind) ?></strong><br/> <?cs call:tag_list(obj.deprecated) ?> </p><?cs /if ?><?cs /def ?> <?cs # print the See Also: section ?><?cs def:see_also_tags(also) ?><?cs if:subcount(also) ?> <div class="jd-tagdata"> <h5 class="jd-tagtitle">See Also</h5> <ul class="nolist"><?cs each:tag=also ?><li><?cs if:tag.kind == "@see" ?><code><a href="<?cs var:toroot ?><?cs var:tag.href ?>"><?cs var:tag.label ?></a></code><?cs elif:tag.kind == "@seeHref" ?><a href="<?cs var:tag.href ?>"><?cs var:tag.label ?></a><?cs elif:tag.kind == "@seeJustLabel" ?><?cs var:tag.label ?><?cs else ?>[ERROR: Unknown @see kind]<?cs /if ?></li><?cs /each ?> </ul> </div><?cs /if ?> <?cs /def ?> <?cs # print the API Level ?><?cs def:since_tags(obj) ?> <?cs if:reference.apilevels ?> Since: <a href="<?cs var:toroot ?>guide/appendix/api-levels.html#level<?cs var:obj.since.key ?>">API Level <?cs var:obj.since.name ?></a> <?cs /if ?> <?cs /def ?> <?cs def:federated_refs(obj) ?> <?cs if:subcount(obj.federated) ?> <div> Also: <?cs each:federated=obj.federated ?> <a href="<?cs var:federated.url ?>"><?cs var:federated.name ?></a><?cs if:!last(federated) ?>,<?cs /if ?> <?cs /each ?> </div> <?cs /if ?> <?cs /def ?> <?cs # Print the long-form description for something. Uses the following fields: deprecated descr seeAlso since ?><?cs def:description(obj) ?><?cs call:deprecated_warning(obj) ?> <div class="jd-tagdata jd-tagdescr"><p><?cs call:tag_list(obj.descr) ?></p></div><?cs if:subcount(obj.attrRefs) ?> <div class="jd-tagdata"> <h5 class="jd-tagtitle">Related XML Attributes</h5> <ul class="nolist"><?cs each:attr=obj.attrRefs ?> <li><a href="<?cs var:toroot ?><?cs var:attr.href ?>"><?cs var:attr.name ?></a></li><?cs /each ?> </ul> </div><?cs /if ?><?cs if:subcount(obj.paramTags) ?> <div class="jd-tagdata"> <h5 class="jd-tagtitle">Parameters</h5> <table class="jd-tagtable"><?cs each:tag=obj.paramTags ?> <tr> <th><?cs if:tag.isTypeParameter ?>&lt;<?cs /if ?><?cs var:tag.name ?><?cs if:tag.isTypeParameter ?>&gt;<?cs /if ?></th> <td><?cs call:tag_list(tag.comment) ?></td> </tr><?cs /each ?> </table> </div><?cs /if ?><?cs if:subcount(obj.returns) ?> <div class="jd-tagdata"> <h5 class="jd-tagtitle">Returns</h5> <ul class="nolist"><li><?cs call:tag_list(obj.returns) ?></li></ul> </div><?cs /if ?><?cs if:subcount(obj.throws) ?> <div class="jd-tagdata"> <h5 class="jd-tagtitle">Throws</h5> <table class="jd-tagtable"><?cs each:tag=obj.throws ?> <tr> <th><?cs call:type_link(tag.type) ?></td> <td><?cs call:tag_list(tag.comment) ?></td> </tr><?cs /each ?> </table> </div><?cs /if ?><?cs call:see_also_tags(obj.seeAlso) ?><?cs /def ?> <?cs # A table of links to classes with descriptions, as in a package file or the nested classes ?><?cs def:class_link_table(classes) ?><?cs set:count = #1 ?> <table class="jd-sumtable-expando"><?cs each:cl=classes ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:cl.type.since.key ?>" > <td class="jd-linkcol"><?cs call:type_link(cl.type) ?></td> <td class="jd-descrcol" width="100%"><?cs call:short_descr(cl) ?>&nbsp;</td> </tr><?cs set:count = count + #1 ?><?cs /each ?> </table><?cs /def ?> <?cs # A list of links to classes, for use in the side navigation of classes when viewing a package (panel nav) ?><?cs def:class_link_list(label, classes) ?><?cs if:subcount(classes) ?> <li><h2><?cs var:label ?></h2> <ul><?cs each:cl=classes ?> <li class="api apilevel-<?cs var:cl.type.since.key ?>"><?cs call:type_link(cl.type) ?></li><?cs /each ?> </ul> </li><?cs /if ?><?cs /def ?> <?cs # A list of links to classes, for use in the side navigation of classes when viewing a class (panel nav) ?><?cs def:list(label, classes) ?><?cs if:subcount(classes) ?> <li><h2><?cs var:label ?></h2> <ul><?cs each:cl=classes ?> <li class="<?cs if:class.name == cl.label?>selected <?cs /if ?>api apilevel-<?cs var:cl.since.key ?>"><?cs call:type_link(cl) ?></li><?cs /each ?> </ul> </li><?cs /if ?><?cs /def ?> <?cs # A list of links to packages, for use in the side navigation of packages (panel nav) ?><?cs def:package_link_list(packages) ?><?cs each:pkg=packages ?> <li class="<?cs if:(class.package.name == pkg.name) || (package.name == pkg.name)?>selected <?cs /if ?>api apilevel-<?cs var:pkg.since.key ?>"><?cs call:package_link(pkg) ?></li><?cs /each ?><?cs /def ?> <?cs # An expando trigger ?><?cs def:expando_trigger(id, default) ?> <a href="#" onclick="return toggleInherited(this, null)" id="<?cs var:id ?>" class="jd-expando-trigger closed" ><img id="<?cs var:id ?>-trigger" src="<?cs var:toassets ?>images/triangle-<?cs var:default ?>.png" class="jd-expando-trigger-img" /></a><?cs /def ?> <?cs # An expandable list of classes ?><?cs def:expandable_class_list(id, classes, default) ?> <div id="<?cs var:id ?>"> <div id="<?cs var:id ?>-list" class="jd-inheritedlinks" <?cs if:default != "list" ?>style="display: none;"<?cs /if ?> > <?cs if:subcount(classes) <= #20 ?> <?cs each:cl=classes ?> <?cs call:type_link(cl.type) ?><?cs if:!last(cl) ?>,<?cs /if ?> <?cs /each ?> <?cs else ?> <?cs set:leftovers = subcount(classes) - #15 ?> <?cs loop:i = #0, #14, #1 ?> <?cs with:cl=classes[i] ?> <?cs call:type_link(cl.type) ?>, <?cs /with ?> <?cs if:(#i == #14) ?>and <a href="#" onclick="return toggleInherited(document.getElementById('<?cs var:id ?>', null))"><?cs var:leftovers ?> others.</a> <?cs /if ?> <?cs /loop ?> <?cs /if ?> </div> <div id="<?cs var:id ?>-summary" <?cs if:default != "summary" ?>style="display: none;"<?cs /if ?> ><?cs call:class_link_table(classes) ?> </div> </div><?cs /def ?> <?cs include:"components.cs" ?>
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Specialized; using System.Reflection; using log4net; using Mono.Addins; using Nini.Config; using OpenMetaverse; using OpenMetaverse.StructuredData; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; namespace OpenSim.Services.Connectors.SimianGrid { /// <summary> /// Connects authentication/authorization to the SimianGrid backend /// </summary> [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule")] public class SimianAuthenticationServiceConnector : IAuthenticationService, ISharedRegionModule { private static readonly ILog m_log = LogManager.GetLogger( MethodBase.GetCurrentMethod().DeclaringType); private string m_serverUrl = String.Empty; private bool m_Enabled = false; #region ISharedRegionModule public Type ReplaceableInterface { get { return null; } } public void RegionLoaded(Scene scene) { } public void PostInitialise() { } public void Close() { } public SimianAuthenticationServiceConnector() { } public string Name { get { return "SimianAuthenticationServiceConnector"; } } public void AddRegion(Scene scene) { if (m_Enabled) { scene.RegisterModuleInterface<IAuthenticationService>(this); } } public void RemoveRegion(Scene scene) { if (m_Enabled) { scene.UnregisterModuleInterface<IAuthenticationService>(this); } } #endregion ISharedRegionModule public SimianAuthenticationServiceConnector(IConfigSource source) { CommonInit(source); } public void Initialise(IConfigSource source) { IConfig moduleConfig = source.Configs["Modules"]; if (moduleConfig != null) { string name = moduleConfig.GetString("AuthenticationServices", ""); if (name == Name) CommonInit(source); } } private void CommonInit(IConfigSource source) { IConfig gridConfig = source.Configs["AuthenticationService"]; if (gridConfig != null) { string serviceUrl = gridConfig.GetString("AuthenticationServerURI"); if (!String.IsNullOrEmpty(serviceUrl)) { if (!serviceUrl.EndsWith("/") && !serviceUrl.EndsWith("=")) serviceUrl = serviceUrl + '/'; m_serverUrl = serviceUrl; m_Enabled = true; } } if (String.IsNullOrEmpty(m_serverUrl)) m_log.Info("[SIMIAN AUTH CONNECTOR]: No AuthenticationServerURI specified, disabling connector"); } public string Authenticate(UUID principalID, string password, int lifetime) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetIdentities" }, { "UserID", principalID.ToString() } }; OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs); if (response["Success"].AsBoolean() && response["Identities"] is OSDArray) { bool md5hashFound = false; OSDArray identities = (OSDArray)response["Identities"]; for (int i = 0; i < identities.Count; i++) { OSDMap identity = identities[i] as OSDMap; if (identity != null) { if (identity["Type"].AsString() == "md5hash") { string authorizeResult; if (CheckPassword(principalID, password, identity["Credential"].AsString(), out authorizeResult)) return authorizeResult; md5hashFound = true; break; } } } if (!md5hashFound) m_log.Warn("[SIMIAN AUTH CONNECTOR]: Authentication failed for " + principalID + ", no md5hash identity found"); } else { m_log.Warn("[SIMIAN AUTH CONNECTOR]: Failed to retrieve identities for " + principalID + ": " + response["Message"].AsString()); } return String.Empty; } public bool Verify(UUID principalID, string token, int lifetime) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetSession" }, { "SessionID", token } }; OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs); if (response["Success"].AsBoolean()) { return true; } else { m_log.Warn("[SIMIAN AUTH CONNECTOR]: Could not verify session for " + principalID + ": " + response["Message"].AsString()); } return false; } public bool Release(UUID principalID, string token) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "RemoveSession" }, { "UserID", principalID.ToString() } }; OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs); if (response["Success"].AsBoolean()) { return true; } else { m_log.Warn("[SIMIAN AUTH CONNECTOR]: Failed to remove session for " + principalID + ": " + response["Message"].AsString()); } return false; } public bool SetPassword(UUID principalID, string passwd) { // Fetch the user name first NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetUser" }, { "UserID", principalID.ToString() } }; OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs); if (response["Success"].AsBoolean() && response["User"] is OSDMap) { OSDMap userMap = (OSDMap)response["User"]; string identifier = userMap["Name"].AsString(); if (!String.IsNullOrEmpty(identifier)) { // Add/update the md5hash identity // TODO: Support salts when AddIdentity does // TODO: Create an a1hash too for WebDAV logins requestArgs = new NameValueCollection { { "RequestMethod", "AddIdentity" }, { "Identifier", identifier }, { "Credential", "$1$" + Utils.MD5String(passwd) }, { "Type", "md5hash" }, { "UserID", principalID.ToString() } }; response = WebUtil.PostToService(m_serverUrl, requestArgs); bool success = response["Success"].AsBoolean(); if (!success) m_log.WarnFormat("[SIMIAN AUTH CONNECTOR]: Failed to set password for {0} ({1})", identifier, principalID); return success; } } else { m_log.Warn("[SIMIAN AUTH CONNECTOR]: Failed to retrieve identities for " + principalID + ": " + response["Message"].AsString()); } return false; } private bool CheckPassword(UUID userID, string password, string simianGridCredential, out string authorizeResult) { if (simianGridCredential.Contains(":")) { // Salted version int idx = simianGridCredential.IndexOf(':'); string finalhash = simianGridCredential.Substring(0, idx); string salt = simianGridCredential.Substring(idx + 1); if (finalhash == Utils.MD5String(password + ":" + salt)) { authorizeResult = Authorize(userID); return true; } else { m_log.Warn("[SIMIAN AUTH CONNECTOR]: Authentication failed for " + userID + " using md5hash " + Utils.MD5String(password) + ":" + salt); } } else { // Unsalted version if (password == simianGridCredential || "$1$" + password == simianGridCredential || "$1$" + Utils.MD5String(password) == simianGridCredential || Utils.MD5String(password) == simianGridCredential || "$1$" + Utils.MD5String(password + ":") == simianGridCredential) { authorizeResult = Authorize(userID); return true; } else { m_log.Warn("[SIMIAN AUTH CONNECTOR]: Authentication failed for " + userID + " using md5hash $1$" + Utils.MD5String(password)); } } authorizeResult = null; return false; } private string Authorize(UUID userID) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "AddSession" }, { "UserID", userID.ToString() } }; OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs); if (response["Success"].AsBoolean()) return response["SessionID"].AsUUID().ToString(); else return String.Empty; } } }
using System; using System.Runtime.CompilerServices; using LLILUM = Microsoft.Llilum.Devices; namespace Microsoft.SPOT.Hardware { public class AnalogOutput : IDisposable { static object s_syncRoot = new Object(); //--// private readonly Cpu.Pin m_pin; private readonly Cpu.AnalogOutputChannel m_channel; private readonly int m_precision; private double m_scale; private double m_offset; //--// private readonly LLILUM.Adc.AdcPin m_adc; //--// /// <summary> /// Builds an instance of AnalogOutput type for the specified channel /// </summary> /// <param name="channel">The channel for the AnalogOutput</param> /// <param name="scale">A multiplicative factor to apply to the value written to the sensor</param> /// <param name="offset">A constant factor to add to the value written to the sensor</param> /// <param name="precisionInBits">The desired bit precision for the D/A conversion. A value of -1 indicates default precision.</param> public AnalogOutput( Cpu.AnalogOutputChannel channel, double scale, double offset, int precisionInBits ) { m_channel = channel; HardwareProvider hwProvider = HardwareProvider.HwProvider; if(hwProvider == null) throw new InvalidOperationException( ); m_pin = hwProvider.GetAnalogOutputPinForChannel( channel ); m_scale = scale; m_offset = offset; int[] availablePrecisions = hwProvider.GetAvailableAnalogOutputPrecisionInBitsForChannel(channel); if(precisionInBits == -1) { if(availablePrecisions.Length == 0) throw new InvalidOperationException( ); m_precision = availablePrecisions[ 0 ]; } else { bool found = false; foreach(int precision in availablePrecisions) { if(precisionInBits == precision) { m_precision = precision; found = true; break; } } if(!found) { throw new ArgumentException( ); } } m_adc = new LLILUM.Adc.AdcPin( (int)m_pin ); bool fReserved = false; try { lock (s_syncRoot) { fReserved = Port.ReservePin( m_pin, true ); Initialize( channel, m_precision ); } } catch { m_adc.Dispose( ); if(fReserved) { Port.ReservePin( m_pin, false ); } throw; } } /// <summary> /// Builds an instance of AnalogOutput type for the specified channel /// </summary> /// <param name="channel">The channel for the AnalogOutput</param> /// <param name="precisionInBits">The desired bit precision for the D/A conversion.</param> public AnalogOutput(Cpu.AnalogOutputChannel channel, int precisionInBits) : this(channel, 1.0, 0.0, precisionInBits) { } /// <summary> /// Builds an instance of AnalogOutput type for the specified channel. /// </summary> /// <param name="channel">The channel for the AnalogOutput</param> public AnalogOutput(Cpu.AnalogOutputChannel channel) : this(channel, 1.0, 0.0, -1) { } /// <summary> /// Destructs the instance of the AnalogOutput /// </summary> ~AnalogOutput() { Dispose(false); } /// <summary> /// Disposes the instance of the AnalogOutput /// </summary> public void Dispose() { Dispose(true); } [MethodImplAttribute(MethodImplOptions.Synchronized)] private void Dispose(bool fDisposing) { Port.ReservePin(m_pin, false); Uninitialize(m_channel); } /// <summary> /// Gets and sets a multiplicative factor that will be applied to the value before it is written to the sensor. /// </summary> public double Scale { get { return m_scale; } set { m_scale = value; } } /// <summary> /// Gets and sets a constant offset that will be applied to the value before it is written to the sensor. /// </summary> public double Offset { get { return m_offset; } set { m_offset = value; } } /// <summary> /// Gets the precision in bits for this channel /// </summary> public int Precision { get { return m_precision; } } /// <summary> /// The GPIO pin chosen for the selected channel /// </summary> public Cpu.Pin Pin { get { return m_pin; } } /// <summary> /// Writes a level to the AnalogOutput, eventually modified by the scale and offset following the formula raw = value * scale + offset. /// <param name="level">The value to be used.</param> /// </summary> public void Write(double level) { if (m_scale != 1.0) level *= m_scale; if (m_offset != 0.0) level += m_offset; if (level < 0) level = 0; // avoid overflow if (level > 1) level = 1; WriteRaw((int)(level * ((1 << m_precision) - 1))); } /// <summary> /// Writes a raw level to the AnalogOutput. /// <param name="level">The raw D/A value</param> /// </summary> //////[MethodImplAttribute(MethodImplOptions.InternalCall)] //////public extern void WriteRaw(int level); public void WriteRaw(int level) { throw new NotImplementedException( ); } //--// [MethodImplAttribute(MethodImplOptions.InternalCall)] protected static extern void Initialize(Cpu.AnalogOutputChannel channel, int precisionInBits); [MethodImplAttribute(MethodImplOptions.InternalCall)] protected static extern void Uninitialize(Cpu.AnalogOutputChannel channel); } }
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Threading.Tasks; using Abp.Dependency; using Abp.Domain.Entities; using Abp.MultiTenancy; using Abp.Reflection.Extensions; namespace Abp.Domain.Repositories { /// <summary> /// Base class to implement <see cref="IRepository{TEntity,TPrimaryKey}"/>. /// It implements some methods in most simple way. /// </summary> /// <typeparam name="TEntity">Type of the Entity for this repository</typeparam> /// <typeparam name="TPrimaryKey">Primary key of the entity</typeparam> public abstract class AbpRepositoryBase<TEntity, TPrimaryKey> : IRepository<TEntity, TPrimaryKey> where TEntity : class, IEntity<TPrimaryKey> { /// <summary> /// The multi tenancy side /// </summary> public static MultiTenancySides? MultiTenancySide { get; private set; } public IIocResolver IocResolver { get; set; } static AbpRepositoryBase() { var attr = typeof (TEntity).GetSingleAttributeOfTypeOrBaseTypesOrNull<MultiTenancySideAttribute>(); if (attr != null) { MultiTenancySide = attr.Side; } } public abstract IQueryable<TEntity> GetAll(); public virtual IQueryable<TEntity> GetAllIncluding(params Expression<Func<TEntity, object>>[] propertySelectors) { return GetAll(); } public virtual List<TEntity> GetAllList() { return GetAll().ToList(); } public virtual Task<List<TEntity>> GetAllListAsync() { return Task.FromResult(GetAllList()); } public virtual List<TEntity> GetAllList(Expression<Func<TEntity, bool>> predicate) { return GetAll().Where(predicate).ToList(); } public virtual Task<List<TEntity>> GetAllListAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(GetAllList(predicate)); } public virtual T Query<T>(Func<IQueryable<TEntity>, T> queryMethod) { return queryMethod(GetAll()); } public virtual TEntity Get(TPrimaryKey id) { var entity = FirstOrDefault(id); if (entity == null) { throw new EntityNotFoundException(typeof(TEntity), id); } return entity; } public virtual async Task<TEntity> GetAsync(TPrimaryKey id) { var entity = await FirstOrDefaultAsync(id); if (entity == null) { throw new EntityNotFoundException(typeof(TEntity), id); } return entity; } public virtual TEntity Single(Expression<Func<TEntity, bool>> predicate) { return GetAll().Single(predicate); } public virtual Task<TEntity> SingleAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(Single(predicate)); } public virtual TEntity FirstOrDefault(TPrimaryKey id) { return GetAll().FirstOrDefault(CreateEqualityExpressionForId(id)); } public virtual Task<TEntity> FirstOrDefaultAsync(TPrimaryKey id) { return Task.FromResult(FirstOrDefault(id)); } public virtual TEntity FirstOrDefault(Expression<Func<TEntity, bool>> predicate) { return GetAll().FirstOrDefault(predicate); } public virtual Task<TEntity> FirstOrDefaultAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(FirstOrDefault(predicate)); } public virtual TEntity Load(TPrimaryKey id) { return Get(id); } public abstract TEntity Insert(TEntity entity); public virtual Task<TEntity> InsertAsync(TEntity entity) { return Task.FromResult(Insert(entity)); } public virtual TPrimaryKey InsertAndGetId(TEntity entity) { return Insert(entity).Id; } public virtual Task<TPrimaryKey> InsertAndGetIdAsync(TEntity entity) { return Task.FromResult(InsertAndGetId(entity)); } public virtual TEntity InsertOrUpdate(TEntity entity) { return entity.IsTransient() ? Insert(entity) : Update(entity); } public virtual async Task<TEntity> InsertOrUpdateAsync(TEntity entity) { return entity.IsTransient() ? await InsertAsync(entity) : await UpdateAsync(entity); } public virtual TPrimaryKey InsertOrUpdateAndGetId(TEntity entity) { return InsertOrUpdate(entity).Id; } public virtual Task<TPrimaryKey> InsertOrUpdateAndGetIdAsync(TEntity entity) { return Task.FromResult(InsertOrUpdateAndGetId(entity)); } public abstract TEntity Update(TEntity entity); public virtual Task<TEntity> UpdateAsync(TEntity entity) { return Task.FromResult(Update(entity)); } public virtual TEntity Update(TPrimaryKey id, Action<TEntity> updateAction) { var entity = Get(id); updateAction(entity); return entity; } public virtual async Task<TEntity> UpdateAsync(TPrimaryKey id, Func<TEntity, Task> updateAction) { var entity = await GetAsync(id); await updateAction(entity); return entity; } public abstract void Delete(TEntity entity); public virtual Task DeleteAsync(TEntity entity) { Delete(entity); return Task.FromResult(0); } public abstract void Delete(TPrimaryKey id); public virtual Task DeleteAsync(TPrimaryKey id) { Delete(id); return Task.FromResult(0); } public virtual void Delete(Expression<Func<TEntity, bool>> predicate) { foreach (var entity in GetAll().Where(predicate).ToList()) { Delete(entity); } } public virtual Task DeleteAsync(Expression<Func<TEntity, bool>> predicate) { Delete(predicate); return Task.FromResult(0); } public virtual int Count() { return GetAll().Count(); } public virtual Task<int> CountAsync() { return Task.FromResult(Count()); } public virtual int Count(Expression<Func<TEntity, bool>> predicate) { return GetAll().Where(predicate).Count(); } public virtual Task<int> CountAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(Count(predicate)); } public virtual long LongCount() { return GetAll().LongCount(); } public virtual Task<long> LongCountAsync() { return Task.FromResult(LongCount()); } public virtual long LongCount(Expression<Func<TEntity, bool>> predicate) { return GetAll().Where(predicate).LongCount(); } public virtual Task<long> LongCountAsync(Expression<Func<TEntity, bool>> predicate) { return Task.FromResult(LongCount(predicate)); } protected static Expression<Func<TEntity, bool>> CreateEqualityExpressionForId(TPrimaryKey id) { var lambdaParam = Expression.Parameter(typeof(TEntity)); var lambdaBody = Expression.Equal( Expression.PropertyOrField(lambdaParam, "Id"), Expression.Constant(id, typeof(TPrimaryKey)) ); return Expression.Lambda<Func<TEntity, bool>>(lambdaBody, lambdaParam); } } }
namespace ContosoUniversity.Domain.Core.Repository { using ContosoUniversity.Domain.Core.Repository.Entities; using Models; using System; using System.Collections.Generic; using System.Data.Entity; using System.Data.Entity.Migrations; using System.Linq; public sealed class ContosoDbInitializer : DropCreateDatabaseIfModelChanges<ContosoDbContext> { // set to false when running selenium tests public static bool AllowDatabaseSeed { get; set; } = true; protected override void Seed(ContosoDbContext context) { if (!AllowDatabaseSeed) return; var students = new List<Student> { new Student { FirstMidName = "Carson", LastName = "Alexander", IsDeleted = false, EnrollmentDate = DateTime.Parse("2010-09-01") }, new Student { FirstMidName = "Meredith", LastName = "Alonso", IsDeleted = false, EnrollmentDate = DateTime.Parse("2012-09-01") }, new Student { FirstMidName = "Arturo", LastName = "Anand", IsDeleted = false, EnrollmentDate = DateTime.Parse("2013-09-01") }, new Student { FirstMidName = "Gytis", LastName = "Barzdukas", IsDeleted = false, EnrollmentDate = DateTime.Parse("2012-09-01") }, new Student { FirstMidName = "Yan", LastName = "Li", IsDeleted = false, EnrollmentDate = DateTime.Parse("2012-09-01") }, new Student { FirstMidName = "Peggy", LastName = "Justice", IsDeleted = false, EnrollmentDate = DateTime.Parse("2011-09-01") }, new Student { FirstMidName = "Laura", LastName = "Norman", IsDeleted = false, EnrollmentDate = DateTime.Parse("2013-09-01") }, new Student { FirstMidName = "Nino", LastName = "Olivetto", IsDeleted = false, EnrollmentDate = DateTime.Parse("2005-09-01") } }; students.ForEach(s => context.Students.AddOrUpdate(p => p.LastName, s)); context.SaveChanges(); var instructors = new List<Instructor> { new Instructor { FirstMidName = "Kim", LastName = "Abercrombie", HireDate = DateTime.Parse("1995-03-11") }, new Instructor { FirstMidName = "Fadi", LastName = "Fakhouri", HireDate = DateTime.Parse("2002-07-06") }, new Instructor { FirstMidName = "Roger", LastName = "Harui", HireDate = DateTime.Parse("1998-07-01") }, new Instructor { FirstMidName = "Candace", LastName = "Kapoor", HireDate = DateTime.Parse("2001-01-15") }, new Instructor { FirstMidName = "Roger", LastName = "Zheng", HireDate = DateTime.Parse("2004-02-12") } }; instructors.ForEach(s => context.Instructors.AddOrUpdate(p => p.LastName, s)); context.SaveChanges(); var departments = new List<Department> { new Department { Name = "English", Budget = 350000, CreatedBy = "seed", CreatedOn = DateTime.Now,ModifiedBy = "seed", ModifiedOn = DateTime.Now, StartDate = DateTime.Parse("2007-09-01"), InstructorID = instructors.Single( i => i.LastName == "Abercrombie").ID }, new Department { Name = "Mathematics", Budget = 100000, CreatedBy = "seed", CreatedOn = DateTime.Now,ModifiedBy = "seed", ModifiedOn = DateTime.Now, StartDate = DateTime.Parse("2007-09-01"), InstructorID = instructors.Single( i => i.LastName == "Fakhouri").ID }, new Department { Name = "Engineering", Budget = 350000, CreatedBy = "seed", CreatedOn = DateTime.Now,ModifiedBy = "seed", ModifiedOn = DateTime.Now, StartDate = DateTime.Parse("2007-09-01"), InstructorID = instructors.Single( i => i.LastName == "Harui").ID }, new Department { Name = "Economics", Budget = 100000, CreatedBy = "seed", CreatedOn = DateTime.Now,ModifiedBy = "seed", ModifiedOn = DateTime.Now, StartDate = DateTime.Parse("2007-09-01"), InstructorID = instructors.Single( i => i.LastName == "Kapoor").ID } }; departments.ForEach(s => context.Departments.AddOrUpdate(p => p.Name, s)); context.SaveChanges(); var courses = new List<Course> { new Course {CourseID = 1050, Title = "Chemistry", Credits = 3, DepartmentID = departments.Single( s => s.Name == "Engineering").DepartmentID, Instructors = new List<Instructor>() }, new Course {CourseID = 4022, Title = "Microeconomics", Credits = 3, DepartmentID = departments.Single( s => s.Name == "Economics").DepartmentID, Instructors = new List<Instructor>() }, new Course {CourseID = 4041, Title = "Macroeconomics", Credits = 3, DepartmentID = departments.Single( s => s.Name == "Economics").DepartmentID, Instructors = new List<Instructor>() }, new Course {CourseID = 1045, Title = "Calculus", Credits = 4, DepartmentID = departments.Single( s => s.Name == "Mathematics").DepartmentID, Instructors = new List<Instructor>() }, new Course {CourseID = 3141, Title = "Trigonometry", Credits = 4, DepartmentID = departments.Single( s => s.Name == "Mathematics").DepartmentID, Instructors = new List<Instructor>() }, new Course {CourseID = 2021, Title = "Composition", Credits = 3, DepartmentID = departments.Single( s => s.Name == "English").DepartmentID, Instructors = new List<Instructor>() }, new Course {CourseID = 2042, Title = "Literature", Credits = 4, DepartmentID = departments.Single( s => s.Name == "English").DepartmentID, Instructors = new List<Instructor>() }, }; courses.ForEach(s => context.Courses.AddOrUpdate(p => p.CourseID, s)); context.SaveChanges(); var officeAssignments = new List<OfficeAssignment> { new OfficeAssignment { InstructorID = instructors.Single( i => i.LastName == "Fakhouri").ID, Location = "Smith 17" }, new OfficeAssignment { InstructorID = instructors.Single( i => i.LastName == "Harui").ID, Location = "Gowan 27" }, new OfficeAssignment { InstructorID = instructors.Single( i => i.LastName == "Kapoor").ID, Location = "Thompson 304" }, }; officeAssignments.ForEach(s => context.OfficeAssignments.AddOrUpdate(p => p.InstructorID, s)); context.SaveChanges(); AddOrUpdateInstructor(context, "Chemistry", "Kapoor"); AddOrUpdateInstructor(context, "Chemistry", "Harui"); AddOrUpdateInstructor(context, "Microeconomics", "Zheng"); AddOrUpdateInstructor(context, "Macroeconomics", "Zheng"); AddOrUpdateInstructor(context, "Calculus", "Fakhouri"); AddOrUpdateInstructor(context, "Trigonometry", "Harui"); AddOrUpdateInstructor(context, "Composition", "Abercrombie"); AddOrUpdateInstructor(context, "Literature", "Abercrombie"); context.SaveChanges(); var enrollments = new List<Enrollment> { new Enrollment { StudentID = students.Single(s => s.LastName == "Alexander").ID, CourseID = courses.Single(c => c.Title == "Chemistry" ).CourseID, Grade = Grade.A }, new Enrollment { StudentID = students.Single(s => s.LastName == "Alexander").ID, CourseID = courses.Single(c => c.Title == "Microeconomics" ).CourseID, Grade = Grade.C }, new Enrollment { StudentID = students.Single(s => s.LastName == "Alexander").ID, CourseID = courses.Single(c => c.Title == "Macroeconomics" ).CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Alonso").ID, CourseID = courses.Single(c => c.Title == "Calculus" ).CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Alonso").ID, CourseID = courses.Single(c => c.Title == "Trigonometry" ).CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Alonso").ID, CourseID = courses.Single(c => c.Title == "Composition" ).CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Anand").ID, CourseID = courses.Single(c => c.Title == "Chemistry" ).CourseID }, new Enrollment { StudentID = students.Single(s => s.LastName == "Anand").ID, CourseID = courses.Single(c => c.Title == "Microeconomics").CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Barzdukas").ID, CourseID = courses.Single(c => c.Title == "Chemistry").CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Li").ID, CourseID = courses.Single(c => c.Title == "Composition").CourseID, Grade = Grade.B }, new Enrollment { StudentID = students.Single(s => s.LastName == "Justice").ID, CourseID = courses.Single(c => c.Title == "Literature").CourseID, Grade = Grade.B } }; foreach (Enrollment e in enrollments) { var enrollmentInDataBase = context.Enrollments.Where( s => s.Student.ID == e.StudentID && s.Course.CourseID == e.CourseID).SingleOrDefault(); if (enrollmentInDataBase == null) { context.Enrollments.Add(e); } } context.SaveChanges(); } void AddOrUpdateInstructor(ContosoDbContext context, string courseTitle, string instructorName) { var crs = context.Courses.SingleOrDefault(c => c.Title == courseTitle); var inst = crs.Instructors.SingleOrDefault(i => i.LastName == instructorName); if (inst == null) crs.Instructors.Add(context.Instructors.Single(i => i.LastName == instructorName)); } } }
using System; using Csla; using Invoices.DataAccess; namespace Invoices.Business { /// <summary> /// ProductTypeDynaItem (dynamic root object).<br/> /// This is a generated <see cref="ProductTypeDynaItem"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="ProductTypeDynaColl"/> collection. /// </remarks> [Serializable] public partial class ProductTypeDynaItem : BusinessBase<ProductTypeDynaItem> { #region Static Fields private static int _lastId; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="ProductTypeId"/> property. /// </summary> [NotUndoable] public static readonly PropertyInfo<int> ProductTypeIdProperty = RegisterProperty<int>(p => p.ProductTypeId, "Product Type Id"); /// <summary> /// Gets the Product Type Id. /// </summary> /// <value>The Product Type Id.</value> public int ProductTypeId { get { return GetProperty(ProductTypeIdProperty); } } /// <summary> /// Maintains metadata about <see cref="Name"/> property. /// </summary> public static readonly PropertyInfo<string> NameProperty = RegisterProperty<string>(p => p.Name, "Name"); /// <summary> /// Gets or sets the Name. /// </summary> /// <value>The Name.</value> public string Name { get { return GetProperty(NameProperty); } set { SetProperty(NameProperty, value); } } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="ProductTypeDynaItem"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public ProductTypeDynaItem() { // Use factory methods and do not use direct creation. Saved += OnProductTypeDynaItemSaved; ProductTypeDynaItemSaved += ProductTypeDynaItemSavedHandler; } #endregion #region Cache Invalidation // TODO: edit "ProductTypeDynaItem.cs", uncomment the "OnDeserialized" method and add the following line: // TODO: ProductTypeDynaItemSaved += ProductTypeDynaItemSavedHandler; private void ProductTypeDynaItemSavedHandler(object sender, Csla.Core.SavedEventArgs e) { // this runs on the client ProductTypeCachedList.InvalidateCache(); ProductTypeCachedNVL.InvalidateCache(); } /// <summary> /// Called by the server-side DataPortal after calling the requested DataPortal_XYZ method. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> protected override void DataPortal_OnDataPortalInvokeComplete(Csla.DataPortalEventArgs e) { if (ApplicationContext.ExecutionLocation == ApplicationContext.ExecutionLocations.Server && e.Operation == DataPortalOperations.Update) { // this runs on the server ProductTypeCachedNVL.InvalidateCache(); } } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="ProductTypeDynaItem"/> object properties. /// </summary> [RunLocal] protected override void DataPortal_Create() { LoadProperty(ProductTypeIdProperty, System.Threading.Interlocked.Decrement(ref _lastId)); var args = new DataPortalHookArgs(); OnCreate(args); base.DataPortal_Create(); } /// <summary> /// Loads a <see cref="ProductTypeDynaItem"/> object from the given <see cref="ProductTypeDynaItemDto"/>. /// </summary> /// <param name="data">The ProductTypeDynaItemDto to use.</param> private void DataPortal_Fetch(ProductTypeDynaItemDto data) { // Value properties LoadProperty(ProductTypeIdProperty, data.ProductTypeId); LoadProperty(NameProperty, data.Name); var args = new DataPortalHookArgs(data); OnFetchRead(args); // check all object rules and property rules BusinessRules.CheckRules(); } /// <summary> /// Inserts a new <see cref="ProductTypeDynaItem"/> object in the database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] protected override void DataPortal_Insert() { var dto = new ProductTypeDynaItemDto(); dto.Name = Name; using (var dalManager = DalFactoryInvoices.GetManager()) { var args = new DataPortalHookArgs(dto); OnInsertPre(args); var dal = dalManager.GetProvider<IProductTypeDynaItemDal>(); using (BypassPropertyChecks) { var resultDto = dal.Insert(dto); LoadProperty(ProductTypeIdProperty, resultDto.ProductTypeId); args = new DataPortalHookArgs(resultDto); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="ProductTypeDynaItem"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] protected override void DataPortal_Update() { var dto = new ProductTypeDynaItemDto(); dto.ProductTypeId = ProductTypeId; dto.Name = Name; using (var dalManager = DalFactoryInvoices.GetManager()) { var args = new DataPortalHookArgs(dto); OnUpdatePre(args); var dal = dalManager.GetProvider<IProductTypeDynaItemDal>(); using (BypassPropertyChecks) { var resultDto = dal.Update(dto); args = new DataPortalHookArgs(resultDto); } OnUpdatePost(args); } } /// <summary> /// Self deletes the <see cref="ProductTypeDynaItem"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] protected override void DataPortal_DeleteSelf() { DataPortal_Delete(ProductTypeId); } /// <summary> /// Deletes the <see cref="ProductTypeDynaItem"/> object from database. /// </summary> /// <param name="productTypeId">The delete criteria.</param> [Transactional(TransactionalTypes.TransactionScope)] private void DataPortal_Delete(int productTypeId) { using (var dalManager = DalFactoryInvoices.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IProductTypeDynaItemDal>(); using (BypassPropertyChecks) { dal.Delete(productTypeId); } OnDeletePost(args); } } #endregion #region Saved Event // TODO: edit "ProductTypeDynaItem.cs", uncomment the "OnDeserialized" method and add the following line: // TODO: Saved += OnProductTypeDynaItemSaved; private void OnProductTypeDynaItemSaved(object sender, Csla.Core.SavedEventArgs e) { if (ProductTypeDynaItemSaved != null) ProductTypeDynaItemSaved(sender, e); } /// <summary> Use this event to signal a <see cref="ProductTypeDynaItem"/> object was saved.</summary> public static event EventHandler<Csla.Core.SavedEventArgs> ProductTypeDynaItemSaved; #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
// /* // * Copyright (c) 2016, Alachisoft. All Rights Reserved. // * // * Licensed under the Apache License, Version 2.0 (the "License"); // * you may not use this file except in compliance with the License. // * You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, software // * distributed under the License is distributed on an "AS IS" BASIS, // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * See the License for the specific language governing permissions and // * limitations under the License. // */ using System; namespace Alachisoft.NosDB.Common.DataStructures { /// <summary> Elements are added at the tail and removed from the head. Class is thread-safe in that /// 1 producer and 1 consumer may add/remove elements concurrently. The class is not /// explicitely designed for multiple producers or consumers. Implemented as a linked /// list, so that removal of an element at the head does not cause a right-shift of the /// remaining elements (as in a Vector-based implementation). /// </summary> /// <author> Bela Ban /// </author> // public class Queue // { // /// <summary> Returns the first element. Returns null if no elements are available.</summary> // virtual public object First // { // get { return head != null?head.obj:null; } // } // // /// <summary> Returns the last element. Returns null if no elements are available.</summary> // virtual public object Last // { // get { return tail != null?tail.obj:null; } // } // // /*head and the tail of the list so that we can easily add and remove objects*/ // internal Element head = null, tail = null; // // /*flag to determine the state of the queue*/ // internal bool closed = false; // // /*current size of the queue*/ // private int size = 0; // // /* Lock object for synchronization. Is notified when element is added */ // internal object mutex = new object(); // // /// <summary>Lock object for syncing on removes. It is notified when an object is removed </summary> // // Object remove_mutex=new Object(); // // /*the number of end markers that have been added*/ // internal int num_markers = 0; // // /// <summary> if the queue closes during the runtime // /// an endMarker object is added to the end of the queue to indicate that // /// the queue will close automatically when the end marker is encountered // /// This allows for a "soft" close. // /// </summary> // /// <seealso cref="Queue#close"> // /// </seealso> // private static readonly object endMarker = new object(); // // // /// <summary> the class Element indicates an object in the queue. // /// This element allows for the linked list algorithm by always holding a // /// reference to the next element in the list. // /// if Element.next is null, then this element is the tail of the list. // /// </summary> // internal class Element // { // /*the actual value stored in the queue*/ // internal object obj = null; // /*pointer to the next item in the (queue) linked list*/ // internal Element next = null; // // /// <summary> creates an Element object holding its value</summary> // /// <param name="o">- the object to be stored in the queue position // /// </param> // internal Element(object o) // { // //this.enclosingInstance = enclosingInstance; // obj = o; // } // // /// <summary> prints out the value of the object</summary> // public override string ToString() // { // return obj != null?obj.ToString():"null"; // } // } // // // /// <summary> creates an empty queue</summary> // public Queue() // { // } // // // /// <summary> returns true if the Queue has been closed // /// however, this method will return false if the queue has been closed // /// using the close(true) method and the last element has yet not been received. // /// </summary> // /// <returns> true if the queue has been closed // /// </returns> // public bool Closed // { // get{return closed;} // } // // /// <summary> adds an object to the tail of this queue // /// If the queue has been closed with close(true) no exception will be // /// thrown if the queue has not been flushed yet. // /// </summary> // /// <param name="obj">- the object to be added to the queue // /// </param> // /// <exception cref=""> QueueClosedException exception if closed() returns true // /// </exception> // public void add(object obj) // { // if (obj == null) // { //// if (log.isErrorEnabled()) //// log.error("argument must not be null"); // return ; // } // if (closed) // throw new QueueClosedException(); // if (this.num_markers > 0) // throw new QueueClosedException("Queue.add(): queue has been closed. You can not add more elements. " + "Waiting for removal of remaining elements."); // // /*lock the queue from other threads*/ // lock (mutex) // { // /*create a new linked list element*/ // Element el = new Element(obj); // /*check the first element*/ // if (head == null) // { // /*the object added is the first element*/ // /*set the head to be this object*/ // head = el; // /*set the tail to be this object*/ // tail = head; // /*set the size to be one, since the queue was empty*/ // size = 1; // } // else // { // /*add the object to the end of the linked list*/ // tail.next = el; // /*set the tail to point to the last element*/ // tail = el; // /*increase the size*/ // size++; // } // /*wake up all the threads that are waiting for the lock to be released*/ // Monitor.PulseAll(mutex); // } // } // // // /// <summary> Adds a new object to the head of the queue // /// basically (obj.equals(queue.remove(queue.add(obj)))) returns true // /// If the queue has been closed with close(true) no exception will be // /// thrown if the queue has not been flushed yet. // /// </summary> // /// <param name="obj">- the object to be added to the queue // /// </param> // /// <exception cref=""> QueueClosedException exception if closed() returns true // /// // /// </exception> // public void addAtHead(object obj) // { // if (obj == null) // { //// if (log.isErrorEnabled()) //// log.error("argument must not be null"); // return ; // } // if (closed) // throw new QueueClosedException(); // if (this.num_markers > 0) // throw new QueueClosedException("Queue.addAtHead(): queue has been closed. You can not add more elements. " + "Waiting for removal of remaining elements."); // // /*lock the queue from other threads*/ // lock (mutex) // { // Element el = new Element(obj); // /*check the head element in the list*/ // if (head == null) // { // /*this is the first object, we could have done add(obj) here*/ // head = el; // tail = head; // size = 1; // } // else // { // /*set the head element to be the child of this one*/ // el.next = head; // /*set the head to point to the recently added object*/ // head = el; // /*increase the size*/ // size++; // } // /*wake up all the threads that are waiting for the lock to be released*/ // Monitor.PulseAll(mutex); // } // } // // // /// <summary> Removes 1 element from head or <B>blocks</B> // /// until next element has been added or until queue has been closed // /// </summary> // /// <returns> the first element to be taken of the queue // /// </returns> // public object remove() // { // object retval = null; // try // { // retval = remove(Timeout.Infinite); // } // catch (TimeoutException) // { // } // return retval; // } // // // /// <summary> Removes 1 element from the head. // /// If the queue is empty the operation will wait for timeout ms. // /// if no object is added during the timeout time, a Timout exception is thrown // /// </summary> // /// <param name="timeout">- the number of milli seconds this operation will wait before it times out // /// </param> // /// <returns> the first object in the queue // /// </returns> // public object remove(long timeout) // { // object retval = null; // // /*lock the queue*/ // lock (mutex) // { // /*if the queue size is zero, we want to wait until a new object is added*/ // if (size == 0) // { // if (closed) throw new QueueClosedException(); // // if(!Monitor.Wait(mutex, (int)timeout)) // { // throw new TimeoutException(); // } // } // /*we either timed out, or got notified by the add_mutex lock object*/ // // /*check to see if the object closed*/ // if (closed) throw new QueueClosedException(); // // /*get the next value*/ // retval = removeInternal(); // // /*if we reached an end marker we are going to close the queue*/ // if (retval == endMarker) // { // close(false); // throw new QueueClosedException(); // } // /*at this point we actually did receive a value from the queue, return it*/ // return retval; // } // } // // // /// <summary> removes a specific object from the queue. // /// the object is matched up using the Object.equals method. // /// </summary> // /// <param name="obj">the actual object to be removed from the queue // /// </param> // public void removeElement(object obj) // { // Element el, tmp_el; // // if (obj == null) // { //// if (log.isErrorEnabled()) //// log.error("argument must not be null"); // return ; // } // // /*lock the queue*/ // lock (mutex) // { // el = head; // // /*the queue is empty*/ // if (el == null) // return ; // // /*check to see if the head element is the one to be removed*/ // if (el.obj.Equals(obj)) // { // /*the head element matched we will remove it*/ // head = el.next; // el.next = null; // /*check if we only had one object left // *at this time the queue becomes empty // *this will set the tail=head=null // */ // if (size == 1) // tail = head; // null // decrementSize(); // // // if(size == 0) { // // synchronized(remove_mutex) { // // remove_mutex.notifyAll(); // // } // // } // return ; // } // // /*look through the other elements*/ // while (el.next != null) // { // if (el.next.obj.Equals(obj)) // { // tmp_el = el.next; // if (tmp_el == tail) // // if it is the last element, move tail one to the left (bela Sept 20 2002) // tail = el; // el.next = el.next.next; // point to the el past the next one. can be null. // tmp_el.next = null; // decrementSize(); // // if(size == 0) { // // synchronized(remove_mutex) { // // remove_mutex.notifyAll(); // // } // // } // break; // } // el = el.next; // } // } // } // // // /// <summary> returns the first object on the queue, without removing it. // /// If the queue is empty this object blocks until the first queue object has // /// been added // /// </summary> // /// <returns> the first object on the queue // /// </returns> // public object peek() // { // object retval = null; // try // { // retval = peek(Timeout.Infinite); // } // catch (TimeoutException) // { // } // return retval; // } // // // /// <summary> returns the first object on the queue, without removing it. // /// If the queue is empty this object blocks until the first queue object has // /// been added or the operation times out // /// </summary> // /// <param name="timeout">how long in milli seconds will this operation wait for an object to be added to the queue // /// before it times out // /// </param> // /// <returns> the first object on the queue // /// </returns> // // public object peek(long timeout) // { // object retval = null; // // lock (mutex) // { // if (size == 0) // { // if (closed) throw new QueueClosedException(); // // if(!Monitor.Wait(mutex, (int)timeout)) // { // throw new TimeoutException(); // } // } // // if (closed) throw new QueueClosedException(); // retval = head != null ? head.obj:null; // // if (retval == endMarker) // { // close(false); // throw new QueueClosedException(); // } // return retval; // } // } // // // /// <summary>Marks the queues as closed. When an <code>add</code> or <code>remove</code> operation is // /// attempted on a closed queue, an exception is thrown. // /// </summary> // /// <param name="flush_entries">When true, a end-of-entries marker is added to the end of the queue. // /// Entries may be added and removed, but when the end-of-entries marker // /// is encountered, the queue is marked as closed. This allows to flush // /// pending messages before closing the queue. // /// </param> // public void close(bool flush_entries) // { // if (flush_entries) // { // try // { // add(endMarker); // add an end-of-entries marker to the end of the queue // num_markers++; // } // catch (QueueClosedException ex) // { // Trace.error("Queue.close()", "exception=" + ex.Message); // } // return ; // } // // lock (mutex) // { // closed = true; // Monitor.PulseAll(mutex); // } // // // synchronized(remove_mutex) { // // remove_mutex.notifyAll(); // // } // } // // // /// <summary> resets the queue. // /// This operation removes all the objects in the queue and marks the queue open // /// </summary> // public void reset() // { // num_markers = 0; // if (!closed) // close(false); // // lock (mutex) // { // size = 0; // head = null; // tail = null; // closed = false; // Monitor.PulseAll(mutex); // } // // // synchronized(remove_mutex) { // // remove_mutex.notifyAll(); // // } // } // // /// <summary> // /// Number of Objects in the queue // /// </summary> // public int Count // { // get{return size - num_markers;} // } // // /// <summary> prints the size of the queue</summary> // public override string ToString() // { // return "Queue (" + Count + ") messages"; // } // // // /// <summary> Removes the first element. Returns null if no elements in queue. // /// Always called with add_mutex locked (we don't have to lock add_mutex ourselves) // /// </summary> // private object removeInternal() // { // Element retval; // // /*if the head is null, the queue is empty*/ // if (head == null) // return null; // // retval = head; // head must be non-null now // // head = head.next; // if (head == null) // tail = null; // // decrementSize(); // // if(size == 0) { // // synchronized(remove_mutex) { // // remove_mutex.notifyAll(); // // } // // } // // if (head != null && head.obj == endMarker) // { // closed = true; // } // // retval.next = null; // return retval.obj; // } // // // void decrementSize() // { // size--; // if (size < 0) // size = 0; // } // // } // internal class QueueClosedException : Exception { /// <summary> /// Basic Exception /// </summary> public QueueClosedException() { } /// <summary> /// Exception with custom message /// </summary> /// <param name="msg">Message to display when exception is thrown</param> public QueueClosedException(String msg) : base(msg) { } /// <summary> /// Creates a String representation of the Exception /// </summary> /// <returns>A String representation of the Exception</returns> public String toString() { if (this.Message != null) return "QueueClosedException:" + this.Message; else return "QueueClosedException"; } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.Azure.Management.Sql.Models; namespace Microsoft.Azure.Management.Sql { /// <summary> /// Represents all the operations for Azure SQL Data Sync. Contains /// operations to: Create, Retrieve, Update, and Delete sync groups, sync /// members and sync agents, and also includes the ability to get the /// synchronization logs. /// </summary> public partial interface IDataSyncOperations { /// <summary> /// Begin creating a new sync group. To determine the status of the /// operation, call GetCreateOrUpdateSyncAgentOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating the sync agent. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync agent. /// </returns> Task<SyncAgentCreateOrUpdateResponse> BeginCreateOrUpdateSyncAgentAsync(string resourceGroupName, string serverName, string syncAgentName, SyncAgentCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Begin creating a new sync group. To determine the status of the /// operation, call GetCreateOrUpdateSyncGroupOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating a data sync group /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync group. /// </returns> Task<SyncGroupCreateOrUpdateResponse> BeginCreateOrUpdateSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, SyncGroupCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Begin creating a new sync member. To determine the status of the /// operation, call GetCreateOrUpdateSyncMemberOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating a sync member /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync member. /// </returns> Task<SyncMemberCreateOrUpdateResponse> BeginCreateOrUpdateSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Begin deleting an existing sync agent. To determine the status of /// the operation, call GetDeleteSyncAgentOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name the sync agent belongs to. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> BeginDeleteSyncAgentAsync(string resourceGroupName, string serverName, string syncAgentName, CancellationToken cancellationToken); /// <summary> /// Begin deleting an existing sync group. To determine the status of /// the operation, call GetDeleteSyncGroupOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> BeginDeleteSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Begin deleting an existing sync member. To determine the status of /// the operation, call GetDeleteSyncMemberOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of deleting a sync member. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> BeginDeleteSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberGeneralParameters parameters, CancellationToken cancellationToken); /// <summary> /// Begin starting refreshing the sync schema of a hub database. To /// determine the status of the operation, call /// GetInvokeSyncHubSchemaRefreshOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> BeginInvokeSyncHubSchemaRefreshAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Begin starting refreshing the member database schema of sync /// member. To determine the status of the operation, call /// GetInvokeSyncMemberSchemaRefreshOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies sync group name and sync member name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> BeginInvokeSyncMemberSchemaRefreshAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberGeneralParameters parameters, CancellationToken cancellationToken); /// <summary> /// Begin updating an existing sync group with the specified /// parameters. To get the status of this operation, call /// GetUpdateSyncGroupOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of updating a sync group. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync group. /// </returns> Task<SyncGroupCreateOrUpdateResponse> BeginUpdateSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, SyncGroupCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Begin updating a existing sync member. To determine the status of /// the operation, call GetUpdateSyncMemberOperationStatus. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating a sync member /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync member. /// </returns> Task<SyncMemberCreateOrUpdateResponse> BeginUpdateSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Create a new sync agent with the specified parameters. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating the sync agent. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync agent. /// </returns> Task<SyncAgentCreateOrUpdateResponse> CreateOrUpdateSyncAgentAsync(string resourceGroupName, string serverName, string syncAgentName, SyncAgentCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Create a new sync group with the specified parameters. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating a data sync group /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync group. /// </returns> Task<SyncGroupCreateOrUpdateResponse> CreateOrUpdateSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, SyncGroupCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Create a new sync member under a specified sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating a sync member /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync member. /// </returns> Task<SyncMemberCreateOrUpdateResponse> CreateOrUpdateSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Generate a sync agent registration key for a specified sync agent. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name the sync agent belongs to. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represents the response to a sync agent key generate request. /// </returns> Task<SyncAgentKeyResponse> CreateSyncAgentKeyAsync(string resourceGroupName, string serverName, string syncAgentName, CancellationToken cancellationToken); /// <summary> /// Delete an existing sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name the sync agent belongs to. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> DeleteSyncAgentAsync(string resourceGroupName, string serverName, string syncAgentName, CancellationToken cancellationToken); /// <summary> /// Delete an existing sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> DeleteSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Delete an existing sync member. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of deleting a sync member. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> DeleteSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberGeneralParameters parameters, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync agent create operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync agent. /// </returns> Task<SyncAgentCreateOrUpdateResponse> GetCreateOrUpdateSyncAgentOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync group create operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync group. /// </returns> Task<SyncGroupCreateOrUpdateResponse> GetCreateOrUpdateSyncGroupOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync member create operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync member. /// </returns> Task<SyncMemberCreateOrUpdateResponse> GetCreateOrUpdateSyncMemberOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync agent delete operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> GetDeleteSyncAgentOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync group delete operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> GetDeleteSyncGroupOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync member delete operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> GetDeleteSyncMemberOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of refreshing the sync schema of a hub database /// operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> GetInvokeSyncHubSchemaRefreshOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of refreshing member database schema operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> GetInvokeSyncMemberSchemaRefreshOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the information about a specified sync agent. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name the sync agent belongs to. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represents a sync agent object. /// </returns> Task<SyncAgentGetResponse> GetSyncAgentAsync(string resourceGroupName, string serverName, string syncAgentName, CancellationToken cancellationToken); /// <summary> /// Get the information about a specified sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for getting sync group operations. /// </returns> Task<SyncGroupGetResponse> GetSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Get the full sync schema of a hub database. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for getting the full sync schema of a member or hub /// database. /// </returns> Task<SyncFullSchemaGetResponse> GetSyncHubSchemaAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Get the information about a specified sync member. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of getting a sync member. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for getting sync member operations. /// </returns> Task<SyncMemberGetResponse> GetSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberGeneralParameters parameters, CancellationToken cancellationToken); /// <summary> /// Get the full schema of member database of a sync member. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies sync group name and sync member name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for getting the full sync schema of a member or hub /// database. /// </returns> Task<SyncFullSchemaGetResponse> GetSyncMemberSchemaAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberGeneralParameters parameters, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync group update operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync group. /// </returns> Task<SyncGroupCreateOrUpdateResponse> GetUpdateSyncGroupOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Get the status of a sync member create operation. /// </summary> /// <param name='operationStatusLink'> /// Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync member. /// </returns> Task<SyncMemberCreateOrUpdateResponse> GetUpdateSyncMemberOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken); /// <summary> /// Invoke the sync schema of a hub database refreshing. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> InvokeSyncHubSchemaRefreshAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Trigger the refreshing of member database schema of a sync member. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies sync group name and sync member name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for long running operations. /// </returns> Task<DataSyncOperationResponse> InvokeSyncMemberSchemaRefreshAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberGeneralParameters parameters, CancellationToken cancellationToken); /// <summary> /// Get the synchronization logs of a sync group from next link. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the Azure SQL sync group name. /// </param> /// <param name='nextLink'> /// Specifies the nextLink which is from the previous call. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for listing sync group log operations. /// </returns> Task<SyncGroupLogListResponse> ListNextSyncGroupLogAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string nextLink, CancellationToken cancellationToken); /// <summary> /// List all the sync agents under a specified resource group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name the sync agent belongs to. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for listing sync agent operations. /// </returns> Task<SyncAgentListResponse> ListSyncAgentAsync(string resourceGroupName, string serverName, CancellationToken cancellationToken); /// <summary> /// List all the databases connected by the specified sync agent. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync agent belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name the sync agent belongs to. /// </param> /// <param name='syncAgentName'> /// Specifies the sync agent name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for listing databases connected by a given sync agent /// operations. /// </returns> Task<SyncAgentLinkedDatabaseListResponse> ListSyncAgentLinkedDatabaseAsync(string resourceGroupName, string serverName, string syncAgentName, CancellationToken cancellationToken); /// <summary> /// List all the sync groups available under a given Azure SQL database. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for listing sync group operations. /// </returns> Task<SyncGroupListResponse> ListSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, CancellationToken cancellationToken); /// <summary> /// Get the synchronization logs of a sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters for getting logs of a sync group. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for listing sync group log operations. /// </returns> Task<SyncGroupLogListResponse> ListSyncGroupLogAsync(string resourceGroupName, string serverName, string databaseName, SyncGroupLogGetParameters parameters, CancellationToken cancellationToken); /// <summary> /// List all the sync members under a specified sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync member belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for listing sync member operations. /// </returns> Task<SyncMemberListResponse> ListSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Trigger synchronization of a sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> StartSyncGroupSynchronizationAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Cancel synchronization of a sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='syncGroupName'> /// Specifies the sync group name. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> StopSyncGroupSynchronizationAsync(string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken); /// <summary> /// Update an existing sync group with the specified parameters. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of updating sync group. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync group. /// </returns> Task<SyncGroupCreateOrUpdateResponse> UpdateSyncGroupAsync(string resourceGroupName, string serverName, string databaseName, SyncGroupCreateOrUpdateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Update an exsting sync member under a specified sync group. /// </summary> /// <param name='resourceGroupName'> /// Specifies the name of the resource group the sync group belongs to. /// </param> /// <param name='serverName'> /// Specifies the Azure SQL server name. /// </param> /// <param name='databaseName'> /// Specifies the Azure SQL database name. /// </param> /// <param name='parameters'> /// Specifies other parameters of creating a sync member /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Represent the response of create or update a sync member. /// </returns> Task<SyncMemberCreateOrUpdateResponse> UpdateSyncMemberAsync(string resourceGroupName, string serverName, string databaseName, SyncMemberCreateOrUpdateParameters parameters, CancellationToken cancellationToken); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Xml.Schema { using System.IO; using System.Diagnostics; using System.Xml; using System.Text; using System.Collections; #pragma warning disable 618 internal class BaseValidator { private readonly XmlSchemaCollection _schemaCollection; private readonly IValidationEventHandling _eventHandling; private readonly XmlNameTable _nameTable; private SchemaNames _schemaNames; private readonly PositionInfo _positionInfo; private XmlResolver _xmlResolver; private Uri _baseUri; protected SchemaInfo schemaInfo; protected XmlValidatingReaderImpl reader; protected XmlQualifiedName elementName; protected ValidationState context; protected StringBuilder textValue; protected string textString; protected bool hasSibling; protected bool checkDatatype; public BaseValidator(BaseValidator other) { reader = other.reader; _schemaCollection = other._schemaCollection; _eventHandling = other._eventHandling; _nameTable = other._nameTable; _schemaNames = other._schemaNames; _positionInfo = other._positionInfo; _xmlResolver = other._xmlResolver; _baseUri = other._baseUri; elementName = other.elementName; } public BaseValidator(XmlValidatingReaderImpl reader, XmlSchemaCollection schemaCollection, IValidationEventHandling eventHandling) { Debug.Assert(schemaCollection == null || schemaCollection.NameTable == reader.NameTable); this.reader = reader; _schemaCollection = schemaCollection; _eventHandling = eventHandling; _nameTable = reader.NameTable; _positionInfo = PositionInfo.GetPositionInfo(reader); elementName = new XmlQualifiedName(); } public XmlValidatingReaderImpl Reader { get { return reader; } } public XmlSchemaCollection SchemaCollection { get { return _schemaCollection; } } public XmlNameTable NameTable { get { return _nameTable; } } public SchemaNames SchemaNames { get { if (_schemaNames != null) { return _schemaNames; } if (_schemaCollection != null) { _schemaNames = _schemaCollection.GetSchemaNames(_nameTable); } else { _schemaNames = new SchemaNames(_nameTable); } return _schemaNames; } } public PositionInfo PositionInfo { get { return _positionInfo; } } public XmlResolver XmlResolver { get { return _xmlResolver; } set { _xmlResolver = value; } } public Uri BaseUri { get { return _baseUri; } set { _baseUri = value; } } public ValidationEventHandler EventHandler { get { return (ValidationEventHandler)_eventHandling.EventHandler; } } public SchemaInfo SchemaInfo { get { return schemaInfo; } set { schemaInfo = value; } } public IDtdInfo DtdInfo { get { return schemaInfo; } set { SchemaInfo tmpSchemaInfo = value as SchemaInfo; if (tmpSchemaInfo == null) { throw new XmlException(SR.Xml_InternalError, string.Empty); } this.schemaInfo = tmpSchemaInfo; } } public virtual bool PreserveWhitespace { get { return false; } } public virtual void Validate() { } public virtual void CompleteValidation() { } public virtual object FindId(string name) { return null; } public void ValidateText() { if (context.NeedValidateChildren) { if (context.IsNill) { SendValidationEvent(SR.Sch_ContentInNill, XmlSchemaValidator.QNameString(context.LocalName, context.Namespace)); return; } ContentValidator contentValidator = context.ElementDecl.ContentValidator; XmlSchemaContentType contentType = contentValidator.ContentType; if (contentType == XmlSchemaContentType.ElementOnly) { ArrayList names = contentValidator.ExpectedElements(context, false); if (names == null) { SendValidationEvent(SR.Sch_InvalidTextInElement, XmlSchemaValidator.BuildElementName(context.LocalName, context.Namespace)); } else { Debug.Assert(names.Count > 0); SendValidationEvent(SR.Sch_InvalidTextInElementExpecting, new string[] { XmlSchemaValidator.BuildElementName(context.LocalName, context.Namespace), XmlSchemaValidator.PrintExpectedElements(names, false) }); } } else if (contentType == XmlSchemaContentType.Empty) { SendValidationEvent(SR.Sch_InvalidTextInEmpty, string.Empty); } if (checkDatatype) { SaveTextValue(reader.Value); } } } public void ValidateWhitespace() { if (context.NeedValidateChildren) { XmlSchemaContentType contentType = context.ElementDecl.ContentValidator.ContentType; if (context.IsNill) { SendValidationEvent(SR.Sch_ContentInNill, XmlSchemaValidator.QNameString(context.LocalName, context.Namespace)); } if (contentType == XmlSchemaContentType.Empty) { SendValidationEvent(SR.Sch_InvalidWhitespaceInEmpty, string.Empty); } if (checkDatatype) { SaveTextValue(reader.Value); } } } private void SaveTextValue(string value) { if (textString.Length == 0) { textString = value; } else { if (!hasSibling) { textValue.Append(textString); hasSibling = true; } textValue.Append(value); } } protected void SendValidationEvent(string code) { SendValidationEvent(code, string.Empty); } protected void SendValidationEvent(string code, string[] args) { SendValidationEvent(new XmlSchemaException(code, args, reader.BaseURI, _positionInfo.LineNumber, _positionInfo.LinePosition)); } protected void SendValidationEvent(string code, string arg) { SendValidationEvent(new XmlSchemaException(code, arg, reader.BaseURI, _positionInfo.LineNumber, _positionInfo.LinePosition)); } protected void SendValidationEvent(XmlSchemaException e) { SendValidationEvent(e, XmlSeverityType.Error); } protected void SendValidationEvent(string code, string msg, XmlSeverityType severity) { SendValidationEvent(new XmlSchemaException(code, msg, reader.BaseURI, _positionInfo.LineNumber, _positionInfo.LinePosition), severity); } protected void SendValidationEvent(string code, string[] args, XmlSeverityType severity) { SendValidationEvent(new XmlSchemaException(code, args, reader.BaseURI, _positionInfo.LineNumber, _positionInfo.LinePosition), severity); } protected void SendValidationEvent(XmlSchemaException e, XmlSeverityType severity) { if (_eventHandling != null) { _eventHandling.SendEvent(e, severity); } else if (severity == XmlSeverityType.Error) { throw e; } } protected static void ProcessEntity(SchemaInfo sinfo, string name, object sender, ValidationEventHandler eventhandler, string baseUri, int lineNumber, int linePosition) { SchemaEntity en; XmlSchemaException e = null; if (!sinfo.GeneralEntities.TryGetValue(new XmlQualifiedName(name), out en)) { // validation error, see xml spec [68] e = new XmlSchemaException(SR.Sch_UndeclaredEntity, name, baseUri, lineNumber, linePosition); } else if (en.NData.IsEmpty) { e = new XmlSchemaException(SR.Sch_UnparsedEntityRef, name, baseUri, lineNumber, linePosition); } if (e != null) { if (eventhandler != null) { eventhandler(sender, new ValidationEventArgs(e)); } else { throw e; } } } protected static void ProcessEntity(SchemaInfo sinfo, string name, IValidationEventHandling eventHandling, string baseUriStr, int lineNumber, int linePosition) { SchemaEntity en; string errorResId = null; if (!sinfo.GeneralEntities.TryGetValue(new XmlQualifiedName(name), out en)) { // validation error, see xml spec [68] errorResId = SR.Sch_UndeclaredEntity; } else if (en.NData.IsEmpty) { errorResId = SR.Sch_UnparsedEntityRef; } if (errorResId != null) { XmlSchemaException e = new XmlSchemaException(errorResId, name, baseUriStr, lineNumber, linePosition); if (eventHandling != null) { eventHandling.SendEvent(e, XmlSeverityType.Error); } else { throw e; } } } public static BaseValidator CreateInstance(ValidationType valType, XmlValidatingReaderImpl reader, XmlSchemaCollection schemaCollection, IValidationEventHandling eventHandling, bool processIdentityConstraints) { switch (valType) { case ValidationType.XDR: return new XdrValidator(reader, schemaCollection, eventHandling); case ValidationType.Schema: return new XsdValidator(reader, schemaCollection, eventHandling); case ValidationType.DTD: return new DtdValidator(reader, eventHandling, processIdentityConstraints); case ValidationType.Auto: return new AutoValidator(reader, schemaCollection, eventHandling); case ValidationType.None: return new BaseValidator(reader, schemaCollection, eventHandling); default: break; } return null; } } #pragma warning restore 618 }
//------------------------------------------------------------------------------ // // Copyright (c) Microsoft Corporation. // All rights reserved. // // This code is licensed under the MIT License. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files(the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and / or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions : // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // //------------------------------------------------------------------------------ using System; using System.Collections.Generic; using System.Linq; using System.Xml.Linq; namespace Microsoft.Identity.Core.WsTrust { internal enum WsTrustVersion { WsTrust13, WsTrust2005 } internal enum UserAuthType { IntegratedAuth, UsernamePassword } internal class MexDocument { private const string WsTrustSoapTransport = "http://schemas.xmlsoap.org/soap/http"; private readonly Dictionary<string, MexPolicy> _policies = new Dictionary<string, MexPolicy>(); private readonly Dictionary<string, MexPolicy> _bindings = new Dictionary<string, MexPolicy>(); private class MexPolicy { public WsTrustVersion Version { get; set; } public string Id { get; set; } public UserAuthType AuthType { get; set; } public Uri Url { get; set; } } public MexDocument(string responseBody) { var mexDocument = XDocument.Parse(responseBody, LoadOptions.None); ReadPolicies(mexDocument); ReadPolicyBindings(mexDocument); SetPolicyEndpointAddresses(mexDocument); } public WsTrustEndpoint GetWsTrustUsernamePasswordEndpoint() { return GetWsTrustEndpoint(UserAuthType.UsernamePassword); } public WsTrustEndpoint GetWsTrustWindowsTransportEndpoint() { return GetWsTrustEndpoint(UserAuthType.IntegratedAuth); } private WsTrustEndpoint GetWsTrustEndpoint(UserAuthType userAuthType) { MexPolicy policy = SelectPolicy(userAuthType); if (policy == null) { return null; } return new WsTrustEndpoint(policy.Url, policy.Version); } private MexPolicy SelectPolicy(UserAuthType userAuthType) { //try ws-trust 1.3 first return _policies .Values .Where(p => p.Url != null && p.AuthType == userAuthType && p.Version == WsTrustVersion.WsTrust13) .FirstOrDefault() ?? _policies .Values .Where(p => p.Url != null && p.AuthType == userAuthType) .FirstOrDefault(); } private void ReadPolicies(XContainer mexDocument) { IEnumerable<XElement> policyElements = mexDocument.Elements().First().Elements(XmlNamespace.Wsp + "Policy"); foreach (XElement policy in policyElements) { XElement exactlyOnePolicy = policy.Elements(XmlNamespace.Wsp + "ExactlyOne").FirstOrDefault(); if (exactlyOnePolicy == null) { continue; } IEnumerable<XElement> all = exactlyOnePolicy.Descendants(XmlNamespace.Wsp + "All"); foreach (XElement element in all) { XNamespace securityPolicy = XmlNamespace.Sp; XElement auth = element.Elements(XmlNamespace.Http + "NegotiateAuthentication").FirstOrDefault(); if (auth != null) { AddPolicy(policy, UserAuthType.IntegratedAuth); } auth = element.Elements(securityPolicy + "SignedEncryptedSupportingTokens").FirstOrDefault(); if (auth == null && ((auth = element.Elements(XmlNamespace.Sp2005 + "SignedSupportingTokens").FirstOrDefault()) == null)) { continue; } securityPolicy = XmlNamespace.Sp2005; XElement wspPolicy = auth.Elements(XmlNamespace.Wsp + "Policy").FirstOrDefault(); if (wspPolicy == null) { continue; } XElement usernameToken = wspPolicy.Elements(securityPolicy + "UsernameToken").FirstOrDefault(); if (usernameToken == null) { continue; } XElement wspPolicy2 = usernameToken.Elements(XmlNamespace.Wsp + "Policy").FirstOrDefault(); if (wspPolicy2 == null) { continue; } XElement wssUsernameToken10 = wspPolicy2.Elements(securityPolicy + "WssUsernameToken10").FirstOrDefault(); if (wssUsernameToken10 != null) { AddPolicy(policy, UserAuthType.UsernamePassword); } } } } private void ReadPolicyBindings(XContainer mexDocument) { IEnumerable<XElement> bindingElements = mexDocument.Elements().First().Elements(XmlNamespace.Wsdl + "binding"); foreach (XElement binding in bindingElements) { IEnumerable<XElement> policyReferences = binding.Elements(XmlNamespace.Wsp + "PolicyReference"); foreach (XElement policyReference in policyReferences) { XAttribute policyUri = policyReference.Attribute("URI"); if (policyUri == null || !_policies.ContainsKey(policyUri.Value)) { continue; } XAttribute bindingName = binding.Attribute("name"); if (bindingName == null) { continue; } XElement bindingOperation = binding.Elements(XmlNamespace.Wsdl + "operation").FirstOrDefault(); if (bindingOperation == null) { continue; } XElement soapOperation = bindingOperation.Elements(XmlNamespace.Soap12 + "operation").FirstOrDefault(); if (soapOperation == null) { continue; } XAttribute soapAction = soapOperation.Attribute("soapAction"); if (soapAction == null || (string.Compare(XmlNamespace.Issue.ToString(), soapAction.Value, StringComparison.OrdinalIgnoreCase) != 0 && string.Compare(XmlNamespace.Issue2005.ToString(), soapAction.Value, StringComparison.OrdinalIgnoreCase) != 0)) { continue; } bool isWsTrust2005 = string.Compare(XmlNamespace.Issue2005.ToString(), soapAction.Value, StringComparison.OrdinalIgnoreCase) == 0; _policies[policyUri.Value].Version = isWsTrust2005 ? WsTrustVersion.WsTrust2005:WsTrustVersion.WsTrust13; XElement soapBinding = binding.Elements(XmlNamespace.Soap12 + "binding").FirstOrDefault(); if (soapBinding == null) { continue; } XAttribute soapBindingTransport = soapBinding.Attribute("transport"); if (soapBindingTransport != null && string.Compare(WsTrustSoapTransport, soapBindingTransport.Value, StringComparison.OrdinalIgnoreCase) == 0) { _bindings.Add(bindingName.Value, _policies[policyUri.Value]); } } } } private void SetPolicyEndpointAddresses(XContainer mexDocument) { XElement serviceElement = mexDocument.Elements().First().Elements(XmlNamespace.Wsdl + "service").First(); IEnumerable<XElement> portElements = serviceElement.Elements(XmlNamespace.Wsdl + "port"); foreach (XElement port in portElements) { XAttribute portBinding = port.Attribute("binding"); if (portBinding == null) { continue; } string portBindingName = portBinding.Value; string[] portBindingNameSegments = portBindingName.Split(new[] { ':' }, 2); if (portBindingNameSegments.Length < 2 || !_bindings.ContainsKey(portBindingNameSegments[1])) { continue; } XElement endpointReference = port.Elements(XmlNamespace.Wsa10 + "EndpointReference").FirstOrDefault(); if (endpointReference == null) { continue; } XElement endpointAddress = endpointReference.Elements(XmlNamespace.Wsa10 + "Address").FirstOrDefault(); if (endpointAddress != null && Uri.IsWellFormedUriString(endpointAddress.Value, UriKind.Absolute)) { _bindings[portBindingNameSegments[1]].Url = new Uri(endpointAddress.Value); } } } private void AddPolicy(XElement policy, UserAuthType policyAuthType) { XElement binding = policy.Descendants(XmlNamespace.Sp + "TransportBinding").FirstOrDefault() ?? policy.Descendants(XmlNamespace.Sp2005 + "TransportBinding").FirstOrDefault(); if (binding != null) { XAttribute id = policy.Attribute(XmlNamespace.Wsu + "Id"); if (id != null) { _policies.Add("#" + id.Value, new MexPolicy { Id = id.Value, AuthType = policyAuthType }); } } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.MirrorRecursiveTypes { using System; using System.Linq; using System.Collections.Generic; using System.Diagnostics; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Models; /// <summary> /// Some cool documentation. /// </summary> public partial class RecursiveTypesAPI : ServiceClient<RecursiveTypesAPI>, IRecursiveTypesAPI { /// <summary> /// The base URI of the service. /// </summary> public Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public RecursiveTypesAPI(params DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public RecursiveTypesAPI(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public RecursiveTypesAPI(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public RecursiveTypesAPI(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// An optional partial-method to perform custom initialization. ///</summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.BaseUri = new Uri("https://management.azure.com/"); SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; CustomInitialize(); } /// <summary> /// Products /// </summary> /// <remarks> /// The Products endpoint returns information about the Uber products offered /// at a given location. The response includes the display name and other /// details about each product, and lists the products in the proper display /// order. /// </remarks> /// <param name='subscriptionId'> /// Subscription Id. /// </param> /// <param name='resourceGroupName'> /// Resource Group Id. /// </param> /// <param name='apiVersion'> /// API Id. /// </param> /// <param name='body'> /// API body mody. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<Product>> PostWithHttpMessagesAsync(string subscriptionId, string resourceGroupName, string apiVersion, Product body = default(Product), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (subscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subscriptionId"); } if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (apiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "apiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("subscriptionId", subscriptionId); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("body", body); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Post", tracingParameters); } // Construct URL var _baseUrl = this.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/Microsoft.Cache/Redis").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(subscriptionId)); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{apiVersion}", Uri.EscapeDataString(apiVersion)); // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(body != null) { _requestContent = SafeJsonConvert.SerializeObject(body, this.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<Product>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Product>(_responseContent, this.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.IO; using System.Runtime; using System.Runtime.Diagnostics; using System.ServiceModel; using System.ServiceModel.Diagnostics; using System.ServiceModel.Diagnostics.Application; using System.Text; using System.Threading.Tasks; using System.Xml; namespace System.ServiceModel.Channels { internal class BinaryMessageEncoderFactory : MessageEncoderFactory { private const int maxPooledXmlReaderPerMessage = 2; private BinaryMessageEncoder _messageEncoder; private MessageVersion _messageVersion; private int _maxReadPoolSize; private int _maxWritePoolSize; private CompressionFormat _compressionFormat; // Double-checked locking pattern requires volatile for read/write synchronization private volatile SynchronizedPool<BinaryBufferedMessageData> _bufferedDataPool; private volatile SynchronizedPool<BinaryBufferedMessageWriter> _bufferedWriterPool; private volatile SynchronizedPool<RecycledMessageState> _recycledStatePool; private object _thisLock; private int _maxSessionSize; private XmlDictionaryReaderQuotas _readerQuotas; private XmlDictionaryReaderQuotas _bufferedReadReaderQuotas; private BinaryVersion _binaryVersion; public BinaryMessageEncoderFactory(MessageVersion messageVersion, int maxReadPoolSize, int maxWritePoolSize, int maxSessionSize, XmlDictionaryReaderQuotas readerQuotas, long maxReceivedMessageSize, BinaryVersion version, CompressionFormat compressionFormat) { _messageVersion = messageVersion; _maxReadPoolSize = maxReadPoolSize; _maxWritePoolSize = maxWritePoolSize; _maxSessionSize = maxSessionSize; _thisLock = new object(); _readerQuotas = new XmlDictionaryReaderQuotas(); if (readerQuotas != null) { readerQuotas.CopyTo(_readerQuotas); } _bufferedReadReaderQuotas = EncoderHelpers.GetBufferedReadQuotas(_readerQuotas); this.MaxReceivedMessageSize = maxReceivedMessageSize; _binaryVersion = version; _compressionFormat = compressionFormat; _messageEncoder = new BinaryMessageEncoder(this, false, 0); } public static IXmlDictionary XmlDictionary { get { return XD.Dictionary; } } public override MessageEncoder Encoder { get { return _messageEncoder; } } public override MessageVersion MessageVersion { get { return _messageVersion; } } public int MaxWritePoolSize { get { return _maxWritePoolSize; } } public XmlDictionaryReaderQuotas ReaderQuotas { get { return _readerQuotas; } } public int MaxReadPoolSize { get { return _maxReadPoolSize; } } public int MaxSessionSize { get { return _maxSessionSize; } } public CompressionFormat CompressionFormat { get { return _compressionFormat; } } private long MaxReceivedMessageSize { get; set; } private object ThisLock { get { return _thisLock; } } private SynchronizedPool<RecycledMessageState> RecycledStatePool { get { if (_recycledStatePool == null) { lock (ThisLock) { if (_recycledStatePool == null) { //running = true; _recycledStatePool = new SynchronizedPool<RecycledMessageState>(_maxReadPoolSize); } } } return _recycledStatePool; } } public override MessageEncoder CreateSessionEncoder() { return new BinaryMessageEncoder(this, true, _maxSessionSize); } private XmlDictionaryWriter TakeStreamedWriter(Stream stream) { return XmlDictionaryWriter.CreateBinaryWriter(stream, _binaryVersion.Dictionary, null, false); } private void ReturnStreamedWriter(XmlDictionaryWriter xmlWriter) { xmlWriter.Dispose(); } private BinaryBufferedMessageWriter TakeBufferedWriter() { if (_bufferedWriterPool == null) { lock (ThisLock) { if (_bufferedWriterPool == null) { //running = true; _bufferedWriterPool = new SynchronizedPool<BinaryBufferedMessageWriter>(_maxWritePoolSize); } } } BinaryBufferedMessageWriter messageWriter = _bufferedWriterPool.Take(); if (messageWriter == null) { messageWriter = new BinaryBufferedMessageWriter(_binaryVersion.Dictionary); if (TD.WritePoolMissIsEnabled()) { TD.WritePoolMiss(messageWriter.GetType().Name); } } return messageWriter; } private void ReturnMessageWriter(BinaryBufferedMessageWriter messageWriter) { _bufferedWriterPool.Return(messageWriter); } private XmlDictionaryReader TakeStreamedReader(Stream stream) { return XmlDictionaryReader.CreateBinaryReader(stream, _binaryVersion.Dictionary, _readerQuotas, null); } private BinaryBufferedMessageData TakeBufferedData(BinaryMessageEncoder messageEncoder) { if (_bufferedDataPool == null) { lock (ThisLock) { if (_bufferedDataPool == null) { //running = true; _bufferedDataPool = new SynchronizedPool<BinaryBufferedMessageData>(_maxReadPoolSize); } } } BinaryBufferedMessageData messageData = _bufferedDataPool.Take(); if (messageData == null) { messageData = new BinaryBufferedMessageData(this, maxPooledXmlReaderPerMessage); if (TD.ReadPoolMissIsEnabled()) { TD.ReadPoolMiss(messageData.GetType().Name); } } messageData.SetMessageEncoder(messageEncoder); return messageData; } private void ReturnBufferedData(BinaryBufferedMessageData messageData) { messageData.SetMessageEncoder(null); _bufferedDataPool.Return(messageData); } internal class BinaryBufferedMessageData : BufferedMessageData { private BinaryMessageEncoderFactory _factory; private BinaryMessageEncoder _messageEncoder; private Pool<XmlDictionaryReader> _readerPool; private OnXmlDictionaryReaderClose _onClose; public BinaryBufferedMessageData(BinaryMessageEncoderFactory factory, int maxPoolSize) : base(factory.RecycledStatePool) { _factory = factory; _readerPool = new Pool<XmlDictionaryReader>(maxPoolSize); _onClose = new OnXmlDictionaryReaderClose(OnXmlReaderClosed); } public override MessageEncoder MessageEncoder { get { return _messageEncoder; } } public override XmlDictionaryReaderQuotas Quotas { get { return _factory._readerQuotas; } } public void SetMessageEncoder(BinaryMessageEncoder messageEncoder) { _messageEncoder = messageEncoder; } protected override XmlDictionaryReader TakeXmlReader() { ArraySegment<byte> buffer = this.Buffer; return XmlDictionaryReader.CreateBinaryReader(buffer.Array, buffer.Offset, buffer.Count, _factory._binaryVersion.Dictionary, _factory._bufferedReadReaderQuotas, _messageEncoder.ReaderSession); } protected override void ReturnXmlReader(XmlDictionaryReader reader) { _readerPool.Return(reader); } protected override void OnClosed() { _factory.ReturnBufferedData(this); } } internal class BinaryBufferedMessageWriter : BufferedMessageWriter { private IXmlDictionary _dictionary; private XmlBinaryWriterSession _session; public BinaryBufferedMessageWriter(IXmlDictionary dictionary) { _dictionary = dictionary; } public BinaryBufferedMessageWriter(IXmlDictionary dictionary, XmlBinaryWriterSession session) { _dictionary = dictionary; _session = session; } protected override XmlDictionaryWriter TakeXmlWriter(Stream stream) { return XmlDictionaryWriter.CreateBinaryWriter(stream, _dictionary, _session, false); } protected override void ReturnXmlWriter(XmlDictionaryWriter writer) { writer.Dispose(); } } internal class BinaryMessageEncoder : MessageEncoder, ICompressedMessageEncoder { private const string SupportedCompressionTypesMessageProperty = "BinaryMessageEncoder.SupportedCompressionTypes"; private BinaryMessageEncoderFactory _factory; private bool _isSession; private XmlBinaryWriterSessionWithQuota _writerSession; private BinaryBufferedMessageWriter _sessionMessageWriter; private XmlBinaryReaderSession _readerSession; private XmlBinaryReaderSession _readerSessionForLogging; private bool _readerSessionForLoggingIsInvalid = false; private int _writeIdCounter; private int _idCounter; private int _maxSessionSize; private int _remainingReaderSessionSize; private bool _isReaderSessionInvalid; private MessagePatterns _messagePatterns; private string _contentType; private string _normalContentType; private string _gzipCompressedContentType; private string _deflateCompressedContentType; private CompressionFormat _sessionCompressionFormat; private readonly long _maxReceivedMessageSize; public BinaryMessageEncoder(BinaryMessageEncoderFactory factory, bool isSession, int maxSessionSize) { _factory = factory; _isSession = isSession; _maxSessionSize = maxSessionSize; _remainingReaderSessionSize = maxSessionSize; _normalContentType = isSession ? factory._binaryVersion.SessionContentType : factory._binaryVersion.ContentType; _gzipCompressedContentType = isSession ? BinaryVersion.GZipVersion1.SessionContentType : BinaryVersion.GZipVersion1.ContentType; _deflateCompressedContentType = isSession ? BinaryVersion.DeflateVersion1.SessionContentType : BinaryVersion.DeflateVersion1.ContentType; _sessionCompressionFormat = _factory.CompressionFormat; _maxReceivedMessageSize = _factory.MaxReceivedMessageSize; switch (_factory.CompressionFormat) { case CompressionFormat.Deflate: _contentType = _deflateCompressedContentType; break; case CompressionFormat.GZip: _contentType = _gzipCompressedContentType; break; default: _contentType = _normalContentType; break; } } public override string ContentType { get { return _contentType; } } public override MessageVersion MessageVersion { get { return _factory._messageVersion; } } public override string MediaType { get { return _contentType; } } public XmlBinaryReaderSession ReaderSession { get { return _readerSession; } } public bool CompressionEnabled { get { return _factory.CompressionFormat != CompressionFormat.None; } } private ArraySegment<byte> AddSessionInformationToMessage(ArraySegment<byte> messageData, BufferManager bufferManager, int maxMessageSize) { int dictionarySize = 0; byte[] buffer = messageData.Array; if (_writerSession.HasNewStrings) { IList<XmlDictionaryString> newStrings = _writerSession.GetNewStrings(); for (int i = 0; i < newStrings.Count; i++) { int utf8ValueSize = Encoding.UTF8.GetByteCount(newStrings[i].Value); dictionarySize += IntEncoder.GetEncodedSize(utf8ValueSize) + utf8ValueSize; } int messageSize = messageData.Offset + messageData.Count; int remainingMessageSize = maxMessageSize - messageSize; if (remainingMessageSize - dictionarySize < 0) { string excMsg = SR.Format(SR.MaxSentMessageSizeExceeded, maxMessageSize); if (TD.MaxSentMessageSizeExceededIsEnabled()) { TD.MaxSentMessageSizeExceeded(excMsg); } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new QuotaExceededException(excMsg)); } int requiredBufferSize = messageData.Offset + messageData.Count + dictionarySize; if (buffer.Length < requiredBufferSize) { byte[] newBuffer = bufferManager.TakeBuffer(requiredBufferSize); Buffer.BlockCopy(buffer, messageData.Offset, newBuffer, messageData.Offset, messageData.Count); bufferManager.ReturnBuffer(buffer); buffer = newBuffer; } Buffer.BlockCopy(buffer, messageData.Offset, buffer, messageData.Offset + dictionarySize, messageData.Count); int offset = messageData.Offset; for (int i = 0; i < newStrings.Count; i++) { string newString = newStrings[i].Value; int utf8ValueSize = Encoding.UTF8.GetByteCount(newString); offset += IntEncoder.Encode(utf8ValueSize, buffer, offset); offset += Encoding.UTF8.GetBytes(newString, 0, newString.Length, buffer, offset); } _writerSession.ClearNewStrings(); } int headerSize = IntEncoder.GetEncodedSize(dictionarySize); int newOffset = messageData.Offset - headerSize; int newSize = headerSize + messageData.Count + dictionarySize; IntEncoder.Encode(dictionarySize, buffer, newOffset); return new ArraySegment<byte>(buffer, newOffset, newSize); } private ArraySegment<byte> ExtractSessionInformationFromMessage(ArraySegment<byte> messageData) { if (_isReaderSessionInvalid) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.BinaryEncoderSessionInvalid)); } byte[] buffer = messageData.Array; int dictionarySize; int headerSize; int newOffset; int newSize; bool throwing = true; try { IntDecoder decoder = new IntDecoder(); headerSize = decoder.Decode(buffer, messageData.Offset, messageData.Count); dictionarySize = decoder.Value; if (dictionarySize > messageData.Count) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.BinaryEncoderSessionMalformed)); } newOffset = messageData.Offset + headerSize + dictionarySize; newSize = messageData.Count - headerSize - dictionarySize; if (newSize < 0) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.BinaryEncoderSessionMalformed)); } if (dictionarySize > 0) { if (dictionarySize > _remainingReaderSessionSize) { string message = SR.Format(SR.BinaryEncoderSessionTooLarge, _maxSessionSize); if (TD.MaxSessionSizeReachedIsEnabled()) { TD.MaxSessionSizeReached(message); } Exception inner = new QuotaExceededException(message); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new CommunicationException(message, inner)); } else { _remainingReaderSessionSize -= dictionarySize; } int size = dictionarySize; int offset = messageData.Offset + headerSize; while (size > 0) { decoder.Reset(); int bytesDecoded = decoder.Decode(buffer, offset, size); int utf8ValueSize = decoder.Value; offset += bytesDecoded; size -= bytesDecoded; if (utf8ValueSize > size) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataException(SR.BinaryEncoderSessionMalformed)); } string value = Encoding.UTF8.GetString(buffer, offset, utf8ValueSize); offset += utf8ValueSize; size -= utf8ValueSize; _readerSession.Add(_idCounter, value); _idCounter++; } } throwing = false; } finally { if (throwing) { _isReaderSessionInvalid = true; } } return new ArraySegment<byte>(buffer, newOffset, newSize); } public override Message ReadMessage(ArraySegment<byte> buffer, BufferManager bufferManager, string contentType) { if (bufferManager == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("bufferManager"); } CompressionFormat compressionFormat = this.CheckContentType(contentType); if (TD.BinaryMessageDecodingStartIsEnabled()) { TD.BinaryMessageDecodingStart(); } if (compressionFormat != CompressionFormat.None) { MessageEncoderCompressionHandler.DecompressBuffer(ref buffer, bufferManager, compressionFormat, _maxReceivedMessageSize); } if (_isSession) { if (_readerSession == null) { _readerSession = new XmlBinaryReaderSession(); _messagePatterns = new MessagePatterns(_factory._binaryVersion.Dictionary, _readerSession, this.MessageVersion); } try { buffer = ExtractSessionInformationFromMessage(buffer); } catch (InvalidDataException) { MessageLogger.LogMessage(buffer, MessageLoggingSource.Malformed); throw; } } BinaryBufferedMessageData messageData = _factory.TakeBufferedData(this); Message message; if (_messagePatterns != null) { message = _messagePatterns.TryCreateMessage(buffer.Array, buffer.Offset, buffer.Count, bufferManager, messageData); } else { message = null; } if (message == null) { messageData.Open(buffer, bufferManager); RecycledMessageState messageState = messageData.TakeMessageState(); if (messageState == null) { messageState = new RecycledMessageState(); } message = new BufferedMessage(messageData, messageState); } message.Properties.Encoder = this; if (MessageLogger.LogMessagesAtTransportLevel) { MessageLogger.LogMessage(ref message, MessageLoggingSource.TransportReceive); } return message; } public override Message ReadMessage(Stream stream, int maxSizeOfHeaders, string contentType) { if (stream == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("stream"); } CompressionFormat compressionFormat = this.CheckContentType(contentType); if (TD.BinaryMessageDecodingStartIsEnabled()) { TD.BinaryMessageDecodingStart(); } if (compressionFormat != CompressionFormat.None) { stream = new MaxMessageSizeStream( MessageEncoderCompressionHandler.GetDecompressStream(stream, compressionFormat), _maxReceivedMessageSize); } XmlDictionaryReader reader = _factory.TakeStreamedReader(stream); Message message = Message.CreateMessage(reader, maxSizeOfHeaders, _factory._messageVersion); message.Properties.Encoder = this; if (TD.StreamedMessageReadByEncoderIsEnabled()) { TD.StreamedMessageReadByEncoder( EventTraceActivityHelper.TryExtractActivity(message, true)); } if (MessageLogger.LogMessagesAtTransportLevel) { MessageLogger.LogMessage(ref message, MessageLoggingSource.TransportReceive); } return message; } public override ArraySegment<byte> WriteMessage(Message message, int maxMessageSize, BufferManager bufferManager, int messageOffset) { if (message == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("message"); } if (bufferManager == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("bufferManager"); } if (maxMessageSize < 0) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("maxMessageSize", maxMessageSize, SR.ValueMustBeNonNegative)); } EventTraceActivity eventTraceActivity = null; if (TD.BinaryMessageEncodingStartIsEnabled()) { eventTraceActivity = EventTraceActivityHelper.TryExtractActivity(message); TD.BinaryMessageEncodingStart(eventTraceActivity); } message.Properties.Encoder = this; if (_isSession) { if (_writerSession == null) { _writerSession = new XmlBinaryWriterSessionWithQuota(_maxSessionSize); _sessionMessageWriter = new BinaryBufferedMessageWriter(_factory._binaryVersion.Dictionary, _writerSession); } messageOffset += IntEncoder.MaxEncodedSize; } if (messageOffset < 0) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("messageOffset", messageOffset, SR.ValueMustBeNonNegative)); } if (messageOffset > maxMessageSize) { string excMsg = SR.Format(SR.MaxSentMessageSizeExceeded, maxMessageSize); if (TD.MaxSentMessageSizeExceededIsEnabled()) { TD.MaxSentMessageSizeExceeded(excMsg); } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new QuotaExceededException(excMsg)); } ThrowIfMismatchedMessageVersion(message); BinaryBufferedMessageWriter messageWriter; if (_isSession) { messageWriter = _sessionMessageWriter; } else { messageWriter = _factory.TakeBufferedWriter(); } ArraySegment<byte> messageData = messageWriter.WriteMessage(message, bufferManager, messageOffset, maxMessageSize); if (MessageLogger.LogMessagesAtTransportLevel && !_readerSessionForLoggingIsInvalid) { if (_isSession) { if (_readerSessionForLogging == null) { _readerSessionForLogging = new XmlBinaryReaderSession(); } if (_writerSession.HasNewStrings) { foreach (XmlDictionaryString xmlDictionaryString in _writerSession.GetNewStrings()) { _readerSessionForLogging.Add(_writeIdCounter++, xmlDictionaryString.Value); } } } XmlDictionaryReader xmlDictionaryReader = XmlDictionaryReader.CreateBinaryReader(messageData.Array, messageData.Offset, messageData.Count, XD.Dictionary, XmlDictionaryReaderQuotas.Max, _readerSessionForLogging); MessageLogger.LogMessage(ref message, xmlDictionaryReader, MessageLoggingSource.TransportSend); } else { _readerSessionForLoggingIsInvalid = true; } if (_isSession) { messageData = AddSessionInformationToMessage(messageData, bufferManager, maxMessageSize); } else { _factory.ReturnMessageWriter(messageWriter); } CompressionFormat compressionFormat = this.CheckCompressedWrite(message); if (compressionFormat != CompressionFormat.None) { MessageEncoderCompressionHandler.CompressBuffer(ref messageData, bufferManager, compressionFormat); } return messageData; } public override void WriteMessage(Message message, Stream stream) { if (message == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("message")); } if (stream == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("stream")); } EventTraceActivity eventTraceActivity = null; if (TD.BinaryMessageEncodingStartIsEnabled()) { eventTraceActivity = EventTraceActivityHelper.TryExtractActivity(message); TD.BinaryMessageEncodingStart(eventTraceActivity); } CompressionFormat compressionFormat = this.CheckCompressedWrite(message); if (compressionFormat != CompressionFormat.None) { stream = MessageEncoderCompressionHandler.GetCompressStream(stream, compressionFormat); } ThrowIfMismatchedMessageVersion(message); message.Properties.Encoder = this; XmlDictionaryWriter xmlWriter = _factory.TakeStreamedWriter(stream); message.WriteMessage(xmlWriter); xmlWriter.Flush(); if (TD.StreamedMessageWrittenByEncoderIsEnabled()) { TD.StreamedMessageWrittenByEncoder(eventTraceActivity ?? EventTraceActivityHelper.TryExtractActivity(message)); } _factory.ReturnStreamedWriter(xmlWriter); if (MessageLogger.LogMessagesAtTransportLevel) { MessageLogger.LogMessage(ref message, MessageLoggingSource.TransportSend); } if (compressionFormat != CompressionFormat.None) { // Stream.Close() has been replaced with Dispose() stream.Dispose(); } } public override bool IsContentTypeSupported(string contentType) { bool supported = true; if (!base.IsContentTypeSupported(contentType)) { if (this.CompressionEnabled) { supported = (_factory.CompressionFormat == CompressionFormat.GZip && base.IsContentTypeSupported(contentType, _gzipCompressedContentType, _gzipCompressedContentType)) || (_factory.CompressionFormat == CompressionFormat.Deflate && base.IsContentTypeSupported(contentType, _deflateCompressedContentType, _deflateCompressedContentType)) || base.IsContentTypeSupported(contentType, _normalContentType, _normalContentType); } else { supported = false; } } return supported; } public void SetSessionContentType(string contentType) { if (base.IsContentTypeSupported(contentType, _gzipCompressedContentType, _gzipCompressedContentType)) { _sessionCompressionFormat = CompressionFormat.GZip; } else if (base.IsContentTypeSupported(contentType, _deflateCompressedContentType, _deflateCompressedContentType)) { _sessionCompressionFormat = CompressionFormat.Deflate; } else { _sessionCompressionFormat = CompressionFormat.None; } } public void AddCompressedMessageProperties(Message message, string supportedCompressionTypes) { message.Properties.Add(SupportedCompressionTypesMessageProperty, supportedCompressionTypes); } private static bool ContentTypeEqualsOrStartsWith(string contentType, string supportedContentType) { return contentType == supportedContentType || contentType.StartsWith(supportedContentType, StringComparison.OrdinalIgnoreCase); } private CompressionFormat CheckContentType(string contentType) { CompressionFormat compressionFormat = CompressionFormat.None; if (contentType == null) { compressionFormat = _sessionCompressionFormat; } else { if (!this.CompressionEnabled) { if (!ContentTypeEqualsOrStartsWith(contentType, this.ContentType)) { throw FxTrace.Exception.AsError(new ProtocolException(SR.Format(SR.EncoderUnrecognizedContentType, contentType, this.ContentType))); } } else { if (_factory.CompressionFormat == CompressionFormat.GZip && ContentTypeEqualsOrStartsWith(contentType, _gzipCompressedContentType)) { compressionFormat = CompressionFormat.GZip; } else if (_factory.CompressionFormat == CompressionFormat.Deflate && ContentTypeEqualsOrStartsWith(contentType, _deflateCompressedContentType)) { compressionFormat = CompressionFormat.Deflate; } else if (ContentTypeEqualsOrStartsWith(contentType, _normalContentType)) { compressionFormat = CompressionFormat.None; } else { throw FxTrace.Exception.AsError(new ProtocolException(SR.Format(SR.EncoderUnrecognizedContentType, contentType, this.ContentType))); } } } return compressionFormat; } private CompressionFormat CheckCompressedWrite(Message message) { CompressionFormat compressionFormat = _sessionCompressionFormat; if (compressionFormat != CompressionFormat.None && !_isSession) { string acceptEncoding; if (message.Properties.TryGetValue<string>(SupportedCompressionTypesMessageProperty, out acceptEncoding) && acceptEncoding != null) { acceptEncoding = acceptEncoding.ToLowerInvariant(); if ((compressionFormat == CompressionFormat.GZip && !acceptEncoding.Contains(MessageEncoderCompressionHandler.GZipContentEncoding)) || (compressionFormat == CompressionFormat.Deflate && !acceptEncoding.Contains(MessageEncoderCompressionHandler.DeflateContentEncoding))) { compressionFormat = CompressionFormat.None; } } } return compressionFormat; } } internal class XmlBinaryWriterSessionWithQuota : XmlBinaryWriterSession { private int _bytesRemaining; private List<XmlDictionaryString> _newStrings; public XmlBinaryWriterSessionWithQuota(int maxSessionSize) { _bytesRemaining = maxSessionSize; } public bool HasNewStrings { get { return _newStrings != null; } } public override bool TryAdd(XmlDictionaryString s, out int key) { if (_bytesRemaining == 0) { key = -1; return false; } int bytesRequired = Encoding.UTF8.GetByteCount(s.Value); bytesRequired += IntEncoder.GetEncodedSize(bytesRequired); if (bytesRequired > _bytesRemaining) { key = -1; _bytesRemaining = 0; return false; } if (base.TryAdd(s, out key)) { if (_newStrings == null) { _newStrings = new List<XmlDictionaryString>(); } _newStrings.Add(s); _bytesRemaining -= bytesRequired; return true; } else { return false; } } public IList<XmlDictionaryString> GetNewStrings() { return _newStrings; } public void ClearNewStrings() { _newStrings = null; } } } internal class BinaryFormatBuilder { private List<byte> _bytes; public BinaryFormatBuilder() { _bytes = new List<byte>(); } public int Count { get { return _bytes.Count; } } public void AppendPrefixDictionaryElement(char prefix, int key) { this.AppendNode(XmlBinaryNodeType.PrefixDictionaryElementA + GetPrefixOffset(prefix)); this.AppendKey(key); } public void AppendDictionaryXmlnsAttribute(char prefix, int key) { this.AppendNode(XmlBinaryNodeType.DictionaryXmlnsAttribute); this.AppendUtf8(prefix); this.AppendKey(key); } public void AppendPrefixDictionaryAttribute(char prefix, int key, char value) { this.AppendNode(XmlBinaryNodeType.PrefixDictionaryAttributeA + GetPrefixOffset(prefix)); this.AppendKey(key); if (value == '1') { this.AppendNode(XmlBinaryNodeType.OneText); } else { this.AppendNode(XmlBinaryNodeType.Chars8Text); this.AppendUtf8(value); } } public void AppendDictionaryAttribute(char prefix, int key, char value) { this.AppendNode(XmlBinaryNodeType.DictionaryAttribute); this.AppendUtf8(prefix); this.AppendKey(key); this.AppendNode(XmlBinaryNodeType.Chars8Text); this.AppendUtf8(value); } public void AppendDictionaryTextWithEndElement(int key) { this.AppendNode(XmlBinaryNodeType.DictionaryTextWithEndElement); this.AppendKey(key); } public void AppendDictionaryTextWithEndElement() { this.AppendNode(XmlBinaryNodeType.DictionaryTextWithEndElement); } public void AppendUniqueIDWithEndElement() { this.AppendNode(XmlBinaryNodeType.UniqueIdTextWithEndElement); } public void AppendEndElement() { this.AppendNode(XmlBinaryNodeType.EndElement); } private void AppendKey(int key) { if (key < 0 || key >= 0x4000) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("key", key, SR.Format(SR.ValueMustBeInRange, 0, 0x4000))); } if (key >= 0x80) { this.AppendByte((key & 0x7f) | 0x80); this.AppendByte(key >> 7); } else { this.AppendByte(key); } } private void AppendNode(XmlBinaryNodeType value) { this.AppendByte((int)value); } private void AppendByte(int value) { if (value < 0 || value > 0xFF) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("value", value, SR.Format(SR.ValueMustBeInRange, 0, 0xFF))); } _bytes.Add((byte)value); } private void AppendUtf8(char value) { AppendByte(1); AppendByte((int)value); } public int GetStaticKey(int value) { return value * 2; } public int GetSessionKey(int value) { return value * 2 + 1; } private int GetPrefixOffset(char prefix) { if (prefix < 'a' && prefix > 'z') { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("prefix", prefix, SR.Format(SR.ValueMustBeInRange, 'a', 'z'))); } return prefix - 'a'; } public byte[] ToByteArray() { byte[] array = _bytes.ToArray(); _bytes.Clear(); return array; } } internal static class BinaryFormatParser { public static bool IsSessionKey(int value) { return (value & 1) != 0; } public static int GetSessionKey(int value) { return value / 2; } public static int GetStaticKey(int value) { return value / 2; } public static int ParseInt32(byte[] buffer, int offset, int size) { switch (size) { case 1: return buffer[offset]; case 2: return (buffer[offset] & 0x7f) + (buffer[offset + 1] << 7); case 3: return (buffer[offset] & 0x7f) + ((buffer[offset + 1] & 0x7f) << 7) + (buffer[offset + 2] << 14); case 4: return (buffer[offset] & 0x7f) + ((buffer[offset + 1] & 0x7f) << 7) + ((buffer[offset + 2] & 0x7f) << 14) + (buffer[offset + 3] << 21); default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("size", size, SR.Format(SR.ValueMustBeInRange, 1, 4))); } } public static int ParseKey(byte[] buffer, int offset, int size) { return ParseInt32(buffer, offset, size); } public unsafe static UniqueId ParseUniqueID(byte[] buffer, int offset, int size) { return new UniqueId(buffer, offset); } public static int MatchBytes(byte[] buffer, int offset, int size, byte[] buffer2) { if (size < buffer2.Length) { return 0; } int j = offset; for (int i = 0; i < buffer2.Length; i++, j++) { if (buffer2[i] != buffer[j]) { return 0; } } return buffer2.Length; } public static bool MatchAttributeNode(byte[] buffer, int offset, int size) { const XmlBinaryNodeType minAttribute = XmlBinaryNodeType.ShortAttribute; const XmlBinaryNodeType maxAttribute = XmlBinaryNodeType.DictionaryAttribute; if (size < 1) { return false; } XmlBinaryNodeType nodeType = (XmlBinaryNodeType)buffer[offset]; return nodeType >= minAttribute && nodeType <= maxAttribute; } public static int MatchKey(byte[] buffer, int offset, int size) { return MatchInt32(buffer, offset, size); } public static int MatchInt32(byte[] buffer, int offset, int size) { if (size > 0) { if ((buffer[offset] & 0x80) == 0) { return 1; } } if (size > 1) { if ((buffer[offset + 1] & 0x80) == 0) { return 2; } } if (size > 2) { if ((buffer[offset + 2] & 0x80) == 0) { return 3; } } if (size > 3) { if ((buffer[offset + 3] & 0x80) == 0) { return 4; } } return 0; } public static int MatchUniqueID(byte[] buffer, int offset, int size) { if (size < 16) { return 0; } return 16; } } internal class MessagePatterns { private static readonly byte[] s_commonFragment; // <Envelope><Headers><Action> private static readonly byte[] s_requestFragment1; // </Action><MessageID> private static readonly byte[] s_requestFragment2; // </MessageID><ReplyTo>...</ReplyTo><To>session-to-key</To></Headers><Body> private static readonly byte[] s_responseFragment1; // </Action><RelatesTo> private static readonly byte[] s_responseFragment2; // </RelatesTo><To>static-anonymous-key</To></Headers><Body> private static readonly byte[] s_bodyFragment; // <Envelope><Body> private const int ToValueSessionKey = 1; private IXmlDictionary _dictionary; private XmlBinaryReaderSession _readerSession; private ToHeader _toHeader; private MessageVersion _messageVersion; static MessagePatterns() { BinaryFormatBuilder builder = new BinaryFormatBuilder(); MessageDictionary messageDictionary = XD.MessageDictionary; Message12Dictionary message12Dictionary = XD.Message12Dictionary; AddressingDictionary addressingDictionary = XD.AddressingDictionary; Addressing10Dictionary addressing10Dictionary = XD.Addressing10Dictionary; char messagePrefix = MessageStrings.Prefix[0]; char addressingPrefix = AddressingStrings.Prefix[0]; // <s:Envelope xmlns:s="soap-ns" xmlns="addressing-ns"> builder.AppendPrefixDictionaryElement(messagePrefix, builder.GetStaticKey(messageDictionary.Envelope.Key)); builder.AppendDictionaryXmlnsAttribute(messagePrefix, builder.GetStaticKey(message12Dictionary.Namespace.Key)); builder.AppendDictionaryXmlnsAttribute(addressingPrefix, builder.GetStaticKey(addressing10Dictionary.Namespace.Key)); // <s:Header> builder.AppendPrefixDictionaryElement(messagePrefix, builder.GetStaticKey(messageDictionary.Header.Key)); // <a:Action>... builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.Action.Key)); builder.AppendPrefixDictionaryAttribute(messagePrefix, builder.GetStaticKey(messageDictionary.MustUnderstand.Key), '1'); builder.AppendDictionaryTextWithEndElement(); s_commonFragment = builder.ToByteArray(); // <a:MessageID>... builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.MessageId.Key)); builder.AppendUniqueIDWithEndElement(); s_requestFragment1 = builder.ToByteArray(); // <a:ReplyTo><a:Address>static-anonymous-key</a:Address></a:ReplyTo> builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.ReplyTo.Key)); builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.Address.Key)); builder.AppendDictionaryTextWithEndElement(builder.GetStaticKey(addressing10Dictionary.Anonymous.Key)); builder.AppendEndElement(); // <a:To>session-to-key</a:To> builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.To.Key)); builder.AppendPrefixDictionaryAttribute(messagePrefix, builder.GetStaticKey(messageDictionary.MustUnderstand.Key), '1'); builder.AppendDictionaryTextWithEndElement(builder.GetSessionKey(ToValueSessionKey)); // </s:Header> builder.AppendEndElement(); // <s:Body> builder.AppendPrefixDictionaryElement(messagePrefix, builder.GetStaticKey(messageDictionary.Body.Key)); s_requestFragment2 = builder.ToByteArray(); // <a:RelatesTo>... builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.RelatesTo.Key)); builder.AppendUniqueIDWithEndElement(); s_responseFragment1 = builder.ToByteArray(); // <a:To>static-anonymous-key</a:To> builder.AppendPrefixDictionaryElement(addressingPrefix, builder.GetStaticKey(addressingDictionary.To.Key)); builder.AppendPrefixDictionaryAttribute(messagePrefix, builder.GetStaticKey(messageDictionary.MustUnderstand.Key), '1'); builder.AppendDictionaryTextWithEndElement(builder.GetStaticKey(addressing10Dictionary.Anonymous.Key)); // </s:Header> builder.AppendEndElement(); // <s:Body> builder.AppendPrefixDictionaryElement(messagePrefix, builder.GetStaticKey(messageDictionary.Body.Key)); s_responseFragment2 = builder.ToByteArray(); // <s:Envelope xmlns:s="soap-ns" xmlns="addressing-ns"> builder.AppendPrefixDictionaryElement(messagePrefix, builder.GetStaticKey(messageDictionary.Envelope.Key)); builder.AppendDictionaryXmlnsAttribute(messagePrefix, builder.GetStaticKey(message12Dictionary.Namespace.Key)); builder.AppendDictionaryXmlnsAttribute(addressingPrefix, builder.GetStaticKey(addressing10Dictionary.Namespace.Key)); // <s:Body> builder.AppendPrefixDictionaryElement(messagePrefix, builder.GetStaticKey(messageDictionary.Body.Key)); s_bodyFragment = builder.ToByteArray(); } public MessagePatterns(IXmlDictionary dictionary, XmlBinaryReaderSession readerSession, MessageVersion messageVersion) { _dictionary = dictionary; _readerSession = readerSession; _messageVersion = messageVersion; } public Message TryCreateMessage(byte[] buffer, int offset, int size, BufferManager bufferManager, BufferedMessageData messageData) { RelatesToHeader relatesToHeader; MessageIDHeader messageIDHeader; XmlDictionaryString toString; int currentOffset = offset; int remainingSize = size; int bytesMatched = BinaryFormatParser.MatchBytes(buffer, currentOffset, remainingSize, s_commonFragment); if (bytesMatched == 0) { return null; } currentOffset += bytesMatched; remainingSize -= bytesMatched; bytesMatched = BinaryFormatParser.MatchKey(buffer, currentOffset, remainingSize); if (bytesMatched == 0) { return null; } int actionOffset = currentOffset; int actionSize = bytesMatched; currentOffset += bytesMatched; remainingSize -= bytesMatched; int totalBytesMatched; bytesMatched = BinaryFormatParser.MatchBytes(buffer, currentOffset, remainingSize, s_requestFragment1); if (bytesMatched != 0) { currentOffset += bytesMatched; remainingSize -= bytesMatched; bytesMatched = BinaryFormatParser.MatchUniqueID(buffer, currentOffset, remainingSize); if (bytesMatched == 0) { return null; } int messageIDOffset = currentOffset; int messageIDSize = bytesMatched; currentOffset += bytesMatched; remainingSize -= bytesMatched; bytesMatched = BinaryFormatParser.MatchBytes(buffer, currentOffset, remainingSize, s_requestFragment2); if (bytesMatched == 0) { return null; } currentOffset += bytesMatched; remainingSize -= bytesMatched; if (BinaryFormatParser.MatchAttributeNode(buffer, currentOffset, remainingSize)) { return null; } UniqueId messageId = BinaryFormatParser.ParseUniqueID(buffer, messageIDOffset, messageIDSize); messageIDHeader = MessageIDHeader.Create(messageId, _messageVersion.Addressing); relatesToHeader = null; if (!_readerSession.TryLookup(ToValueSessionKey, out toString)) { return null; } totalBytesMatched = s_requestFragment1.Length + messageIDSize + s_requestFragment2.Length; } else { bytesMatched = BinaryFormatParser.MatchBytes(buffer, currentOffset, remainingSize, s_responseFragment1); if (bytesMatched == 0) { return null; } currentOffset += bytesMatched; remainingSize -= bytesMatched; bytesMatched = BinaryFormatParser.MatchUniqueID(buffer, currentOffset, remainingSize); if (bytesMatched == 0) { return null; } int messageIDOffset = currentOffset; int messageIDSize = bytesMatched; currentOffset += bytesMatched; remainingSize -= bytesMatched; bytesMatched = BinaryFormatParser.MatchBytes(buffer, currentOffset, remainingSize, s_responseFragment2); if (bytesMatched == 0) { return null; } currentOffset += bytesMatched; remainingSize -= bytesMatched; if (BinaryFormatParser.MatchAttributeNode(buffer, currentOffset, remainingSize)) { return null; } UniqueId messageId = BinaryFormatParser.ParseUniqueID(buffer, messageIDOffset, messageIDSize); relatesToHeader = RelatesToHeader.Create(messageId, _messageVersion.Addressing); messageIDHeader = null; toString = XD.Addressing10Dictionary.Anonymous; totalBytesMatched = s_responseFragment1.Length + messageIDSize + s_responseFragment2.Length; } totalBytesMatched += s_commonFragment.Length + actionSize; int actionKey = BinaryFormatParser.ParseKey(buffer, actionOffset, actionSize); XmlDictionaryString actionString; if (!TryLookupKey(actionKey, out actionString)) { return null; } ActionHeader actionHeader = ActionHeader.Create(actionString, _messageVersion.Addressing); if (_toHeader == null) { _toHeader = ToHeader.Create(new Uri(toString.Value), _messageVersion.Addressing); } int abandonedSize = totalBytesMatched - s_bodyFragment.Length; offset += abandonedSize; size -= abandonedSize; Buffer.BlockCopy(s_bodyFragment, 0, buffer, offset, s_bodyFragment.Length); messageData.Open(new ArraySegment<byte>(buffer, offset, size), bufferManager); PatternMessage patternMessage = new PatternMessage(messageData, _messageVersion); MessageHeaders headers = patternMessage.Headers; headers.AddActionHeader(actionHeader); if (messageIDHeader != null) { headers.AddMessageIDHeader(messageIDHeader); headers.AddReplyToHeader(ReplyToHeader.AnonymousReplyTo10); } else { headers.AddRelatesToHeader(relatesToHeader); } headers.AddToHeader(_toHeader); return patternMessage; } private bool TryLookupKey(int key, out XmlDictionaryString result) { if (BinaryFormatParser.IsSessionKey(key)) { return _readerSession.TryLookup(BinaryFormatParser.GetSessionKey(key), out result); } else { return _dictionary.TryLookup(BinaryFormatParser.GetStaticKey(key), out result); } } internal sealed class PatternMessage : ReceivedMessage { private IBufferedMessageData _messageData; private MessageHeaders _headers; private RecycledMessageState _recycledMessageState; private MessageProperties _properties; private XmlDictionaryReader _reader; public PatternMessage(IBufferedMessageData messageData, MessageVersion messageVersion) { _messageData = messageData; _recycledMessageState = messageData.TakeMessageState(); if (_recycledMessageState == null) { _recycledMessageState = new RecycledMessageState(); } _properties = _recycledMessageState.TakeProperties(); if (_properties == null) { _properties = new MessageProperties(); } _headers = _recycledMessageState.TakeHeaders(); if (_headers == null) { _headers = new MessageHeaders(messageVersion); } else { _headers.Init(messageVersion); } XmlDictionaryReader reader = messageData.GetMessageReader(); reader.ReadStartElement(); VerifyStartBody(reader, messageVersion.Envelope); ReadStartBody(reader); _reader = reader; } public PatternMessage(IBufferedMessageData messageData, MessageVersion messageVersion, KeyValuePair<string, object>[] properties, MessageHeaders headers) { _messageData = messageData; _messageData.Open(); _recycledMessageState = _messageData.TakeMessageState(); if (_recycledMessageState == null) { _recycledMessageState = new RecycledMessageState(); } _properties = _recycledMessageState.TakeProperties(); if (_properties == null) { _properties = new MessageProperties(); } if (properties != null) { _properties.CopyProperties(properties); } _headers = _recycledMessageState.TakeHeaders(); if (_headers == null) { _headers = new MessageHeaders(messageVersion); } if (headers != null) { _headers.CopyHeadersFrom(headers); } XmlDictionaryReader reader = messageData.GetMessageReader(); reader.ReadStartElement(); VerifyStartBody(reader, messageVersion.Envelope); ReadStartBody(reader); _reader = reader; } public override MessageHeaders Headers { get { if (IsDisposed) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateMessageDisposedException()); } return _headers; } } public override MessageProperties Properties { get { if (IsDisposed) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateMessageDisposedException()); } return _properties; } } public override MessageVersion Version { get { if (IsDisposed) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateMessageDisposedException()); } return _headers.MessageVersion; } } internal override RecycledMessageState RecycledMessageState { get { return _recycledMessageState; } } private XmlDictionaryReader GetBufferedReaderAtBody() { XmlDictionaryReader reader = _messageData.GetMessageReader(); reader.ReadStartElement(); reader.ReadStartElement(); return reader; } protected override void OnBodyToString(XmlDictionaryWriter writer) { using (XmlDictionaryReader reader = GetBufferedReaderAtBody()) { while (reader.NodeType != XmlNodeType.EndElement) { writer.WriteNode(reader, false); } } } protected override void OnClose() { Exception ex = null; try { base.OnClose(); } catch (Exception e) { if (Fx.IsFatal(e)) { throw; } ex = e; } try { _properties.Dispose(); } catch (Exception e) { if (Fx.IsFatal(e)) { throw; } if (ex == null) { ex = e; } } try { if (_reader != null) { _reader.Dispose(); } } catch (Exception e) { if (Fx.IsFatal(e)) { throw; } if (ex == null) { ex = e; } } try { _recycledMessageState.ReturnHeaders(_headers); _recycledMessageState.ReturnProperties(_properties); _messageData.ReturnMessageState(_recycledMessageState); _recycledMessageState = null; _messageData.Close(); _messageData = null; } catch (Exception e) { if (Fx.IsFatal(e)) { throw; } if (ex == null) { ex = e; } } if (ex != null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(ex); } } protected override MessageBuffer OnCreateBufferedCopy(int maxBufferSize) { KeyValuePair<string, object>[] properties = new KeyValuePair<string, object>[Properties.Count]; ((ICollection<KeyValuePair<string, object>>)Properties).CopyTo(properties, 0); _messageData.EnableMultipleUsers(); return new PatternMessageBuffer(_messageData, this.Version, properties, _headers); } protected override XmlDictionaryReader OnGetReaderAtBodyContents() { XmlDictionaryReader reader = _reader; _reader = null; return reader; } protected override string OnGetBodyAttribute(string localName, string ns) { return null; } } internal class PatternMessageBuffer : MessageBuffer { private bool _closed; private MessageHeaders _headers; private IBufferedMessageData _messageDataAtBody; private MessageVersion _messageVersion; private KeyValuePair<string, object>[] _properties; private object _thisLock = new object(); private RecycledMessageState _recycledMessageState; public PatternMessageBuffer(IBufferedMessageData messageDataAtBody, MessageVersion messageVersion, KeyValuePair<string, object>[] properties, MessageHeaders headers) { _messageDataAtBody = messageDataAtBody; _messageDataAtBody.Open(); _recycledMessageState = _messageDataAtBody.TakeMessageState(); if (_recycledMessageState == null) { _recycledMessageState = new RecycledMessageState(); } _headers = _recycledMessageState.TakeHeaders(); if (_headers == null) { _headers = new MessageHeaders(messageVersion); } _headers.CopyHeadersFrom(headers); _properties = properties; _messageVersion = messageVersion; } public override int BufferSize { get { lock (this.ThisLock) { if (_closed) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateBufferDisposedException()); } return _messageDataAtBody.Buffer.Count; } } } private object ThisLock { get { return _thisLock; } } public override void Close() { lock (_thisLock) { if (!_closed) { _closed = true; _recycledMessageState.ReturnHeaders(_headers); _messageDataAtBody.ReturnMessageState(_recycledMessageState); _messageDataAtBody.Close(); _recycledMessageState = null; _messageDataAtBody = null; _properties = null; _messageVersion = null; _headers = null; } } } public override Message CreateMessage() { lock (this.ThisLock) { if (_closed) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateBufferDisposedException()); } return new PatternMessage(_messageDataAtBody, _messageVersion, _properties, _headers); } } } } }
using Lucene.Net.Support; using System; using System.Collections; using System.Collections.Generic; using System.Linq; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif using System.Text; namespace Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; using IBits = Lucene.Net.Util.IBits; using IndexReader = Lucene.Net.Index.IndexReader; using Occur_e = Lucene.Net.Search.Occur; using Similarity = Lucene.Net.Search.Similarities.Similarity; using Term = Lucene.Net.Index.Term; using ToStringUtils = Lucene.Net.Util.ToStringUtils; /// <summary> /// A <see cref="Query"/> that matches documents matching boolean combinations of other /// queries, e.g. <see cref="TermQuery"/>s, <see cref="PhraseQuery"/>s or other /// <see cref="BooleanQuery"/>s. /// <para/> /// Collection initializer note: To create and populate a <see cref="BooleanQuery"/> /// in a single statement, you can use the following example as a guide: /// /// <code> /// var booleanQuery = new BooleanQuery() { /// { new WildcardQuery(new Term("field2", "foobar")), Occur.SHOULD }, /// { new MultiPhraseQuery() { /// new Term("field", "microsoft"), /// new Term("field", "office") /// }, Occur.SHOULD } /// }; /// /// // or /// /// var booleanQuery = new BooleanQuery() { /// new BooleanClause(new WildcardQuery(new Term("field2", "foobar")), Occur.SHOULD), /// new BooleanClause(new MultiPhraseQuery() { /// new Term("field", "microsoft"), /// new Term("field", "office") /// }, Occur.SHOULD) /// }; /// </code> /// </summary> #if FEATURE_SERIALIZABLE [Serializable] #endif public class BooleanQuery : Query, IEnumerable<BooleanClause> { private static int maxClauseCount = 1024; /// <summary> /// Thrown when an attempt is made to add more than /// <see cref="MaxClauseCount"/> clauses. This typically happens if /// a <see cref="PrefixQuery"/>, <see cref="FuzzyQuery"/>, <see cref="WildcardQuery"/>, or <see cref="TermRangeQuery"/> /// is expanded to many terms during search. /// </summary> // LUCENENET: It is no longer good practice to use binary serialization. // See: https://github.com/dotnet/corefx/issues/23584#issuecomment-325724568 #if FEATURE_SERIALIZABLE_EXCEPTIONS [Serializable] #endif public class TooManyClausesException : Exception // LUCENENET specific - added Exception suffix { public TooManyClausesException() : base("maxClauseCount is set to " + maxClauseCount) { } #if FEATURE_SERIALIZABLE_EXCEPTIONS /// <summary> /// Initializes a new instance of this class with serialized data. /// </summary> /// <param name="info">The <see cref="SerializationInfo"/> that holds the serialized object data about the exception being thrown.</param> /// <param name="context">The <see cref="StreamingContext"/> that contains contextual information about the source or destination.</param> public TooManyClausesException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endif } /// <summary> /// Return the maximum number of clauses permitted, 1024 by default. /// Attempts to add more than the permitted number of clauses cause /// <see cref="TooManyClausesException"/> to be thrown. </summary> public static int MaxClauseCount { get { return maxClauseCount; } set { if (value < 1) { throw new System.ArgumentException("maxClauseCount must be >= 1"); } BooleanQuery.maxClauseCount = value; } } private EquatableList<BooleanClause> clauses = new EquatableList<BooleanClause>(); private readonly bool disableCoord; /// <summary> /// Constructs an empty boolean query. </summary> public BooleanQuery() { disableCoord = false; } /// <summary> /// Constructs an empty boolean query. /// <para/> /// <see cref="Similarity.Coord(int,int)"/> may be disabled in scoring, as /// appropriate. For example, this score factor does not make sense for most /// automatically generated queries, like <see cref="WildcardQuery"/> and /// <see cref="FuzzyQuery"/>. /// </summary> /// <param name="disableCoord"> Disables <see cref="Similarity.Coord(int,int)"/> in scoring. </param> public BooleanQuery(bool disableCoord) { this.disableCoord = disableCoord; } /// <summary> /// Returns true if <see cref="Similarity.Coord(int,int)"/> is disabled in /// scoring for this query instance. </summary> /// <seealso cref="BooleanQuery(bool)"/> public virtual bool CoordDisabled // LUCENENET TODO: API Change to CoordEnabled? Per MSDN, properties should be in the affirmative. { get { return disableCoord; } } /// <summary> /// Specifies a minimum number of the optional <see cref="BooleanClause"/>s /// which must be satisfied. /// /// <para> /// By default no optional clauses are necessary for a match /// (unless there are no required clauses). If this method is used, /// then the specified number of clauses is required. /// </para> /// <para> /// Use of this method is totally independent of specifying that /// any specific clauses are required (or prohibited). This number will /// only be compared against the number of matching optional clauses. /// </para> /// </summary> /// <param name="value"> The number of optional clauses that must match </param> public virtual int MinimumNumberShouldMatch { set { this.m_minNrShouldMatch = value; } get { return m_minNrShouldMatch; } } protected int m_minNrShouldMatch = 0; /// <summary> /// Adds a clause to a boolean query. /// </summary> /// <exception cref="TooManyClausesException"> If the new number of clauses exceeds the maximum clause number </exception> /// <seealso cref="MaxClauseCount"/> public virtual void Add(Query query, Occur occur) { Add(new BooleanClause(query, occur)); } /// <summary> /// Adds a clause to a boolean query. </summary> /// <exception cref="TooManyClausesException"> If the new number of clauses exceeds the maximum clause number </exception> /// <seealso cref="MaxClauseCount"/> public virtual void Add(BooleanClause clause) { if (clauses.Count >= maxClauseCount) { throw new TooManyClausesException(); } clauses.Add(clause); } /// <summary> /// Returns the set of clauses in this query. </summary> public virtual BooleanClause[] GetClauses() { return clauses.ToArray(); } /// <summary> /// Returns the list of clauses in this query. </summary> public virtual IList<BooleanClause> Clauses { get { return clauses; } } /// <summary> /// Returns an iterator on the clauses in this query. It implements the <see cref="T:IEnumerable{BooleanClause}"/> interface to /// make it possible to do: /// <code>foreach (BooleanClause clause in booleanQuery) {}</code> /// </summary> public IEnumerator<BooleanClause> GetEnumerator() { return Clauses.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } /// <summary> /// Expert: the <see cref="Weight"/> for <see cref="BooleanQuery"/>, used to /// normalize, score and explain these queries. /// <para/> /// @lucene.experimental /// </summary> public class BooleanWeight : Weight { private readonly BooleanQuery outerInstance; /// <summary> /// The <see cref="Similarities.Similarity"/> implementation. </summary> protected Similarity m_similarity; protected List<Weight> m_weights; protected int m_maxCoord; // num optional + num required private readonly bool disableCoord; public BooleanWeight(BooleanQuery outerInstance, IndexSearcher searcher, bool disableCoord) { this.outerInstance = outerInstance; this.m_similarity = searcher.Similarity; this.disableCoord = disableCoord; m_weights = new List<Weight>(outerInstance.clauses.Count); for (int i = 0; i < outerInstance.clauses.Count; i++) { BooleanClause c = outerInstance.clauses[i]; Weight w = c.Query.CreateWeight(searcher); m_weights.Add(w); if (!c.IsProhibited) { m_maxCoord++; } } } public Similarity Similarity { get { return m_similarity; } } public int MaxCoord { get { return m_maxCoord; } } public override Query Query { get { return outerInstance; } } public override float GetValueForNormalization() { float sum = 0.0f; for (int i = 0; i < m_weights.Count; i++) { // call sumOfSquaredWeights for all clauses in case of side effects float s = m_weights[i].GetValueForNormalization(); // sum sub weights if (!outerInstance.clauses[i].IsProhibited) { // only add to sum for non-prohibited clauses sum += s; } } sum *= outerInstance.Boost * outerInstance.Boost; // boost each sub-weight return sum; } public virtual float Coord(int overlap, int maxOverlap) { // LUCENE-4300: in most cases of maxOverlap=1, BQ rewrites itself away, // so coord() is not applied. But when BQ cannot optimize itself away // for a single clause (minNrShouldMatch, prohibited clauses, etc), its // important not to apply coord(1,1) for consistency, it might not be 1.0F return maxOverlap == 1 ? 1F : m_similarity.Coord(overlap, maxOverlap); } public override void Normalize(float norm, float topLevelBoost) { topLevelBoost *= outerInstance.Boost; // incorporate boost foreach (Weight w in m_weights) { // normalize all clauses, (even if prohibited in case of side affects) w.Normalize(norm, topLevelBoost); } } public override Explanation Explain(AtomicReaderContext context, int doc) { int minShouldMatch = outerInstance.MinimumNumberShouldMatch; ComplexExplanation sumExpl = new ComplexExplanation(); sumExpl.Description = "sum of:"; int coord = 0; float sum = 0.0f; bool fail = false; int shouldMatchCount = 0; IEnumerator<BooleanClause> cIter = outerInstance.clauses.GetEnumerator(); for (IEnumerator<Weight> wIter = m_weights.GetEnumerator(); wIter.MoveNext(); ) { Weight w = wIter.Current; cIter.MoveNext(); BooleanClause c = cIter.Current; if (w.GetScorer(context, context.AtomicReader.LiveDocs) == null) { if (c.IsRequired) { fail = true; Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); sumExpl.AddDetail(r); } continue; } Explanation e = w.Explain(context, doc); if (e.IsMatch) { if (!c.IsProhibited) { sumExpl.AddDetail(e); sum += e.Value; coord++; } else { Explanation r = new Explanation(0.0f, "match on prohibited clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } if (c.Occur == Occur_e.SHOULD) { shouldMatchCount++; } } else if (c.IsRequired) { Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } } if (fail) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to meet condition(s) of required/prohibited clause(s)"; return sumExpl; } else if (shouldMatchCount < minShouldMatch) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to match minimum number " + "of optional clauses: " + minShouldMatch; return sumExpl; } sumExpl.Match = 0 < coord ? true : false; sumExpl.Value = sum; float coordFactor = disableCoord ? 1.0f : Coord(coord, m_maxCoord); if (coordFactor == 1.0f) { return sumExpl; // eliminate wrapper } else { ComplexExplanation result = new ComplexExplanation(sumExpl.IsMatch, sum * coordFactor, "product of:"); result.AddDetail(sumExpl); result.AddDetail(new Explanation(coordFactor, "coord(" + coord + "/" + m_maxCoord + ")")); return result; } } public override BulkScorer GetBulkScorer(AtomicReaderContext context, bool scoreDocsInOrder, IBits acceptDocs) { if (scoreDocsInOrder || outerInstance.m_minNrShouldMatch > 1) { // TODO: (LUCENE-4872) in some cases BooleanScorer may be faster for minNrShouldMatch // but the same is even true of pure conjunctions... return base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); } IList<BulkScorer> prohibited = new List<BulkScorer>(); IList<BulkScorer> optional = new List<BulkScorer>(); IEnumerator<BooleanClause> cIter = outerInstance.clauses.GetEnumerator(); foreach (Weight w in m_weights) { cIter.MoveNext(); BooleanClause c = cIter.Current; BulkScorer subScorer = w.GetBulkScorer(context, false, acceptDocs); if (subScorer == null) { if (c.IsRequired) { return null; } } else if (c.IsRequired) { // TODO: there are some cases where BooleanScorer // would handle conjunctions faster than // BooleanScorer2... return base.GetBulkScorer(context, scoreDocsInOrder, acceptDocs); } else if (c.IsProhibited) { prohibited.Add(subScorer); } else { optional.Add(subScorer); } } // Check if we can and should return a BooleanScorer return new BooleanScorer(this, disableCoord, outerInstance.m_minNrShouldMatch, optional, prohibited, m_maxCoord); } public override Scorer GetScorer(AtomicReaderContext context, IBits acceptDocs) { IList<Scorer> required = new List<Scorer>(); IList<Scorer> prohibited = new List<Scorer>(); IList<Scorer> optional = new List<Scorer>(); IEnumerator<BooleanClause> cIter = outerInstance.clauses.GetEnumerator(); foreach (Weight w in m_weights) { cIter.MoveNext(); BooleanClause c = cIter.Current; Scorer subScorer = w.GetScorer(context, acceptDocs); if (subScorer == null) { if (c.IsRequired) { return null; } } else if (c.IsRequired) { required.Add(subScorer); } else if (c.IsProhibited) { prohibited.Add(subScorer); } else { optional.Add(subScorer); } } if (required.Count == 0 && optional.Count == 0) { // no required and optional clauses. return null; } else if (optional.Count < outerInstance.m_minNrShouldMatch) { // either >1 req scorer, or there are 0 req scorers and at least 1 // optional scorer. Therefore if there are not enough optional scorers // no documents will be matched by the query return null; } // simple conjunction if (optional.Count == 0 && prohibited.Count == 0) { float coord = disableCoord ? 1.0f : Coord(required.Count, m_maxCoord); return new ConjunctionScorer(this, required.ToArray(), coord); } // simple disjunction if (required.Count == 0 && prohibited.Count == 0 && outerInstance.m_minNrShouldMatch <= 1 && optional.Count > 1) { var coord = new float[optional.Count + 1]; for (int i = 0; i < coord.Length; i++) { coord[i] = disableCoord ? 1.0f : Coord(i, m_maxCoord); } return new DisjunctionSumScorer(this, optional.ToArray(), coord); } // Return a BooleanScorer2 return new BooleanScorer2(this, disableCoord, outerInstance.m_minNrShouldMatch, required, prohibited, optional, m_maxCoord); } public override bool ScoresDocsOutOfOrder { get { if (outerInstance.m_minNrShouldMatch > 1) { // BS2 (in-order) will be used by scorer() return false; } foreach (BooleanClause c in outerInstance.clauses) { if (c.IsRequired) { // BS2 (in-order) will be used by scorer() return false; } } // scorer() will return an out-of-order scorer if requested. return true; } } } public override Weight CreateWeight(IndexSearcher searcher) { return new BooleanWeight(this, searcher, disableCoord); } public override Query Rewrite(IndexReader reader) { if (m_minNrShouldMatch == 0 && clauses.Count == 1) // optimize 1-clause queries { BooleanClause c = clauses[0]; if (!c.IsProhibited) // just return clause { Query query = c.Query.Rewrite(reader); // rewrite first if (Boost != 1.0f) // incorporate boost { if (query == c.Query) // if rewrite was no-op { query = (Query)query.Clone(); // then clone before boost } // Since the BooleanQuery only has 1 clause, the BooleanQuery will be // written out. Therefore the rewritten Query's boost must incorporate both // the clause's boost, and the boost of the BooleanQuery itself query.Boost = Boost * query.Boost; } return query; } } BooleanQuery clone = null; // recursively rewrite for (int i = 0; i < clauses.Count; i++) { BooleanClause c = clauses[i]; Query query = c.Query.Rewrite(reader); if (query != c.Query) // clause rewrote: must clone { if (clone == null) { // The BooleanQuery clone is lazily initialized so only initialize // it if a rewritten clause differs from the original clause (and hasn't been // initialized already). If nothing differs, the clone isn't needlessly created clone = (BooleanQuery)this.Clone(); } clone.clauses[i] = new BooleanClause(query, c.Occur); } } if (clone != null) { return clone; // some clauses rewrote } else { return this; // no clauses rewrote } } // inherit javadoc public override void ExtractTerms(ISet<Term> terms) { foreach (BooleanClause clause in clauses) { if (clause.Occur != Occur_e.MUST_NOT) { clause.Query.ExtractTerms(terms); } } } public override object Clone() { BooleanQuery clone = (BooleanQuery)base.Clone(); clone.clauses = (EquatableList<BooleanClause>)this.clauses.Clone(); return clone; } /// <summary> /// Prints a user-readable version of this query. </summary> public override string ToString(string field) { StringBuilder buffer = new StringBuilder(); bool needParens = Boost != 1.0 || MinimumNumberShouldMatch > 0; if (needParens) { buffer.Append("("); } for (int i = 0; i < clauses.Count; i++) { BooleanClause c = clauses[i]; if (c.IsProhibited) { buffer.Append("-"); } else if (c.IsRequired) { buffer.Append("+"); } Query subQuery = c.Query; if (subQuery != null) { if (subQuery is BooleanQuery) // wrap sub-bools in parens { buffer.Append("("); buffer.Append(subQuery.ToString(field)); buffer.Append(")"); } else { buffer.Append(subQuery.ToString(field)); } } else { buffer.Append("null"); } if (i != clauses.Count - 1) { buffer.Append(" "); } } if (needParens) { buffer.Append(")"); } if (MinimumNumberShouldMatch > 0) { buffer.Append('~'); buffer.Append(MinimumNumberShouldMatch); } if (Boost != 1.0f) { buffer.Append(ToStringUtils.Boost(Boost)); } return buffer.ToString(); } /// <summary> /// Returns <c>true</c> if <paramref name="o"/> is equal to this. </summary> public override bool Equals(object o) { if (!(o is BooleanQuery)) { return false; } BooleanQuery other = (BooleanQuery)o; return this.Boost == other.Boost && this.clauses.Equals(other.clauses) && this.MinimumNumberShouldMatch == other.MinimumNumberShouldMatch && this.disableCoord == other.disableCoord; } /// <summary> /// Returns a hash code value for this object. </summary> public override int GetHashCode() { return Number.SingleToInt32Bits(Boost) ^ clauses.GetHashCode() + MinimumNumberShouldMatch + (disableCoord ? 17 : 0); } } }
using System; using ArrayList = System.Collections.ArrayList; //using CharFormatter = antlr.CharFormatter; namespace antlr.collections.impl { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A BitSet to replace java.util.BitSet. * Primary differences are that most set operators return new sets * as opposed to oring and anding "in place". Further, a number of * operations were added. I cannot contain a BitSet because there * is no way to access the internal bits (which I need for speed) * and, because it is final, I cannot subclass to add functionality. * Consider defining set degree. Without access to the bits, I must * call a method n times to test the ith bit...ack! * * Also seems like or() from util is wrong when size of incoming set is bigger * than this.bits.length. * * @author Terence Parr * @author <br><a href="mailto:[email protected]">Pete Wells</a> */ public class BitSet : ICloneable { protected internal const int BITS = 64; // number of bits / long protected internal const int NIBBLE = 4; protected internal const int LOG_BITS = 6; // 2^6 == 64 /*We will often need to do a mod operator (i mod nbits). Its * turns out that, for powers of two, this mod operation is * same as (i & (nbits-1)). Since mod is slow, we use a * precomputed mod mask to do the mod instead. */ protected internal static readonly int MOD_MASK = BITS - 1; /*The actual data bits */ protected internal long[] dataBits; /*Construct a bitset of size one word (64 bits) */ public BitSet() : this(BITS) { } /*Construction from a static array of longs */ public BitSet(long[] bits_) { dataBits = bits_; } /*Construct a bitset given the size * @param nbits The size of the bitset in bits */ public BitSet(int nbits) { dataBits = new long[((nbits - 1) >> LOG_BITS) + 1]; } /*or this element into this set (grow as necessary to accommodate) */ public virtual void add(int el) { int n = wordNumber(el); if (n >= dataBits.Length) { growToInclude(el); } dataBits[n] |= bitMask(el); } public virtual BitSet and(BitSet a) { BitSet s = (BitSet) this.Clone(); s.andInPlace(a); return s; } public virtual void andInPlace(BitSet a) { int min = (int) (Math.Min(dataBits.Length, a.dataBits.Length)); for (int i = min - 1; i >= 0; i--) { dataBits[i] &= a.dataBits[i]; } // clear all bits in this not present in a (if this bigger than a). for (int i = min; i < dataBits.Length; i++) { dataBits[i] = 0; } } private static long bitMask(int bitNumber) { int bitPosition = bitNumber & MOD_MASK; // bitNumber mod BITS return 1L << bitPosition; } public virtual void clear() { for (int i = dataBits.Length - 1; i >= 0; i--) { dataBits[i] = 0; } } public virtual void clear(int el) { int n = wordNumber(el); if (n >= dataBits.Length) { // grow as necessary to accommodate growToInclude(el); } dataBits[n] &= ~ bitMask(el); } public virtual object Clone() { BitSet s; try { s = new BitSet(); s.dataBits = new long[dataBits.Length]; Array.Copy(dataBits, 0, s.dataBits, 0, dataBits.Length); } catch //(System.Exception e) { throw new System.ApplicationException(); } return s; } public virtual int degree() { int deg = 0; for (int i = dataBits.Length - 1; i >= 0; i--) { long word = dataBits[i]; if (word != 0L) { for (int bit = BITS - 1; bit >= 0; bit--) { if ((word & (1L << bit)) != 0) { deg++; } } } } return deg; } override public int GetHashCode() { return dataBits.GetHashCode(); } /*code "inherited" from java.util.BitSet */ override public bool Equals(object obj) { if ((obj != null) && (obj is BitSet)) { BitSet bset = (BitSet) obj; int n = (int) (System.Math.Min(dataBits.Length, bset.dataBits.Length)); for (int i = n; i-- > 0; ) { if (dataBits[i] != bset.dataBits[i]) { return false; } } if (dataBits.Length > n) { for (int i = (int) (dataBits.Length); i-- > n; ) { if (dataBits[i] != 0) { return false; } } } else if (bset.dataBits.Length > n) { for (int i = (int) (bset.dataBits.Length); i-- > n; ) { if (bset.dataBits[i] != 0) { return false; } } } return true; } return false; } /* * Grows the set to a larger number of bits. * @param bit element that must fit in set */ public virtual void growToInclude(int bit) { int newSize = (int) (System.Math.Max(dataBits.Length << 1, numWordsToHold(bit))); long[] newbits = new long[newSize]; Array.Copy(dataBits, 0, newbits, 0, dataBits.Length); dataBits = newbits; } public virtual bool member(int el) { int n = wordNumber(el); if (n >= dataBits.Length) return false; return (dataBits[n] & bitMask(el)) != 0; } public virtual bool nil() { for (int i = dataBits.Length - 1; i >= 0; i--) { if (dataBits[i] != 0) return false; } return true; } public virtual BitSet not() { BitSet s = (BitSet) this.Clone(); s.notInPlace(); return s; } public virtual void notInPlace() { for (int i = dataBits.Length - 1; i >= 0; i--) { dataBits[i] = ~ dataBits[i]; } } /*complement bits in the range 0..maxBit. */ public virtual void notInPlace(int maxBit) { notInPlace(0, maxBit); } /*complement bits in the range minBit..maxBit.*/ public virtual void notInPlace(int minBit, int maxBit) { // make sure that we have room for maxBit growToInclude(maxBit); for (int i = minBit; i <= maxBit; i++) { int n = wordNumber(i); dataBits[n] ^= bitMask(i); } } private int numWordsToHold(int el) { return (el >> LOG_BITS) + 1; } public static BitSet of(int el) { BitSet s = new BitSet(el + 1); s.add(el); return s; } /*return this | a in a new set */ public virtual BitSet or(BitSet a) { BitSet s = (BitSet) this.Clone(); s.orInPlace(a); return s; } public virtual void orInPlace(BitSet a) { // If this is smaller than a, grow this first if (a.dataBits.Length > dataBits.Length) { setSize((int) (a.dataBits.Length)); } int min = (int) (System.Math.Min(dataBits.Length, a.dataBits.Length)); for (int i = min - 1; i >= 0; i--) { dataBits[i] |= a.dataBits[i]; } } // remove this element from this set public virtual void remove(int el) { int n = wordNumber(el); if (n >= dataBits.Length) { growToInclude(el); } dataBits[n] &= ~ bitMask(el); } /* * Sets the size of a set. * @param nwords how many words the new set should be */ private void setSize(int nwords) { long[] newbits = new long[nwords]; int n = (int) (System.Math.Min(nwords, dataBits.Length)); Array.Copy(dataBits, 0, newbits, 0, n); dataBits = newbits; } public virtual int size() { return dataBits.Length << LOG_BITS; // num words * bits per word } /*return how much space is being used by the dataBits array not * how many actually have member bits on. */ public virtual int lengthInLongWords() { return dataBits.Length; } /*Is this contained within a? */ public virtual bool subset(BitSet a) { if (a == null) //(a == null || !(a is BitSet)) return false; return this.and(a).Equals(this); } /*Subtract the elements of 'a' from 'this' in-place. * Basically, just turn off all bits of 'this' that are in 'a'. */ public virtual void subtractInPlace(BitSet a) { if (a == null) return ; // for all words of 'a', turn off corresponding bits of 'this' for (int i = 0; i < dataBits.Length && i < a.dataBits.Length; i++) { dataBits[i] &= ~ a.dataBits[i]; } } public virtual int[] toArray() { int[] elems = new int[degree()]; int en = 0; for (int i = 0; i < (dataBits.Length << LOG_BITS); i++) { if (member(i)) { elems[en++] = i; } } return elems; } public virtual long[] toPackedArray() { return dataBits; } override public string ToString() { return ToString(","); } /*Transform a bit set into a string by formatting each element as an integer * @separator The string to put in between elements * @return A commma-separated list of values */ public virtual string ToString(string separator) { string str = ""; for (int i = 0; i < (dataBits.Length << LOG_BITS); i++) { if (member(i)) { if (str.Length > 0) { str += separator; } str = str + i; } } return str; } /*Create a string representation where instead of integer elements, the * ith element of vocabulary is displayed instead. Vocabulary is a Vector * of Strings. * @separator The string to put in between elements * @return A commma-separated list of character constants. */ public virtual string ToString(string separator, ArrayList vocabulary) { if (vocabulary == null) { return ToString(separator); } string str = ""; for (int i = 0; i < (dataBits.Length << LOG_BITS); i++) { if (member(i)) { if (str.Length > 0) { str += separator; } if (i >= vocabulary.Count) { str += "<bad element " + i + ">"; } else if (vocabulary[i] == null) { str += "<" + i + ">"; } else { str += (string) vocabulary[i]; } } } return str; } /* * Dump a comma-separated list of the words making up the bit set. * Split each 64 bit number into two more manageable 32 bit numbers. * This generates a comma-separated list of C++-like unsigned long constants. */ public virtual string toStringOfHalfWords() { string s = new string("".ToCharArray()); for (int i = 0; i < dataBits.Length; i++) { if (i != 0) s += ", "; long tmp = dataBits[i]; tmp &= 0xFFFFFFFFL; s += (tmp + "UL"); s += ", "; tmp = SupportClass.URShift(dataBits[i], 32); tmp &= 0xFFFFFFFFL; s += (tmp + "UL"); } return s; } /* * Dump a comma-separated list of the words making up the bit set. * This generates a comma-separated list of Java-like long int constants. */ public virtual string toStringOfWords() { string s = new string("".ToCharArray()); for (int i = 0; i < dataBits.Length; i++) { if (i != 0) s += ", "; s += (dataBits[i] + "L"); } return s; } /*Print out the bit set but collapse char ranges. */ /* public virtual string toStringWithRanges(string separator, CharFormatter formatter) { string str = ""; int[] elems = this.toArray(); if (elems.Length == 0) { return ""; } // look for ranges int i = 0; while (i < elems.Length) { int lastInRange; lastInRange = 0; for (int j = i + 1; j < elems.Length; j++) { if (elems[j] != elems[j - 1] + 1) { break; } lastInRange = j; } // found a range if (str.Length > 0) { str += separator; } if (lastInRange - i >= 2) { str += formatter.literalChar(elems[i]); str += ".."; str += formatter.literalChar(elems[lastInRange]); i = lastInRange; // skip past end of range for next range } else { // no range, just print current char and move on str += formatter.literalChar(elems[i]); } i++; } return str; } */ private static int wordNumber(int bit) { return bit >> LOG_BITS; // bit / BITS } } }
#region Using Statements using System.Collections.Generic; using WaveEngine.Common.Graphics; using WaveEngine.Common.Math; using WaveEngine.Components.Graphics3D; using WaveEngine.Framework; using WaveEngine.Framework.Graphics; using WaveEngine.Framework.Physics3D; using WaveEngine.Framework.Services; using WaveEngine.Materials; using WaveOculusDemoProject.Components; using WaveOculusDemoProject.Entities; using WaveOculusDemoProject.Layers; using WaveOculusDemoProject.Audio; #endregion namespace WaveOculusDemoProject { public class MyScene : Scene { private float fps = 30; private int startFrame = 0; protected override void CreateScene() { this.CreateScriptManager(); this.CreateSounds(); this.CreateLayers(); this.CreateWingman(); this.CreateEnemyFighter(); this.CreateLaunchBase(); this.CreateEnvironment(); this.CreateCockpit(); } /// <summary> /// Create the script manager /// </summary> private void CreateScriptManager() { // Script Manager this.EntityManager.Add(new ScreenplayManagerDecorator("ScreenplayManager") { Fps = this.fps, CurrentFrameTime = this.startFrame }.Entity); } /// <summary> /// Load sounds used in the demo /// </summary> private void CreateSounds() { this.EntityManager.Add(new SoundManagerDecorator("soundManager")); } /// <summary> /// Create the cockpit entity, that contains: /// - The stereoscopic 3D camera, /// - The player controlled fighter, /// - The HUD /// </summary> private void CreateCockpit() { // Materials Dictionary<string, Material> materials = new Dictionary<string, Material>() { { "Seat", new BasicMaterial("Content/Textures/Cockpit.png", typeof(CockpitLayer)) }, { "Trail", new BasicMaterial("Content/Textures/Trail.wpk", DefaultLayers.Additive) { SamplerMode = AddressMode.LinearClamp } } }; // Player fighter entity var playerFighter = new Entity("Player") .AddComponent(new Transform3D()) .AddComponent(new Sound3DEmitter()) .AddComponent(new FollowPathBehavior("Content/Paths/Fighter_1.txt")) .AddComponent(new TrailManager()) .AddComponent(new TrailsRenderer()) .AddComponent(new MaterialsMap(materials)) .AddComponent(new AnimatedParamBehavior("Content/Paths/Fighter1_Lasers.txt")) .AddComponent(new FighterController(FighterState.Player, new List<Vector3>() { new Vector3(-3, -1.6f, -9f), new Vector3(3f, -1.6f, -9f) }, SoundType.Engines_1, SoundType.Shoot)) ; // Cockpit model Entity cockpitEntity = new Entity() .AddComponent(new Transform3D() { LocalScale = Vector3.One * 5, LocalPosition = Vector3.UnitZ * -0.8f }) .AddComponent(new MaterialsMap(materials)) .AddComponent(new Model("Content/Models/Cockpit.FBX")) .AddComponent(new ModelRenderer()) ; playerFighter.AddChild(cockpitEntity); // Hud Entity var hud = new HudDecorator("hud"); playerFighter.AddChild(hud.Entity); // Stereoscopic camera var stereoscopicCamera = new StereoscopicCameraDecorator("stereoCamera"); playerFighter.AddChild(stereoscopicCamera.Entity); // Guns Entity projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(3.376f, -2.689f, -3.499f), Orientation = Quaternion.CreateFromAxisAngle(Vector3.UnitY, MathHelper.Pi) }) .AddComponent(new ProjectileEmitter(800, 7f, 1, 0)); playerFighter.AddChild(projectileEmitter); projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(-3.376f, -2.689f, -3.499f), Orientation = Quaternion.CreateFromAxisAngle(Vector3.UnitY, MathHelper.Pi) }) .AddComponent(new ProjectileEmitter(800, 7f, 1, 0.5f)); playerFighter.AddChild(projectileEmitter); #if WINDOWS // In Windows platform, you must set the Oculus Rift rendertarget and view proyections for each eye. var OVRService = WaveServices.GetService<WaveEngine.OculusRift.OVRService>(); if (OVRService != null) { stereoscopicCamera.SetRenderTarget(OVRService.RenderTarget, OVRService.GetProjectionMatrix(WaveEngine.OculusRift.EyeType.Left), OVRService.GetProjectionMatrix(WaveEngine.OculusRift.EyeType.Right)); } #endif this.EntityManager.Add(playerFighter); } /// <summary> /// Create specific render layers /// </summary> private void CreateLayers() { this.RenderManager.RegisterLayerBefore(new StarfieldLayer(this.RenderManager), DefaultLayers.Alpha); this.RenderManager.RegisterLayerBefore(new PlanetLayer(this.RenderManager), DefaultLayers.Alpha); this.RenderManager.RegisterLayerAfter(new CockpitLayer(this.RenderManager), DefaultLayers.Additive); this.RenderManager.RegisterLayerAfter(new CockpitAdditiveLayer(this.RenderManager), typeof(CockpitLayer)); } /// <summary> /// Create scene environment: /// - Projectiles, Explosions /// - Asteroid field, /// - Starfield /// </summary> private void CreateEnvironment() { // Projectiles Entity blueProjectiles = new Entity("Projectiles") .AddComponent(new ProjectileManager()) .AddComponent(new ProjectilesRenderer()) .AddComponent(new MaterialsMap(new BasicMaterial("Content/Textures/Blaster.png", DefaultLayers.Additive))); this.EntityManager.Add(blueProjectiles); // Explosion ExplosionDecorator explosion = new ExplosionDecorator("explosion", 2556); explosion.Transform3D.Position = new Vector3(-1109.993f, 0.437f, -3785.457f); this.EntityManager.Add(explosion.Entity); // Add asteroidfield var asteroidField = new AsteroidFieldDecorator("asteroidField", new Vector3(2000, 2000, 0)); this.EntityManager.Add(asteroidField); // Add starfield var starField = new StarfieldDecorator("starfield"); this.EntityManager.Add(starField); Dictionary<string, Material> materials = new Dictionary<string, Material>() { { "Planet", new BasicMaterial("Content/Textures/planet.png", typeof(PlanetLayer)) }, { "BackSun", new BasicMaterial("Content/Textures/StarShine.png", typeof(StarfieldLayer)){DiffuseColor = Color.White * 0.8f} }, { "FrontSun", new BasicMaterial("Content/Textures/StarShine.png", DefaultLayers.Additive){DiffuseColor = Color.White * 0.4f} } }; // Add background planet var planet = new Entity("planet") .AddComponent(new Transform3D()) .AddComponent(new Model("Content/Models/Planet.FBX")) .AddComponent(new ModelRenderer()) .AddComponent(new MaterialsMap(materials)) .AddComponent(new FollowCameraBehavior()) ; this.EntityManager.Add(planet); PointLight light = new PointLight("light", new Vector3(-30000, 17000, -15000)) { Attenuation = 1900000, IsVisible = true, Color = Color.White }; this.EntityManager.Add(light); } /// <summary> /// Create wingman fighter /// </summary> private void CreateWingman() { Dictionary<string, Material> materials = new Dictionary<string, Material>() { { "Fighter", new NormalMappingMaterial( "Content/Textures/fighter_diffuse.png", "Content/Textures/fighter_normal_spec.png") { AmbientColor = GameResources.AmbientColor } }, { "Glow", new BasicMaterial("Content/Textures/fighter_diffuse.png") { DiffuseColor = GameResources.AmbientColor } }, { "Trail", new BasicMaterial("Content/Textures/Trail.wpk", DefaultLayers.Additive) { SamplerMode = AddressMode.LinearClamp } }, { "Thrust", new BasicMaterial("Content/Textures/Thrust.wpk", DefaultLayers.Additive) { SamplerMode = AddressMode.LinearClamp } } }; Entity fighter = new Entity("Wingman") .AddComponent(new Transform3D()) .AddComponent(new Sound3DEmitter()) .AddComponent(new FollowPathBehavior("Content/Paths/Fighter_2.txt", Quaternion.CreateFromAxisAngle(Vector3.UnitY, MathHelper.Pi))) .AddComponent(new AnimatedParamBehavior("Content/Paths/Fighter2_Lasers.txt")) .AddComponent(new Model("Content/Models/Fighter.FBX")) .AddComponent(new MaterialsMap(materials)) .AddComponent(new ModelRenderer()) .AddComponent(new TrailManager()) .AddComponent(new TrailsRenderer()) .AddComponent(new FighterController(FighterState.Wingman, new List<Vector3>() { new Vector3(-3, 0, 6.8f), new Vector3(3f, 0, 6.8f) }, SoundType.Engines_2, SoundType.Shoot)) ; EntityManager.Add(fighter); // Wingman guns Entity projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(3.376f, -0.689f, -3.499f) }) .AddComponent(new ProjectileEmitter(800, 7f, 1, 0)); fighter.AddChild(projectileEmitter); projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(-3.376f, -0.689f, -3.499f) }) .AddComponent(new ProjectileEmitter(800, 7f, 1, 0.5f)); fighter.AddChild(projectileEmitter); } /// <summary> /// Create the enemy fighter entity /// </summary> private void CreateEnemyFighter() { Dictionary<string, Material> materials = new Dictionary<string, Material>() { { "EnemyFighter", new NormalMappingMaterial( "Content/Textures/enemyfighter_diffuse.png", "Content/Textures/enemyfighter_normal_spec.png") { AmbientColor = GameResources.AmbientColor } }, { "EnemyGlow", new BasicMaterial("Content/Textures/enemyfighter_diffuse.png") { DiffuseColor = GameResources.AmbientColor } }, { "Trail", new BasicMaterial("Content/Textures/EnemyTrail.wpk", DefaultLayers.Additive) { SamplerMode = AddressMode.LinearClamp } }, { "EnemyThrust", new BasicMaterial("Content/Textures/EnemyThrust.wpk", DefaultLayers.Additive) { SamplerMode = AddressMode.LinearClamp } } }; Entity fighter = new Entity() .AddComponent(new Transform3D()) .AddComponent(new Sound3DEmitter()) .AddComponent(new FollowPathBehavior("Content/Paths/EnemyFighter.txt", Quaternion.CreateFromAxisAngle(Vector3.UnitY, MathHelper.Pi))) .AddComponent(new AnimatedParamBehavior("Content/Paths/Enemy_Lasers.txt")) .AddComponent(new Model("Content/Models/EnemyFighter.FBX")) .AddComponent(new MaterialsMap(materials)) .AddComponent(new ModelRenderer()) .AddComponent(new TrailManager()) .AddComponent(new TrailsRenderer()) .AddComponent(new FighterController(FighterState.Enemy, new List<Vector3>() { new Vector3(-4.5f, -3.7f, 9), new Vector3(4.5f , -3.7f, 9), new Vector3(-4.5f, 3.7f, 9), new Vector3(4.5f , 3.7f, 9), }, SoundType.Engines_2, SoundType.Shoot)) ; EntityManager.Add(fighter); // Gun entities Entity projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(9.197f, 1.99f, -13.959f) }) .AddComponent(new ProjectileEmitter(800, 5f, 1, 0)); fighter.AddChild(projectileEmitter); projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(-9.197f, 1.99f, -13.959f) }) .AddComponent(new ProjectileEmitter(800, 5f, 1, 0.75f)); fighter.AddChild(projectileEmitter); projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(9.197f, -1.99f, -13.959f) }) .AddComponent(new ProjectileEmitter(800, 5f, 1, 0.5f)); fighter.AddChild(projectileEmitter); projectileEmitter = new Entity() { Tag = "Gun" } .AddComponent(new Transform3D() { LocalPosition = new Vector3(-9.197f, -1.99f, -13.959f) }) .AddComponent(new ProjectileEmitter(800, 5f, 1, 0.25f)); fighter.AddChild(projectileEmitter); } /// <summary> /// Launch base entity /// </summary> private void CreateLaunchBase() { Dictionary<string, Material> materials = new Dictionary<string, Material>() { { "SpaceStation", new NormalMappingMaterial( "Content/Textures/spacestation_diffuse.jpg", "Content/Textures/spacestation_normal_spec.png") { AmbientColor = GameResources.AmbientColor } }, { "Glow", new BasicMaterial("Content/Textures/spacestation_glow.png", DefaultLayers.Additive) } }; Entity launchBase = new Entity() .AddComponent(new Transform3D()) .AddComponent(new Model("Content/Models/LaunchBase.FBX")) .AddComponent(new BoxCollider()) .AddComponent(new MaterialsMap(materials)) .AddComponent(new ModelRenderer()) ; EntityManager.Add(launchBase); } } }
namespace InControl { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.InteropServices; using System.Text; using UnityEngine; using DeviceHandle = System.UInt32; public class NativeInputDeviceManager : InputDeviceManager { public static Func<NativeDeviceInfo, ReadOnlyCollection<NativeInputDevice>, NativeInputDevice> CustomFindDetachedDevice; List<NativeInputDevice> attachedDevices; List<NativeInputDevice> detachedDevices; List<NativeInputDeviceProfile> systemDeviceProfiles; List<NativeInputDeviceProfile> customDeviceProfiles; DeviceHandle[] deviceEvents; public NativeInputDeviceManager() { attachedDevices = new List<NativeInputDevice>(); detachedDevices = new List<NativeInputDevice>(); systemDeviceProfiles = new List<NativeInputDeviceProfile>( NativeInputDeviceProfileList.Profiles.Length ); customDeviceProfiles = new List<NativeInputDeviceProfile>(); deviceEvents = new DeviceHandle[32]; AddSystemDeviceProfiles(); var options = new NativeInputOptions(); options.enableXInput = InputManager.NativeInputEnableXInput; options.preventSleep = InputManager.NativeInputPreventSleep; if (InputManager.NativeInputUpdateRate > 0) { options.updateRate = (UInt16) InputManager.NativeInputUpdateRate; } else { options.updateRate = (UInt16) Mathf.FloorToInt( 1.0f / Time.fixedDeltaTime ); } Native.Init( options ); } public override void Destroy() { Native.Stop(); } UInt32 NextPowerOfTwo( UInt32 x ) { if (x < 0) { return 0; } --x; x |= x >> 1; x |= x >> 2; x |= x >> 4; x |= x >> 8; x |= x >> 16; return x + 1; } public override void Update( ulong updateTick, float deltaTime ) { IntPtr data; var size = Native.GetDeviceEvents( out data ); if (size > 0) { Utility.ArrayExpand( ref deviceEvents, size ); MarshalUtility.Copy( data, deviceEvents, size ); var index = 0; var attachedEventCount = deviceEvents[index++]; for (var i = 0; i < attachedEventCount; i++) { var deviceHandle = deviceEvents[index++]; var stringBuilder = new StringBuilder( 256 ); stringBuilder.Append( "Attached native device with handle " + deviceHandle + ":\n" ); NativeDeviceInfo deviceInfo; if (Native.GetDeviceInfo( deviceHandle, out deviceInfo )) { stringBuilder.AppendFormat( "Name: {0}\n", deviceInfo.name ); stringBuilder.AppendFormat( "Driver Type: {0}\n", deviceInfo.driverType ); stringBuilder.AppendFormat( "Location ID: {0}\n", deviceInfo.location ); stringBuilder.AppendFormat( "Serial Number: {0}\n", deviceInfo.serialNumber ); stringBuilder.AppendFormat( "Vendor ID: 0x{0:x}\n", deviceInfo.vendorID ); stringBuilder.AppendFormat( "Product ID: 0x{0:x}\n", deviceInfo.productID ); stringBuilder.AppendFormat( "Version Number: 0x{0:x}\n", deviceInfo.versionNumber ); stringBuilder.AppendFormat( "Buttons: {0}\n", deviceInfo.numButtons ); stringBuilder.AppendFormat( "Analogs: {0}\n", deviceInfo.numAnalogs ); DetectDevice( deviceHandle, deviceInfo ); } Logger.LogInfo( stringBuilder.ToString() ); } var detachedEventCount = deviceEvents[index++]; for (var i = 0; i < detachedEventCount; i++) { var deviceHandle = deviceEvents[index++]; Logger.LogInfo( "Detached native device with handle " + deviceHandle + ":" ); var device = FindAttachedDevice( deviceHandle ); if (device != null) { DetachDevice( device ); } else { Logger.LogWarning( "Couldn't find device to detach with handle: " + deviceHandle ); } } } } void DetectDevice( DeviceHandle deviceHandle, NativeDeviceInfo deviceInfo ) { // Try to find a matching profile for this device. NativeInputDeviceProfile deviceProfile = null; deviceProfile = deviceProfile ?? customDeviceProfiles.Find( profile => profile.Matches( deviceInfo ) ); deviceProfile = deviceProfile ?? systemDeviceProfiles.Find( profile => profile.Matches( deviceInfo ) ); deviceProfile = deviceProfile ?? customDeviceProfiles.Find( profile => profile.LastResortMatches( deviceInfo ) ); deviceProfile = deviceProfile ?? systemDeviceProfiles.Find( profile => profile.LastResortMatches( deviceInfo ) ); // Find a matching previously attached device or create a new one. var device = FindDetachedDevice( deviceInfo ) ?? new NativeInputDevice(); device.Initialize( deviceHandle, deviceInfo, deviceProfile ); AttachDevice( device ); } void AttachDevice( NativeInputDevice device ) { detachedDevices.Remove( device ); attachedDevices.Add( device ); InputManager.AttachDevice( device ); } void DetachDevice( NativeInputDevice device ) { attachedDevices.Remove( device ); detachedDevices.Add( device ); InputManager.DetachDevice( device ); } NativeInputDevice FindAttachedDevice( DeviceHandle deviceHandle ) { var attachedDevicesCount = attachedDevices.Count; for (var i = 0; i < attachedDevicesCount; i++) { var device = attachedDevices[i]; if (device.Handle == deviceHandle) { return device; } } return null; } NativeInputDevice FindDetachedDevice( NativeDeviceInfo deviceInfo ) { var devices = new ReadOnlyCollection<NativeInputDevice>( detachedDevices ); if (CustomFindDetachedDevice != null) { return CustomFindDetachedDevice( deviceInfo, devices ); } return SystemFindDetachedDevice( deviceInfo, devices ); } static NativeInputDevice SystemFindDetachedDevice( NativeDeviceInfo deviceInfo, ReadOnlyCollection<NativeInputDevice> detachedDevices ) { var detachedDevicesCount = detachedDevices.Count; for (var i = 0; i < detachedDevicesCount; i++) { var device = detachedDevices[i]; if (device.Info.HasSameVendorID( deviceInfo ) && device.Info.HasSameProductID( deviceInfo ) && device.Info.HasSameSerialNumber( deviceInfo )) { return device; } } for (var i = 0; i < detachedDevicesCount; i++) { var device = detachedDevices[i]; if (device.Info.HasSameVendorID( deviceInfo ) && device.Info.HasSameProductID( deviceInfo ) && device.Info.HasSameLocation( deviceInfo )) { return device; } } for (var i = 0; i < detachedDevicesCount; i++) { var device = detachedDevices[i]; if (device.Info.HasSameVendorID( deviceInfo ) && device.Info.HasSameProductID( deviceInfo ) && device.Info.HasSameVersionNumber( deviceInfo )) { return device; } } for (var i = 0; i < detachedDevicesCount; i++) { var device = detachedDevices[i]; if (device.Info.HasSameLocation( deviceInfo )) { return device; } } return null; } void AddSystemDeviceProfile( NativeInputDeviceProfile deviceProfile ) { if (deviceProfile.IsSupportedOnThisPlatform) { systemDeviceProfiles.Add( deviceProfile ); } } void AddSystemDeviceProfiles() { foreach (var typeName in NativeInputDeviceProfileList.Profiles) { var deviceProfile = (NativeInputDeviceProfile) Activator.CreateInstance( Type.GetType( typeName ) ); AddSystemDeviceProfile( deviceProfile ); } } public static bool CheckPlatformSupport( ICollection<string> errors ) { if (Application.platform != RuntimePlatform.OSXPlayer && Application.platform != RuntimePlatform.OSXEditor && Application.platform != RuntimePlatform.WindowsPlayer && Application.platform != RuntimePlatform.WindowsEditor) { errors.Add( "Native input is currently only supported on Windows and Mac." ); return false; } #if UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 || UNITY_4_7 if (!Application.HasProLicense()) { if (errors != null) { errors.Add( "Unity 4 Professional or Unity 5 is required for native input support." ); } return false; } #endif try { NativeVersionInfo versionInfo; Native.GetVersionInfo( out versionInfo ); Logger.LogInfo( "InControl Native (version " + versionInfo.major + "." + versionInfo.minor + "." + versionInfo.patch + ")" ); } catch (DllNotFoundException e) { if (errors != null) { errors.Add( e.Message + Utility.PluginFileExtension() + " could not be found or is missing a dependency." ); } return false; } return true; } internal static bool Enable() { var errors = new List<string>(); if (CheckPlatformSupport( errors )) { InputManager.AddDeviceManager<NativeInputDeviceManager>(); return true; } foreach (var error in errors) { Debug.LogError( "Error enabling NativeInputDeviceManager: " + error ); } return false; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Reflection; using Xunit; namespace System.Linq.Expressions.Tests { public static class UnaryPlusTests { #region Test methods [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusShortTest(bool useInterpreter) { short[] values = new short[] { 0, 1, -1, short.MinValue, short.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusShort(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusUShortTest(bool useInterpreter) { ushort[] values = new ushort[] { 0, 1, ushort.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusUShort(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusIntTest(bool useInterpreter) { int[] values = new int[] { 0, 1, -1, int.MinValue, int.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusInt(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusUIntTest(bool useInterpreter) { uint[] values = new uint[] { 0, 1, uint.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusUInt(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusLongTest(bool useInterpreter) { long[] values = new long[] { 0, 1, -1, long.MinValue, long.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusLong(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusULongTest(bool useInterpreter) { ulong[] values = new ulong[] { 0, 1, ulong.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusULong(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusFloatTest(bool useInterpreter) { float[] values = new float[] { 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusFloat(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusDoubleTest(bool useInterpreter) { double[] values = new double[] { 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusDouble(values[i], useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] //[WorkItem(3196, "https://github.com/dotnet/corefx/issues/3196")] public static void CheckUnaryArithmeticUnaryPlusDecimalTest(bool useInterpreter) { decimal[] values = new decimal[] { decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue }; for (int i = 0; i < values.Length; i++) { VerifyArithmeticUnaryPlusDecimal(values[i], useInterpreter); } } [Fact] public static void ToStringTest() { var e = Expression.UnaryPlus(Expression.Parameter(typeof(int), "x")); Assert.Equal("+x", e.ToString()); } #endregion #region Test verifiers private static void VerifyArithmeticUnaryPlusShort(short value, bool useInterpreter) { Expression<Func<short>> e = Expression.Lambda<Func<short>>( Expression.UnaryPlus(Expression.Constant(value, typeof(short))), Enumerable.Empty<ParameterExpression>()); Func<short> f = e.Compile(useInterpreter); Assert.Equal((short)(+value), f()); } private static void VerifyArithmeticUnaryPlusUShort(ushort value, bool useInterpreter) { Expression<Func<ushort>> e = Expression.Lambda<Func<ushort>>( Expression.UnaryPlus(Expression.Constant(value, typeof(ushort))), Enumerable.Empty<ParameterExpression>()); Func<ushort> f = e.Compile(useInterpreter); Assert.Equal((ushort)(+value), f()); } private static void VerifyArithmeticUnaryPlusInt(int value, bool useInterpreter) { Expression<Func<int>> e = Expression.Lambda<Func<int>>( Expression.UnaryPlus(Expression.Constant(value, typeof(int))), Enumerable.Empty<ParameterExpression>()); Func<int> f = e.Compile(useInterpreter); Assert.Equal((int)(+value), f()); } private static void VerifyArithmeticUnaryPlusUInt(uint value, bool useInterpreter) { Expression<Func<uint>> e = Expression.Lambda<Func<uint>>( Expression.UnaryPlus(Expression.Constant(value, typeof(uint))), Enumerable.Empty<ParameterExpression>()); Func<uint> f = e.Compile(useInterpreter); Assert.Equal((uint)(+value), f()); } private static void VerifyArithmeticUnaryPlusLong(long value, bool useInterpreter) { Expression<Func<long>> e = Expression.Lambda<Func<long>>( Expression.UnaryPlus(Expression.Constant(value, typeof(long))), Enumerable.Empty<ParameterExpression>()); Func<long> f = e.Compile(useInterpreter); Assert.Equal((long)(+value), f()); } private static void VerifyArithmeticUnaryPlusULong(ulong value, bool useInterpreter) { Expression<Func<ulong>> e = Expression.Lambda<Func<ulong>>( Expression.UnaryPlus(Expression.Constant(value, typeof(ulong))), Enumerable.Empty<ParameterExpression>()); Func<ulong> f = e.Compile(useInterpreter); Assert.Equal((ulong)(+value), f()); } private static void VerifyArithmeticUnaryPlusFloat(float value, bool useInterpreter) { Expression<Func<float>> e = Expression.Lambda<Func<float>>( Expression.UnaryPlus(Expression.Constant(value, typeof(float))), Enumerable.Empty<ParameterExpression>()); Func<float> f = e.Compile(useInterpreter); Assert.Equal((float)(+value), f()); } private static void VerifyArithmeticUnaryPlusDouble(double value, bool useInterpreter) { Expression<Func<double>> e = Expression.Lambda<Func<double>>( Expression.UnaryPlus(Expression.Constant(value, typeof(double))), Enumerable.Empty<ParameterExpression>()); Func<double> f = e.Compile(useInterpreter); Assert.Equal((double)(+value), f()); } private static void VerifyArithmeticUnaryPlusDecimal(decimal value, bool useInterpreter) { Expression<Func<decimal>> e = Expression.Lambda<Func<decimal>>( Expression.UnaryPlus(Expression.Constant(value, typeof(decimal))), Enumerable.Empty<ParameterExpression>()); Func<decimal> f = e.Compile(useInterpreter); Assert.Equal((decimal)(+value), f()); } #endregion } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gcdv = Google.Cloud.Dialogflow.V2; using sys = System; namespace Google.Cloud.Dialogflow.V2 { /// <summary>Resource name for the <c>Intent</c> resource.</summary> public sealed partial class IntentName : gax::IResourceName, sys::IEquatable<IntentName> { /// <summary>The possible contents of <see cref="IntentName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary>A resource name with pattern <c>projects/{project}/agent/intents/{intent}</c>.</summary> ProjectIntent = 1, /// <summary> /// A resource name with pattern <c>projects/{project}/locations/{location}/agent/intents/{intent}</c>. /// </summary> ProjectLocationIntent = 2, } private static gax::PathTemplate s_projectIntent = new gax::PathTemplate("projects/{project}/agent/intents/{intent}"); private static gax::PathTemplate s_projectLocationIntent = new gax::PathTemplate("projects/{project}/locations/{location}/agent/intents/{intent}"); /// <summary>Creates a <see cref="IntentName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="IntentName"/> containing the provided <paramref name="unparsedResourceName"/>. /// </returns> public static IntentName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new IntentName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="IntentName"/> with the pattern <c>projects/{project}/agent/intents/{intent}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="intentId">The <c>Intent</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="IntentName"/> constructed from the provided ids.</returns> public static IntentName FromProjectIntent(string projectId, string intentId) => new IntentName(ResourceNameType.ProjectIntent, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), intentId: gax::GaxPreconditions.CheckNotNullOrEmpty(intentId, nameof(intentId))); /// <summary> /// Creates a <see cref="IntentName"/> with the pattern /// <c>projects/{project}/locations/{location}/agent/intents/{intent}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="intentId">The <c>Intent</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="IntentName"/> constructed from the provided ids.</returns> public static IntentName FromProjectLocationIntent(string projectId, string locationId, string intentId) => new IntentName(ResourceNameType.ProjectLocationIntent, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), intentId: gax::GaxPreconditions.CheckNotNullOrEmpty(intentId, nameof(intentId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="IntentName"/> with pattern /// <c>projects/{project}/agent/intents/{intent}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="intentId">The <c>Intent</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="IntentName"/> with pattern /// <c>projects/{project}/agent/intents/{intent}</c>. /// </returns> public static string Format(string projectId, string intentId) => FormatProjectIntent(projectId, intentId); /// <summary> /// Formats the IDs into the string representation of this <see cref="IntentName"/> with pattern /// <c>projects/{project}/agent/intents/{intent}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="intentId">The <c>Intent</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="IntentName"/> with pattern /// <c>projects/{project}/agent/intents/{intent}</c>. /// </returns> public static string FormatProjectIntent(string projectId, string intentId) => s_projectIntent.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(intentId, nameof(intentId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="IntentName"/> with pattern /// <c>projects/{project}/locations/{location}/agent/intents/{intent}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="intentId">The <c>Intent</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="IntentName"/> with pattern /// <c>projects/{project}/locations/{location}/agent/intents/{intent}</c>. /// </returns> public static string FormatProjectLocationIntent(string projectId, string locationId, string intentId) => s_projectLocationIntent.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(intentId, nameof(intentId))); /// <summary>Parses the given resource name string into a new <see cref="IntentName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project}/agent/intents/{intent}</c></description></item> /// <item> /// <description><c>projects/{project}/locations/{location}/agent/intents/{intent}</c></description> /// </item> /// </list> /// </remarks> /// <param name="intentName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="IntentName"/> if successful.</returns> public static IntentName Parse(string intentName) => Parse(intentName, false); /// <summary> /// Parses the given resource name string into a new <see cref="IntentName"/> instance; optionally allowing an /// unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project}/agent/intents/{intent}</c></description></item> /// <item> /// <description><c>projects/{project}/locations/{location}/agent/intents/{intent}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="intentName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="IntentName"/> if successful.</returns> public static IntentName Parse(string intentName, bool allowUnparsed) => TryParse(intentName, allowUnparsed, out IntentName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="IntentName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project}/agent/intents/{intent}</c></description></item> /// <item> /// <description><c>projects/{project}/locations/{location}/agent/intents/{intent}</c></description> /// </item> /// </list> /// </remarks> /// <param name="intentName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="IntentName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string intentName, out IntentName result) => TryParse(intentName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="IntentName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>projects/{project}/agent/intents/{intent}</c></description></item> /// <item> /// <description><c>projects/{project}/locations/{location}/agent/intents/{intent}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="intentName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="IntentName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string intentName, bool allowUnparsed, out IntentName result) { gax::GaxPreconditions.CheckNotNull(intentName, nameof(intentName)); gax::TemplatedResourceName resourceName; if (s_projectIntent.TryParseName(intentName, out resourceName)) { result = FromProjectIntent(resourceName[0], resourceName[1]); return true; } if (s_projectLocationIntent.TryParseName(intentName, out resourceName)) { result = FromProjectLocationIntent(resourceName[0], resourceName[1], resourceName[2]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(intentName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private IntentName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string intentId = null, string locationId = null, string projectId = null) { Type = type; UnparsedResource = unparsedResourceName; IntentId = intentId; LocationId = locationId; ProjectId = projectId; } /// <summary> /// Constructs a new instance of a <see cref="IntentName"/> class from the component parts of pattern /// <c>projects/{project}/agent/intents/{intent}</c> /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="intentId">The <c>Intent</c> ID. Must not be <c>null</c> or empty.</param> public IntentName(string projectId, string intentId) : this(ResourceNameType.ProjectIntent, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), intentId: gax::GaxPreconditions.CheckNotNullOrEmpty(intentId, nameof(intentId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Intent</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string IntentId { get; } /// <summary> /// The <c>Location</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string LocationId { get; } /// <summary> /// The <c>Project</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string ProjectId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.ProjectIntent: return s_projectIntent.Expand(ProjectId, IntentId); case ResourceNameType.ProjectLocationIntent: return s_projectLocationIntent.Expand(ProjectId, LocationId, IntentId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as IntentName); /// <inheritdoc/> public bool Equals(IntentName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(IntentName a, IntentName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(IntentName a, IntentName b) => !(a == b); } public partial class Intent { /// <summary> /// <see cref="gcdv::IntentName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gcdv::IntentName IntentName { get => string.IsNullOrEmpty(Name) ? null : gcdv::IntentName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } public partial class ListIntentsRequest { /// <summary><see cref="AgentName"/>-typed view over the <see cref="Parent"/> resource name property.</summary> public AgentName ParentAsAgentName { get => string.IsNullOrEmpty(Parent) ? null : AgentName.Parse(Parent, allowUnparsed: true); set => Parent = value?.ToString() ?? ""; } } public partial class GetIntentRequest { /// <summary> /// <see cref="gcdv::IntentName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gcdv::IntentName IntentName { get => string.IsNullOrEmpty(Name) ? null : gcdv::IntentName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } public partial class CreateIntentRequest { /// <summary><see cref="AgentName"/>-typed view over the <see cref="Parent"/> resource name property.</summary> public AgentName ParentAsAgentName { get => string.IsNullOrEmpty(Parent) ? null : AgentName.Parse(Parent, allowUnparsed: true); set => Parent = value?.ToString() ?? ""; } } public partial class DeleteIntentRequest { /// <summary> /// <see cref="gcdv::IntentName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gcdv::IntentName IntentName { get => string.IsNullOrEmpty(Name) ? null : gcdv::IntentName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } public partial class BatchUpdateIntentsRequest { /// <summary><see cref="AgentName"/>-typed view over the <see cref="Parent"/> resource name property.</summary> public AgentName ParentAsAgentName { get => string.IsNullOrEmpty(Parent) ? null : AgentName.Parse(Parent, allowUnparsed: true); set => Parent = value?.ToString() ?? ""; } } public partial class BatchDeleteIntentsRequest { /// <summary><see cref="AgentName"/>-typed view over the <see cref="Parent"/> resource name property.</summary> public AgentName ParentAsAgentName { get => string.IsNullOrEmpty(Parent) ? null : AgentName.Parse(Parent, allowUnparsed: true); set => Parent = value?.ToString() ?? ""; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * The PdxSerializer QuickStart Example. * This example takes the following steps: * * This example shows IPdxSerializer usage. * We have used inbuilt ReflectionBasedAutoSerializer as IPdxSerializer. ReflectionBasedAutoSerializer class implements the IPdxSerializer interface * This auto serializer uses reflection to serialize and de-serialize class members. * User can use PdxIdentityField attribute on member fields to define field as identity field. * ReflectionBasedAutoSerializer will use this attribute to set field as identity field for Pdx data. * * AutoSerializerEx class extends the ReflectionBasedAutoSerializer class and override WriteTransform and ReadTransform methods. * In this method it handles serialization of .NET decimal type and Guid type. * * PdxTypeMapper class demonstrates that how app can map .NET types to pdx types(java types) * * Domain class should have default constructor(zero-arg). * * After that test demonstrartes query on .NET objects without having corresponding java classes at server. * * 1. Create a Geode Cache. * 2. Get the Person from the Cache. * 3. Populate some query Person objects on the Region. * 4. Get the pool, get the Query Service from Pool. Pool is define in clientPdxRemoteQuery.xml. * 5. Execute a query that returns a Result Set. * 6. Execute a query that returns a Struct Set. * 7. Execute the region shortcut/convenience query methods. * 8. Close the Cache. * */ // Use standard namespaces using System; using System.Reflection; // Use the Geode namespace using Apache.Geode.Client; namespace Apache.Geode.Client.QuickStart { public class Person { private string name; //this is the only field used on server to create hashcode and use in equals method [PdxIdentityField] private int id; private int age; //private decimal salary; private Guid guid = Guid.NewGuid(); public Person() { } public Person(string name, int id, int age) { this.name = name; this.id = id; this.age = age; //this.salary = 217773.12321M; } #region Public Properties public string Name { get { return name; } } public int ID { get { return id; } } public int Age { get { return age; } } #endregion } //This demonstrates, how to extend ReflectionBasedAutoSerializer public class AutoSerializerEx : ReflectionBasedAutoSerializer { public override object WriteTransform(FieldInfo fi, Type type, object originalValue) { if (fi.FieldType.Equals(Type.GetType("System.Guid"))) { return originalValue.ToString(); } else if (fi.FieldType.Equals(Type.GetType("System.Decimal"))) { return originalValue.ToString(); } else return base.WriteTransform(fi, type, originalValue); } public override object ReadTransform(FieldInfo fi, Type type, object serializeValue) { if (fi.FieldType.Equals(Type.GetType("System.Guid"))) { Guid g = new Guid((string)serializeValue); return g; } else if (fi.FieldType.Equals(Type.GetType("System.Decimal"))) { return Convert.ToDecimal((string)serializeValue); } else return base.ReadTransform(fi, type, serializeValue); } public override FieldType GetFieldType(FieldInfo fi, Type type) { if (fi.FieldType.Equals(Type.GetType("System.Guid")) || fi.FieldType.Equals(Type.GetType("System.Decimal"))) return FieldType.STRING; return base.GetFieldType(fi, type); } public override bool IsIdentityField(FieldInfo fi, Type type) { if (fi.Name == "_identityField") return true; return base.IsIdentityField(fi, type); } public override string GetFieldName(FieldInfo fi, Type type) { if (fi.Name == "_nameChange") return fi.Name + "NewName"; return fi.Name ; } public override bool IsFieldIncluded(FieldInfo fi, Type type) { if (fi.Name == "_notInclude") return false; return base.IsFieldIncluded(fi, type); } } //This demonstrates, how to map .NET type to pdx type or java type public class PdxTypeMapper : IPdxTypeMapper { public string ToPdxTypeName(string localTypeName) { return "pdx_" + localTypeName; } public string FromPdxTypeName(string pdxTypeName) { return pdxTypeName.Substring(4);//need to extract "pdx_" } } // The PdxRemoteQuery QuickStart example. class PdxSerializer { static void Main(string[] args) { try { CacheFactory cacheFactory = CacheFactory.CreateCacheFactory(); Console.WriteLine("Connected to the Geode Distributed System"); // Create a Geode Cache with the "clientPdxRemoteQuery.xml" Cache XML file. Cache cache = cacheFactory.Set("cache-xml-file", "XMLs/clientPdxSerializer.xml").Create(); Console.WriteLine("Created the Geode Cache"); // Get the example Region from the Cache which is declared in the Cache XML file. IRegion<string, Person> region = cache.GetRegion<string, Person>("Person"); Console.WriteLine("Obtained the Region from the Cache"); //to map .net type tp pdx type or java type Serializable.SetPdxTypeMapper(new PdxTypeMapper()); // Register inbuilt reflection based autoserializer to serialize the domain types(Person class) as pdx format Serializable.RegisterPdxSerializer(new AutoSerializerEx()); Console.WriteLine("Registered Person Query Objects"); // Populate the Region with some PortfolioPdx objects. Person p1 = new Person("John", 1 /*ID*/, 23 /*age*/); Person p2 = new Person("Jack", 2 /*ID*/, 20 /*age*/); Person p3 = new Person("Tony", 3 /*ID*/, 35 /*age*/); region["Key1"] = p1; region["Key2"] = p2; region["Key3"] = p3; Console.WriteLine("Populated some Person Objects"); //find the pool Pool pool = PoolManager.Find("examplePool"); // Get the QueryService from the pool QueryService<string, Person> qrySvc = pool.GetQueryService<string, Person>(); Console.WriteLine("Got the QueryService from the Pool"); // Execute a Query which returns a ResultSet. Query<Person> qry = qrySvc.NewQuery("SELECT DISTINCT * FROM /Person"); ISelectResults<Person> results = qry.Execute(); Console.WriteLine("ResultSet Query returned {0} rows", results.Size); // Execute a Query which returns a StructSet. QueryService<string, Struct> qrySvc1 = pool.GetQueryService<string, Struct>(); Query<Struct> qry1 = qrySvc1.NewQuery("SELECT name, age FROM /Person WHERE id = 1"); ISelectResults<Struct> results1 = qry1.Execute(); Console.WriteLine("StructSet Query returned {0} rows", results1.Size); // Iterate through the rows of the query result. int rowCount = 0; foreach (Struct si in results1) { rowCount++; Console.WriteLine("Row {0} Column 1 is named {1}, value is {2}", rowCount, si.Set.GetFieldName(0), si[0].ToString()); Console.WriteLine("Row {0} Column 2 is named {1}, value is {2}", rowCount, si.Set.GetFieldName(1), si[1].ToString()); } // Close the Geode Cache. cache.Close(); Console.WriteLine("Closed the Geode Cache"); } // An exception should not occur catch (GeodeException gfex) { Console.WriteLine("PdxSerializer Geode Exception: {0}", gfex.Message); } } } }
// // SaslMechanism.cs // // Author: Jeffrey Stedfast <[email protected]> // // Copyright (c) 2013-2014 Xamarin Inc. (www.xamarin.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // using System; using System.Net; using System.Text; namespace MailKit.Security { /// <summary> /// A SASL authentication mechanism. /// </summary> /// <remarks> /// Authenticating via a SASL mechanism may be a multi-step process. /// To determine if the mechanism has completed the necessary steps /// to authentication, check the <see cref="IsAuthenticated"/> after /// each call to <see cref="Challenge(string)"/>. /// </remarks> public abstract class SaslMechanism { /// <summary> /// The supported authentication mechanisms in order of strongest to weakest. /// </summary> /// <remarks> /// Use by the various clients when authenticating via SASL to determine /// which order the SASL mechanisms supported by the server should be tried. /// </remarks> public static readonly string[] AuthMechanismRank = { "XOAUTH2", "SCRAM-SHA-1", "NTLM", "CRAM-MD5", "DIGEST-MD5", "PLAIN", "LOGIN" }; /// <summary> /// Initializes a new instance of the <see cref="MailKit.Security.SaslMechanism"/> class. /// </summary> /// <remarks> /// Creates a new SASL context. /// </remarks> /// <param name="uri">The URI of the service.</param> /// <param name="credentials">The user's credentials.</param> protected SaslMechanism (Uri uri, ICredentials credentials) { Credentials = credentials; Uri = uri; } /// <summary> /// Gets the name of the mechanism. /// </summary> /// <remarks> /// Gets the name of the mechanism. /// </remarks> /// <value>The name of the mechanism.</value> public abstract string MechanismName { get; } /// <summary> /// Gets the user's credentials. /// </summary> /// <remarks> /// Gets the user's credentials. /// </remarks> /// <value>The user's credentials.</value> public ICredentials Credentials { get; private set; } /// <summary> /// Gets whether or not the mechanism supports an initial response (SASL-IR). /// </summary> /// <remarks> /// SASL mechanisms that support sending an initial client response to the server /// should return <value>true</value>. /// </remarks> /// <value><c>true</c> if the mechanism supports an initial response; otherwise, <c>false</c>.</value> public virtual bool SupportsInitialResponse { get { return false; } } /// <summary> /// Gets or sets whether the SASL mechanism has finished authenticating. /// </summary> /// <remarks> /// Gets or sets whether the SASL mechanism has finished authenticating. /// </remarks> /// <value><c>true</c> if the SASL mechanism has finished authenticating; otherwise, <c>false</c>.</value> public bool IsAuthenticated { get; protected set; } /// <summary> /// Gets or sets the URI of the service. /// </summary> /// <remarks> /// Gets or sets the URI of the service. /// </remarks> /// <value>The URI of the service.</value> public Uri Uri { get; protected set; } /// <summary> /// Parses the server's challenge token and returns the next challenge response. /// </summary> /// <remarks> /// Parses the server's challenge token and returns the next challenge response. /// </remarks> /// <returns>The next challenge response.</returns> /// <param name="token">The server's challenge token.</param> /// <param name="startIndex">The index into the token specifying where the server's challenge begins.</param> /// <param name="length">The length of the server's challenge.</param> /// <exception cref="System.InvalidOperationException"> /// The SASL mechanism is already authenticated. /// </exception> /// <exception cref="System.NotSupportedException"> /// THe SASL mechanism does not support SASL-IR. /// </exception> /// <exception cref="SaslException"> /// An error has occurred while parsing the server's challenge token. /// </exception> protected abstract byte[] Challenge (byte[] token, int startIndex, int length); /// <summary> /// Decodes the base64-encoded server challenge and returns the next challenge response encoded in base64. /// </summary> /// <remarks> /// Decodes the base64-encoded server challenge and returns the next challenge response encoded in base64. /// </remarks> /// <returns>The next base64-encoded challenge response.</returns> /// <param name="token">The server's base64-encoded challenge token.</param> /// <exception cref="System.InvalidOperationException"> /// The SASL mechanism is already authenticated. /// </exception> /// <exception cref="System.NotSupportedException"> /// THe SASL mechanism does not support SASL-IR. /// </exception> /// <exception cref="SaslException"> /// An error has occurred while parsing the server's challenge token. /// </exception> public string Challenge (string token) { byte[] decoded; int length; if (token != null) { decoded = Convert.FromBase64String (token); length = decoded.Length; } else { decoded = null; length = 0; } var challenge = Challenge (decoded, 0, length); if (challenge == null) return null; return Convert.ToBase64String (challenge); } /// <summary> /// Resets the state of the SASL mechanism. /// </summary> /// <remarks> /// Resets the state of the SASL mechanism. /// </remarks> public virtual void Reset () { IsAuthenticated = false; } /// <summary> /// Determines if the specified SASL mechanism is supported by MailKit. /// </summary> /// <remarks> /// Use this method to make sure that a SASL mechanism is supported before calling /// <see cref="Create"/>. /// </remarks> /// <returns><c>true</c> if the specified SASL mechanism is supported; otherwise, <c>false</c>.</returns> /// <param name="mechanism">The name of the SASL mechanism.</param> /// <exception cref="System.ArgumentNullException"> /// <paramref name="mechanism"/> is <c>null</c>. /// </exception> public static bool IsSupported (string mechanism) { if (mechanism == null) throw new ArgumentNullException ("mechanism"); switch (mechanism) { case "SCRAM-SHA-1": return true; case "DIGEST-MD5": return true; case "CRAM-MD5": return true; case "XOAUTH2": return true; case "PLAIN": return true; case "LOGIN": return true; case "NTLM": return true; default: return false; } } /// <summary> /// Create an instance of the specified SASL mechanism using the uri and credentials. /// </summary> /// <remarks> /// If unsure that a particular SASL mechanism is supported, you should first call /// <see cref="IsSupported"/>. /// </remarks> /// <returns>An instance of the requested SASL mechanism if supported; otherwise <c>null</c>.</returns> /// <param name="mechanism">The name of the SASL mechanism.</param> /// <param name="uri">The URI of the service to authenticate against.</param> /// <param name="credentials">The user's credentials.</param> /// <exception cref="System.ArgumentNullException"> /// <para><paramref name="mechanism"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="uri"/> is <c>null</c>.</para> /// <para>-or-</para> /// <para><paramref name="credentials"/> is <c>null</c>.</para> /// </exception> public static SaslMechanism Create (string mechanism, Uri uri, ICredentials credentials) { if (mechanism == null) throw new ArgumentNullException ("mechanism"); if (uri == null) throw new ArgumentNullException ("uri"); if (credentials == null) throw new ArgumentNullException ("credentials"); switch (mechanism) { //case "KERBEROS_V4": return null; case "SCRAM-SHA-1": return new SaslMechanismScramSha1 (uri, credentials); case "DIGEST-MD5": return new SaslMechanismDigestMd5 (uri, credentials); case "CRAM-MD5": return new SaslMechanismCramMd5 (uri, credentials); //case "GSSAPI": return null; case "XOAUTH2": return new SaslMechanismOAuth2 (uri, credentials); case "PLAIN": return new SaslMechanismPlain (uri, credentials); case "LOGIN": return new SaslMechanismLogin (uri, credentials); case "NTLM": return new SaslMechanismNtlm (uri, credentials); default: return null; } } /// <summary> /// Determines if the character is a non-ASCII space. /// </summary> /// <remarks> /// This list was obtained from http://tools.ietf.org/html/rfc3454#appendix-C.1.2 /// </remarks> /// <returns><c>true</c> if the character is a non-ASCII space; otherwise, <c>false</c>.</returns> /// <param name="c">The character.</param> static bool IsNonAsciiSpace (char c) { switch (c) { case '\u00A0': // NO-BREAK SPACE case '\u1680': // OGHAM SPACE MARK case '\u2000': // EN QUAD case '\u2001': // EM QUAD case '\u2002': // EN SPACE case '\u2003': // EM SPACE case '\u2004': // THREE-PER-EM SPACE case '\u2005': // FOUR-PER-EM SPACE case '\u2006': // SIX-PER-EM SPACE case '\u2007': // FIGURE SPACE case '\u2008': // PUNCTUATION SPACE case '\u2009': // THIN SPACE case '\u200A': // HAIR SPACE case '\u200B': // ZERO WIDTH SPACE case '\u202F': // NARROW NO-BREAK SPACE case '\u205F': // MEDIUM MATHEMATICAL SPACE case '\u3000': // IDEOGRAPHIC SPACE return true; default: return false; } } /// <summary> /// Determines if the character is commonly mapped to nothing. /// </summary> /// <remarks> /// This list was obtained from http://tools.ietf.org/html/rfc3454#appendix-B.1 /// </remarks> /// <returns><c>true</c> if the character is commonly mapped to nothing; otherwise, <c>false</c>.</returns> /// <param name="c">The character.</param> static bool IsCommonlyMappedToNothing (char c) { switch (c) { case '\u00AD': case '\u034F': case '\u1806': case '\u180B': case '\u180C': case '\u180D': case '\u200B': case '\u200C': case '\u200D': case '\u2060': case '\uFE00': case '\uFE01': case '\uFE02': case '\uFE03': case '\uFE04': case '\uFE05': case '\uFE06': case '\uFE07': case '\uFE08': case '\uFE09': case '\uFE0A': case '\uFE0B': case '\uFE0C': case '\uFE0D': case '\uFE0E': case '\uFE0F': case '\uFEFF': return true; default: return false; } } /// <summary> /// Determines if the character is prohibited. /// </summary> /// <remarks> /// This list was obtained from http://tools.ietf.org/html/rfc3454#appendix-C.3 /// </remarks> /// <returns><c>true</c> if the character is prohibited; otherwise, <c>false</c>.</returns> /// <param name="s">The string.</param> /// <param name="index">The character index.</param> static bool IsProhibited (string s, int index) { int u = char.ConvertToUtf32 (s, index); // Private Use characters: http://tools.ietf.org/html/rfc3454#appendix-C.3 if ((u >= 0xE000 && u <= 0xF8FF) || (u >= 0xF0000 && u <= 0xFFFFD) || (u >= 0x100000 && u <= 0x10FFFD)) return true; // Non-character code points: http://tools.ietf.org/html/rfc3454#appendix-C.4 if ((u >= 0xFDD0 && u <= 0xFDEF) || (u >= 0xFFFE && u <= 0xFFFF) || (u >= 0x1FFFE & u <= 0x1FFFF) || (u >= 0x2FFFE & u <= 0x2FFFF) || (u >= 0x3FFFE & u <= 0x3FFFF) || (u >= 0x4FFFE & u <= 0x4FFFF) || (u >= 0x5FFFE & u <= 0x5FFFF) || (u >= 0x6FFFE & u <= 0x6FFFF) || (u >= 0x7FFFE & u <= 0x7FFFF) || (u >= 0x8FFFE & u <= 0x8FFFF) || (u >= 0x9FFFE & u <= 0x9FFFF) || (u >= 0xAFFFE & u <= 0xAFFFF) || (u >= 0xBFFFE & u <= 0xBFFFF) || (u >= 0xCFFFE & u <= 0xCFFFF) || (u >= 0xDFFFE & u <= 0xDFFFF) || (u >= 0xEFFFE & u <= 0xEFFFF) || (u >= 0xFFFFE & u <= 0xFFFFF) || (u >= 0x10FFFE & u <= 0x10FFFF)) return true; // Surrogate code points: http://tools.ietf.org/html/rfc3454#appendix-C.5 if (u >= 0xD800 && u <= 0xDFFF) return true; // Inappropriate for plain text characters: http://tools.ietf.org/html/rfc3454#appendix-C.6 switch (u) { case 0xFFF9: // INTERLINEAR ANNOTATION ANCHOR case 0xFFFA: // INTERLINEAR ANNOTATION SEPARATOR case 0xFFFB: // INTERLINEAR ANNOTATION TERMINATOR case 0xFFFC: // OBJECT REPLACEMENT CHARACTER case 0xFFFD: // REPLACEMENT CHARACTER return true; } // Inappropriate for canonical representation: http://tools.ietf.org/html/rfc3454#appendix-C.7 if (u >= 0x2FF0 && u <= 0x2FFB) return true; // Change display properties or are deprecated: http://tools.ietf.org/html/rfc3454#appendix-C.8 switch (u) { case 0x0340: // COMBINING GRAVE TONE MARK case 0x0341: // COMBINING ACUTE TONE MARK case 0x200E: // LEFT-TO-RIGHT MARK case 0x200F: // RIGHT-TO-LEFT MARK case 0x202A: // LEFT-TO-RIGHT EMBEDDING case 0x202B: // RIGHT-TO-LEFT EMBEDDING case 0x202C: // POP DIRECTIONAL FORMATTING case 0x202D: // LEFT-TO-RIGHT OVERRIDE case 0x202E: // RIGHT-TO-LEFT OVERRIDE case 0x206A: // INHIBIT SYMMETRIC SWAPPING case 0x206B: // ACTIVATE SYMMETRIC SWAPPING case 0x206C: // INHIBIT ARABIC FORM SHAPING case 0x206D: // ACTIVATE ARABIC FORM SHAPING case 0x206E: // NATIONAL DIGIT SHAPES case 0x206F: // NOMINAL DIGIT SHAPES return true; } // Tagging characters: http://tools.ietf.org/html/rfc3454#appendix-C.9 if (u == 0xE0001 || (u >= 0xE0020 & u <= 0xE007F)) return true; return false; } /// <summary> /// Prepares the user name or password string. /// </summary> /// <remarks> /// Prepares a user name or password string according to the rules of rfc4013. /// </remarks> /// <returns>The prepared string.</returns> /// <param name="s">The string to prepare.</param> /// <exception cref="System.ArgumentNullException"> /// <paramref name="s"/> is <c>null</c>. /// </exception> /// <exception cref="System.ArgumentException"> /// <paramref name="s"/> contains prohibited characters. /// </exception> public static string SaslPrep (string s) { if (s == null) throw new ArgumentNullException ("s"); if (s.Length == 0) return s; var builder = new StringBuilder (s.Length); for (int i = 0; i < s.Length; i++) { if (IsNonAsciiSpace (s[i])) { // non-ASII space characters [StringPrep, C.1.2] that can be // mapped to SPACE (U+0020). builder.Append (' '); } else if (IsCommonlyMappedToNothing (s[i])) { // the "commonly mapped to nothing" characters [StringPrep, B.1] // that can be mapped to nothing. } else if (char.IsControl (s[i])) { throw new ArgumentException ("Control characters are prohibited.", "s"); } else if (IsProhibited (s, i)) { throw new ArgumentException ("One or more characters in the string are prohibited.", "s"); } else { builder.Append (s[i]); } } #if !NETFX_CORE return builder.ToString ().Normalize (NormalizationForm.FormKC); #else return builder.ToString (); #endif } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; namespace System.Net { internal static partial class HttpKnownHeaderNames { /// <summary> /// Gets a known header name string from a matching char[] array segment, using a case-sensitive /// ordinal comparison. Used to avoid allocating new strings for known header names. /// </summary> public static bool TryGetHeaderName(char[] array, int startIndex, int length, out string name) { CharArrayHelpers.DebugAssertArrayInputs(array, startIndex, length); return TryGetHeaderName( array, startIndex, length, (arr, index) => arr[index], (known, arr, start, len) => CharArrayHelpers.EqualsOrdinal(known, arr, start, len), out name); } /// <summary> /// Gets a known header name string from a matching IntPtr buffer, using a case-sensitive /// ordinal comparison. Used to avoid allocating new strings for known header names. /// </summary> public unsafe static bool TryGetHeaderName(IntPtr buffer, int length, out string name) { Debug.Assert(length >= 0); if (buffer == IntPtr.Zero) { name = null; return false; } // We always pass 0 for the startIndex, as buffer should already point to the start. const int startIndex = 0; return TryGetHeaderName( buffer, startIndex, length, (buf, index) => (char)((byte*)buf)[index], (known, buf, start, len) => EqualsOrdinal(known, buf, len), out name); } private static bool TryGetHeaderName<T>( T key, int startIndex, int length, Func<T, int, char> charAt, Func<string, T, int, int, bool> equals, out string name) { Debug.Assert(key != null); Debug.Assert(startIndex >= 0); Debug.Assert(length >= 0); Debug.Assert(charAt != null); Debug.Assert(equals != null); // When adding a new constant, add it to HttpKnownHeaderNames.cs as well. // The lookup works as follows: first switch on the length of the passed-in key. // // - If there is only one known header of that length, set potentialHeader to that known header // and goto TryMatch to see if the key fully matches potentialHeader. // // - If there are more than one known headers of that length, switch on a unique char from that // set of same-length known headers. Typically this will be the first char, but some sets of // same-length known headers do not have unique chars in the first position, so a char in a // position further in the strings is used. If the char from the key matches one of the // known headers, set potentialHeader to that known header and goto TryMatch to see if the key // fully matches potentialHeader. // // - Otherwise, there is no match, so set the out param to null and return false. // // Matching is case-sensitive: we only want to return a known header that exactly matches the key. string potentialHeader = null; switch (length) { case 2: potentialHeader = TE; goto TryMatch; // TE case 3: switch (charAt(key, startIndex)) { case 'A': potentialHeader = Age; goto TryMatch; // [A]ge case 'P': potentialHeader = P3P; goto TryMatch; // [P]3P case 'V': potentialHeader = Via; goto TryMatch; // [V]ia } break; case 4: switch (charAt(key, startIndex)) { case 'D': potentialHeader = Date; goto TryMatch; // [D]ate case 'E': potentialHeader = ETag; goto TryMatch; // [E]Tag case 'F': potentialHeader = From; goto TryMatch; // [F]rom case 'H': potentialHeader = Host; goto TryMatch; // [H]ost case 'V': potentialHeader = Vary; goto TryMatch; // [V]ary } break; case 5: switch (charAt(key, startIndex)) { case 'A': potentialHeader = Allow; goto TryMatch; // [A]llow case 'R': potentialHeader = Range; goto TryMatch; // [R]ange } break; case 6: switch (charAt(key, startIndex)) { case 'A': potentialHeader = Accept; goto TryMatch; // [A]ccept case 'C': potentialHeader = Cookie; goto TryMatch; // [C]ookie case 'E': potentialHeader = Expect; goto TryMatch; // [E]xpect case 'O': potentialHeader = Origin; goto TryMatch; // [O]rigin case 'P': potentialHeader = Pragma; goto TryMatch; // [P]ragma case 'S': potentialHeader = Server; goto TryMatch; // [S]erver } break; case 7: switch (charAt(key, startIndex)) { case 'C': potentialHeader = Cookie2; goto TryMatch; // [C]ookie2 case 'E': potentialHeader = Expires; goto TryMatch; // [E]xpires case 'R': potentialHeader = Referer; goto TryMatch; // [R]eferer case 'T': potentialHeader = Trailer; goto TryMatch; // [T]railer case 'U': potentialHeader = Upgrade; goto TryMatch; // [U]pgrade case 'W': potentialHeader = Warning; goto TryMatch; // [W]arning } break; case 8: switch (charAt(key, startIndex + 3)) { case 'M': potentialHeader = IfMatch; goto TryMatch; // If-[M]atch case 'R': potentialHeader = IfRange; goto TryMatch; // If-[R]ange case 'a': potentialHeader = Location; goto TryMatch; // Loc[a]tion } break; case 10: switch (charAt(key, startIndex)) { case 'C': potentialHeader = Connection; goto TryMatch; // [C]onnection case 'K': potentialHeader = KeepAlive; goto TryMatch; // [K]eep-Alive case 'S': potentialHeader = SetCookie; goto TryMatch; // [S]et-Cookie case 'U': potentialHeader = UserAgent; goto TryMatch; // [U]ser-Agent } break; case 11: switch (charAt(key, startIndex)) { case 'C': potentialHeader = ContentMD5; goto TryMatch; // [C]ontent-MD5 case 'R': potentialHeader = RetryAfter; goto TryMatch; // [R]etry-After case 'S': potentialHeader = SetCookie2; goto TryMatch; // [S]et-Cookie2 } break; case 12: switch (charAt(key, startIndex)) { case 'C': potentialHeader = ContentType; goto TryMatch; // [C]ontent-Type case 'M': potentialHeader = MaxForwards; goto TryMatch; // [M]ax-Forwards case 'X': potentialHeader = XPoweredBy; goto TryMatch; // [X]-Powered-By } break; case 13: switch (charAt(key, startIndex + 6)) { case '-': potentialHeader = AcceptRanges; goto TryMatch; // Accept[-]Ranges case 'i': potentialHeader = Authorization; goto TryMatch; // Author[i]zation case 'C': potentialHeader = CacheControl; goto TryMatch; // Cache-[C]ontrol case 't': potentialHeader = ContentRange; goto TryMatch; // Conten[t]-Range case 'e': potentialHeader = IfNoneMatch; goto TryMatch; // If-Non[e]-Match case 'o': potentialHeader = LastModified; goto TryMatch; // Last-M[o]dified } break; case 14: switch (charAt(key, startIndex)) { case 'A': potentialHeader = AcceptCharset; goto TryMatch; // [A]ccept-Charset case 'C': potentialHeader = ContentLength; goto TryMatch; // [C]ontent-Length } break; case 15: switch (charAt(key, startIndex + 7)) { case 'E': potentialHeader = AcceptEncoding; goto TryMatch; // Accept-[E]ncoding case 'L': potentialHeader = AcceptLanguage; goto TryMatch; // Accept-[L]anguage } break; case 16: switch (charAt(key, startIndex + 11)) { case 'o': potentialHeader = ContentEncoding; goto TryMatch; // Content-Enc[o]ding case 'g': potentialHeader = ContentLanguage; goto TryMatch; // Content-Lan[g]uage case 'a': potentialHeader = ContentLocation; goto TryMatch; // Content-Loc[a]tion case 'c': potentialHeader = ProxyConnection; goto TryMatch; // Proxy-Conne[c]tion case 'i': potentialHeader = WWWAuthenticate; goto TryMatch; // WWW-Authent[i]cate case 'r': potentialHeader = XAspNetVersion; goto TryMatch; // X-AspNet-Ve[r]sion } break; case 17: switch (charAt(key, startIndex)) { case 'I': potentialHeader = IfModifiedSince; goto TryMatch; // [I]f-Modified-Since case 'S': potentialHeader = SecWebSocketKey; goto TryMatch; // [S]ec-WebSocket-Key case 'T': potentialHeader = TransferEncoding; goto TryMatch; // [T]ransfer-Encoding } break; case 18: potentialHeader = ProxyAuthenticate; goto TryMatch; // Proxy-Authenticate case 19: switch (charAt(key, startIndex)) { case 'C': potentialHeader = ContentDisposition; goto TryMatch; // [C]ontent-Disposition case 'I': potentialHeader = IfUnmodifiedSince; goto TryMatch; // [I]f-Unmodified-Since case 'P': potentialHeader = ProxyAuthorization; goto TryMatch; // [P]roxy-Authorization } break; case 20: potentialHeader = SecWebSocketAccept; goto TryMatch; // Sec-WebSocket-Accept case 21: potentialHeader = SecWebSocketVersion; goto TryMatch; // Sec-WebSocket-Version case 22: potentialHeader = SecWebSocketProtocol; goto TryMatch; // Sec-WebSocket-Protocol case 24: potentialHeader = SecWebSocketExtensions; goto TryMatch; // Sec-WebSocket-Extensions } name = null; return false; TryMatch: Debug.Assert(potentialHeader != null); return TryMatch(potentialHeader, key, startIndex, length, equals, out name); } /// <summary> /// Returns true if <paramref name="known"/> matches the <paramref name="key"/> char[] array segment, /// using an ordinal comparison. /// </summary> private static bool TryMatch<T>(string known, T key, int startIndex, int length, Func<string, T, int, int, bool> equals, out string name) { Debug.Assert(known != null); Debug.Assert(known.Length > 0); Debug.Assert(startIndex >= 0); Debug.Assert(length > 0); Debug.Assert(equals != null); // The lengths should be equal because this method is only called // from within a "switch (length) { ... }". Debug.Assert(known.Length == length); if (equals(known, key, startIndex, length)) { name = known; return true; } name = null; return false; } private unsafe static bool EqualsOrdinal(string left, IntPtr right, int rightLength) { Debug.Assert(left != null); Debug.Assert(right != IntPtr.Zero); Debug.Assert(rightLength > 0); // At this point the lengths have already been determined to be equal. Debug.Assert(left.Length == rightLength); byte* pRight = (byte*)right; for (int i = 0; i < left.Length; i++) { if (left[i] != pRight[i]) { return false; } } return true; } } }
using System; using System.IO; namespace NAudio.Wave { /// <summary> /// This class writes WAV data to a .wav file on disk /// </summary> public class WaveFileWriter : IDisposable { private Stream outStream; private BinaryWriter writer; private long dataSizePos; private long factSampleCountPos; private int dataChunkSize = 0; private WaveFormat format; private bool overwriting; private string filename; /// <summary> /// Creates a Wave file by reading all the data from a WaveStream /// </summary> /// <param name="filename">The filename to use</param> /// <param name="stream">The source WaveStream</param> public static void CreateWaveFile(string filename, WaveStream stream) { using (WaveFileWriter writer = new WaveFileWriter(filename, stream.WaveFormat)) { byte[] buffer = new byte[stream.WaveFormat.SampleRate * stream.WaveFormat.Channels * 16]; while (true) { int bytesRead = stream.Read(buffer, 0, buffer.Length); if (bytesRead == 0) break; writer.WriteData(buffer, 0, bytesRead); } } } /// <summary> /// WaveFileWriter that actually writes to a stream /// </summary> /// <param name="outStream">Stream to be written to</param> /// <param name="format">Wave format to use</param> public WaveFileWriter(Stream outStream, WaveFormat format) { this.outStream = outStream; this.writer = new BinaryWriter(outStream, System.Text.Encoding.ASCII); this.writer.Write(System.Text.Encoding.ASCII.GetBytes("RIFF")); this.writer.Write((int)0); // placeholder this.writer.Write(System.Text.Encoding.ASCII.GetBytes("WAVEfmt ")); this.format = format; format.Serialize(this.writer); CreateFactChunk(); WriteDataChunkHeader(); } /// <summary> /// Creates a new WaveFileWriter, simply overwriting the samples on an existing file /// </summary> /// <param name="filename">The filename</param> [Obsolete("Not planning to keep supporting this, should create derived WaveFileWriter for this type of behaviour if needed")] public WaveFileWriter(string filename) { this.filename = filename; this.outStream = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.Read); this.writer = new BinaryWriter(outStream); int dataChunkLength; // long dataChunkPosition; WaveFileReader.ReadWaveHeader(outStream, out format, out dataChunkPosition, out dataChunkLength, null); dataSizePos = dataChunkPosition - 4; overwriting = true; } /// <summary> /// Creates a new WaveFileWriter /// </summary> /// <param name="filename">The filename to write to</param> /// <param name="format">The Wave Format of the output data</param> public WaveFileWriter(string filename, WaveFormat format) : this(new FileStream(filename, FileMode.Create, FileAccess.Write, FileShare.Read), format) { this.filename = filename; } private void WriteDataChunkHeader() { this.writer.Write(System.Text.Encoding.ASCII.GetBytes("data")); dataSizePos = this.outStream.Position; this.writer.Write((int)0); // placeholder } private void CreateFactChunk() { if (this.format.Encoding != WaveFormatEncoding.Pcm) { this.writer.Write(System.Text.Encoding.ASCII.GetBytes("fact")); this.writer.Write((int)4); factSampleCountPos = this.outStream.Position; this.writer.Write((int)0); // number of samples } } /// <summary> /// The wave file name /// </summary> public string Filename { get { return filename; } } /// <summary> /// Number of bytes of audio /// </summary> public long Length { get { return dataChunkSize; } } /// <summary> /// WaveFormat of this wave file /// </summary> public WaveFormat WaveFormat { get { return format; } } /// <summary> /// Writes bytes to the WaveFile (assumes they are already in the correct format) /// </summary> /// <param name="data">the buffer containing the wave data</param> /// <param name="offset">the offset from which to start writing</param> /// <param name="count">the number of bytes to write</param> public void WriteData(byte[] data, int offset, int count) { outStream.Write(data, offset, count); dataChunkSize += count; } private byte[] value24 = new byte[3]; // keep this around to save us creating it every time /// <summary> /// Writes a single sample to the Wave file /// </summary> /// <param name="sample">the sample to write (assumed floating point with 1.0f as max value)</param> public void WriteSample(float sample) { if (WaveFormat.BitsPerSample == 16) { writer.Write((Int16)(Int16.MaxValue * sample)); dataChunkSize += 2; } else if (WaveFormat.BitsPerSample == 24) { var value = BitConverter.GetBytes((Int32)(Int32.MaxValue * sample)); value24[0] = value[1]; value24[1] = value[2]; value24[2] = value[3]; writer.Write(value24); dataChunkSize += 3; } else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) { writer.Write(UInt16.MaxValue * (Int32)sample); dataChunkSize += 4; } else if (WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { writer.Write(sample); dataChunkSize += 4; } else { throw new ApplicationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); } } /// <summary> /// Writes 16 bit samples to the Wave file /// </summary> /// <param name="data">The buffer containing the wave data</param> /// <param name="offset">The offset from which to start writing</param> /// <param name="count">The number of 16 bit samples to write</param> public void WriteData(short[] data, int offset, int count) { // 16 bit PCM data if (WaveFormat.BitsPerSample == 16) { for (int sample = 0; sample < count; sample++) { writer.Write(data[sample + offset]); } dataChunkSize += (count * 2); } // 24 bit PCM data else if (WaveFormat.BitsPerSample == 24) { byte[] value; for (int sample = 0; sample < count; sample++) { value = BitConverter.GetBytes(UInt16.MaxValue * (Int32)data[sample + offset]); value24[0] = value[1]; value24[1] = value[2]; value24[2] = value[3]; writer.Write(value24); } dataChunkSize += (count * 3); } // 32 bit PCM data else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) { for (int sample = 0; sample < count; sample++) { writer.Write(UInt16.MaxValue * (Int32)data[sample + offset]); } dataChunkSize += (count * 4); } // IEEE float data else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { for (int sample = 0; sample < count; sample++) { writer.Write((float)data[sample + offset] / (float)(Int16.MaxValue + 1)); } dataChunkSize += (count * 4); } else { throw new ApplicationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); } } /// <summary> /// Writes float samples to the Wave file /// </summary> /// <param name="data">The buffer containing the wave data</param> /// <param name="offset">The offset from which to start writing</param> /// <param name="count">The number of float samples to write</param> [Obsolete("Use the WriteSample method instead")] public void WriteData(float[][] data, int offset, int count) { // 16 bit PCM data if (WaveFormat.BitsPerSample == 16) { for (int sample = 0; sample < count; sample++) { for (int channel = 0; channel < WaveFormat.Channels; channel++) { writer.Write((Int16)(Int16.MaxValue * data[channel][sample + offset])); dataChunkSize += 2; } } } // 24 bit PCM data else if (WaveFormat.BitsPerSample == 24) { byte[] value; byte[] value24 = new byte[3]; for (int sample = 0; sample < count; sample++) { for (int channel = 0; channel < WaveFormat.Channels; channel++) { value = BitConverter.GetBytes((Int32)(Int32.MaxValue * data[channel][sample + offset])); value24[0] = value[1]; value24[1] = value[2]; value24[2] = value[3]; writer.Write(value24); dataChunkSize += 3; } } } // 32 bit PCM data else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.Extensible) { for (int sample = 0; sample < count; sample++) { for (int channel = 0; channel < WaveFormat.Channels; channel++) { writer.Write((Int32)(Int32.MaxValue * data[channel][sample + offset])); dataChunkSize += 4; } } } // IEEE float data else if (WaveFormat.BitsPerSample == 32 && WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { for (int sample = 0; sample < count; sample++) { for (int channel = 0; channel < WaveFormat.Channels; channel++) { writer.Write(data[channel][sample + offset]); dataChunkSize += 4; } } } else { throw new ApplicationException("Only 16, 24 or 32 bit PCM or IEEE float audio data supported"); } } /// <summary> /// Ensures data is written to disk /// </summary> public void Flush() { outStream.Flush(); } #region IDisposable Members /// <summary> /// Closes this WaveFile (calls <see>Dispose</see>) /// </summary> public void Close() { Dispose(); } /// <summary> /// Closes this WaveFile /// </summary> public void Dispose() { GC.SuppressFinalize(this); Dispose(true); } /// <summary> /// Actually performs the close,making sure the header contains the correct data /// </summary> /// <param name="disposing">True if called from <see>Dispose</see></param> protected virtual void Dispose(bool disposing) { if (disposing) { if (outStream != null) { try { if (!overwriting) { UpdateHeader(writer); } } finally { // in a finally block as we don't want the FileStream to run its disposer in // the GC thread if the code above caused an IOException (e.g. due to disk full) outStream.Close(); // will close the underlying base stream outStream = null; } } } } /// <summary> /// Updates the header with file size information /// </summary> protected virtual void UpdateHeader(BinaryWriter writer) { // in overwrite mode, we will not change the length set at the start // irrespective of whether we actually wrote less or more outStream.Flush(); writer.Seek(4, SeekOrigin.Begin); writer.Write((int)(outStream.Length - 8)); if (format.Encoding != WaveFormatEncoding.Pcm) { int bitsPerSample = (format.BitsPerSample * format.Channels); if (bitsPerSample != 0) { writer.Seek((int)factSampleCountPos, SeekOrigin.Begin); writer.Write((int)((dataChunkSize * 8) / bitsPerSample)); } } writer.Seek((int)dataSizePos, SeekOrigin.Begin); writer.Write((int)(dataChunkSize)); } /// <summary> /// Finaliser - should only be called if the user forgot to close this WaveFileWriter /// </summary> ~WaveFileWriter() { System.Diagnostics.Debug.Assert(false, "WaveFileWriter was not disposed"); Dispose(false); } #endregion } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the elastictranscoder-2012-09-25.normal.json service model. */ using System; using System.IO; using System.Text; using Microsoft.VisualStudio.TestTools.UnitTesting; using Amazon.ElasticTranscoder; using Amazon.ElasticTranscoder.Model; using Amazon.ElasticTranscoder.Model.Internal.MarshallTransformations; using Amazon.Runtime.Internal.Transform; using Amazon.Util; using ServiceClientGenerator; using AWSSDK_DotNet35.UnitTests.TestTools; namespace AWSSDK_DotNet35.UnitTests.Marshalling { [TestClass] public partial class ElasticTranscoderMarshallingTests { static readonly ServiceModel service_model = Utils.LoadServiceModel("elastictranscoder-2012-09-25.normal.json", "elastictranscoder.customizations.json"); [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void CancelJobMarshallTest() { var operation = service_model.FindOperation("CancelJob"); var request = InstantiateClassGenerator.Execute<CancelJobRequest>(); var marshaller = new CancelJobRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("CancelJob", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = CancelJobResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as CancelJobResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void CreateJobMarshallTest() { var operation = service_model.FindOperation("CreateJob"); var request = InstantiateClassGenerator.Execute<CreateJobRequest>(); var marshaller = new CreateJobRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("CreateJob", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = CreateJobResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as CreateJobResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void CreatePipelineMarshallTest() { var operation = service_model.FindOperation("CreatePipeline"); var request = InstantiateClassGenerator.Execute<CreatePipelineRequest>(); var marshaller = new CreatePipelineRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("CreatePipeline", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = CreatePipelineResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as CreatePipelineResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void CreatePresetMarshallTest() { var operation = service_model.FindOperation("CreatePreset"); var request = InstantiateClassGenerator.Execute<CreatePresetRequest>(); var marshaller = new CreatePresetRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("CreatePreset", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = CreatePresetResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as CreatePresetResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void DeletePipelineMarshallTest() { var operation = service_model.FindOperation("DeletePipeline"); var request = InstantiateClassGenerator.Execute<DeletePipelineRequest>(); var marshaller = new DeletePipelineRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("DeletePipeline", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = DeletePipelineResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as DeletePipelineResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void DeletePresetMarshallTest() { var operation = service_model.FindOperation("DeletePreset"); var request = InstantiateClassGenerator.Execute<DeletePresetRequest>(); var marshaller = new DeletePresetRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("DeletePreset", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = DeletePresetResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as DeletePresetResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ListJobsByPipelineMarshallTest() { var operation = service_model.FindOperation("ListJobsByPipeline"); var request = InstantiateClassGenerator.Execute<ListJobsByPipelineRequest>(); var marshaller = new ListJobsByPipelineRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ListJobsByPipeline", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ListJobsByPipelineResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ListJobsByPipelineResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ListJobsByStatusMarshallTest() { var operation = service_model.FindOperation("ListJobsByStatus"); var request = InstantiateClassGenerator.Execute<ListJobsByStatusRequest>(); var marshaller = new ListJobsByStatusRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ListJobsByStatus", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ListJobsByStatusResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ListJobsByStatusResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ListPipelinesMarshallTest() { var operation = service_model.FindOperation("ListPipelines"); var request = InstantiateClassGenerator.Execute<ListPipelinesRequest>(); var marshaller = new ListPipelinesRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ListPipelines", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ListPipelinesResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ListPipelinesResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ListPresetsMarshallTest() { var operation = service_model.FindOperation("ListPresets"); var request = InstantiateClassGenerator.Execute<ListPresetsRequest>(); var marshaller = new ListPresetsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ListPresets", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ListPresetsResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ListPresetsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ReadJobMarshallTest() { var operation = service_model.FindOperation("ReadJob"); var request = InstantiateClassGenerator.Execute<ReadJobRequest>(); var marshaller = new ReadJobRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ReadJob", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ReadJobResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ReadJobResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ReadPipelineMarshallTest() { var operation = service_model.FindOperation("ReadPipeline"); var request = InstantiateClassGenerator.Execute<ReadPipelineRequest>(); var marshaller = new ReadPipelineRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ReadPipeline", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ReadPipelineResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ReadPipelineResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void ReadPresetMarshallTest() { var operation = service_model.FindOperation("ReadPreset"); var request = InstantiateClassGenerator.Execute<ReadPresetRequest>(); var marshaller = new ReadPresetRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("ReadPreset", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = ReadPresetResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as ReadPresetResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void TestRoleMarshallTest() { var operation = service_model.FindOperation("TestRole"); var request = InstantiateClassGenerator.Execute<TestRoleRequest>(); var marshaller = new TestRoleRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("TestRole", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = TestRoleResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as TestRoleResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void UpdatePipelineMarshallTest() { var operation = service_model.FindOperation("UpdatePipeline"); var request = InstantiateClassGenerator.Execute<UpdatePipelineRequest>(); var marshaller = new UpdatePipelineRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("UpdatePipeline", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = UpdatePipelineResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as UpdatePipelineResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void UpdatePipelineNotificationsMarshallTest() { var operation = service_model.FindOperation("UpdatePipelineNotifications"); var request = InstantiateClassGenerator.Execute<UpdatePipelineNotificationsRequest>(); var marshaller = new UpdatePipelineNotificationsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("UpdatePipelineNotifications", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = UpdatePipelineNotificationsResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as UpdatePipelineNotificationsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Rest_Json")] [TestCategory("ElasticTranscoder")] public void UpdatePipelineStatusMarshallTest() { var operation = service_model.FindOperation("UpdatePipelineStatus"); var request = InstantiateClassGenerator.Execute<UpdatePipelineStatusRequest>(); var marshaller = new UpdatePipelineStatusRequestMarshaller(); var internalRequest = marshaller.Marshall(request); RequestValidator.Validate("UpdatePipelineStatus", request, internalRequest, service_model); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var payloadResponse = new JsonSampleGenerator(service_model, operation.ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(payloadResponse).Length.ToString()); var context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse); ResponseUnmarshaller unmarshaller = UpdatePipelineStatusResponseUnmarshaller.Instance; var response = unmarshaller.Unmarshall(context) as UpdatePipelineStatusResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } } }
//----------------------------------------------------------------------- // <copyright file="SubStream.cs" company="Microsoft"> // Copyright 2016 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> //----------------------------------------------------------------------- namespace Microsoft.Azure.Storage.Blob { using System; using System.IO; using Microsoft.Azure.Storage.Core.Util; using System.Threading.Tasks; using System.Threading; using Microsoft.Azure.Storage.Shared.Protocol; using Microsoft.Azure.Storage.Core; /// <summary> /// A wrapper class that creates a logical substream from a region within an existing seekable stream. /// Allows for concurrent, asynchronous read and seek operations on the wrapped stream. /// Ensures thread-safe operations between related substream instances via a shared, user-supplied synchronization mutex. /// This class will buffer read requests to minimize overhead on the underlying stream. /// </summary> internal sealed class SubStream : Stream { // Stream to be logically wrapped. private Stream wrappedStream; // Position in the wrapped stream at which the SubStream should logically begin. private long streamBeginIndex; // Total length of the substream. private long substreamLength; // Tracks the current position in the substream. private long substreamCurrentIndex; // Stream to manage read buffer, lazily initialized when read or seek operations commence. private Lazy<MemoryStream> readBufferStream; // Internal read buffer, lazily initialized when read or seek operations commence. private Lazy<byte[]> readBuffer; // Tracks the valid bytes remaining in the readBuffer private int readBufferLength; // Determines where to update the position of the readbuffer stream depending on the scenario) private bool shouldSeek = false; // Non-blocking semaphore for controlling read operations between related SubStream instances. public SemaphoreSlim Mutex { get; set; } // Current relative position in the substream. public override long Position { get { return this.substreamCurrentIndex; } set { CommonUtility.AssertInBounds("Position", value, 0, this.substreamLength); // Check if we can potentially advance substream position without reallocating the read buffer. if (value >= this.substreamCurrentIndex) { long offset = value - this.substreamCurrentIndex; // New position is within the valid bytes stored in the readBuffer. if (offset <= this.readBufferLength) { this.readBufferLength -= (int)offset; if (shouldSeek) { this.readBufferStream.Value.Seek(offset, SeekOrigin.Current); } } else { // Resets the read buffer. this.readBufferLength = 0; this.readBufferStream.Value.Seek(0, SeekOrigin.End); } } else { // Resets the read buffer. this.readBufferLength = 0; this.readBufferStream.Value.Seek(0, SeekOrigin.End); } this.substreamCurrentIndex = value; } } // Total length of the substream. public override long Length { get { return this.substreamLength; } } public override bool CanRead { get { return true; } } public override bool CanSeek { get { return true; } } public override bool CanWrite { get { return false; } } private void CheckDisposed() { if (this.wrappedStream == null) { throw new ObjectDisposedException("SubStreamWrapper"); } } protected override void Dispose(bool disposing) { this.wrappedStream = null; this.readBufferStream = null; this.readBuffer = null; } public override void Flush() { throw new NotSupportedException(); } // Initiates the new buffer size to be used for read operations. public int ReadBufferSize { get { return this.readBuffer.Value.Length; } set { if (value < 2 * Constants.DefaultBufferSize) { throw new ArgumentOutOfRangeException(string.Format(SR.ArgumentTooSmallError, "ReadBufferSize", 2 * Constants.DefaultBufferSize)); } this.readBuffer = new Lazy<byte[]>(() => new byte[value]); this.readBufferStream = new Lazy<MemoryStream>(() => new MemoryStream(this.readBuffer.Value, 0, value, true)); this.readBufferStream.Value.Seek(0, SeekOrigin.End); } } /// <summary> /// Creates a new SubStream instance. /// </summary> /// <param name="stream">A seekable source stream.</param> /// <param name="streamBeginIndex">The index in the wrapped stream where the logical SubStream should begin.</param> /// <param name="substreamLength">The length of the SubStream.</param> /// <param name="globalSemaphore"> A <see cref="SemaphoreSlim"/> object that is shared between related SubStream instances.</param> /// <remarks> /// The source stream to be wrapped must be seekable. /// The Semaphore object provided must have the initialCount thread parameter set to one to ensure only one concurrent request is granted at a time. /// </remarks> public SubStream(Stream stream, long streamBeginIndex, long substreamLength, SemaphoreSlim globalSemaphore) { if (stream == null) { throw new ArgumentNullException("Stream."); } else if (!stream.CanSeek) { throw new NotSupportedException("Stream must be seekable."); } else if (globalSemaphore == null) { throw new ArgumentNullException("globalSemaphore"); } CommonUtility.AssertInBounds("streamBeginIndex", streamBeginIndex, 0, stream.Length); this.streamBeginIndex = streamBeginIndex; this.wrappedStream = stream; this.Mutex = globalSemaphore; this.substreamLength = Math.Min(substreamLength, stream.Length - streamBeginIndex); this.readBufferLength = 0; this.Position = 0; this.ReadBufferSize = Constants.DefaultSubStreamBufferSize; } #if !(WINDOWS_RT || NETCORE) public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { return this.ReadAsync(buffer, offset, count, CancellationToken.None).AsApm<int>(callback, state); } public override int EndRead(IAsyncResult asyncResult) { CommonUtility.AssertNotNull("AsyncResult", asyncResult); return CommonUtility.RunWithoutSynchronizationContext(() => ((Task<int>)asyncResult).Result); } #endif /// <summary> /// Reads a block of bytes asynchronously from the substream read buffer. /// </summary> /// <param name="buffer">When this method returns, the buffer contains the specified byte array with the values between offset and (offset + count - 1) replaced by the bytes read from the current source.</param> /// <param name="offset">The zero-based byte offset in buffer at which to begin storing the data read from the current stream.</param> /// <param name="count">The maximum number of bytes to be read.</param> /// <param name="cancellationToken">An object of type <see cref="CancellationToken"/> that propagates notification that operation should be canceled.</param> /// <returns>The total number of bytes read into the buffer. This can be less than the number of bytes requested if that many bytes are not currently available, or zero if the end of the substream has been reached.</returns> /// <remarks> /// If the read request cannot be satisfied because the read buffer is empty or contains less than the requested number of the bytes, /// the wrapped stream will be called to refill the read buffer. /// Only one read request to the underlying wrapped stream will be allowed at a time and concurrent requests will be queued up by effect of the shared semaphore mutex. /// </remarks> public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { this.CheckDisposed(); try { int readCount = this.CheckAdjustReadCount(count, offset, buffer.Length); int bytesRead = await this.readBufferStream.Value.ReadAsync(buffer, offset, Math.Min(readCount, this.readBufferLength), cancellationToken).ConfigureAwait(false); int bytesLeft = readCount - bytesRead; // must adjust readbufferLength this.shouldSeek = false; this.Position += bytesRead; if (bytesLeft > 0 && readBufferLength == 0) { this.readBufferStream.Value.Position = 0; int bytesAdded = await this.ReadAsyncHelper(this.readBuffer.Value, 0, this.readBuffer.Value.Length, cancellationToken).ConfigureAwait(false); this.readBufferLength = bytesAdded; if (bytesAdded > 0) { bytesLeft = Math.Min(bytesAdded, bytesLeft); int secondRead = await this.readBufferStream.Value.ReadAsync(buffer, bytesRead + offset, bytesLeft, cancellationToken).ConfigureAwait(false); bytesRead += secondRead; this.Position += secondRead; } } return bytesRead; } finally { this.shouldSeek = true; } } /// <summary> /// Reads a block of bytes from the wrapped stream asynchronously and writes the data to the SubStream buffer. /// </summary> /// <param name="buffer">When this method returns, the substream read buffer contains the specified byte array with the values between offset and (offset + count - 1) replaced by the bytes read from the current source.</param> /// <param name="offset">The zero-based byte offset in buffer at which to begin storing the data read from the current stream.</param> /// <param name="count">The maximum number of bytes to be read.</param> /// <param name="cancellationToken">An object of type <see cref="CancellationToken"/> that propagates notification that operation should be canceled.</param> /// <returns>The total number of bytes read into the buffer. This can be less than the number of bytes requested if that many bytes are not currently available, or zero if the end of the substream has been reached.</returns> /// <remarks> /// This method will allow only one read request to the underlying wrapped stream at a time, /// while concurrent requests will be queued up by effect of the shared semaphore mutex. /// The caller is responsible for adjusting the substream position after a successful read. /// </remarks> private async Task<int> ReadAsyncHelper(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { await this.Mutex.WaitAsync(cancellationToken).ConfigureAwait(false); int result = 0; try { this.CheckDisposed(); // Check if read is out of range and adjust to read only up to the substream bounds. count = this.CheckAdjustReadCount(count, offset, buffer.Length); // Only seek if wrapped stream is misaligned with the substream position. if (this.wrappedStream.Position != this.streamBeginIndex + this.Position) { this.wrappedStream.Seek(this.streamBeginIndex + this.Position, SeekOrigin.Begin); } result = await this.wrappedStream.ReadAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false); } finally { this.Mutex.Release(); } return result; } public override int Read(byte[] buffer, int offset, int count) { return CommonUtility.RunWithoutSynchronizationContext(() => this.ReadAsync(buffer, offset, count).Result); } /// <summary> /// Sets the position within the current substream. /// This operation does not perform a seek on the wrapped stream. /// </summary> /// <param name="offset">A byte offset relative to the origin parameter.</param> /// <param name="origin">A value of type System.IO.SeekOrigin indicating the reference point used to obtain the new position.</param> /// <returns>The new position within the current substream.</returns> /// <exception cref="NotSupportedException">Thrown if using the unsupported <paramref name="origin"/> SeekOrigin.End </exception> /// <exception cref="ArgumentOutOfRangeException">Thrown if <paramref name="offset"/> is invalid for SeekOrigin.</exception> public override long Seek(long offset, SeekOrigin origin) { this.CheckDisposed(); long startIndex; // Map offset to the specified SeekOrigin of the substream. switch (origin) { case SeekOrigin.Begin: startIndex = 0; break; case SeekOrigin.Current: startIndex = this.Position; break; case SeekOrigin.End: throw new NotSupportedException(); default: throw new ArgumentOutOfRangeException(); } this.Position = startIndex + offset; return this.Position; } public override void SetLength(long value) { throw new NotSupportedException(); } public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } private int CheckAdjustReadCount(int count, int offset, int bufferLength) { if (offset < 0 || count < 0 || offset + count > bufferLength) { throw new ArgumentOutOfRangeException(); } long currentPos = this.streamBeginIndex + this.Position; long endPos = this.streamBeginIndex + this.substreamLength; if (currentPos + count > endPos) { return (int)(endPos - currentPos); } else { return count; } } } }
using System.Linq; namespace NetGore { /// <summary> /// Extensions for multiple numeric types to clamp the value between a range. /// </summary> public static class ClampExtensions { /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static int Clamp(this int num, int min, int max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static uint Clamp(this uint num, uint min, uint max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static short Clamp(this short num, short min, short max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static ushort Clamp(this ushort num, ushort min, ushort max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static byte Clamp(this byte num, byte min, byte max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static sbyte Clamp(this sbyte num, sbyte min, sbyte max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static float Clamp(this float num, float min, float max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static double Clamp(this double num, double min, double max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static decimal Clamp(this decimal num, decimal min, decimal max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static long Clamp(this long num, long min, long max) { if (num < min) return min; if (num > max) return max; return num; } /// <summary> /// Clamps a number into a specified range of values. /// </summary> /// <param name="num">Number to clamp.</param> /// <param name="min">Minimum allowed value.</param> /// <param name="max">Maximum allowed value.</param> /// <returns>If <paramref name="num"/> is less than <paramref name="min"/>, <paramref name="min"/> /// is returned. If <paramref name="num"/> is greater than <paramref name="max"/>, <paramref name="max"/> /// is returned. Else, the original value of <paramref name="num"/> is returned.</returns> public static ulong Clamp(this ulong num, ulong min, ulong max) { if (num < min) return min; if (num > max) return max; return num; } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Collections.Concurrent; using System.Drawing; using System.Drawing.Imaging; using System.IO; using System.Linq; using System.Reflection; using System.Threading; using Nini.Config; using OpenMetaverse; using Aurora.Framework; using OpenSim.Region.Framework.Interfaces; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace Aurora.Modules.WorldMap { public class AuroraWorldMapModule : INonSharedRegionModule, IWorldMapModule { private const string DEFAULT_WORLD_MAP_EXPORT_PATH = "exportmap.jpg"; protected IScene m_scene; protected bool m_Enabled; private readonly ExpiringCache<ulong, List< mapItemReply>> m_mapItemCache = new ExpiringCache<ulong, List<mapItemReply>>(); private readonly ConcurrentQueue<MapItemRequester> m_itemsToRequest = new ConcurrentQueue<MapItemRequester>(); private bool itemRequesterIsRunning; private static AuroraThreadPool threadpool; private static AuroraThreadPool blockthreadpool; private int MapViewLength = 8; #region INonSharedRegionModule Members public virtual void Initialise(IConfigSource source) { if (source.Configs["MapModule"] != null) { if (source.Configs["MapModule"].GetString( "WorldMapModule", "AuroraWorldMapModule") != "AuroraWorldMapModule") return; m_Enabled = true; MapViewLength = source.Configs["MapModule"].GetInt("MapViewLength", MapViewLength); } } public virtual void AddRegion (IScene scene) { if (!m_Enabled) return; lock (scene) { m_scene = scene; m_scene.RegisterModuleInterface<IWorldMapModule>(this); if (MainConsole.Instance != null) { MainConsole.Instance.Commands.AddCommand ( "export-map", "export-map [<path>]", "Save an image of the world map", HandleExportWorldMapConsoleCommand); } AddHandlers(); } } public virtual void RemoveRegion (IScene scene) { if (!m_Enabled) return; lock (m_scene) { m_Enabled = false; RemoveHandlers(); m_scene = null; } } public virtual void RegionLoaded (IScene scene) { if (!m_Enabled) return; AuroraThreadPoolStartInfo info = new AuroraThreadPoolStartInfo {priority = ThreadPriority.Lowest, Threads = 1}; threadpool = new AuroraThreadPool(info); blockthreadpool = new AuroraThreadPool(info); } public virtual void Close() { } public Type ReplaceableInterface { get { return null; } } public virtual string Name { get { return "AuroraWorldMapModule"; } } #endregion // this has to be called with a lock on m_scene protected virtual void AddHandlers() { string regionimage = "regionImage" + m_scene.RegionInfo.RegionID.ToString(); regionimage = regionimage.Replace("-", ""); MainConsole.Instance.Debug("[WORLD MAP]: JPEG Map location: " + MainServer.Instance.ServerURI + "/index.php?method=" + regionimage); MainServer.Instance.AddHTTPHandler(regionimage, OnHTTPGetMapImage); m_scene.EventManager.OnNewClient += OnNewClient; m_scene.EventManager.OnClosingClient += OnClosingClient; } // this has to be called with a lock on m_scene protected virtual void RemoveHandlers() { m_scene.EventManager.OnNewClient -= OnNewClient; m_scene.EventManager.OnClosingClient -= OnClosingClient; string regionimage = "regionImage" + m_scene.RegionInfo.RegionID.ToString(); regionimage = regionimage.Replace("-", ""); MainServer.Instance.RemoveHTTPHandler("", regionimage); } #region EventHandlers /// <summary> /// Registered for event /// </summary> /// <param name="client"></param> private void OnNewClient(IClientAPI client) { client.OnRequestMapBlocks += RequestMapBlocks; client.OnMapItemRequest += HandleMapItemRequest; client.OnMapNameRequest += OnMapNameRequest; } private void OnClosingClient(IClientAPI client) { client.OnRequestMapBlocks -= RequestMapBlocks; client.OnMapItemRequest -= HandleMapItemRequest; client.OnMapNameRequest -= OnMapNameRequest; } #endregion public virtual void HandleMapItemRequest(IClientAPI remoteClient, uint flags, uint EstateID, bool godlike, uint itemtype, ulong regionhandle) { if (remoteClient.Scene.GetScenePresence (remoteClient.AgentId).IsChildAgent) return;//No child agent requests uint xstart; uint ystart; Utils.LongToUInts(m_scene.RegionInfo.RegionHandle, out xstart, out ystart); List<mapItemReply> mapitems = new List<mapItemReply>(); int tc = Environment.TickCount; if (itemtype == (int)GridItemType.AgentLocations) { //If its local, just let it do it on its own. if (regionhandle == 0 || regionhandle == m_scene.RegionInfo.RegionHandle) { //Only one person here, send a zero person response mapItemReply mapitem; IEntityCountModule entityCountModule = m_scene.RequestModuleInterface<IEntityCountModule>(); if (entityCountModule != null && entityCountModule.RootAgents <= 1) { mapitem = new mapItemReply { x = xstart + 1, y = ystart + 1, id = UUID.Zero, name = Util.Md5Hash(m_scene.RegionInfo.RegionName + tc.ToString()), Extra = 0, Extra2 = 0 }; mapitems.Add(mapitem); remoteClient.SendMapItemReply(mapitems.ToArray(), itemtype, flags); return; } m_scene.ForEachScenePresence(delegate(IScenePresence sp) { // Don't send a green dot for yourself if (!sp.IsChildAgent && sp.UUID != remoteClient.AgentId) { mapitem = new mapItemReply { x = (uint) (xstart + sp.AbsolutePosition.X), y = (uint) (ystart + sp.AbsolutePosition.Y), id = UUID.Zero, name = Util.Md5Hash(m_scene.RegionInfo.RegionName + tc.ToString()), Extra = 1, Extra2 = 0 }; mapitems.Add(mapitem); } }); remoteClient.SendMapItemReply(mapitems.ToArray(), itemtype, flags); } else { List<mapItemReply> reply; if (!m_mapItemCache.TryGetValue(regionhandle, out reply)) { m_itemsToRequest.Enqueue(new MapItemRequester { flags = flags, itemtype = itemtype, regionhandle = regionhandle, remoteClient = remoteClient }); if(!itemRequesterIsRunning) threadpool.QueueEvent(GetMapItems, 3); } else { remoteClient.SendMapItemReply(mapitems.ToArray(), itemtype, flags); } } } } private void GetMapItems() { itemRequesterIsRunning = true; while(true) { MapItemRequester item = null; if(!m_itemsToRequest.TryDequeue(out item)) break; //Nothing in the queue List<mapItemReply> mapitems; if (!m_mapItemCache.TryGetValue(item.regionhandle, out mapitems)) //try again, might have gotten picked up by this already { multipleMapItemReply allmapitems = m_scene.GridService.GetMapItems(item.remoteClient.AllScopeIDs, item.regionhandle, (GridItemType)item.itemtype); if (allmapitems == null) continue; //Send out the update if (allmapitems.items.ContainsKey(item.regionhandle)) { mapitems = allmapitems.items[item.regionhandle]; //Update the cache foreach (KeyValuePair<ulong, List<mapItemReply>> kvp in allmapitems.items) { m_mapItemCache.AddOrUpdate(kvp.Key, kvp.Value, 3 * 60); //5 mins } } } if(mapitems != null) item.remoteClient.SendMapItemReply (mapitems.ToArray (), item.itemtype, item.flags); Thread.Sleep (5); } itemRequesterIsRunning = false; } /// <summary> /// Requests map blocks in area of minX, maxX, minY, MaxY in world cordinates /// </summary> /// <param name="remoteClient"></param> /// <param name="minX"></param> /// <param name="minY"></param> /// <param name="maxX"></param> /// <param name="maxY"></param> /// <param name="flag"></param> public virtual void RequestMapBlocks(IClientAPI remoteClient, int minX, int minY, int maxX, int maxY, uint flag) { if ((flag & 0x10000) != 0) // user clicked on the map a tile that isn't visible { ClickedOnTile(remoteClient, minX, minY, maxX, maxY, flag); } else if (flag == 0) //Terrain and objects { // normal mapblock request. Use the provided values GetAndSendMapBlocks(remoteClient, minX, minY, maxX, maxY, flag); } else if ((flag & 1) == 1) //Terrain only { // normal terrain only request. Use the provided values GetAndSendTerrainBlocks(remoteClient, minX, minY, maxX, maxY, flag); } else { if (flag != 2) //Land sales MainConsole.Instance.Warn("[World Map] : Got new flag, " + flag + " RequestMapBlocks()"); } } protected virtual void ClickedOnTile(IClientAPI remoteClient, int minX, int minY, int maxX, int maxY, uint flag) { m_blockitemsToRequest.Enqueue (new MapBlockRequester { maxX = maxX, maxY = maxY, minX = minX, minY = minY, mapBlocks = (uint)(flag & ~0x10000), remoteClient = remoteClient }); if (!blockRequesterIsRunning) blockthreadpool.QueueEvent(GetMapBlocks, 3); } protected virtual void GetAndSendMapBlocks(IClientAPI remoteClient, int minX, int minY, int maxX, int maxY, uint flag) { m_blockitemsToRequest.Enqueue(new MapBlockRequester { maxX = maxX, maxY = maxY, minX = minX, minY = minY, mapBlocks = 0,//Map remoteClient = remoteClient }); if (!blockRequesterIsRunning) blockthreadpool.QueueEvent(GetMapBlocks, 3); } protected virtual void GetAndSendTerrainBlocks(IClientAPI remoteClient, int minX, int minY, int maxX, int maxY, uint flag) { m_blockitemsToRequest.Enqueue (new MapBlockRequester { maxX = maxX, maxY = maxY, minX = minX, minY = minY, mapBlocks = 1,//Terrain remoteClient = remoteClient }); if (!blockRequesterIsRunning) blockthreadpool.QueueEvent(GetMapBlocks, 3); } private bool blockRequesterIsRunning; private readonly ConcurrentQueue<MapBlockRequester> m_blockitemsToRequest = new ConcurrentQueue<MapBlockRequester>(); private class MapBlockRequester { public int minX; public int minY; public int maxX; public int maxY; public uint mapBlocks; public IClientAPI remoteClient; } private void GetMapBlocks() { try { blockRequesterIsRunning = true; while(true) { MapBlockRequester item = null; if(!m_blockitemsToRequest.TryDequeue(out item)) break; List<MapBlockData> mapBlocks = new List<MapBlockData>(); List<GridRegion> regions = m_scene.GridService.GetRegionRange(item.remoteClient.AllScopeIDs, (item.minX - 4) * Constants.RegionSize, (item.maxX + 4) * Constants.RegionSize, (item.minY - 4) * Constants.RegionSize, (item.maxY + 4) * Constants.RegionSize); foreach (GridRegion region in regions) { if ((item.mapBlocks & 0) == 0 || (item.mapBlocks & 0x10000) != 0) mapBlocks.Add(MapBlockFromGridRegion(region)); else if ((item.mapBlocks & 1) == 1) mapBlocks.Add(TerrainBlockFromGridRegion(region)); else if ((item.mapBlocks & 2) == 2) //V2 viewer, we need to deal with it a bit mapBlocks.AddRange (Map2BlockFromGridRegion (region)); } item.remoteClient.SendMapBlock(mapBlocks, item.mapBlocks); Thread.Sleep (5); } } catch (Exception) { } blockRequesterIsRunning = false; } protected MapBlockData MapBlockFromGridRegion(GridRegion r) { MapBlockData block = new MapBlockData(); if (r == null) { block.Access = (byte)SimAccess.Down; block.MapImageID = UUID.Zero; return block; } block.Access = r.Access; block.MapImageID = r.TerrainImage; if ((r.Access & (byte)SimAccess.Down) == (byte)SimAccess.Down) block.Name = r.RegionName + " (offline)"; else block.Name = r.RegionName; block.X = (ushort)(r.RegionLocX / Constants.RegionSize); block.Y = (ushort)(r.RegionLocY / Constants.RegionSize); block.SizeX = (ushort)r.RegionSizeX; block.SizeY = (ushort)r.RegionSizeY; return block; } protected List<MapBlockData> Map2BlockFromGridRegion (GridRegion r) { List<MapBlockData> blocks = new List<MapBlockData> (); MapBlockData block = new MapBlockData (); if (r == null) { block.Access = (byte)SimAccess.Down; block.MapImageID = UUID.Zero; blocks.Add (block); return blocks; } block.Access = r.Access; block.MapImageID = r.TerrainImage; if ((r.Access & (byte)SimAccess.Down) == (byte)SimAccess.Down) block.Name = r.RegionName + " (offline)"; else block.Name = r.RegionName; block.X = (ushort)(r.RegionLocX / Constants.RegionSize); block.Y = (ushort)(r.RegionLocY / Constants.RegionSize); block.SizeX = (ushort)r.RegionSizeX; block.SizeY = (ushort)r.RegionSizeY; blocks.Add(block); if (r.RegionSizeX > Constants.RegionSize || r.RegionSizeY > Constants.RegionSize) { for (int x = 0; x < r.RegionSizeX / Constants.RegionSize; x++) { for (int y = 0; y < r.RegionSizeY / Constants.RegionSize; y++) { if (x == 0 && y == 0) continue; block = new MapBlockData { Access = r.Access, MapImageID = r.TerrainImage, Name = r.RegionName, X = (ushort) ((r.RegionLocX/Constants.RegionSize) + x), Y = (ushort) ((r.RegionLocY/Constants.RegionSize) + y), SizeX = (ushort) r.RegionSizeX, SizeY = (ushort) r.RegionSizeY }; //Child piece, so ignore it blocks.Add (block); } } } return blocks; } private void OnMapNameRequest (IClientAPI remoteClient, string mapName, uint flags) { if (mapName.Length < 1) { remoteClient.SendAlertMessage("Use a search string with at least 1 character"); return; } bool TryCoordsSearch = false; int XCoord = 0; int YCoord = 0; string[] splitSearch = mapName.Split(','); if (splitSearch.Length != 1) { if (splitSearch[1].StartsWith (" ")) splitSearch[1] = splitSearch[1].Remove (0, 1); if (int.TryParse(splitSearch[0], out XCoord) && int.TryParse(splitSearch[1], out YCoord)) TryCoordsSearch = true; } List<MapBlockData> blocks = new List<MapBlockData>(); List<GridRegion> regionInfos = m_scene.GridService.GetRegionsByName(remoteClient.AllScopeIDs, mapName, 0, 20); if (TryCoordsSearch) { GridRegion region = m_scene.GridService.GetRegionByPosition(remoteClient.AllScopeIDs, XCoord * Constants.RegionSize, YCoord * Constants.RegionSize); if (region != null) { region.RegionName = mapName + " - " + region.RegionName; regionInfos.Add (region); } } List<GridRegion> allRegions = new List<GridRegion> (); if (regionInfos != null) { foreach (GridRegion region in regionInfos) { //Add the found in search region first if (!allRegions.Contains(region)) { allRegions.Add(region); blocks.Add(SearchMapBlockFromGridRegion(region)); } //Then send surrounding regions List<GridRegion> regions = m_scene.GridService.GetRegionRange(remoteClient.AllScopeIDs, (region.RegionLocX - (4 * Constants.RegionSize)), (region.RegionLocX + (4 * Constants.RegionSize)), (region.RegionLocY - (4 * Constants.RegionSize)), (region.RegionLocY + (4 * Constants.RegionSize))); if (regions != null) { foreach (GridRegion r in regions) { if (!allRegions.Contains(region)) { allRegions.Add(region); blocks.Add(SearchMapBlockFromGridRegion(r)); } } } } } // final block, closing the search result MapBlockData data = new MapBlockData { Agents = 0, Access = 255, MapImageID = UUID.Zero, Name = mapName, RegionFlags = 0, WaterHeight = 0, X = 0, Y = 0, SizeX = 256, SizeY = 256 }; // not used blocks.Add(data); remoteClient.SendMapBlock (blocks, flags); } protected MapBlockData SearchMapBlockFromGridRegion(GridRegion r) { MapBlockData block = new MapBlockData (); if (r == null) { block.Access = (byte)SimAccess.Down; block.MapImageID = UUID.Zero; return block; } block.Access = r.Access; if ((r.Access & (byte)SimAccess.Down) == (byte)SimAccess.Down) block.Name = r.RegionName + " (offline)"; else block.Name = r.RegionName; block.MapImageID = r.TerrainImage; block.Name = r.RegionName; block.X = (ushort)(r.RegionLocX / Constants.RegionSize); block.Y = (ushort)(r.RegionLocY / Constants.RegionSize); block.SizeX = (ushort)r.RegionSizeX; block.SizeY = (ushort)r.RegionSizeY; return block; } protected MapBlockData TerrainBlockFromGridRegion(GridRegion r) { MapBlockData block = new MapBlockData(); if (r == null) { block.Access = (byte)SimAccess.Down; block.MapImageID = UUID.Zero; return block; } block.Access = r.Access; block.MapImageID = r.TerrainMapImage; if ((r.Access & (byte)SimAccess.Down) == (byte)SimAccess.Down) block.Name = r.RegionName + " (offline)"; else block.Name = r.RegionName; block.X = (ushort)(r.RegionLocX / Constants.RegionSize); block.Y = (ushort)(r.RegionLocY / Constants.RegionSize); block.SizeX = (ushort)r.RegionSizeX; block.SizeY = (ushort)r.RegionSizeY; return block; } public Hashtable OnHTTPGetMapImage(Hashtable keysvals) { Hashtable reply = new Hashtable(); string regionImage = "regionImage" + m_scene.RegionInfo.RegionID.ToString(); regionImage = regionImage.Replace("-", ""); if (keysvals["method"].ToString() != regionImage) return reply; MainConsole.Instance.Debug("[WORLD MAP]: Sending map image jpeg"); const int statuscode = 200; byte[] jpeg = new byte[0]; MemoryStream imgstream = new MemoryStream(); Bitmap mapTexture = new Bitmap(1, 1); Image image = (Image)mapTexture; try { // Taking our jpeg2000 data, decoding it, then saving it to a byte array with regular jpeg data imgstream = new MemoryStream(); // non-async because we know we have the asset immediately. AssetBase mapasset = m_scene.AssetService.Get(m_scene.RegionInfo.RegionSettings.TerrainImageID.ToString()); if (mapasset != null) { image = m_scene.RequestModuleInterface<IJ2KDecoder>().DecodeToImage(mapasset.Data); // Decode image to System.Drawing.Image if (image != null) { // Save to bitmap mapTexture = new Bitmap(image); EncoderParameters myEncoderParameters = new EncoderParameters(); myEncoderParameters.Param[0] = new EncoderParameter(Encoder.Quality, 95L); // Save bitmap to stream mapTexture.Save(imgstream, GetEncoderInfo("image/jpeg"), myEncoderParameters); // Write the stream to a byte array for output jpeg = imgstream.ToArray(); } } } catch (Exception) { // Dummy! MainConsole.Instance.Warn("[WORLD MAP]: Unable to generate Map image"); } finally { // Reclaim memory, these are unmanaged resources // If we encountered an exception, one or more of these will be null mapTexture.Dispose(); if (image != null) image.Dispose(); imgstream.Close(); imgstream.Dispose(); } reply["str_response_string"] = Convert.ToBase64String(jpeg); reply["int_response_code"] = statuscode; reply["content_type"] = "image/jpeg"; return reply; } // From msdn private static ImageCodecInfo GetEncoderInfo(String mimeType) { ImageCodecInfo[] encoders = ImageCodecInfo.GetImageEncoders(); #if (!ISWIN) foreach (ImageCodecInfo t in encoders) { if (t.MimeType == mimeType) return t; } return null; #else return encoders.FirstOrDefault(t => t.MimeType == mimeType); #endif } /// <summary> /// Export the world map /// </summary> /// <param name="cmdparams"></param> public void HandleExportWorldMapConsoleCommand(string[] cmdparams) { if (MainConsole.Instance.ConsoleScene != m_scene) { if (MainConsole.Instance.ConsoleScene == null && !MainConsole.Instance.HasProcessedCurrentCommand) MainConsole.Instance.HasProcessedCurrentCommand = true; else return; } string exportPath = cmdparams.Length > 1 ? cmdparams[1] : DEFAULT_WORLD_MAP_EXPORT_PATH; MainConsole.Instance.InfoFormat( "[WORLD MAP]: Exporting world map for {0} to {1}", m_scene.RegionInfo.RegionName, exportPath); List<GridRegion> regions = m_scene.GridService.GetRegionRange(null, m_scene.RegionInfo.RegionLocX - (9 * Constants.RegionSize), m_scene.RegionInfo.RegionLocX + (9 * Constants.RegionSize), m_scene.RegionInfo.RegionLocY - (9 * Constants.RegionSize), m_scene.RegionInfo.RegionLocY + (9 * Constants.RegionSize)); List<Image> bitImages = new List<Image>(); #if (!ISWIN) List<AssetBase> textures = new List<AssetBase>(); foreach (GridRegion r in regions) { AssetBase texAsset = m_scene.AssetService.Get(r.TerrainImage.ToString()); if (texAsset != null) textures.Add(texAsset); } #else List<AssetBase> textures = regions.Select(r => m_scene.AssetService.Get(r.TerrainImage.ToString())).Where(texAsset => texAsset != null).ToList(); #endif foreach (AssetBase asset in textures) { Image image; if ((image = m_scene.RequestModuleInterface<IJ2KDecoder> ().DecodeToImage(asset.Data)) != null) bitImages.Add(image); } const int size = 2560; const int offsetSize = size / 10 / 2; Bitmap mapTexture = new Bitmap(size, size); Graphics g = Graphics.FromImage(mapTexture); SolidBrush sea = new SolidBrush(Color.DarkBlue); g.FillRectangle(sea, 0, 0, size, size); int regionXOffset = (m_scene.RegionInfo.RegionSizeX / 2 - 128) * -1;//Neg because the image is upside down const int regionYOffset = 0; // (m_scene.RegionInfo.RegionSizeY / 2 - 128) * -1; for (int i = 0; i < regions.Count; i++) { int regionSizeOffset = regions[i].RegionSizeX / 2 - 128; int x = ((regions[i].RegionLocX - m_scene.RegionInfo.RegionLocX) / Constants.RegionSize) + 10; int y = ((regions[i].RegionLocY - m_scene.RegionInfo.RegionLocY) / Constants.RegionSize) + 10; if(i < bitImages.Count) g.DrawImage(bitImages[i], (x * offsetSize) + regionXOffset, size - (y * offsetSize + regionSizeOffset) + regionYOffset, regions[i].RegionSizeX / 2, regions[i].RegionSizeY / 2); // y origin is top } mapTexture.Save(exportPath, ImageFormat.Jpeg); MainConsole.Instance.InfoFormat( "[WORLD MAP]: Successfully exported world map for {0} to {1}", m_scene.RegionInfo.RegionName, exportPath); } private class MapItemRequester { public ulong regionhandle = 0; public uint itemtype = 0; public IClientAPI remoteClient = null; public uint flags = 0; } } }
using System; using System.Linq; using System.IO; using System.Collections.Generic; namespace FastCgiNet.Streams { /// <summary> /// Use this Stream whenever you have to work with a record's contents. This stream is specially designed /// to be optimal for socket operations (avoids unnecessary buffering) and to check for invalid operations when it comes to FastCgi records. /// </summary> public class RecordContentsStream : Stream { //TODO: Watch out for large byte arrays, since this would promote them straight to Gen2 of the GC, // while they are in fact short lived objects private long position; /// <summary> /// The blocks of memory that have been written to this stream. /// </summary> internal LinkedList<byte[]> MemoryBlocks; private int length; /// <summary> /// Whether this record's contents reached its maximum size. /// </summary> public bool IsFull { get { return length == RecordBase.MaxContentLength; } } public override void Flush() { } public override int Read(byte[] buffer, int offset, int count) { if (buffer == null) throw new ArgumentNullException("buffer"); else if (buffer.Length < offset + count) throw new ArgumentException("The sum of offset and count is larger than the buffer length"); else if (offset < 0 || count < 0) throw new ArgumentOutOfRangeException("offset or count is negative"); if (position == length) return 0; int positionSoFar = 0; int bytesCopied = 0; foreach (var arr in MemoryBlocks) { if (positionSoFar + arr.Length > position && positionSoFar < position + count) { int toArrayOffset = offset + bytesCopied; int startCopyingFrom = (int)position - positionSoFar; if (startCopyingFrom < 0) startCopyingFrom = 0; int copyLength = count - bytesCopied; if (copyLength + startCopyingFrom > arr.Length) copyLength = arr.Length - startCopyingFrom; Array.Copy(arr, startCopyingFrom, buffer, toArrayOffset, copyLength); bytesCopied += copyLength; } positionSoFar += arr.Length; } // Advances the stream position += bytesCopied; return bytesCopied; } public override long Seek(long offset, SeekOrigin origin) { // Users can seek anywhere, they just can't write or read out of bounds.. if (origin == SeekOrigin.Begin) position = offset; else if (origin == SeekOrigin.Current) position = position + offset; else if (origin == SeekOrigin.End) position = length + offset; return position; } public override void SetLength(long value) { throw new NotSupportedException(); } /// <summary> /// Writes to this stream, effectively copying bytes from <paramref name="buffer"/> to internal buffers. /// </summary> public override void Write(byte[] buffer, int offset, int count) { //TODO: Proper Write in positions other than the end of the stream if (position != length) throw new NotImplementedException("At the moment, only writing at the end of the stream is supported"); var internalBuffer = new byte[count]; Array.Copy (buffer, offset, internalBuffer, 0, count); MemoryBlocks.AddLast(internalBuffer); length += count; position += count; // Check that we didn't go over the size limit if (length > RecordBase.MaxContentLength) throw new InvalidOperationException("You can't write more than " + RecordBase.MaxContentLength + " bytes to a record's contents"); } public override bool CanRead { get { return true; } } public override bool CanSeek { get { return true; } } public override bool CanWrite { get { return true; } } public override long Length { get { return length; } } public override long Position { get { return position; } set { Seek(value, SeekOrigin.Begin); } } public override bool Equals (object obj) { if (obj == null) return false; var b = obj as RecordContentsStream; if (b == null) return false; // Check lenghts first if (this.Length != b.Length) return false; // Compare byte by byte.. kind of expensive byte[] bufForB = new byte[128]; byte[] bufForA = new byte[128]; this.Position = 0; b.Position = 0; while (b.Read(bufForB, 0, bufForB.Length) > 0) { this.Read(bufForA, 0, bufForA.Length); if (!ByteUtils.AreEqual(bufForA, bufForB)) return false; } return true; } public override int GetHashCode() { return length + 31 * MemoryBlocks.Sum(mb => mb.GetHashCode()); } public RecordContentsStream() { MemoryBlocks = new LinkedList<byte[]>(); length = 0; position = 0; } } }
// SF API version v50.0 // Custom fields included: False // Relationship objects included: True using System; using NetCoreForce.Client.Models; using NetCoreForce.Client.Attributes; using Newtonsoft.Json; namespace NetCoreForce.Models { ///<summary> /// Refund Line Payment ///<para>SObject Name: RefundLinePayment</para> ///<para>Custom Object: False</para> ///</summary> public class SfRefundLinePayment : SObject { [JsonIgnore] public static string SObjectTypeName { get { return "RefundLinePayment"; } } ///<summary> /// Refund Line Payment ID /// <para>Name: Id</para> /// <para>SF Type: id</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "id")] [Updateable(false), Createable(false)] public string Id { get; set; } ///<summary> /// Deleted /// <para>Name: IsDeleted</para> /// <para>SF Type: boolean</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "isDeleted")] [Updateable(false), Createable(false)] public bool? IsDeleted { get; set; } ///<summary> /// Refund Line Payment Number /// <para>Name: RefundLinePaymentNumber</para> /// <para>SF Type: string</para> /// <para>AutoNumber field</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "refundLinePaymentNumber")] [Updateable(false), Createable(false)] public string RefundLinePaymentNumber { get; set; } ///<summary> /// Created Date /// <para>Name: CreatedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdDate")] [Updateable(false), Createable(false)] public DateTimeOffset? CreatedDate { get; set; } ///<summary> /// Created By ID /// <para>Name: CreatedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdById")] [Updateable(false), Createable(false)] public string CreatedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: CreatedBy</para> ///</summary> [JsonProperty(PropertyName = "createdBy")] [Updateable(false), Createable(false)] public SfUser CreatedBy { get; set; } ///<summary> /// Last Modified Date /// <para>Name: LastModifiedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedDate")] [Updateable(false), Createable(false)] public DateTimeOffset? LastModifiedDate { get; set; } ///<summary> /// Last Modified By ID /// <para>Name: LastModifiedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedById")] [Updateable(false), Createable(false)] public string LastModifiedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: LastModifiedBy</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedBy")] [Updateable(false), Createable(false)] public SfUser LastModifiedBy { get; set; } ///<summary> /// System Modstamp /// <para>Name: SystemModstamp</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "systemModstamp")] [Updateable(false), Createable(false)] public DateTimeOffset? SystemModstamp { get; set; } ///<summary> /// Payment ID /// <para>Name: PaymentId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "paymentId")] [Updateable(false), Createable(true)] public string PaymentId { get; set; } ///<summary> /// ReferenceTo: Payment /// <para>RelationshipName: Payment</para> ///</summary> [JsonProperty(PropertyName = "payment")] [Updateable(false), Createable(false)] public SfPayment Payment { get; set; } ///<summary> /// Refund ID /// <para>Name: RefundId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "refundId")] [Updateable(false), Createable(true)] public string RefundId { get; set; } ///<summary> /// ReferenceTo: Refund /// <para>RelationshipName: Refund</para> ///</summary> [JsonProperty(PropertyName = "refund")] [Updateable(false), Createable(false)] public SfRefund Refund { get; set; } ///<summary> /// Amount /// <para>Name: Amount</para> /// <para>SF Type: currency</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "amount")] [Updateable(false), Createable(true)] public decimal? Amount { get; set; } ///<summary> /// Type /// <para>Name: Type</para> /// <para>SF Type: picklist</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "type")] [Updateable(false), Createable(true)] public string Type { get; set; } ///<summary> /// Has Been Unapplied /// <para>Name: HasBeenUnapplied</para> /// <para>SF Type: picklist</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "hasBeenUnapplied")] [Updateable(false), Createable(true)] public string HasBeenUnapplied { get; set; } ///<summary> /// Comments /// <para>Name: Comments</para> /// <para>SF Type: textarea</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "comments")] public string Comments { get; set; } ///<summary> /// Date /// <para>Name: Date</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "date")] [Updateable(false), Createable(true)] public DateTimeOffset? Date { get; set; } ///<summary> /// Applied Date /// <para>Name: AppliedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "appliedDate")] [Updateable(false), Createable(true)] public DateTimeOffset? AppliedDate { get; set; } ///<summary> /// Effective Date /// <para>Name: EffectiveDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "effectiveDate")] [Updateable(false), Createable(true)] public DateTimeOffset? EffectiveDate { get; set; } ///<summary> /// Unapplied Date /// <para>Name: UnappliedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "unappliedDate")] [Updateable(false), Createable(true)] public DateTimeOffset? UnappliedDate { get; set; } ///<summary> /// Account ID /// <para>Name: AssociatedAccountId</para> /// <para>SF Type: reference</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "associatedAccountId")] [Updateable(false), Createable(true)] public string AssociatedAccountId { get; set; } ///<summary> /// ReferenceTo: Account /// <para>RelationshipName: AssociatedAccount</para> ///</summary> [JsonProperty(PropertyName = "associatedAccount")] [Updateable(false), Createable(false)] public SfAccount AssociatedAccount { get; set; } ///<summary> /// Refund Line Payment ID /// <para>Name: AssociatedRefundLinePaymentId</para> /// <para>SF Type: reference</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "associatedRefundLinePaymentId")] [Updateable(false), Createable(true)] public string AssociatedRefundLinePaymentId { get; set; } ///<summary> /// ReferenceTo: RefundLinePayment /// <para>RelationshipName: AssociatedRefundLinePayment</para> ///</summary> [JsonProperty(PropertyName = "associatedRefundLinePayment")] [Updateable(false), Createable(false)] public SfRefundLinePayment AssociatedRefundLinePayment { get; set; } ///<summary> /// Impact Amount /// <para>Name: ImpactAmount</para> /// <para>SF Type: currency</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "impactAmount")] [Updateable(false), Createable(false)] public decimal? ImpactAmount { get; set; } ///<summary> /// Effective Impact Amount /// <para>Name: EffectiveImpactAmount</para> /// <para>SF Type: currency</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "effectiveImpactAmount")] [Updateable(false), Createable(false)] public decimal? EffectiveImpactAmount { get; set; } ///<summary> /// Refund Balance /// <para>Name: RefundBalance</para> /// <para>SF Type: currency</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "refundBalance")] [Updateable(false), Createable(false)] public decimal? RefundBalance { get; set; } ///<summary> /// Payment Balance /// <para>Name: PaymentBalance</para> /// <para>SF Type: currency</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "paymentBalance")] [Updateable(false), Createable(false)] public decimal? PaymentBalance { get; set; } } }
using Lucene.Net.Randomized.Generators; using Lucene.Net.Util; using System; using System.Collections.Generic; using System.Diagnostics; using Console = Lucene.Net.Support.SystemConsole; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Analyzer = Lucene.Net.Analysis.Analyzer; using BytesRef = Lucene.Net.Util.BytesRef; using Codec = Lucene.Net.Codecs.Codec; using Directory = Lucene.Net.Store.Directory; using InfoStream = Lucene.Net.Util.InfoStream; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using NullInfoStream = Lucene.Net.Util.NullInfoStream; using Query = Lucene.Net.Search.Query; using Similarity = Search.Similarities.Similarity; using TestUtil = Lucene.Net.Util.TestUtil; /// <summary> /// Silly class that randomizes the indexing experience. EG /// it may swap in a different merge policy/scheduler; may /// commit periodically; may or may not forceMerge in the end, /// may flush by doc count instead of RAM, etc. /// </summary> public class RandomIndexWriter : IDisposable { public IndexWriter w; private readonly Random r; internal int DocCount; internal int FlushAt; private double FlushAtFactor = 1.0; private bool GetReaderCalled; private readonly Codec Codec; // sugar public static IndexWriter MockIndexWriter(Directory dir, IndexWriterConfig conf, Random r) { // Randomly calls Thread.yield so we mixup thread scheduling Random random = new Random(r.Next()); return MockIndexWriter(dir, conf, new TestPointAnonymousInnerClassHelper(random)); } private class TestPointAnonymousInnerClassHelper : TestPoint { private Random Random; public TestPointAnonymousInnerClassHelper(Random random) { this.Random = random; } public virtual void Apply(string message) { if (Random.Next(4) == 2) { System.Threading.Thread.Sleep(0); } } } public static IndexWriter MockIndexWriter(Directory dir, IndexWriterConfig conf, TestPoint testPoint) { conf.SetInfoStream(new TestPointInfoStream(conf.InfoStream, testPoint)); return new IndexWriter(dir, conf); } /// <summary> /// create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer /// /// LUCENENET specific /// Similarity and TimeZone parameters allow a RandomIndexWriter to be /// created without adding a dependency on /// <see cref="LuceneTestCase.ClassEnv.Similarity"/> and /// <see cref="LuceneTestCase.ClassEnv.TimeZone"/> /// </summary> public RandomIndexWriter(Random r, Directory dir, Similarity similarity, TimeZoneInfo timezone) : this(r, dir, LuceneTestCase.NewIndexWriterConfig(r, LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(r), similarity, timezone)) { } /// <summary> /// create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT /// /// LUCENENET specific /// Similarity and TimeZone parameters allow a RandomIndexWriter to be /// created without adding a dependency on /// <see cref="LuceneTestCase.ClassEnv.Similarity"/> and /// <see cref="LuceneTestCase.ClassEnv.TimeZone"/> /// </summary> public RandomIndexWriter(Random r, Directory dir, Analyzer a, Similarity similarity, TimeZoneInfo timezone) : this(r, dir, LuceneTestCase.NewIndexWriterConfig(r, LuceneTestCase.TEST_VERSION_CURRENT, a, similarity, timezone)) { } /// <summary> /// Creates a RandomIndexWriter with a random config /// /// LUCENENET specific /// Similarity and TimeZone parameters allow a RandomIndexWriter to be /// created without adding a dependency on /// <see cref="LuceneTestCase.ClassEnv.Similarity"/> and /// <see cref="LuceneTestCase.ClassEnv.TimeZone"/> /// </summary> public RandomIndexWriter(Random r, Directory dir, LuceneVersion v, Analyzer a, Similarity similarity, TimeZoneInfo timezone) : this(r, dir, LuceneTestCase.NewIndexWriterConfig(r, v, a, similarity, timezone)) { } /// <summary> /// create a RandomIndexWriter with the provided config </summary> public RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c) { // TODO: this should be solved in a different way; Random should not be shared (!). this.r = new Random(r.Next()); w = MockIndexWriter(dir, c, r); FlushAt = TestUtil.NextInt(r, 10, 1000); Codec = w.Config.Codec; if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW dir=" + dir + " config=" + w.Config); Console.WriteLine("codec default=" + Codec.Name); } // Make sure we sometimes test indices that don't get // any forced merges: DoRandomForceMerge_Renamed = !(c.MergePolicy is NoMergePolicy) && r.NextBoolean(); } /// <summary> /// Adds a Document. </summary> /// <seealso cref= IndexWriter#addDocument(Iterable) </seealso> public virtual void AddDocument(IEnumerable<IIndexableField> doc) { AddDocument(doc, w.Analyzer); } public virtual void AddDocument(IEnumerable<IIndexableField> doc, Analyzer a) { if (r.Next(5) == 3) { // TODO: maybe, we should simply buffer up added docs // (but we need to clone them), and only when // getReader, commit, etc. are called, we do an // addDocuments? Would be better testing. w.AddDocuments(new IterableAnonymousInnerClassHelper<IIndexableField>(this, doc), a); } else { w.AddDocument(doc, a); } MaybeCommit(); } private class IterableAnonymousInnerClassHelper<IndexableField> : IEnumerable<IEnumerable<IndexableField>> { private readonly RandomIndexWriter OuterInstance; private IEnumerable<IndexableField> Doc; public IterableAnonymousInnerClassHelper(RandomIndexWriter outerInstance, IEnumerable<IndexableField> doc) { this.OuterInstance = outerInstance; this.Doc = doc; } public IEnumerator<IEnumerable<IndexableField>> GetEnumerator() { return new IteratorAnonymousInnerClassHelper(this); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } private class IteratorAnonymousInnerClassHelper : IEnumerator<IEnumerable<IndexableField>> { private readonly IterableAnonymousInnerClassHelper<IndexableField> OuterInstance; public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper<IndexableField> outerInstance) { this.OuterInstance = outerInstance; } internal bool done; private IEnumerable<IndexableField> current; public bool MoveNext() { if (done) { return false; } done = true; current = OuterInstance.Doc; return true; } public IEnumerable<IndexableField> Current { get { return current; } } object System.Collections.IEnumerator.Current { get { return Current; } } public void Reset() { throw new NotImplementedException(); } public void Dispose() { } } } private void MaybeCommit() { if (DocCount++ == FlushAt) { if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW.add/updateDocument: now doing a commit at docCount=" + DocCount); } w.Commit(); FlushAt += TestUtil.NextInt(r, (int)(FlushAtFactor * 10), (int)(FlushAtFactor * 1000)); if (FlushAtFactor < 2e6) { // gradually but exponentially increase time b/w flushes FlushAtFactor *= 1.05; } } } public virtual void AddDocuments(IEnumerable<IEnumerable<IIndexableField>> docs) { w.AddDocuments(docs); MaybeCommit(); } public virtual void UpdateDocuments(Term delTerm, IEnumerable<IEnumerable<IIndexableField>> docs) { w.UpdateDocuments(delTerm, docs); MaybeCommit(); } /// <summary> /// Updates a document. </summary> /// <seealso cref= IndexWriter#updateDocument(Term, Iterable) </seealso> public virtual void UpdateDocument(Term t, IEnumerable<IIndexableField> doc) { if (r.Next(5) == 3) { w.UpdateDocuments(t, new IterableAnonymousInnerClassHelper2(this, doc)); } else { w.UpdateDocument(t, doc); } MaybeCommit(); } private class IterableAnonymousInnerClassHelper2 : IEnumerable<IEnumerable<IIndexableField>> { private readonly RandomIndexWriter OuterInstance; private IEnumerable<IIndexableField> Doc; public IterableAnonymousInnerClassHelper2(RandomIndexWriter outerInstance, IEnumerable<IIndexableField> doc) { this.OuterInstance = outerInstance; this.Doc = doc; } public IEnumerator<IEnumerable<IIndexableField>> GetEnumerator() { return new IteratorAnonymousInnerClassHelper2(this); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } private class IteratorAnonymousInnerClassHelper2 : IEnumerator<IEnumerable<IIndexableField>> { private readonly IterableAnonymousInnerClassHelper2 OuterInstance; public IteratorAnonymousInnerClassHelper2(IterableAnonymousInnerClassHelper2 outerInstance) { this.OuterInstance = outerInstance; } internal bool done; private IEnumerable<IIndexableField> current; public bool MoveNext() { if (done) { return false; } done = true; current = OuterInstance.Doc; return true; } public IEnumerable<IIndexableField> Current { get { return current; } } object System.Collections.IEnumerator.Current { get { return Current; } } public virtual void Reset() { throw new NotImplementedException(); } public void Dispose() { } } } public virtual void AddIndexes(params Directory[] dirs) { w.AddIndexes(dirs); } public virtual void AddIndexes(params IndexReader[] readers) { w.AddIndexes(readers); } public virtual void UpdateNumericDocValue(Term term, string field, long? value) { w.UpdateNumericDocValue(term, field, value); } public virtual void UpdateBinaryDocValue(Term term, string field, BytesRef value) { w.UpdateBinaryDocValue(term, field, value); } public virtual void DeleteDocuments(Term term) { w.DeleteDocuments(term); } public virtual void DeleteDocuments(Query q) { w.DeleteDocuments(q); } public virtual void Commit() { w.Commit(); } public virtual int NumDocs { get { return w.NumDocs; } } public virtual int MaxDoc { get { return w.MaxDoc; } } public virtual void DeleteAll() { w.DeleteAll(); } public virtual DirectoryReader Reader { get { return GetReader(true); } } private bool DoRandomForceMerge_Renamed = true; private bool DoRandomForceMergeAssert_Renamed = true; public virtual void ForceMergeDeletes(bool doWait) { w.ForceMergeDeletes(doWait); } public virtual void ForceMergeDeletes() { w.ForceMergeDeletes(); } public virtual bool RandomForceMerge { set { DoRandomForceMerge_Renamed = value; } } public virtual bool DoRandomForceMergeAssert { set { DoRandomForceMergeAssert_Renamed = value; } } private void DoRandomForceMerge() { if (DoRandomForceMerge_Renamed) { int segCount = w.SegmentCount; if (r.NextBoolean() || segCount == 0) { // full forceMerge if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW: doRandomForceMerge(1)"); } w.ForceMerge(1); } else { // partial forceMerge int limit = TestUtil.NextInt(r, 1, segCount); if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW: doRandomForceMerge(" + limit + ")"); } w.ForceMerge(limit); Debug.Assert(!DoRandomForceMergeAssert_Renamed || w.SegmentCount <= limit, "limit=" + limit + " actual=" + w.SegmentCount); } } } public virtual DirectoryReader GetReader(bool applyDeletions) { GetReaderCalled = true; if (r.Next(20) == 2) { DoRandomForceMerge(); } // If we are writing with PreFlexRW, force a full // IndexReader.open so terms are sorted in codepoint // order during searching: if (!applyDeletions || !Codec.Name.Equals("Lucene3x", StringComparison.Ordinal) && r.NextBoolean()) { if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW.getReader: use NRT reader"); } if (r.Next(5) == 1) { w.Commit(); } return w.GetReader(applyDeletions); } else { if (LuceneTestCase.VERBOSE) { Console.WriteLine("RIW.getReader: open new reader"); } w.Commit(); if (r.NextBoolean()) { return DirectoryReader.Open(w.Directory, TestUtil.NextInt(r, 1, 10)); } else { return w.GetReader(applyDeletions); } } } /// <summary> /// Close this writer. </summary> /// <seealso cref= IndexWriter#close() </seealso> public void Dispose() { // if someone isn't using getReader() API, we want to be sure to // forceMerge since presumably they might open a reader on the dir. if (GetReaderCalled == false && r.Next(8) == 2) { DoRandomForceMerge(); } w.Dispose(); } /// <summary> /// Forces a forceMerge. /// <p> /// NOTE: this should be avoided in tests unless absolutely necessary, /// as it will result in less test coverage. </summary> /// <seealso cref= IndexWriter#forceMerge(int) </seealso> public virtual void ForceMerge(int maxSegmentCount) { w.ForceMerge(maxSegmentCount); } public sealed class TestPointInfoStream : InfoStream { internal readonly InfoStream @delegate; internal readonly TestPoint TestPoint; public TestPointInfoStream(InfoStream @delegate, TestPoint testPoint) { this.@delegate = @delegate ?? new NullInfoStream(); this.TestPoint = testPoint; } protected override void Dispose(bool disposing) { if (disposing) { @delegate.Dispose(); } } public override void Message(string component, string message) { if ("TP".Equals(component, StringComparison.Ordinal)) { TestPoint.Apply(message); } if (@delegate.IsEnabled(component)) { @delegate.Message(component, message); } } public override bool IsEnabled(string component) { return "TP".Equals(component, StringComparison.Ordinal) || @delegate.IsEnabled(component); } } /// <summary> /// Simple interface that is executed for each <tt>TP</tt> <seealso cref="InfoStream"/> component /// message. See also <seealso cref="RandomIndexWriter#mockIndexWriter(Directory, IndexWriterConfig, TestPoint)"/> /// </summary> public interface TestPoint { void Apply(string message); } } }
namespace IdSharpHarness { partial class ID3v1UserControl { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.txtFilename = new System.Windows.Forms.TextBox(); this.lblArtist = new System.Windows.Forms.Label(); this.txtArtist = new System.Windows.Forms.TextBox(); this.txtTitle = new System.Windows.Forms.TextBox(); this.txtAlbum = new System.Windows.Forms.TextBox(); this.txtYear = new System.Windows.Forms.TextBox(); this.lblTitle = new System.Windows.Forms.Label(); this.lblAlbum = new System.Windows.Forms.Label(); this.lblTrack = new System.Windows.Forms.Label(); this.lblGenre = new System.Windows.Forms.Label(); this.txtTrackNumber = new System.Windows.Forms.TextBox(); this.lblYear = new System.Windows.Forms.Label(); this.cmbID3v1 = new System.Windows.Forms.ComboBox(); this.lblFilename = new System.Windows.Forms.Label(); this.lblID3v1 = new System.Windows.Forms.Label(); this.cmbGenre = new System.Windows.Forms.ComboBox(); this.txtComment = new System.Windows.Forms.TextBox(); this.lblComment = new System.Windows.Forms.Label(); this.SuspendLayout(); // // txtFilename // this.txtFilename.Location = new System.Drawing.Point(94, 9); this.txtFilename.Name = "txtFilename"; this.txtFilename.ReadOnly = true; this.txtFilename.Size = new System.Drawing.Size(310, 20); this.txtFilename.TabIndex = 101; this.txtFilename.TabStop = false; // // lblArtist // this.lblArtist.AutoSize = true; this.lblArtist.Location = new System.Drawing.Point(10, 65); this.lblArtist.Name = "lblArtist"; this.lblArtist.Size = new System.Drawing.Size(30, 13); this.lblArtist.TabIndex = 106; this.lblArtist.Text = "Artist"; // // txtArtist // this.txtArtist.Location = new System.Drawing.Point(94, 62); this.txtArtist.MaxLength = 30; this.txtArtist.Name = "txtArtist"; this.txtArtist.Size = new System.Drawing.Size(310, 20); this.txtArtist.TabIndex = 104; // // txtTitle // this.txtTitle.Location = new System.Drawing.Point(94, 88); this.txtTitle.MaxLength = 30; this.txtTitle.Name = "txtTitle"; this.txtTitle.Size = new System.Drawing.Size(310, 20); this.txtTitle.TabIndex = 105; // // txtAlbum // this.txtAlbum.Location = new System.Drawing.Point(94, 114); this.txtAlbum.MaxLength = 30; this.txtAlbum.Name = "txtAlbum"; this.txtAlbum.Size = new System.Drawing.Size(310, 20); this.txtAlbum.TabIndex = 107; // // txtYear // this.txtYear.Location = new System.Drawing.Point(94, 166); this.txtYear.MaxLength = 4; this.txtYear.Name = "txtYear"; this.txtYear.Size = new System.Drawing.Size(75, 20); this.txtYear.TabIndex = 114; // // lblTitle // this.lblTitle.AutoSize = true; this.lblTitle.Location = new System.Drawing.Point(10, 91); this.lblTitle.Name = "lblTitle"; this.lblTitle.Size = new System.Drawing.Size(27, 13); this.lblTitle.TabIndex = 108; this.lblTitle.Text = "Title"; // // lblAlbum // this.lblAlbum.AutoSize = true; this.lblAlbum.Location = new System.Drawing.Point(10, 117); this.lblAlbum.Name = "lblAlbum"; this.lblAlbum.Size = new System.Drawing.Size(36, 13); this.lblAlbum.TabIndex = 109; this.lblAlbum.Text = "Album"; // // lblTrack // this.lblTrack.AutoSize = true; this.lblTrack.Location = new System.Drawing.Point(10, 195); this.lblTrack.Name = "lblTrack"; this.lblTrack.Size = new System.Drawing.Size(35, 13); this.lblTrack.TabIndex = 122; this.lblTrack.Text = "Track"; // // lblGenre // this.lblGenre.AutoSize = true; this.lblGenre.Location = new System.Drawing.Point(10, 143); this.lblGenre.Name = "lblGenre"; this.lblGenre.Size = new System.Drawing.Size(36, 13); this.lblGenre.TabIndex = 110; this.lblGenre.Text = "Genre"; // // txtTrackNumber // this.txtTrackNumber.Location = new System.Drawing.Point(94, 192); this.txtTrackNumber.MaxLength = 3; this.txtTrackNumber.Name = "txtTrackNumber"; this.txtTrackNumber.Size = new System.Drawing.Size(75, 20); this.txtTrackNumber.TabIndex = 115; // // lblYear // this.lblYear.AutoSize = true; this.lblYear.Location = new System.Drawing.Point(10, 169); this.lblYear.Name = "lblYear"; this.lblYear.Size = new System.Drawing.Size(29, 13); this.lblYear.TabIndex = 112; this.lblYear.Text = "Year"; // // cmbID3v1 // this.cmbID3v1.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cmbID3v1.FormattingEnabled = true; this.cmbID3v1.Items.AddRange(new object[] { "ID3v1.0", "ID3v1.1"}); this.cmbID3v1.Location = new System.Drawing.Point(94, 35); this.cmbID3v1.Name = "cmbID3v1"; this.cmbID3v1.Size = new System.Drawing.Size(75, 21); this.cmbID3v1.TabIndex = 103; this.cmbID3v1.SelectedIndexChanged += new System.EventHandler(this.cmbID3v1_SelectedIndexChanged); // // lblFilename // this.lblFilename.AutoSize = true; this.lblFilename.Location = new System.Drawing.Point(10, 12); this.lblFilename.Name = "lblFilename"; this.lblFilename.Size = new System.Drawing.Size(52, 13); this.lblFilename.TabIndex = 113; this.lblFilename.Text = "File name"; // // lblID3v1 // this.lblID3v1.AutoSize = true; this.lblID3v1.Location = new System.Drawing.Point(10, 38); this.lblID3v1.Name = "lblID3v1"; this.lblID3v1.Size = new System.Drawing.Size(36, 13); this.lblID3v1.TabIndex = 121; this.lblID3v1.Text = "ID3v1"; // // cmbGenre // this.cmbGenre.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cmbGenre.FormattingEnabled = true; this.cmbGenre.Location = new System.Drawing.Point(94, 140); this.cmbGenre.Name = "cmbGenre"; this.cmbGenre.Size = new System.Drawing.Size(146, 21); this.cmbGenre.TabIndex = 111; // // txtComment // this.txtComment.Location = new System.Drawing.Point(94, 218); this.txtComment.Name = "txtComment"; this.txtComment.Size = new System.Drawing.Size(310, 20); this.txtComment.TabIndex = 123; // // lblComment // this.lblComment.AutoSize = true; this.lblComment.Location = new System.Drawing.Point(10, 221); this.lblComment.Name = "lblComment"; this.lblComment.Size = new System.Drawing.Size(51, 13); this.lblComment.TabIndex = 124; this.lblComment.Text = "Comment"; // // ID3v1UserControl // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.Controls.Add(this.txtComment); this.Controls.Add(this.lblComment); this.Controls.Add(this.txtFilename); this.Controls.Add(this.lblArtist); this.Controls.Add(this.txtArtist); this.Controls.Add(this.txtTitle); this.Controls.Add(this.txtAlbum); this.Controls.Add(this.txtYear); this.Controls.Add(this.lblTitle); this.Controls.Add(this.lblAlbum); this.Controls.Add(this.lblTrack); this.Controls.Add(this.lblGenre); this.Controls.Add(this.txtTrackNumber); this.Controls.Add(this.lblYear); this.Controls.Add(this.cmbID3v1); this.Controls.Add(this.lblFilename); this.Controls.Add(this.lblID3v1); this.Controls.Add(this.cmbGenre); this.Name = "ID3v1UserControl"; this.Size = new System.Drawing.Size(704, 333); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.TextBox txtFilename; private System.Windows.Forms.Label lblArtist; private System.Windows.Forms.TextBox txtArtist; private System.Windows.Forms.TextBox txtTitle; private System.Windows.Forms.TextBox txtAlbum; private System.Windows.Forms.TextBox txtYear; private System.Windows.Forms.Label lblTitle; private System.Windows.Forms.Label lblAlbum; private System.Windows.Forms.Label lblTrack; private System.Windows.Forms.Label lblGenre; private System.Windows.Forms.TextBox txtTrackNumber; private System.Windows.Forms.Label lblYear; private System.Windows.Forms.ComboBox cmbID3v1; private System.Windows.Forms.Label lblFilename; private System.Windows.Forms.Label lblID3v1; private System.Windows.Forms.ComboBox cmbGenre; private System.Windows.Forms.TextBox txtComment; private System.Windows.Forms.Label lblComment; } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Reactive.Disposables; using System.Reactive.Linq; using Avalonia.Controls.Platform; using Avalonia.Controls.Primitives; using Avalonia.Input; using Avalonia.Input.Raw; using Avalonia.Layout; using Avalonia.Logging; using Avalonia.Platform; using Avalonia.Rendering; using Avalonia.Styling; using Avalonia.VisualTree; namespace Avalonia.Controls { /// <summary> /// Base class for top-level windows. /// </summary> /// <remarks> /// This class acts as a base for top level windows such as <see cref="Window"/> and /// <see cref="PopupRoot"/>. It handles scheduling layout, styling and rendering as well as /// tracking the window <see cref="ClientSize"/> and <see cref="IsActive"/> state. /// </remarks> public abstract class TopLevel : ContentControl, IInputRoot, ILayoutRoot, IRenderRoot, ICloseable, IStyleRoot { /// <summary> /// Defines the <see cref="ClientSize"/> property. /// </summary> public static readonly DirectProperty<TopLevel, Size> ClientSizeProperty = AvaloniaProperty.RegisterDirect<TopLevel, Size>(nameof(ClientSize), o => o.ClientSize); /// <summary> /// Defines the <see cref="IsActive"/> property. /// </summary> public static readonly DirectProperty<TopLevel, bool> IsActiveProperty = AvaloniaProperty.RegisterDirect<TopLevel, bool>(nameof(IsActive), o => o.IsActive); /// <summary> /// Defines the <see cref="IInputRoot.PointerOverElement"/> property. /// </summary> public static readonly StyledProperty<IInputElement> PointerOverElementProperty = AvaloniaProperty.Register<TopLevel, IInputElement>(nameof(IInputRoot.PointerOverElement)); private readonly IInputManager _inputManager; private readonly IAccessKeyHandler _accessKeyHandler; private readonly IKeyboardNavigationHandler _keyboardNavigationHandler; private readonly IApplicationLifecycle _applicationLifecycle; private readonly IPlatformRenderInterface _renderInterface; private Size _clientSize; private bool _isActive; /// <summary> /// Initializes static members of the <see cref="TopLevel"/> class. /// </summary> static TopLevel() { AffectsMeasure(ClientSizeProperty); } /// <summary> /// Initializes a new instance of the <see cref="TopLevel"/> class. /// </summary> /// <param name="impl">The platform-specific window implementation.</param> public TopLevel(ITopLevelImpl impl) : this(impl, AvaloniaLocator.Current) { } /// <summary> /// Initializes a new instance of the <see cref="TopLevel"/> class. /// </summary> /// <param name="impl">The platform-specific window implementation.</param> /// <param name="dependencyResolver"> /// The dependency resolver to use. If null the default dependency resolver will be used. /// </param> public TopLevel(ITopLevelImpl impl, IAvaloniaDependencyResolver dependencyResolver) { if (impl == null) { throw new InvalidOperationException( "Could not create window implementation: maybe no windowing subsystem was initialized?"); } PlatformImpl = impl; dependencyResolver = dependencyResolver ?? AvaloniaLocator.Current; var styler = TryGetService<IStyler>(dependencyResolver); _accessKeyHandler = TryGetService<IAccessKeyHandler>(dependencyResolver); _inputManager = TryGetService<IInputManager>(dependencyResolver); _keyboardNavigationHandler = TryGetService<IKeyboardNavigationHandler>(dependencyResolver); _applicationLifecycle = TryGetService<IApplicationLifecycle>(dependencyResolver); _renderInterface = TryGetService<IPlatformRenderInterface>(dependencyResolver); var renderLoop = TryGetService<IRenderLoop>(dependencyResolver); var rendererFactory = TryGetService<IRendererFactory>(dependencyResolver); Renderer = rendererFactory?.CreateRenderer(this, renderLoop); PlatformImpl.SetInputRoot(this); PlatformImpl.Activated = HandleActivated; PlatformImpl.Deactivated = HandleDeactivated; PlatformImpl.Closed = HandleClosed; PlatformImpl.Input = HandleInput; PlatformImpl.Paint = Renderer != null ? (Action<Rect>)Renderer.Render : null; PlatformImpl.Resized = HandleResized; PlatformImpl.ScalingChanged = HandleScalingChanged; PlatformImpl.PositionChanged = HandlePositionChanged; _keyboardNavigationHandler?.SetOwner(this); _accessKeyHandler?.SetOwner(this); styler?.ApplyStyles(this); ClientSize = PlatformImpl.ClientSize; this.GetObservable(ClientSizeProperty).Skip(1).Subscribe(x => PlatformImpl.ClientSize = x); this.GetObservable(PointerOverElementProperty) .Select( x => (x as InputElement)?.GetObservable(CursorProperty) ?? Observable.Empty<Cursor>()) .Switch().Subscribe(cursor => PlatformImpl.SetCursor(cursor?.PlatformCursor)); if (_applicationLifecycle != null) { _applicationLifecycle.OnExit += OnApplicationExiting; } } /// <summary> /// Fired when the window is activated. /// </summary> public event EventHandler Activated; /// <summary> /// Fired when the window is closed. /// </summary> public event EventHandler Closed; /// <summary> /// Fired when the window is deactivated. /// </summary> public event EventHandler Deactivated; /// <summary> /// Fired when the window position is changed. /// </summary> public event EventHandler<PointEventArgs> PositionChanged; /// <summary> /// Gets or sets the client size of the window. /// </summary> public Size ClientSize { get { return _clientSize; } private set { SetAndRaise(ClientSizeProperty, ref _clientSize, value); } } /// <summary> /// Gets a value that indicates whether the window is active. /// </summary> public bool IsActive { get { return _isActive; } private set { SetAndRaise(IsActiveProperty, ref _isActive, value); } } /// <summary> /// Gets or sets the window position in screen coordinates. /// </summary> public Point Position { get { return PlatformImpl.Position; } set { PlatformImpl.Position = value; } } /// <summary> /// Gets the platform-specific window implementation. /// </summary> public ITopLevelImpl PlatformImpl { get; } /// <summary> /// Gets the renderer for the window. /// </summary> public IRenderer Renderer { get; } /// <summary> /// Gets the access key handler for the window. /// </summary> IAccessKeyHandler IInputRoot.AccessKeyHandler => _accessKeyHandler; /// <summary> /// Gets or sets the keyboard navigation handler for the window. /// </summary> IKeyboardNavigationHandler IInputRoot.KeyboardNavigationHandler => _keyboardNavigationHandler; /// <summary> /// Gets or sets the input element that the pointer is currently over. /// </summary> IInputElement IInputRoot.PointerOverElement { get { return GetValue(PointerOverElementProperty); } set { SetValue(PointerOverElementProperty, value); } } /// <summary> /// Gets or sets a value indicating whether access keys are shown in the window. /// </summary> bool IInputRoot.ShowAccessKeys { get { return GetValue(AccessText.ShowAccessKeyProperty); } set { SetValue(AccessText.ShowAccessKeyProperty, value); } } /// <inheritdoc/> Size ILayoutRoot.MaxClientSize => Size.Infinity; /// <inheritdoc/> double ILayoutRoot.LayoutScaling => PlatformImpl.Scaling; IStyleHost IStyleHost.StylingParent { get { return AvaloniaLocator.Current.GetService<IGlobalStyles>(); } } /// <summary> /// Whether an auto-size operation is in progress. /// </summary> protected bool AutoSizing { get; private set; } /// <inheritdoc/> IRenderTarget IRenderRoot.CreateRenderTarget() { return _renderInterface.CreateRenderTarget(PlatformImpl.Handle); } /// <inheritdoc/> void IRenderRoot.Invalidate(Rect rect) { PlatformImpl.Invalidate(rect); } /// <inheritdoc/> Point IRenderRoot.PointToClient(Point p) { return PlatformImpl.PointToClient(p); } /// <inheritdoc/> Point IRenderRoot.PointToScreen(Point p) { return PlatformImpl.PointToScreen(p); } /// <summary> /// Activates the window. /// </summary> public void Activate() { PlatformImpl.Activate(); } /// <summary> /// Begins an auto-resize operation. /// </summary> /// <returns>A disposable used to finish the operation.</returns> /// <remarks> /// When an auto-resize operation is in progress any resize events received will not be /// cause the new size to be written to the <see cref="Layoutable.Width"/> and /// <see cref="Layoutable.Height"/> properties. /// </remarks> protected IDisposable BeginAutoSizing() { AutoSizing = true; return Disposable.Create(() => AutoSizing = false); } /// <summary> /// Carries out the arrange pass of the window. /// </summary> /// <param name="finalSize">The final window size.</param> /// <returns>The <paramref name="finalSize"/> parameter unchanged.</returns> protected override Size ArrangeOverride(Size finalSize) { using (BeginAutoSizing()) { PlatformImpl.ClientSize = finalSize; } return base.ArrangeOverride(PlatformImpl.ClientSize); } /// <summary> /// Handles a resize notification from <see cref="ITopLevelImpl.Resized"/>. /// </summary> /// <param name="clientSize">The new client size.</param> protected virtual void HandleResized(Size clientSize) { if (!AutoSizing) { Width = clientSize.Width; Height = clientSize.Height; } ClientSize = clientSize; LayoutManager.Instance.ExecuteLayoutPass(); PlatformImpl.Invalidate(new Rect(clientSize)); } /// <summary> /// Handles a window scaling change notification from /// <see cref="ITopLevelImpl.ScalingChanged"/>. /// </summary> /// <param name="scaling">The window scaling.</param> protected virtual void HandleScalingChanged(double scaling) { foreach (ILayoutable control in this.GetSelfAndVisualDescendents()) { control.InvalidateMeasure(); } } /// <inheritdoc/> protected override void OnAttachedToVisualTree(VisualTreeAttachmentEventArgs e) { base.OnAttachedToVisualTree(e); throw new InvalidOperationException( $"Control '{GetType().Name}' is a top level control and cannot be added as a child."); } /// <summary> /// Tries to get a service from an <see cref="IAvaloniaDependencyResolver"/>, logging a /// warning if not found. /// </summary> /// <typeparam name="T">The service type.</typeparam> /// <param name="resolver">The resolver.</param> /// <returns>The service.</returns> private T TryGetService<T>(IAvaloniaDependencyResolver resolver) where T : class { var result = resolver.GetService<T>(); if (result == null) { Logger.Warning( LogArea.Control, this, "Could not create {Service} : maybe Application.RegisterServices() wasn't called?", typeof(T)); } return result; } /// <summary> /// Handles an activated notification from <see cref="ITopLevelImpl.Activated"/>. /// </summary> private void HandleActivated() { Activated?.Invoke(this, EventArgs.Empty); var scope = this as IFocusScope; if (scope != null) { FocusManager.Instance.SetFocusScope(scope); } IsActive = true; } /// <summary> /// Handles a closed notification from <see cref="ITopLevelImpl.Closed"/>. /// </summary> private void HandleClosed() { Closed?.Invoke(this, EventArgs.Empty); _applicationLifecycle.OnExit -= OnApplicationExiting; } private void OnApplicationExiting(object sender, EventArgs args) { HandleApplicationExiting(); } /// <summary> /// Handles the application exiting, either from the last window closing, or a call to <see cref="IApplicationLifecycle.Exit"/>. /// </summary> protected virtual void HandleApplicationExiting() { } /// <summary> /// Handles a deactivated notification from <see cref="ITopLevelImpl.Deactivated"/>. /// </summary> private void HandleDeactivated() { IsActive = false; Deactivated?.Invoke(this, EventArgs.Empty); } /// <summary> /// Handles input from <see cref="ITopLevelImpl.Input"/>. /// </summary> /// <param name="e">The event args.</param> private void HandleInput(RawInputEventArgs e) { _inputManager.ProcessInput(e); } /// <summary> /// Handles a window position change notification from /// <see cref="ITopLevelImpl.PositionChanged"/>. /// </summary> /// <param name="pos">The window position.</param> private void HandlePositionChanged(Point pos) { PositionChanged?.Invoke(this, new PointEventArgs(pos)); } /// <summary> /// Starts moving a window with left button being held. Should be called from left mouse button press event handler /// </summary> public void BeginMoveDrag() => PlatformImpl.BeginMoveDrag(); /// <summary> /// Starts resizing a window. This function is used if an application has window resizing controls. /// Should be called from left mouse button press event handler /// </summary> public void BeginResizeDrag(WindowEdge edge) => PlatformImpl.BeginResizeDrag(edge); } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Video.Transcoder.V1.Snippets { using Google.Api.Gax; using Google.Api.Gax.ResourceNames; using System; using System.Linq; using System.Threading.Tasks; /// <summary>Generated snippets.</summary> public sealed class GeneratedTranscoderServiceClientSnippets { /// <summary>Snippet for CreateJob</summary> public void CreateJobRequestObject() { // Snippet: CreateJob(CreateJobRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) CreateJobRequest request = new CreateJobRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Job = new Job(), }; // Make the request Job response = transcoderServiceClient.CreateJob(request); // End snippet } /// <summary>Snippet for CreateJobAsync</summary> public async Task CreateJobRequestObjectAsync() { // Snippet: CreateJobAsync(CreateJobRequest, CallSettings) // Additional: CreateJobAsync(CreateJobRequest, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) CreateJobRequest request = new CreateJobRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Job = new Job(), }; // Make the request Job response = await transcoderServiceClient.CreateJobAsync(request); // End snippet } /// <summary>Snippet for CreateJob</summary> public void CreateJob() { // Snippet: CreateJob(string, Job, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; Job job = new Job(); // Make the request Job response = transcoderServiceClient.CreateJob(parent, job); // End snippet } /// <summary>Snippet for CreateJobAsync</summary> public async Task CreateJobAsync() { // Snippet: CreateJobAsync(string, Job, CallSettings) // Additional: CreateJobAsync(string, Job, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; Job job = new Job(); // Make the request Job response = await transcoderServiceClient.CreateJobAsync(parent, job); // End snippet } /// <summary>Snippet for CreateJob</summary> public void CreateJobResourceNames() { // Snippet: CreateJob(LocationName, Job, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); Job job = new Job(); // Make the request Job response = transcoderServiceClient.CreateJob(parent, job); // End snippet } /// <summary>Snippet for CreateJobAsync</summary> public async Task CreateJobResourceNamesAsync() { // Snippet: CreateJobAsync(LocationName, Job, CallSettings) // Additional: CreateJobAsync(LocationName, Job, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); Job job = new Job(); // Make the request Job response = await transcoderServiceClient.CreateJobAsync(parent, job); // End snippet } /// <summary>Snippet for ListJobs</summary> public void ListJobsRequestObject() { // Snippet: ListJobs(ListJobsRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) ListJobsRequest request = new ListJobsRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Filter = "", OrderBy = "", }; // Make the request PagedEnumerable<ListJobsResponse, Job> response = transcoderServiceClient.ListJobs(request); // Iterate over all response items, lazily performing RPCs as required foreach (Job item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListJobsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Job item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Job> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Job item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobsAsync</summary> public async Task ListJobsRequestObjectAsync() { // Snippet: ListJobsAsync(ListJobsRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) ListJobsRequest request = new ListJobsRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Filter = "", OrderBy = "", }; // Make the request PagedAsyncEnumerable<ListJobsResponse, Job> response = transcoderServiceClient.ListJobsAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Job item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListJobsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Job item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Job> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Job item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobs</summary> public void ListJobs() { // Snippet: ListJobs(string, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedEnumerable<ListJobsResponse, Job> response = transcoderServiceClient.ListJobs(parent); // Iterate over all response items, lazily performing RPCs as required foreach (Job item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListJobsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Job item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Job> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Job item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobsAsync</summary> public async Task ListJobsAsync() { // Snippet: ListJobsAsync(string, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedAsyncEnumerable<ListJobsResponse, Job> response = transcoderServiceClient.ListJobsAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Job item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListJobsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Job item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Job> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Job item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobs</summary> public void ListJobsResourceNames() { // Snippet: ListJobs(LocationName, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedEnumerable<ListJobsResponse, Job> response = transcoderServiceClient.ListJobs(parent); // Iterate over all response items, lazily performing RPCs as required foreach (Job item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListJobsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Job item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Job> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Job item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobsAsync</summary> public async Task ListJobsResourceNamesAsync() { // Snippet: ListJobsAsync(LocationName, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedAsyncEnumerable<ListJobsResponse, Job> response = transcoderServiceClient.ListJobsAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Job item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListJobsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Job item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Job> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Job item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for GetJob</summary> public void GetJobRequestObject() { // Snippet: GetJob(GetJobRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"), }; // Make the request Job response = transcoderServiceClient.GetJob(request); // End snippet } /// <summary>Snippet for GetJobAsync</summary> public async Task GetJobRequestObjectAsync() { // Snippet: GetJobAsync(GetJobRequest, CallSettings) // Additional: GetJobAsync(GetJobRequest, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"), }; // Make the request Job response = await transcoderServiceClient.GetJobAsync(request); // End snippet } /// <summary>Snippet for GetJob</summary> public void GetJob() { // Snippet: GetJob(string, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobs/[JOB]"; // Make the request Job response = transcoderServiceClient.GetJob(name); // End snippet } /// <summary>Snippet for GetJobAsync</summary> public async Task GetJobAsync() { // Snippet: GetJobAsync(string, CallSettings) // Additional: GetJobAsync(string, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobs/[JOB]"; // Make the request Job response = await transcoderServiceClient.GetJobAsync(name); // End snippet } /// <summary>Snippet for GetJob</summary> public void GetJobResourceNames() { // Snippet: GetJob(JobName, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) JobName name = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"); // Make the request Job response = transcoderServiceClient.GetJob(name); // End snippet } /// <summary>Snippet for GetJobAsync</summary> public async Task GetJobResourceNamesAsync() { // Snippet: GetJobAsync(JobName, CallSettings) // Additional: GetJobAsync(JobName, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) JobName name = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"); // Make the request Job response = await transcoderServiceClient.GetJobAsync(name); // End snippet } /// <summary>Snippet for DeleteJob</summary> public void DeleteJobRequestObject() { // Snippet: DeleteJob(DeleteJobRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"), AllowMissing = false, }; // Make the request transcoderServiceClient.DeleteJob(request); // End snippet } /// <summary>Snippet for DeleteJobAsync</summary> public async Task DeleteJobRequestObjectAsync() { // Snippet: DeleteJobAsync(DeleteJobRequest, CallSettings) // Additional: DeleteJobAsync(DeleteJobRequest, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"), AllowMissing = false, }; // Make the request await transcoderServiceClient.DeleteJobAsync(request); // End snippet } /// <summary>Snippet for DeleteJob</summary> public void DeleteJob() { // Snippet: DeleteJob(string, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobs/[JOB]"; // Make the request transcoderServiceClient.DeleteJob(name); // End snippet } /// <summary>Snippet for DeleteJobAsync</summary> public async Task DeleteJobAsync() { // Snippet: DeleteJobAsync(string, CallSettings) // Additional: DeleteJobAsync(string, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobs/[JOB]"; // Make the request await transcoderServiceClient.DeleteJobAsync(name); // End snippet } /// <summary>Snippet for DeleteJob</summary> public void DeleteJobResourceNames() { // Snippet: DeleteJob(JobName, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) JobName name = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"); // Make the request transcoderServiceClient.DeleteJob(name); // End snippet } /// <summary>Snippet for DeleteJobAsync</summary> public async Task DeleteJobResourceNamesAsync() { // Snippet: DeleteJobAsync(JobName, CallSettings) // Additional: DeleteJobAsync(JobName, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) JobName name = JobName.FromProjectLocationJob("[PROJECT]", "[LOCATION]", "[JOB]"); // Make the request await transcoderServiceClient.DeleteJobAsync(name); // End snippet } /// <summary>Snippet for CreateJobTemplate</summary> public void CreateJobTemplateRequestObject() { // Snippet: CreateJobTemplate(CreateJobTemplateRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) CreateJobTemplateRequest request = new CreateJobTemplateRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), JobTemplate = new JobTemplate(), JobTemplateId = "", }; // Make the request JobTemplate response = transcoderServiceClient.CreateJobTemplate(request); // End snippet } /// <summary>Snippet for CreateJobTemplateAsync</summary> public async Task CreateJobTemplateRequestObjectAsync() { // Snippet: CreateJobTemplateAsync(CreateJobTemplateRequest, CallSettings) // Additional: CreateJobTemplateAsync(CreateJobTemplateRequest, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) CreateJobTemplateRequest request = new CreateJobTemplateRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), JobTemplate = new JobTemplate(), JobTemplateId = "", }; // Make the request JobTemplate response = await transcoderServiceClient.CreateJobTemplateAsync(request); // End snippet } /// <summary>Snippet for CreateJobTemplate</summary> public void CreateJobTemplate() { // Snippet: CreateJobTemplate(string, JobTemplate, string, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; JobTemplate jobTemplate = new JobTemplate(); string jobTemplateId = ""; // Make the request JobTemplate response = transcoderServiceClient.CreateJobTemplate(parent, jobTemplate, jobTemplateId); // End snippet } /// <summary>Snippet for CreateJobTemplateAsync</summary> public async Task CreateJobTemplateAsync() { // Snippet: CreateJobTemplateAsync(string, JobTemplate, string, CallSettings) // Additional: CreateJobTemplateAsync(string, JobTemplate, string, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; JobTemplate jobTemplate = new JobTemplate(); string jobTemplateId = ""; // Make the request JobTemplate response = await transcoderServiceClient.CreateJobTemplateAsync(parent, jobTemplate, jobTemplateId); // End snippet } /// <summary>Snippet for CreateJobTemplate</summary> public void CreateJobTemplateResourceNames() { // Snippet: CreateJobTemplate(LocationName, JobTemplate, string, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); JobTemplate jobTemplate = new JobTemplate(); string jobTemplateId = ""; // Make the request JobTemplate response = transcoderServiceClient.CreateJobTemplate(parent, jobTemplate, jobTemplateId); // End snippet } /// <summary>Snippet for CreateJobTemplateAsync</summary> public async Task CreateJobTemplateResourceNamesAsync() { // Snippet: CreateJobTemplateAsync(LocationName, JobTemplate, string, CallSettings) // Additional: CreateJobTemplateAsync(LocationName, JobTemplate, string, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); JobTemplate jobTemplate = new JobTemplate(); string jobTemplateId = ""; // Make the request JobTemplate response = await transcoderServiceClient.CreateJobTemplateAsync(parent, jobTemplate, jobTemplateId); // End snippet } /// <summary>Snippet for ListJobTemplates</summary> public void ListJobTemplatesRequestObject() { // Snippet: ListJobTemplates(ListJobTemplatesRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) ListJobTemplatesRequest request = new ListJobTemplatesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Filter = "", OrderBy = "", }; // Make the request PagedEnumerable<ListJobTemplatesResponse, JobTemplate> response = transcoderServiceClient.ListJobTemplates(request); // Iterate over all response items, lazily performing RPCs as required foreach (JobTemplate item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListJobTemplatesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (JobTemplate item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<JobTemplate> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (JobTemplate item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobTemplatesAsync</summary> public async Task ListJobTemplatesRequestObjectAsync() { // Snippet: ListJobTemplatesAsync(ListJobTemplatesRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) ListJobTemplatesRequest request = new ListJobTemplatesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Filter = "", OrderBy = "", }; // Make the request PagedAsyncEnumerable<ListJobTemplatesResponse, JobTemplate> response = transcoderServiceClient.ListJobTemplatesAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((JobTemplate item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListJobTemplatesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (JobTemplate item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<JobTemplate> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (JobTemplate item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobTemplates</summary> public void ListJobTemplates() { // Snippet: ListJobTemplates(string, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedEnumerable<ListJobTemplatesResponse, JobTemplate> response = transcoderServiceClient.ListJobTemplates(parent); // Iterate over all response items, lazily performing RPCs as required foreach (JobTemplate item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListJobTemplatesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (JobTemplate item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<JobTemplate> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (JobTemplate item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobTemplatesAsync</summary> public async Task ListJobTemplatesAsync() { // Snippet: ListJobTemplatesAsync(string, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedAsyncEnumerable<ListJobTemplatesResponse, JobTemplate> response = transcoderServiceClient.ListJobTemplatesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((JobTemplate item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListJobTemplatesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (JobTemplate item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<JobTemplate> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (JobTemplate item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobTemplates</summary> public void ListJobTemplatesResourceNames() { // Snippet: ListJobTemplates(LocationName, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedEnumerable<ListJobTemplatesResponse, JobTemplate> response = transcoderServiceClient.ListJobTemplates(parent); // Iterate over all response items, lazily performing RPCs as required foreach (JobTemplate item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListJobTemplatesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (JobTemplate item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<JobTemplate> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (JobTemplate item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListJobTemplatesAsync</summary> public async Task ListJobTemplatesResourceNamesAsync() { // Snippet: ListJobTemplatesAsync(LocationName, string, int?, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedAsyncEnumerable<ListJobTemplatesResponse, JobTemplate> response = transcoderServiceClient.ListJobTemplatesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((JobTemplate item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListJobTemplatesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (JobTemplate item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<JobTemplate> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (JobTemplate item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for GetJobTemplate</summary> public void GetJobTemplateRequestObject() { // Snippet: GetJobTemplate(GetJobTemplateRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) GetJobTemplateRequest request = new GetJobTemplateRequest { JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"), }; // Make the request JobTemplate response = transcoderServiceClient.GetJobTemplate(request); // End snippet } /// <summary>Snippet for GetJobTemplateAsync</summary> public async Task GetJobTemplateRequestObjectAsync() { // Snippet: GetJobTemplateAsync(GetJobTemplateRequest, CallSettings) // Additional: GetJobTemplateAsync(GetJobTemplateRequest, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) GetJobTemplateRequest request = new GetJobTemplateRequest { JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"), }; // Make the request JobTemplate response = await transcoderServiceClient.GetJobTemplateAsync(request); // End snippet } /// <summary>Snippet for GetJobTemplate</summary> public void GetJobTemplate() { // Snippet: GetJobTemplate(string, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobTemplates/[JOB_TEMPLATE]"; // Make the request JobTemplate response = transcoderServiceClient.GetJobTemplate(name); // End snippet } /// <summary>Snippet for GetJobTemplateAsync</summary> public async Task GetJobTemplateAsync() { // Snippet: GetJobTemplateAsync(string, CallSettings) // Additional: GetJobTemplateAsync(string, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobTemplates/[JOB_TEMPLATE]"; // Make the request JobTemplate response = await transcoderServiceClient.GetJobTemplateAsync(name); // End snippet } /// <summary>Snippet for GetJobTemplate</summary> public void GetJobTemplateResourceNames() { // Snippet: GetJobTemplate(JobTemplateName, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) JobTemplateName name = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"); // Make the request JobTemplate response = transcoderServiceClient.GetJobTemplate(name); // End snippet } /// <summary>Snippet for GetJobTemplateAsync</summary> public async Task GetJobTemplateResourceNamesAsync() { // Snippet: GetJobTemplateAsync(JobTemplateName, CallSettings) // Additional: GetJobTemplateAsync(JobTemplateName, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) JobTemplateName name = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"); // Make the request JobTemplate response = await transcoderServiceClient.GetJobTemplateAsync(name); // End snippet } /// <summary>Snippet for DeleteJobTemplate</summary> public void DeleteJobTemplateRequestObject() { // Snippet: DeleteJobTemplate(DeleteJobTemplateRequest, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) DeleteJobTemplateRequest request = new DeleteJobTemplateRequest { JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"), AllowMissing = false, }; // Make the request transcoderServiceClient.DeleteJobTemplate(request); // End snippet } /// <summary>Snippet for DeleteJobTemplateAsync</summary> public async Task DeleteJobTemplateRequestObjectAsync() { // Snippet: DeleteJobTemplateAsync(DeleteJobTemplateRequest, CallSettings) // Additional: DeleteJobTemplateAsync(DeleteJobTemplateRequest, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) DeleteJobTemplateRequest request = new DeleteJobTemplateRequest { JobTemplateName = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"), AllowMissing = false, }; // Make the request await transcoderServiceClient.DeleteJobTemplateAsync(request); // End snippet } /// <summary>Snippet for DeleteJobTemplate</summary> public void DeleteJobTemplate() { // Snippet: DeleteJobTemplate(string, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobTemplates/[JOB_TEMPLATE]"; // Make the request transcoderServiceClient.DeleteJobTemplate(name); // End snippet } /// <summary>Snippet for DeleteJobTemplateAsync</summary> public async Task DeleteJobTemplateAsync() { // Snippet: DeleteJobTemplateAsync(string, CallSettings) // Additional: DeleteJobTemplateAsync(string, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/jobTemplates/[JOB_TEMPLATE]"; // Make the request await transcoderServiceClient.DeleteJobTemplateAsync(name); // End snippet } /// <summary>Snippet for DeleteJobTemplate</summary> public void DeleteJobTemplateResourceNames() { // Snippet: DeleteJobTemplate(JobTemplateName, CallSettings) // Create client TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.Create(); // Initialize request argument(s) JobTemplateName name = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"); // Make the request transcoderServiceClient.DeleteJobTemplate(name); // End snippet } /// <summary>Snippet for DeleteJobTemplateAsync</summary> public async Task DeleteJobTemplateResourceNamesAsync() { // Snippet: DeleteJobTemplateAsync(JobTemplateName, CallSettings) // Additional: DeleteJobTemplateAsync(JobTemplateName, CancellationToken) // Create client TranscoderServiceClient transcoderServiceClient = await TranscoderServiceClient.CreateAsync(); // Initialize request argument(s) JobTemplateName name = JobTemplateName.FromProjectLocationJobTemplate("[PROJECT]", "[LOCATION]", "[JOB_TEMPLATE]"); // Make the request await transcoderServiceClient.DeleteJobTemplateAsync(name); // End snippet } } }
#pragma warning disable 109, 114, 219, 429, 168, 162 namespace haxe.lang{ public class StringExt { public StringExt(){ unchecked { #line 26 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" { } } #line default } public static string charAt(string me, int index){ if ( ((uint) index) >= me.Length) return ""; else return new string(me[index], 1); } public static global::haxe.lang.Null<int> charCodeAt(string me, int index){ if ( ((uint) index) >= me.Length) return default(haxe.lang.Null<int>); else return new haxe.lang.Null<int>((int)me[index], true); } public static int indexOf(string me, string str, global::haxe.lang.Null<int> startIndex){ uint sIndex = (startIndex.hasValue) ? ((uint) startIndex.@value) : 0; if (sIndex >= me.Length) return -1; return me.IndexOf(str, (int)sIndex); } public static int lastIndexOf(string me, string str, global::haxe.lang.Null<int> startIndex){ int sIndex = (startIndex.hasValue) ? (startIndex.@value) : (me.Length - 1); if (sIndex >= me.Length) sIndex = me.Length - 1; else if (sIndex < 0) return -1; //TestBaseTypes.hx@133 fix if (startIndex.hasValue) { for(int i = sIndex; i >= 0; i--) { bool found = true; for(int j = 0; j < str.Length; j++) { if(me[i + j] != str[j]) { found = false; break; } } if (found) return i; } return -1; } else { return me.LastIndexOf(str, sIndex); } } public static global::Array<object> split(string me, string delimiter){ string[] native; if (delimiter.Length == 0) { int len = me.Length; native = new string[len]; for (int i = 0; i < len; i++) native[i] = new string(me[i], 1); } else { native = me.Split(new string[] { delimiter }, System.StringSplitOptions.None); } return new Array<object>(native); } public static string substr(string me, int pos, global::haxe.lang.Null<int> len){ int meLen = me.Length; int targetLen = meLen; if (len.hasValue) { targetLen = len.@value; if (targetLen == 0) return ""; if( pos != 0 && targetLen < 0 ){ return ""; } } if( pos < 0 ){ pos = meLen + pos; if( pos < 0 ) pos = 0; } else if( targetLen < 0 ){ targetLen = meLen + targetLen - pos; } if( pos + targetLen > meLen ){ targetLen = meLen - pos; } if ( pos < 0 || targetLen <= 0 ) return ""; return me.Substring(pos, targetLen); } public static string substring(string me, int startIndex, global::haxe.lang.Null<int> endIndex){ int endIdx; int len = me.Length; if ( !endIndex.hasValue ) { endIdx = len; } else if ( (endIdx = endIndex.@value) < 0 ) { endIdx = 0; } else if ( endIdx > len ) { endIdx = len; } if ( startIndex < 0 ) { startIndex = 0; } else if ( startIndex > len ) { startIndex = len; } if ( startIndex > endIdx ) { int tmp = startIndex; startIndex = endIdx; endIdx = tmp; } return me.Substring(startIndex, endIdx - startIndex); } public static string toString(string me){ unchecked { #line 181 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" return me; } #line default } public static string toLowerCase(string me){ return me.ToLower(); } public static string toUpperCase(string me){ return me.ToUpper(); } public static string toNativeString(string me){ unchecked { #line 202 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" return me; } #line default } public static string fromCharCode(int code){ return new string( (char) code, 1 ); } } } #pragma warning disable 109, 114, 219, 429, 168, 162 namespace haxe.lang{ public class StringRefl { static StringRefl() { #line 216 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" global::haxe.lang.StringRefl.fields = new global::Array<object>(new object[]{"length", "toUpperCase", "toLowerCase", "charAt", "charCodeAt", "indexOf", "lastIndexOf", "split", "substr", "substring"}); } public StringRefl(){ unchecked { #line 214 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" { } } #line default } public static global::Array<object> fields; public static object handleGetField(string str, string f, bool throwErrors){ unchecked { #line 220 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" switch (f){ case "length": { #line 222 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" return str.Length; } case "toUpperCase":case "toLowerCase":case "charAt":case "charCodeAt":case "indexOf":case "lastIndexOf":case "split":case "substr":case "substring": { #line 224 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" return new global::haxe.lang.Closure(((object) (str) ), ((string) (f) ), ((int) (0) )); } default: { #line 226 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" if (throwErrors) { #line 227 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" throw global::haxe.lang.HaxeException.wrap(global::haxe.lang.Runtime.concat(global::haxe.lang.Runtime.concat("Field not found: \'", f), "\' in String")); } else { #line 229 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" return default(object); } } } } #line default } public static object handleCallField(string str, string f, global::Array args){ unchecked { #line 235 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" global::Array _args = new global::Array<object>(new object[]{str}); if (( args == default(global::Array) )) { #line 237 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" args = _args; } else { #line 239 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" args = ((global::Array) (global::haxe.lang.Runtime.callField(_args, "concat", 1204816148, new global::Array<object>(new object[]{args}))) ); } #line 241 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\StringExt.hx" return global::haxe.lang.Runtime.slowCallField(typeof(global::haxe.lang.StringExt), f, args); } #line default } } }
#if !UNITY_WINRT || UNITY_EDITOR || UNITY_WP8 #region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Text; using System.IO; #if !JSONNET_XMLDISABLE using System.Xml; #endif using System.Globalization; using Newtonsoft.Json.Utilities; namespace Newtonsoft.Json { /// <summary> /// Represents a reader that provides fast, non-cached, forward-only access to serialized Json data. /// </summary> public class JsonTextReader : JsonReader, IJsonLineInfo { private enum ReadType { Read, ReadAsBytes, ReadAsDecimal, ReadAsDateTimeOffset } private readonly TextReader _reader; private readonly StringBuffer _buffer; private char? _lastChar; private int _currentLinePosition; private int _currentLineNumber; private bool _end; private ReadType _readType; private CultureInfo _culture; /// <summary> /// Gets or sets the culture used when reading JSON. Defaults to <see cref="CultureInfo.CurrentCulture"/>. /// </summary> public CultureInfo Culture { get { return _culture ?? CultureInfo.CurrentCulture; } set { _culture = value; } } /// <summary> /// Initializes a new instance of the <see cref="JsonReader"/> class with the specified <see cref="TextReader"/>. /// </summary> /// <param name="reader">The <c>TextReader</c> containing the XML data to read.</param> public JsonTextReader(TextReader reader) { if (reader == null) throw new ArgumentNullException("reader"); _reader = reader; _buffer = new StringBuffer(4096); _currentLineNumber = 1; } private void ParseString(char quote) { ReadStringIntoBuffer(quote); if (_readType == ReadType.ReadAsBytes) { byte[] data; if (_buffer.Position == 0) { data = new byte[0]; } else { data = Convert.FromBase64CharArray(_buffer.GetInternalBuffer(), 0, _buffer.Position); _buffer.Position = 0; } SetToken(JsonToken.Bytes, data); } else { string text = _buffer.ToString(); _buffer.Position = 0; if (text.StartsWith("/Date(", StringComparison.Ordinal) && text.EndsWith(")/", StringComparison.Ordinal)) { ParseDate(text); } else { SetToken(JsonToken.String, text); QuoteChar = quote; } } } private void ReadStringIntoBuffer(char quote) { while (true) { char currentChar = MoveNext(); switch (currentChar) { case '\0': if (_end) throw CreateJsonReaderException("Unterminated string. Expected delimiter: {0}. Line {1}, position {2}.", quote, _currentLineNumber, _currentLinePosition); _buffer.Append('\0'); break; case '\\': if ((currentChar = MoveNext()) != '\0' || !_end) { switch (currentChar) { case 'b': _buffer.Append('\b'); break; case 't': _buffer.Append('\t'); break; case 'n': _buffer.Append('\n'); break; case 'f': _buffer.Append('\f'); break; case 'r': _buffer.Append('\r'); break; case '\\': _buffer.Append('\\'); break; case '"': case '\'': case '/': _buffer.Append(currentChar); break; case 'u': char[] hexValues = new char[4]; for (int i = 0; i < hexValues.Length; i++) { if ((currentChar = MoveNext()) != '\0' || !_end) hexValues[i] = currentChar; else throw CreateJsonReaderException("Unexpected end while parsing unicode character. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } char hexChar = Convert.ToChar(int.Parse(new string(hexValues), NumberStyles.HexNumber, NumberFormatInfo.InvariantInfo)); _buffer.Append(hexChar); break; default: throw CreateJsonReaderException("Bad JSON escape sequence: {0}. Line {1}, position {2}.", @"\" + currentChar, _currentLineNumber, _currentLinePosition); } } else { throw CreateJsonReaderException("Unterminated string. Expected delimiter: {0}. Line {1}, position {2}.", quote, _currentLineNumber, _currentLinePosition); } break; case '"': case '\'': if (currentChar == quote) { return; } else { _buffer.Append(currentChar); } break; default: _buffer.Append(currentChar); break; } } } private JsonReaderException CreateJsonReaderException(string format, params object[] args) { string message = format.FormatWith(CultureInfo.InvariantCulture, args); return new JsonReaderException(message, null, _currentLineNumber, _currentLinePosition); } private TimeSpan ReadOffset(string offsetText) { bool negative = (offsetText[0] == '-'); int hours = int.Parse(offsetText.Substring(1, 2), NumberStyles.Integer, CultureInfo.InvariantCulture); int minutes = 0; if (offsetText.Length >= 5) minutes = int.Parse(offsetText.Substring(3, 2), NumberStyles.Integer, CultureInfo.InvariantCulture); TimeSpan offset = TimeSpan.FromHours(hours) + TimeSpan.FromMinutes(minutes); if (negative) offset = offset.Negate(); return offset; } private void ParseDate(string text) { string value = text.Substring(6, text.Length - 8); DateTimeKind kind = DateTimeKind.Utc; int index = value.IndexOf('+', 1); if (index == -1) index = value.IndexOf('-', 1); TimeSpan offset = TimeSpan.Zero; if (index != -1) { kind = DateTimeKind.Local; offset = ReadOffset(value.Substring(index)); value = value.Substring(0, index); } long javaScriptTicks = long.Parse(value, NumberStyles.Integer, CultureInfo.InvariantCulture); DateTime utcDateTime = JsonConvert.ConvertJavaScriptTicksToDateTime(javaScriptTicks); if (_readType == ReadType.ReadAsDateTimeOffset) { SetToken(JsonToken.Date, new DateTimeOffset(utcDateTime.Add(offset).Ticks, offset)); } else { DateTime dateTime; switch (kind) { case DateTimeKind.Unspecified: dateTime = DateTime.SpecifyKind(utcDateTime.ToLocalTime(), DateTimeKind.Unspecified); break; case DateTimeKind.Local: dateTime = utcDateTime.ToLocalTime(); break; default: dateTime = utcDateTime; break; } SetToken(JsonToken.Date, dateTime); } } private const int LineFeedValue = StringUtils.LineFeed; private const int CarriageReturnValue = StringUtils.CarriageReturn; private char MoveNext() { int value = _reader.Read(); switch (value) { case -1: _end = true; return '\0'; case CarriageReturnValue: if (_reader.Peek() == LineFeedValue) _reader.Read(); _currentLineNumber++; _currentLinePosition = 0; break; case LineFeedValue: _currentLineNumber++; _currentLinePosition = 0; break; default: _currentLinePosition++; break; } return (char)value; } private bool HasNext() { return (_reader.Peek() != -1); } private int PeekNext() { return _reader.Peek(); } /// <summary> /// Reads the next JSON token from the stream. /// </summary> /// <returns> /// true if the next token was read successfully; false if there are no more tokens to read. /// </returns> public override bool Read() { _readType = ReadType.Read; return ReadInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="T:Byte[]"/>. /// </summary> /// <returns> /// A <see cref="T:Byte[]"/> or a null reference if the next JSON token is null. /// </returns> public override byte[] ReadAsBytes() { _readType = ReadType.ReadAsBytes; do { if (!ReadInternal()) throw CreateJsonReaderException("Unexpected end when reading bytes: Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } while (TokenType == JsonToken.Comment); if (TokenType == JsonToken.Null) return null; if (TokenType == JsonToken.Bytes) return (byte[]) Value; if (TokenType == JsonToken.StartArray) { List<byte> data = new List<byte>(); while (ReadInternal()) { switch (TokenType) { case JsonToken.Integer: data.Add(Convert.ToByte(Value, CultureInfo.InvariantCulture)); break; case JsonToken.EndArray: byte[] d = data.ToArray(); SetToken(JsonToken.Bytes, d); return d; case JsonToken.Comment: // skip break; default: throw CreateJsonReaderException("Unexpected token when reading bytes: {0}. Line {1}, position {2}.", TokenType, _currentLineNumber, _currentLinePosition); } } throw CreateJsonReaderException("Unexpected end when reading bytes: Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } throw CreateJsonReaderException("Unexpected token when reading bytes: {0}. Line {1}, position {2}.", TokenType, _currentLineNumber, _currentLinePosition); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Decimal}"/>. /// </summary> /// <returns>A <see cref="Nullable{Decimal}"/>.</returns> public override decimal? ReadAsDecimal() { _readType = ReadType.ReadAsDecimal; do { if (!ReadInternal()) throw CreateJsonReaderException("Unexpected end when reading decimal: Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } while (TokenType == JsonToken.Comment); if (TokenType == JsonToken.Null) return null; if (TokenType == JsonToken.Float) return (decimal?)Value; decimal d; if (TokenType == JsonToken.String && decimal.TryParse((string)Value, NumberStyles.Number, Culture, out d)) { SetToken(JsonToken.Float, d); return d; } throw CreateJsonReaderException("Unexpected token when reading decimal: {0}. Line {1}, position {2}.", TokenType, _currentLineNumber, _currentLinePosition); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{DateTimeOffset}"/>. /// </summary> /// <returns>A <see cref="DateTimeOffset"/>.</returns> public override DateTimeOffset? ReadAsDateTimeOffset() { _readType = ReadType.ReadAsDateTimeOffset; do { if (!ReadInternal()) throw CreateJsonReaderException("Unexpected end when reading date: Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } while (TokenType == JsonToken.Comment); if (TokenType == JsonToken.Null) return null; if (TokenType == JsonToken.Date) return (DateTimeOffset)Value; DateTimeOffset dt; if (TokenType == JsonToken.String && DateTimeOffset.TryParse((string)Value, Culture, DateTimeStyles.None, out dt)) { SetToken(JsonToken.Date, dt); return dt; } throw CreateJsonReaderException("Unexpected token when reading date: {0}. Line {1}, position {2}.", TokenType, _currentLineNumber, _currentLinePosition); } private bool ReadInternal() { while (true) { char currentChar; if (_lastChar != null) { currentChar = _lastChar.Value; _lastChar = null; } else { currentChar = MoveNext(); } if (currentChar == '\0' && _end) return false; switch (CurrentState) { case State.Start: case State.Property: case State.Array: case State.ArrayStart: case State.Constructor: case State.ConstructorStart: return ParseValue(currentChar); case State.Complete: break; case State.Object: case State.ObjectStart: return ParseObject(currentChar); case State.PostValue: // returns true if it hits // end of object or array if (ParsePostValue(currentChar)) return true; break; case State.Closed: break; case State.Error: break; default: throw CreateJsonReaderException("Unexpected state: {0}. Line {1}, position {2}.", CurrentState, _currentLineNumber, _currentLinePosition); } } } private bool ParsePostValue(char currentChar) { do { switch (currentChar) { case '}': SetToken(JsonToken.EndObject); return true; case ']': SetToken(JsonToken.EndArray); return true; case ')': SetToken(JsonToken.EndConstructor); return true; case '/': ParseComment(); return true; case ',': // finished parsing SetStateBasedOnCurrent(); return false; case ' ': case StringUtils.Tab: case StringUtils.LineFeed: case StringUtils.CarriageReturn: // eat break; default: if (char.IsWhiteSpace(currentChar)) { // eat } else { throw CreateJsonReaderException("After parsing a value an unexpected character was encountered: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); } break; } } while ((currentChar = MoveNext()) != '\0' || !_end); return false; } private bool ParseObject(char currentChar) { do { switch (currentChar) { case '}': SetToken(JsonToken.EndObject); return true; case '/': ParseComment(); return true; case ' ': case StringUtils.Tab: case StringUtils.LineFeed: case StringUtils.CarriageReturn: // eat break; default: if (char.IsWhiteSpace(currentChar)) { // eat } else { return ParseProperty(currentChar); } break; } } while ((currentChar = MoveNext()) != '\0' || !_end); return false; } private bool ParseProperty(char firstChar) { char currentChar = firstChar; char quoteChar; if (ValidIdentifierChar(currentChar)) { quoteChar = '\0'; currentChar = ParseUnquotedProperty(currentChar); } else if (currentChar == '"' || currentChar == '\'') { quoteChar = currentChar; ReadStringIntoBuffer(quoteChar); currentChar = MoveNext(); } else { throw CreateJsonReaderException("Invalid property identifier character: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); } if (currentChar != ':') { currentChar = MoveNext(); // finished property. skip any whitespace and move to colon EatWhitespace(currentChar, false, out currentChar); if (currentChar != ':') throw CreateJsonReaderException("Invalid character after parsing property name. Expected ':' but got: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); } SetToken(JsonToken.PropertyName, _buffer.ToString()); QuoteChar = quoteChar; _buffer.Position = 0; return true; } private bool ValidIdentifierChar(char value) { return (char.IsLetterOrDigit(value) || value == '_' || value == '$'); } private char ParseUnquotedProperty(char firstChar) { // parse unquoted property name until whitespace or colon _buffer.Append(firstChar); char currentChar; while ((currentChar = MoveNext()) != '\0' || !_end) { if (char.IsWhiteSpace(currentChar) || currentChar == ':') { return currentChar; } else if (ValidIdentifierChar(currentChar)) { _buffer.Append(currentChar); } else { throw CreateJsonReaderException("Invalid JavaScript property identifier character: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); } } throw CreateJsonReaderException("Unexpected end when parsing unquoted property name. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } private bool ParseValue(char currentChar) { do { switch (currentChar) { case '"': case '\'': ParseString(currentChar); return true; case 't': ParseTrue(); return true; case 'f': ParseFalse(); return true; case 'n': if (HasNext()) { char next = (char)PeekNext(); if (next == 'u') ParseNull(); else if (next == 'e') ParseConstructor(); else throw CreateJsonReaderException("Unexpected character encountered while parsing value: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); } else { throw CreateJsonReaderException("Unexpected end. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } return true; case 'N': ParseNumberNaN(); return true; case 'I': ParseNumberPositiveInfinity(); return true; case '-': if (PeekNext() == 'I') ParseNumberNegativeInfinity(); else ParseNumber(currentChar); return true; case '/': ParseComment(); return true; case 'u': ParseUndefined(); return true; case '{': SetToken(JsonToken.StartObject); return true; case '[': SetToken(JsonToken.StartArray); return true; case '}': SetToken(JsonToken.EndObject); return true; case ']': SetToken(JsonToken.EndArray); return true; case ',': SetToken(JsonToken.Undefined); return true; case ')': SetToken(JsonToken.EndConstructor); return true; case ' ': case StringUtils.Tab: case StringUtils.LineFeed: case StringUtils.CarriageReturn: // eat break; default: if (char.IsWhiteSpace(currentChar)) { // eat } else if (char.IsNumber(currentChar) || currentChar == '-' || currentChar == '.') { ParseNumber(currentChar); return true; } else { throw CreateJsonReaderException("Unexpected character encountered while parsing value: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); } break; } } while ((currentChar = MoveNext()) != '\0' || !_end); return false; } private bool EatWhitespace(char initialChar, bool oneOrMore, out char finalChar) { bool whitespace = false; char currentChar = initialChar; while (currentChar == ' ' || char.IsWhiteSpace(currentChar)) { whitespace = true; currentChar = MoveNext(); } finalChar = currentChar; return (!oneOrMore || whitespace); } private void ParseConstructor() { if (MatchValue('n', "new", true)) { char currentChar = MoveNext(); if (EatWhitespace(currentChar, true, out currentChar)) { while (char.IsLetter(currentChar)) { _buffer.Append(currentChar); currentChar = MoveNext(); } EatWhitespace(currentChar, false, out currentChar); if (currentChar != '(') throw CreateJsonReaderException("Unexpected character while parsing constructor: {0}. Line {1}, position {2}.", currentChar, _currentLineNumber, _currentLinePosition); string constructorName = _buffer.ToString(); _buffer.Position = 0; SetToken(JsonToken.StartConstructor, constructorName); } } } private void ParseNumber(char firstChar) { char currentChar = firstChar; // parse until seperator character or end bool end = false; do { if (IsSeperator(currentChar)) { end = true; _lastChar = currentChar; } else { _buffer.Append(currentChar); } } while (!end && ((currentChar = MoveNext()) != '\0' || !_end)); string number = _buffer.ToString(); object numberValue; JsonToken numberType; bool nonBase10 = (firstChar == '0' && !number.StartsWith("0.", StringComparison.OrdinalIgnoreCase)); if (_readType == ReadType.ReadAsDecimal) { if (nonBase10) { // decimal.Parse doesn't support parsing hexadecimal values long integer = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt64(number, 16) : Convert.ToInt64(number, 8); numberValue = Convert.ToDecimal(integer); } else { numberValue = decimal.Parse(number, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture); } numberType = JsonToken.Float; } else { if (nonBase10) { numberValue = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt64(number, 16) : Convert.ToInt64(number, 8); numberType = JsonToken.Integer; } else if (number.IndexOf(".", StringComparison.OrdinalIgnoreCase) != -1 || number.IndexOf("e", StringComparison.OrdinalIgnoreCase) != -1) { numberValue = Convert.ToDouble(number, CultureInfo.InvariantCulture); numberType = JsonToken.Float; } else { try { numberValue = Convert.ToInt64(number, CultureInfo.InvariantCulture); } catch (OverflowException ex) { throw new JsonReaderException("JSON integer {0} is too large or small for an Int64.".FormatWith(CultureInfo.InvariantCulture, number), ex); } numberType = JsonToken.Integer; } } _buffer.Position = 0; SetToken(numberType, numberValue); } private void ParseComment() { // should have already parsed / character before reaching this method char currentChar = MoveNext(); if (currentChar == '*') { while ((currentChar = MoveNext()) != '\0' || !_end) { if (currentChar == '*') { if ((currentChar = MoveNext()) != '\0' || !_end) { if (currentChar == '/') { break; } else { _buffer.Append('*'); _buffer.Append(currentChar); } } } else { _buffer.Append(currentChar); } } } else { throw CreateJsonReaderException("Error parsing comment. Expected: *. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } SetToken(JsonToken.Comment, _buffer.ToString()); _buffer.Position = 0; } private bool MatchValue(char firstChar, string value) { char currentChar = firstChar; int i = 0; do { if (currentChar != value[i]) { break; } i++; } while (i < value.Length && ((currentChar = MoveNext()) != '\0' || !_end)); return (i == value.Length); } private bool MatchValue(char firstChar, string value, bool noTrailingNonSeperatorCharacters) { // will match value and then move to the next character, checking that it is a seperator character bool match = MatchValue(firstChar, value); if (!noTrailingNonSeperatorCharacters) { return match; } else { int c = PeekNext(); char next = (c != -1) ? (char) c : '\0'; bool matchAndNoTrainingNonSeperatorCharacters = (match && (next == '\0' || IsSeperator(next))); return matchAndNoTrainingNonSeperatorCharacters; } } private bool IsSeperator(char c) { switch (c) { case '}': case ']': case ',': return true; case '/': // check next character to see if start of a comment return (HasNext() && PeekNext() == '*'); case ')': if (CurrentState == State.Constructor || CurrentState == State.ConstructorStart) return true; break; case ' ': case StringUtils.Tab: case StringUtils.LineFeed: case StringUtils.CarriageReturn: return true; default: if (char.IsWhiteSpace(c)) return true; break; } return false; } private void ParseTrue() { // check characters equal 'true' // and that it is followed by either a seperator character // or the text ends if (MatchValue('t', JsonConvert.True, true)) { SetToken(JsonToken.Boolean, true); } else { throw CreateJsonReaderException("Error parsing boolean value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } private void ParseNull() { if (MatchValue('n', JsonConvert.Null, true)) { SetToken(JsonToken.Null); } else { throw CreateJsonReaderException("Error parsing null value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } private void ParseUndefined() { if (MatchValue('u', JsonConvert.Undefined, true)) { SetToken(JsonToken.Undefined); } else { throw CreateJsonReaderException("Error parsing undefined value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } private void ParseFalse() { if (MatchValue('f', JsonConvert.False, true)) { SetToken(JsonToken.Boolean, false); } else { throw CreateJsonReaderException("Error parsing boolean value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } private void ParseNumberNegativeInfinity() { if (MatchValue('-', JsonConvert.NegativeInfinity, true)) { SetToken(JsonToken.Float, double.NegativeInfinity); } else { throw CreateJsonReaderException("Error parsing negative infinity value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } private void ParseNumberPositiveInfinity() { if (MatchValue('I', JsonConvert.PositiveInfinity, true)) { SetToken(JsonToken.Float, double.PositiveInfinity); } else { throw CreateJsonReaderException("Error parsing positive infinity value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } private void ParseNumberNaN() { if (MatchValue('N', JsonConvert.NaN, true)) { SetToken(JsonToken.Float, double.NaN); } else { throw CreateJsonReaderException("Error parsing NaN value. Line {0}, position {1}.", _currentLineNumber, _currentLinePosition); } } /// <summary> /// Changes the state to closed. /// </summary> public override void Close() { base.Close(); if (CloseInput && _reader != null) _reader.Close(); if (_buffer != null) _buffer.Clear(); } /// <summary> /// Gets a value indicating whether the class can return line information. /// </summary> /// <returns> /// <c>true</c> if LineNumber and LinePosition can be provided; otherwise, <c>false</c>. /// </returns> public bool HasLineInfo() { return true; } /// <summary> /// Gets the current line number. /// </summary> /// <value> /// The current line number or 0 if no line information is available (for example, HasLineInfo returns false). /// </value> public int LineNumber { get { if (CurrentState == State.Start) return 0; return _currentLineNumber; } } /// <summary> /// Gets the current line position. /// </summary> /// <value> /// The current line position or 0 if no line information is available (for example, HasLineInfo returns false). /// </value> public int LinePosition { get { return _currentLinePosition; } } } } #endif
using System; using System.IO; using System.Text; using System.Runtime.InteropServices; using Microsoft.Win32.SafeHandles; namespace UsbLibrary { #region Custom exception /// <summary> /// Generic HID device exception /// </summary> public class HIDDeviceException : ApplicationException { public HIDDeviceException(string strMessage) : base(strMessage) { } public static HIDDeviceException GenerateWithWinError(string strMessage) { return new HIDDeviceException(string.Format("Msg:{0} WinEr:{1:X8}", strMessage, Marshal.GetLastWin32Error())); } public static HIDDeviceException GenerateError(string strMessage) { return new HIDDeviceException(string.Format("Msg:{0}", strMessage)); } } #endregion /// <summary> /// Abstract HID device : Derive your new device controller class from this /// </summary> public abstract class HIDDevice : Win32Usb, IDisposable { #region Privates variables /// <summary>Filestream we can use to read/write from</summary> private FileStream m_oFile; /// <summary>Length of input report : device gives us this</summary> private int m_nInputReportLength; /// <summary>Length if output report : device gives us this</summary> private int m_nOutputReportLength; /// <summary>Handle to the device</summary> private IntPtr m_hHandle; #endregion #region IDisposable Members /// <summary> /// Dispose method /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Disposer called by both dispose and finalise /// </summary> /// <param name="bDisposing">True if disposing</param> protected virtual void Dispose(bool bDisposing) { try { if (bDisposing) // if we are disposing, need to close the managed resources { if (m_oFile != null) { m_oFile.Close(); m_oFile = null; } } if (m_hHandle != IntPtr.Zero) // Dispose and finalize, get rid of unmanaged resources { CloseHandle(m_hHandle); } } catch (Exception ex) { Console.WriteLine(ex.ToString()); } } #endregion #region Privates/protected /// <summary> /// Initialises the device /// </summary> /// <param name="strPath">Path to the device</param> private void Initialise(string strPath) { // Create the file from the device path m_hHandle = CreateFile(strPath, GENERIC_READ | GENERIC_WRITE, 0, IntPtr.Zero, OPEN_EXISTING, FILE_FLAG_OVERLAPPED, IntPtr.Zero); if ( m_hHandle != InvalidHandleValue || m_hHandle == null) // if the open worked... { IntPtr lpData; if (HidD_GetPreparsedData(m_hHandle, out lpData)) // get windows to read the device data into an internal buffer { try { HidCaps oCaps; HidP_GetCaps(lpData, out oCaps); // extract the device capabilities from the internal buffer m_nInputReportLength = oCaps.InputReportByteLength; // get the input... m_nOutputReportLength = oCaps.OutputReportByteLength; // ... and output report lengths //m_oFile = new FileStream(m_hHandle, FileAccess.Read | FileAccess.Write, true, m_nInputReportLength, true); m_oFile = new FileStream(new SafeFileHandle(m_hHandle, false), FileAccess.Read | FileAccess.Write, m_nInputReportLength, true); BeginAsyncRead(); // kick off the first asynchronous read } catch (Exception ex) { throw HIDDeviceException.GenerateWithWinError("Failed to get the detailed data from the hid."); } finally { HidD_FreePreparsedData(ref lpData); // before we quit the funtion, we must free the internal buffer reserved in GetPreparsedData } } else // GetPreparsedData failed? Chuck an exception { throw HIDDeviceException.GenerateWithWinError("GetPreparsedData failed"); } } else // File open failed? Chuck an exception { m_hHandle = IntPtr.Zero; throw HIDDeviceException.GenerateWithWinError("Failed to create device file"); } } /// <summary> /// Kicks off an asynchronous read which completes when data is read or when the device /// is disconnected. Uses a callback. /// </summary> private void BeginAsyncRead() { byte[] arrInputReport = new byte[m_nInputReportLength]; // put the buff we used to receive the stuff as the async state then we can get at it when the read completes m_oFile.BeginRead(arrInputReport, 0, m_nInputReportLength, new AsyncCallback(ReadCompleted), arrInputReport); } /// <summary> /// Callback for above. Care with this as it will be called on the background thread from the async read /// </summary> /// <param name="iResult">Async result parameter</param> protected void ReadCompleted(IAsyncResult iResult) { byte[] arrBuff = (byte[])iResult.AsyncState; // retrieve the read buffer try { m_oFile.EndRead(iResult); // call end read : this throws any exceptions that happened during the read try { InputReport oInRep = CreateInputReport(); // Create the input report for the device oInRep.SetData(arrBuff); // and set the data portion - this processes the data received into a more easily understood format depending upon the report type HandleDataReceived(oInRep); // pass the new input report on to the higher level handler } finally { BeginAsyncRead(); // when all that is done, kick off another read for the next report } } catch(IOException ex) // if we got an IO exception, the device was removed { HandleDeviceRemoved(); if (OnDeviceRemoved != null) { OnDeviceRemoved(this, new EventArgs()); } Dispose(); } } /// <summary> /// Write an output report to the device. /// </summary> /// <param name="oOutRep">Output report to write</param> protected void Write(OutputReport oOutRep) { try { m_oFile.Write(oOutRep.Buffer, 0, oOutRep.BufferLength); } catch (IOException ex) { //Console.WriteLine(ex.ToString()); // The device was removed! throw new HIDDeviceException("Probbaly the device was removed"); } catch(Exception exx) { Console.WriteLine(exx.ToString()); } } /// <summary> /// virtual handler for any action to be taken when data is received. Override to use. /// </summary> /// <param name="oInRep">The input report that was received</param> protected virtual void HandleDataReceived(InputReport oInRep) { } /// <summary> /// Virtual handler for any action to be taken when a device is removed. Override to use. /// </summary> protected virtual void HandleDeviceRemoved() { } /// <summary> /// Helper method to return the device path given a DeviceInterfaceData structure and an InfoSet handle. /// Used in 'FindDevice' so check that method out to see how to get an InfoSet handle and a DeviceInterfaceData. /// </summary> /// <param name="hInfoSet">Handle to the InfoSet</param> /// <param name="oInterface">DeviceInterfaceData structure</param> /// <returns>The device path or null if there was some problem</returns> private static string GetDevicePath(IntPtr hInfoSet, ref DeviceInterfaceData oInterface) { uint nRequiredSize = 0; // Get the device interface details if (!SetupDiGetDeviceInterfaceDetail(hInfoSet, ref oInterface, IntPtr.Zero, 0, ref nRequiredSize, IntPtr.Zero)) { DeviceInterfaceDetailData oDetail = new DeviceInterfaceDetailData(); oDetail.Size = 5; // hardcoded to 5! Sorry, but this works and trying more future proof versions by setting the size to the struct sizeof failed miserably. If you manage to sort it, mail me! Thx if (SetupDiGetDeviceInterfaceDetail(hInfoSet, ref oInterface, ref oDetail, nRequiredSize, ref nRequiredSize, IntPtr.Zero)) { return oDetail.DevicePath; } } return null; } #endregion #region Public static /// <summary> /// Finds a device given its PID and VID /// </summary> /// <param name="nVid">Vendor id for device (VID)</param> /// <param name="nPid">Product id for device (PID)</param> /// <param name="oType">Type of device class to create</param> /// <returns>A new device class of the given type or null</returns> public static HIDDevice FindDevice(int nVid, int nPid, Type oType) { string strPath = string.Empty; string strSearch = string.Format("vid_{0:x4}&pid_{1:x4}", nVid, nPid); // first, build the path search string Guid gHid = HIDGuid; //HidD_GetHidGuid(out gHid); // next, get the GUID from Windows that it uses to represent the HID USB interface IntPtr hInfoSet = SetupDiGetClassDevs(ref gHid, null, IntPtr.Zero, DIGCF_DEVICEINTERFACE | DIGCF_PRESENT); // this gets a list of all HID devices currently connected to the computer (InfoSet) try { DeviceInterfaceData oInterface = new DeviceInterfaceData(); // build up a device interface data block oInterface.Size = Marshal.SizeOf(oInterface); // Now iterate through the InfoSet memory block assigned within Windows in the call to SetupDiGetClassDevs // to get device details for each device connected int nIndex = 0; while (SetupDiEnumDeviceInterfaces(hInfoSet, 0, ref gHid, (uint)nIndex, ref oInterface)) // this gets the device interface information for a device at index 'nIndex' in the memory block { string strDevicePath = GetDevicePath(hInfoSet, ref oInterface); // get the device path (see helper method 'GetDevicePath') if (strDevicePath.IndexOf(strSearch) >= 0) // do a string search, if we find the VID/PID string then we found our device! { HIDDevice oNewDevice = (HIDDevice)Activator.CreateInstance(oType); // create an instance of the class for this device oNewDevice.Initialise(strDevicePath); // initialise it with the device path return oNewDevice; // and return it } nIndex++; // if we get here, we didn't find our device. So move on to the next one. } } catch(Exception ex) { throw HIDDeviceException.GenerateError(ex.ToString()); //Console.WriteLine(ex.ToString()); } finally { // Before we go, we have to free up the InfoSet memory reserved by SetupDiGetClassDevs SetupDiDestroyDeviceInfoList(hInfoSet); } return null; // oops, didn't find our device } #endregion #region Publics /// <summary> /// Event handler called when device has been removed /// </summary> public event EventHandler OnDeviceRemoved; /// <summary> /// Accessor for output report length /// </summary> public int OutputReportLength { get { return m_nOutputReportLength; } } /// <summary> /// Accessor for input report length /// </summary> public int InputReportLength { get { return m_nInputReportLength; } } /// <summary> /// Virtual method to create an input report for this device. Override to use. /// </summary> /// <returns>A shiny new input report</returns> public virtual InputReport CreateInputReport() { return null; } #endregion } }
// <copyright file="Permutation.cs" company="Math.NET"> // Math.NET Numerics, part of the Math.NET Project // http://numerics.mathdotnet.com // http://github.com/mathnet/mathnet-numerics // http://mathnetnumerics.codeplex.com // // Copyright (c) 2009-2010 Math.NET // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. // </copyright> namespace MathNet.Numerics { using System; using Properties; /// <summary> /// Class to represent a permutation for a subset of the natural numbers. /// </summary> [Serializable] public class Permutation { #region fields /// <summary> /// Entry _indices[i] represents the location to which i is permuted to. /// </summary> private readonly int[] _indices; #endregion fields #region Constructor /// <summary> /// Initializes a new instance of the Permutation class. /// </summary> /// <param name="indices">An array which represents where each integer is permuted too: indices[i] represents that integer i /// is permuted to location indices[i].</param> public Permutation(int[] indices) { if (!CheckForProperPermutation(indices)) { throw new ArgumentException(Resources.PermutationAsIntArrayInvalid, "indices"); } _indices = (int[])indices.Clone(); } #endregion /// <summary> /// Gets the number of elements this permutation is over. /// </summary> public int Dimension { get { return _indices.Length; } } /// <summary> /// Computes where <paramref name="idx"/> permutes too. /// </summary> /// <param name="idx">The index to permute from.</param> /// <returns>The index which is permuted to.</returns> public int this[int idx] { get { return _indices[idx]; } } /// <summary> /// Computes the inverse of the permutation. /// </summary> /// <returns>The inverse of the permutation.</returns> public Permutation Inverse() { var invIdx = new int[Dimension]; for (int i = 0; i < invIdx.Length; i++) { invIdx[_indices[i]] = i; } return new Permutation(invIdx); } /// <summary> /// Construct an array from a sequence of inversions. /// </summary> /// <example> /// From wikipedia: the permutation 12043 has the inversions (0,2), (1,2) and (3,4). This would be /// encoded using the array [22244]. /// </example> /// <param name="inv">The set of inversions to construct the permutation from.</param> /// <returns>A permutation generated from a sequence of inversions.</returns> public static Permutation FromInversions(int[] inv) { var idx = new int[inv.Length]; for (int i = 0; i < inv.Length; i++) { idx[i] = i; } for (int i = inv.Length - 1; i >= 0; i--) { if (idx[i] != inv[i]) { int t = idx[i]; idx[i] = idx[inv[i]]; idx[inv[i]] = t; } } return new Permutation(idx); } /// <summary> /// Construct a sequence of inversions from the permutation. /// </summary> /// <example> /// From wikipedia: the permutation 12043 has the inversions (0,2), (1,2) and (3,4). This would be /// encoded using the array [22244]. /// </example> /// <returns>A sequence of inversions.</returns> public int[] ToInversions() { var idx = (int[])_indices.Clone(); for (int i = 0; i < idx.Length; i++) { if (idx[i] != i) { #if !PORTABLE int q = Array.FindIndex(idx, i + 1, x => x == i); #else int q = -1; for(int j = i+1; j < Dimension; j++) { if(idx[j] == i) { q = j; break; } } #endif var t = idx[i]; idx[i] = q; idx[q] = t; } } return idx; } /// <summary> /// Checks whether the <paramref name="indices"/> array represents a proper permutation. /// </summary> /// <param name="indices">An array which represents where each integer is permuted too: indices[i] represents that integer i /// is permuted to location indices[i].</param> /// <returns>True if <paramref name="indices"/> represents a proper permutation, <c>false</c> otherwise.</returns> static private bool CheckForProperPermutation(int[] indices) { var idxCheck = new bool[indices.Length]; for (int i = 0; i < indices.Length; i++) { if (indices[i] >= indices.Length || indices[i] < 0) { return false; } idxCheck[indices[i]] = true; } for (int i = 0; i < indices.Length; i++) { if (idxCheck[i] == false) { return false; } } return true; } } }
using System; using System.Collections.Generic; using System.Text; namespace LumiSoft.Net.Mime.vCard { /// <summary> /// vCard phone number implementation. /// </summary> public class PhoneNumber { private Item m_pItem = null; private PhoneNumberType_enum m_Type = PhoneNumberType_enum.Voice; private string m_Number = ""; /// <summary> /// Default constructor. /// </summary> /// <param name="item">Owner vCard item.</param> /// <param name="type">Phone number type. Note: This value can be flagged value !</param> /// <param name="number">Phone number.</param> internal PhoneNumber(Item item,PhoneNumberType_enum type,string number) { m_pItem = item; m_Type = type; m_Number = number; } #region method Changed /// <summary> /// This method is called when some property has changed, wee need to update underlaying vCard item. /// </summary> private void Changed() { m_pItem.ParametersString = PhoneTypeToString(m_Type); m_pItem.Value = m_Number; } #endregion #region internal static method Parse /// <summary> /// Parses phone from vCard TEL structure string. /// </summary> /// <param name="item">vCard TEL item.</param> internal static PhoneNumber Parse(Item item) { PhoneNumberType_enum type = PhoneNumberType_enum.NotSpecified; if(item.ParametersString.ToUpper().IndexOf("PREF") != -1){ type |= PhoneNumberType_enum.Preferred; } if(item.ParametersString.ToUpper().IndexOf("HOME") != -1){ type |= PhoneNumberType_enum.Home; } if(item.ParametersString.ToUpper().IndexOf("MSG") != -1){ type |= PhoneNumberType_enum.Msg; } if(item.ParametersString.ToUpper().IndexOf("WORK") != -1){ type |= PhoneNumberType_enum.Work; } if(item.ParametersString.ToUpper().IndexOf("VOICE") != -1){ type |= PhoneNumberType_enum.Voice; } if(item.ParametersString.ToUpper().IndexOf("FAX") != -1){ type |= PhoneNumberType_enum.Fax; } if(item.ParametersString.ToUpper().IndexOf("CELL") != -1){ type |= PhoneNumberType_enum.Cellular; } if(item.ParametersString.ToUpper().IndexOf("VIDEO") != -1){ type |= PhoneNumberType_enum.Video; } if(item.ParametersString.ToUpper().IndexOf("PAGER") != -1){ type |= PhoneNumberType_enum.Pager; } if(item.ParametersString.ToUpper().IndexOf("BBS") != -1){ type |= PhoneNumberType_enum.BBS; } if(item.ParametersString.ToUpper().IndexOf("MODEM") != -1){ type |= PhoneNumberType_enum.Modem; } if(item.ParametersString.ToUpper().IndexOf("CAR") != -1){ type |= PhoneNumberType_enum.Car; } if(item.ParametersString.ToUpper().IndexOf("ISDN") != -1){ type |= PhoneNumberType_enum.ISDN; } if(item.ParametersString.ToUpper().IndexOf("PCS") != -1){ type |= PhoneNumberType_enum.PCS; } return new PhoneNumber(item,type,item.Value); } #endregion #region internal static PhoneTypeToString /// <summary> /// Converts PhoneNumberType_enum to vCard item parameters string. /// </summary> /// <param name="type">Value to convert.</param> /// <returns></returns> internal static string PhoneTypeToString(PhoneNumberType_enum type) { string retVal = ""; if((type & PhoneNumberType_enum.BBS) != 0){ retVal += "BBS,"; } if((type & PhoneNumberType_enum.Car) != 0){ retVal += "CAR,"; } if((type & PhoneNumberType_enum.Cellular) != 0){ retVal += "CELL,"; } if((type & PhoneNumberType_enum.Fax) != 0){ retVal += "FAX,"; } if((type & PhoneNumberType_enum.Home) != 0){ retVal += "HOME,"; } if((type & PhoneNumberType_enum.ISDN) != 0){ retVal += "ISDN,"; } if((type & PhoneNumberType_enum.Modem) != 0){ retVal += "MODEM,"; } if((type & PhoneNumberType_enum.Msg) != 0){ retVal += "MSG,"; } if((type & PhoneNumberType_enum.Pager) != 0){ retVal += "PAGER,"; } if((type & PhoneNumberType_enum.PCS) != 0){ retVal += "PCS,"; } if((type & PhoneNumberType_enum.Preferred) != 0){ retVal += "PREF,"; } if((type & PhoneNumberType_enum.Video) != 0){ retVal += "VIDEO,"; } if((type & PhoneNumberType_enum.Voice) != 0){ retVal += "VOICE,"; } if((type & PhoneNumberType_enum.Work) != 0){ retVal += "WORK,"; } if(retVal.EndsWith(",")){ retVal = retVal.Substring(0,retVal.Length - 1); } return retVal; } #endregion #region Properties Implementation /// <summary> /// Gets underlaying vCrad item. /// </summary> public Item Item { get{ return m_pItem; } } /// <summary> /// Gets or sets phone number type. Note: This property can be flagged value ! /// </summary> public PhoneNumberType_enum NumberType { get{ return m_Type; } set{ m_Type = value; Changed(); } } /// <summary> /// Gets or sets phone number. /// </summary> public string Number { get{ return m_Number; } set{ m_Number = value; Changed(); } } #endregion } }
// // MetadataDisplay.cs // // Author: // Ruben Vermeersch <[email protected]> // Stephen Shaw <[email protected]> // // Copyright (c) 2014 Stephen Shaw // Copyright (C) 2008-2010 Novell, Inc. // Copyright (C) 2008, 2010 Ruben Vermeersch // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using Gtk; using Mono.Unix; using FSpot.Core; using FSpot.Utils; using FSpot.Extensions; namespace FSpot.Widgets { public class MetadataDisplayPage : SidebarPage { public MetadataDisplayPage () : base (new MetadataDisplayWidget (), Catalog.GetString ("Metadata"), "gtk-info") { (SidebarWidget as MetadataDisplayWidget).Page = this; } protected override void AddedToSidebar () { var widget = SidebarWidget as MetadataDisplayWidget; (Sidebar as Sidebar).SelectionChanged += widget.HandleSelectionChanged; (Sidebar as Sidebar).SelectionItemsChanged += widget.HandleSelectionItemsChanged; } } public class MetadataDisplayWidget : ScrolledWindow { DelayedOperation update_delay; /* This VBox only contains exif-data, so it is seperated from other information */ readonly VBox metadata_vbox; readonly VBox main_vbox; readonly Label metadata_message; DisplayState display; public MetadataDisplayPage Page { get; set; } // stores list of the expanded expanders List<string> open_list; ListStore extended_metadata; bool up_to_date; enum DisplayState { metadata, message }; public MetadataDisplayWidget () { main_vbox = new VBox (); main_vbox.Spacing = 6; metadata_message = new Label (string.Empty); metadata_message.UseMarkup = true; metadata_message.LineWrap = true; metadata_vbox = new VBox (); metadata_vbox.Spacing = 6; main_vbox.PackStart (metadata_vbox, false, false, 0); AddWithViewport (metadata_message); ((Viewport)Child).ShadowType = ShadowType.None; BorderWidth = 3; display = DisplayState.message; ExposeEvent += HandleExposeEvent; open_list = new List<string> (); // Create Expander and TreeView for // extended metadata var tree_view = new TreeView (); tree_view.HeadersVisible = false; tree_view.RulesHint = true; var col = new TreeViewColumn (); col.Sizing = TreeViewColumnSizing.Autosize; CellRenderer colr = new CellRendererText (); col.PackStart (colr, false); col.AddAttribute (colr, "markup", 0); tree_view.AppendColumn (col); extended_metadata = new ListStore (typeof(string)); tree_view.Model = extended_metadata; var expander = new Expander (string.Format ("<span weight=\"bold\"><small>{0}</small></span>", Catalog.GetString ("Extended Metadata"))); expander.UseMarkup = true; expander.Add (tree_view); expander.Expanded = true; main_vbox.PackStart (expander, false, false, 6); expander.ShowAll (); update_delay = new DelayedOperation (Update); update_delay.Start (); } IPhoto photo; public IPhoto Photo { get { return photo; } set { photo = value; if (!Visible) { up_to_date = false; } else { update_delay.Start (); } } } void HandleExposeEvent (object sender, ExposeEventArgs args) { if (!up_to_date) { update_delay.Start (); } } internal void HandleSelectionChanged (IBrowsableCollection collection) { // Don't show metadata when multiple photos are selected. Photo = (collection != null && collection.Count == 1) ? collection [0] : null; } internal void HandleSelectionItemsChanged (IBrowsableCollection collection, BrowsableEventArgs args) { if (!args.Changes.MetadataChanged) return; if (!Visible) { up_to_date = false; } else { update_delay.Start (); } } new bool Visible { get { return (Page.Sidebar as Sidebar).IsActive (Page); } } ListStore AddExpander (string name, int pos) { TreeView tree_view = new TreeView (); tree_view.HeadersVisible = false; tree_view.RulesHint = true; TreeViewColumn col = new TreeViewColumn (); col.Sizing = TreeViewColumnSizing.Autosize; CellRenderer colr = new CellRendererText (); col.PackStart (colr, false); col.AddAttribute (colr, "markup", 0); tree_view.AppendColumn (col); ListStore model = new ListStore (typeof(string)); tree_view.Model = model; Expander expander = new Expander (string.Format ("<span weight=\"bold\"><small>{0}</small></span>", name)); expander.UseMarkup = true; expander.Add (tree_view); expander.Expanded = true; metadata_vbox.PackStart (expander, false, false, 6); metadata_vbox.ReorderChild (expander, pos); if (open_list.Contains (name)) expander.Expanded = true; expander.Activated += HandleExpanderActivated; expander.ShowAll (); return model; } public void HandleExpanderActivated (object sender, EventArgs e) { Expander expander = (Expander)sender; if (expander.Expanded) open_list.Add (expander.Label); else open_list.Remove (expander.Label); } bool Update () { bool empty = true; int index_of_expander = 0; bool missing = false; Exception error = null; /* // FIXME: The stuff below needs to be ported to Taglib#. TreeIter iter; ListStore model; string name; up_to_date = true; int i = 0; // Write Exif-Data if (exif_info != null) { foreach (ExifContent content in exif_info.GetContents ()) { ExifEntry [] entries = content.GetEntries (); i++; if (entries.Length < 1) continue; empty = false; name = ExifUtil.GetIfdNameExtended ((Ifd)i - 1); if (index_of_expander >= metadata_vbox.Children.Length) model = AddExpander (name, index_of_expander); else { Expander expander = (Expander)metadata_vbox.Children[index_of_expander]; if (expander.Label == name) model = (ListStore)((TreeView)expander.Child).Model; else { model = AddExpander (name, index_of_expander); } } model.GetIterFirst(out iter); foreach (ExifEntry entry in entries) { string s; if (entry.Title != null) s = string.Format ("{0}\n\t<small>{1}</small>", entry.Title, entry.Value); else s = string.Format ("Unknown Tag ID={0}\n\t<small>{1}</small>", entry.Tag.ToString (), entry.Value); if (model.IterIsValid(iter)) { model.SetValue (iter, 0, s); model.IterNext(ref iter); } else model.AppendValues (s); } // remove rows, that are not used while (model.IterIsValid(iter)) { model.Remove (ref iter); } index_of_expander++; } } // Write Extended Metadata if (photo != null) { MetadataStore store = new MetadataStore (); try { using (var img = ImageFile.Create (photo.DefaultVersion.Uri)) { if (img is SemWeb.StatementSource) { StatementSource source = (StatementSource)img; source.Select (store); } } } catch (System.IO.FileNotFoundException) { missing = true; } catch (System.Exception e){ // Sometimes we don't get the right exception, check for the file if (!System.IO.File.Exists (photo.DefaultVersion.Uri.LocalPath)) { missing = true; } else { // if the file is there but we still got an exception display it. error = e; } } model = extended_metadata; model.GetIterFirst(out iter); if (store.StatementCount > 0) { empty = false; foreach (Statement stmt in store) { // Skip anonymous subjects because they are // probably part of a collection if (stmt.Subject.Uri == null && store.SelectSubjects (null, stmt.Subject).Length > 0) continue; string title; string value; string s; Description.GetDescription (store, stmt, out title, out value); if (value == null) { MemoryStore substore = store.Select (new Statement ((Entity)stmt.Object, null, null, null)).Load(); StringBuilder collection = new StringBuilder (); collection.Append (title); WriteCollection (substore, collection); if (model.IterIsValid(iter)) { model.SetValue (iter, 0, collection.ToString ()); model.IterNext(ref iter); } else model.AppendValues (collection.ToString ()); } else { s = string.Format ("{0}\n\t<small>{1}</small>", title, value); if (model.IterIsValid(iter)) { model.SetValue (iter, 0, s); model.IterNext(ref iter); } else model.AppendValues (s); } } } else { // clear Extended Metadata String s = string.Format ("<small>{0}</small>", Catalog.GetString ("No Extended Metadata Available")); if (model.IterIsValid(iter)) { model.SetValue (iter, 0, s); model.IterNext(ref iter); } else model.AppendValues (s); } // remove rows, that are not used while (model.IterIsValid(iter)) { model.Remove (ref iter); } }*/ if (empty) { string msg; if (photo == null) { msg = Catalog.GetString ("No active photo"); } else if (missing) { msg = string.Format (Catalog.GetString ("The photo \"{0}\" does not exist"), photo.DefaultVersion.Uri); } else { msg = Catalog.GetString ("No metadata available"); if (error != null) { msg = string.Format ("<i>{0}</i>", error); } } metadata_message.Markup = "<span weight=\"bold\">" + msg + "</span>"; if (display == DisplayState.metadata) { // Child is a Viewport, (AddWithViewport in ctor) ((Viewport)Child).Remove (main_vbox); ((Viewport)Child).Add (metadata_message); display = DisplayState.message; metadata_message.Show (); } } else { // remove Expanders, that are not used while (index_of_expander < metadata_vbox.Children.Length) metadata_vbox.Remove (metadata_vbox.Children [index_of_expander]); if (display == DisplayState.message) { // Child is a Viewport, (AddWithViewport in ctor) ((Viewport)Child).Remove (metadata_message); ((Viewport)Child).Add (main_vbox); display = DisplayState.metadata; main_vbox.ShowAll (); } } return false; } /* private void WriteCollection (MemoryStore substore, StringBuilder collection) { string type = null; foreach (Statement stmt in substore) { if (stmt.Predicate.Uri == MetadataStore.Namespaces.Resolve ("rdf:type")) { string prefix; MetadataStore.Namespaces.Normalize (stmt.Object.Uri, out prefix, out type); } } foreach (Statement sub in substore) { if (sub.Object is SemWeb.Literal) { string title; string value = ((SemWeb.Literal)sub.Object).Value; Description.GetDescription (substore, sub, out title, out value); if (type == null) collection.AppendFormat ("\n\t<small>{0}: {1}</small>", title, value); else collection.AppendFormat ("\n\t<small>{0}</small>", value); } else { if (type == null) { MemoryStore substore2 = substore.Select (new Statement ((Entity)sub.Object, null, null, null)).Load(); if (substore.StatementCount > 0) WriteCollection (substore2, collection); } } } }*/ } }
// Copyright (c) .NET Foundation and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices; using FluentAssertions; using Microsoft.DotNet.Cli; using Microsoft.DotNet.Cli.CommandLine; using Microsoft.DotNet.Cli.Utils; using Microsoft.DotNet.ToolPackage; using Microsoft.DotNet.Tools; using Microsoft.DotNet.Tools.Tool.Install; using Microsoft.DotNet.Tools.Tool.Uninstall; using Microsoft.DotNet.Tools.Tests.ComponentMocks; using Microsoft.DotNet.Tools.Test.Utilities; using Microsoft.Extensions.DependencyModel.Tests; using Microsoft.Extensions.EnvironmentAbstractions; using Xunit; using Parser = Microsoft.DotNet.Cli.Parser; using LocalizableStrings = Microsoft.DotNet.Tools.Tool.Uninstall.LocalizableStrings; using InstallLocalizableStrings = Microsoft.DotNet.Tools.Tool.Install.LocalizableStrings; using Microsoft.DotNet.ShellShim; namespace Microsoft.DotNet.Tests.Commands { public class ToolUninstallGlobalOrToolPathCommandTests { private readonly BufferedReporter _reporter; private readonly IFileSystem _fileSystem; private readonly EnvironmentPathInstructionMock _environmentPathInstructionMock; private const string PackageId = "global.tool.console.demo"; private const string PackageVersion = "1.0.4"; private readonly string _shimsDirectory; private readonly string _toolsDirectory; public ToolUninstallGlobalOrToolPathCommandTests() { _reporter = new BufferedReporter(); _fileSystem = new FileSystemMockBuilder().UseCurrentSystemTemporaryDirectory().Build(); var tempDirectory = _fileSystem.Directory.CreateTemporaryDirectory().DirectoryPath; _shimsDirectory = Path.Combine(tempDirectory, "shims"); _toolsDirectory = Path.Combine(tempDirectory, "tools"); _environmentPathInstructionMock = new EnvironmentPathInstructionMock(_reporter, _shimsDirectory); } [Fact] public void GivenANonExistentPackageItErrors() { var packageId = "does.not.exist"; var command = CreateUninstallCommand($"-g {packageId}"); Action a = () => command.Execute(); a.ShouldThrow<GracefulException>() .And .Message .Should() .Be(string.Format(LocalizableStrings.ToolNotInstalled, packageId)); } [Fact] public void GivenAPackageItUninstalls() { CreateInstallCommand($"-g {PackageId}").Execute().Should().Be(0); _reporter .Lines .Last() .Should() .Contain(string.Format( InstallLocalizableStrings.InstallationSucceeded, ProjectRestorerMock.DefaultToolCommandName, PackageId, PackageVersion)); var packageDirectory = new DirectoryPath(Path.GetFullPath(_toolsDirectory)) .WithSubDirectories(PackageId, PackageVersion); var shimPath = Path.Combine( _shimsDirectory, ProjectRestorerMock.DefaultToolCommandName + (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? ".exe" : "")); _fileSystem.Directory.Exists(packageDirectory.Value).Should().BeTrue(); _fileSystem.File.Exists(shimPath).Should().BeTrue(); _reporter.Lines.Clear(); CreateUninstallCommand($"-g {PackageId}").Execute().Should().Be(0); _reporter .Lines .Single() .Should() .Contain(string.Format( LocalizableStrings.UninstallSucceeded, PackageId, PackageVersion)); _fileSystem.Directory.Exists(packageDirectory.Value).Should().BeFalse(); _fileSystem.File.Exists(shimPath).Should().BeFalse(); } [Fact] public void GivenAPackageWhenCallFromUninstallRedirectCommandItUninstalls() { CreateInstallCommand($"-g {PackageId}").Execute().Should().Be(0); _reporter .Lines .Last() .Should() .Contain(string.Format( InstallLocalizableStrings.InstallationSucceeded, ProjectRestorerMock.DefaultToolCommandName, PackageId, PackageVersion)); var packageDirectory = new DirectoryPath(Path.GetFullPath(_toolsDirectory)) .WithSubDirectories(PackageId, PackageVersion); var shimPath = Path.Combine( _shimsDirectory, ProjectRestorerMock.DefaultToolCommandName + (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? ".exe" : "")); _fileSystem.Directory.Exists(packageDirectory.Value).Should().BeTrue(); _fileSystem.File.Exists(shimPath).Should().BeTrue(); _reporter.Lines.Clear(); ParseResult result = Parser.Instance.Parse("dotnet tool uninstall " + $"-g {PackageId}"); (IToolPackageStore, IToolPackageStoreQuery, IToolPackageUninstaller) CreateToolPackageStoreAndUninstaller( DirectoryPath? directoryPath) { var store = new ToolPackageStoreMock( new DirectoryPath(_toolsDirectory), _fileSystem); var packageUninstaller = new ToolPackageUninstallerMock(_fileSystem, store); return (store, store, packageUninstaller); } var toolUninstallGlobalOrToolPathCommand = new ToolUninstallGlobalOrToolPathCommand( result["dotnet"]["tool"]["uninstall"], result, CreateToolPackageStoreAndUninstaller, (_) => new ShellShimRepository( new DirectoryPath(_shimsDirectory), fileSystem: _fileSystem, appHostShellShimMaker: new AppHostShellShimMakerMock(_fileSystem)), _reporter); var uninstallCommand = new ToolUninstallCommand( result["dotnet"]["tool"]["uninstall"], result, toolUninstallGlobalOrToolPathCommand: toolUninstallGlobalOrToolPathCommand) ; uninstallCommand.Execute().Should().Be(0); _reporter .Lines .Single() .Should() .Contain(string.Format( LocalizableStrings.UninstallSucceeded, PackageId, PackageVersion)); _fileSystem.Directory.Exists(packageDirectory.Value).Should().BeFalse(); _fileSystem.File.Exists(shimPath).Should().BeFalse(); } [Fact] public void GivenAFailureToUninstallItLeavesItInstalled() { CreateInstallCommand($"-g {PackageId}").Execute().Should().Be(0); _reporter .Lines .Last() .Should() .Contain(string.Format( InstallLocalizableStrings.InstallationSucceeded, ProjectRestorerMock.DefaultToolCommandName, PackageId, PackageVersion)); var packageDirectory = new DirectoryPath(Path.GetFullPath(_toolsDirectory)) .WithSubDirectories(PackageId, PackageVersion); var shimPath = Path.Combine( _shimsDirectory, ProjectRestorerMock.DefaultToolCommandName + (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? ".exe" : "")); _fileSystem.Directory.Exists(packageDirectory.Value).Should().BeTrue(); _fileSystem.File.Exists(shimPath).Should().BeTrue(); Action a = () => CreateUninstallCommand( options: $"-g {PackageId}", uninstallCallback: () => throw new IOException("simulated error")) .Execute(); a.ShouldThrow<GracefulException>() .And .Message .Should() .Be(string.Format( CommonLocalizableStrings.FailedToUninstallToolPackage, PackageId, "simulated error")); _fileSystem.Directory.Exists(packageDirectory.Value).Should().BeTrue(); _fileSystem.File.Exists(shimPath).Should().BeTrue(); } [Fact] public void GivenAnInvalidToolPathItThrowsException() { var toolPath = "tool-path-does-not-exist"; var uninstallCommand = CreateUninstallCommand($"--tool-path {toolPath} {PackageId}"); Action a = () => uninstallCommand.Execute(); a.ShouldThrow<GracefulException>() .And .Message .Should() .Be(string.Format(LocalizableStrings.InvalidToolPathOption, toolPath)); } private ToolInstallGlobalOrToolPathCommand CreateInstallCommand(string options) { ParseResult result = Parser.Instance.Parse("dotnet tool install " + options); var store = new ToolPackageStoreMock(new DirectoryPath(_toolsDirectory), _fileSystem); var packageInstallerMock = new ToolPackageInstallerMock( _fileSystem, store, new ProjectRestorerMock( _fileSystem, _reporter)); return new ToolInstallGlobalOrToolPathCommand( result["dotnet"]["tool"]["install"], result, (location, forwardArguments) => (store, store, packageInstallerMock), (_) => new ShellShimRepository( new DirectoryPath(_shimsDirectory), fileSystem: _fileSystem, appHostShellShimMaker: new AppHostShellShimMakerMock(_fileSystem)), _environmentPathInstructionMock, _reporter); } private ToolUninstallGlobalOrToolPathCommand CreateUninstallCommand(string options, Action uninstallCallback = null) { ParseResult result = Parser.Instance.Parse("dotnet tool uninstall " + options); (IToolPackageStore, IToolPackageStoreQuery, IToolPackageUninstaller) createToolPackageStoreAndUninstaller( DirectoryPath? directoryPath) { var store = new ToolPackageStoreMock( new DirectoryPath(_toolsDirectory), _fileSystem); var packageUninstaller = new ToolPackageUninstallerMock(_fileSystem, store, uninstallCallback); return (store, store, packageUninstaller); } return new ToolUninstallGlobalOrToolPathCommand( result["dotnet"]["tool"]["uninstall"], result, createToolPackageStoreAndUninstaller, (_) => new ShellShimRepository( new DirectoryPath(_shimsDirectory), fileSystem: _fileSystem, appHostShellShimMaker: new AppHostShellShimMakerMock(_fileSystem)), _reporter); } } }
using System.Collections; using System.Collections.Generic; using System.Linq; using ILRepacking.Steps.SourceServerData; using NUnit.Framework; namespace ILRepack.Tests.Steps.SourceServerData { internal static class HttpSourceServerDescriptorTest { public static IEnumerable SuccessfulParsingTestCases { get { yield return new TestCaseData( @"SRCSRV: ini ------------------------------------------------ VERSION=2 SRCSRV: variables ------------------------------------------ SRCSRVVERCTRL=http SRCSRVTRG=http://website/raw/myrepo/commithash/%var2% SRCSRV: source files --------------------------------------- C:\Users\vagrant\workspace\myrepo\src\File1.cs*src/File1.cs C:\Users\vagrant\workspace\myrepo\src\FileTwo.cs*src/FileTow.cs C:\Users\vagrant\workspace\myrepo\src\FileXxx.cs*src/FileXxx.cs SRCSRV: end ------------------------------------------------", 2, "http", "http://website/raw/myrepo/commithash/%var2%", 3).SetName("3 files, http"); yield return new TestCaseData( @"SRCSRV: ini ------------------------------------------------ VERSION=1 SRCSRV: variables ------------------------------------------ SRCSRVTRG=https://website/raw/anotherrepo/commithash/%var2% SRCSRVVERCTRL=https SRCSRV: source files --------------------------------------- SRCSRV: end ------------------------------------------------", 1, "https", "https://website/raw/anotherrepo/commithash/%var2%", 0).SetName("No files, https"); yield return new TestCaseData( @"SRCSRV: ini ------------------------------------------------ VERSION=2 SRCSRV: variables ------------------------------------------ SRCSRVVERCTRL=http SRCSRVTRG=http://website/raw/myrepo/commithash/%var2% SRCSRV: source files --------------------------------------- C:\Users\vagrant\workspace\myrepo\src\File1.cs*src/File1.cs C:\Users\vagrant\workspace\myrepo\src\FileTwo.cs*src/FileTow.cs SRCSRV: end ------------------------------------------------", 2, "http", "http://website/raw/myrepo/commithash/%var2%", 2).SetName("2 files, http, different return line"); yield return new TestCaseData( @"SRCSRV: ini ------------------------------------------------ VERSION=1 SRCSRV: variables ------------------------------------------ SRCSRVTRG=https://website/raw/anotherrepo/commit=hash/%var2% SRCSRVVERCTRL=http SRCSRV: source files --------------------------------------- C:\Users\vagrant\workspace\myrepo\src\OnlyOneFile.cs*src/OnlyOneFile.cs SRCSRV: end ------------------------------------------------", 1, "http", "https://website/raw/anotherrepo/commit=hash/%var2%", 1).SetName("1 file, http, equal charater in SRCSRVTRG"); } } [TestCaseSource(nameof(SuccessfulParsingTestCases))] public static void GivenHttpRawSourceServerData_WhenParsing_ThenValuesShouldTHeExpectedOne( string raw, int expectedVersion, string expectedVersionControl, string expectedTarget, int expectedSourceFilesCount) { HttpSourceServerDescriptor descriptor; var success = HttpSourceServerDescriptor.TryParse(raw, out descriptor); Assert.IsTrue(success); Assert.That(descriptor.Version, Is.EqualTo(expectedVersion)); Assert.That(descriptor.VersionControl, Is.EqualTo(expectedVersionControl)); Assert.That(descriptor.Target, Is.EqualTo(expectedTarget)); Assert.That(descriptor.SourceFiles.Length, Is.EqualTo(expectedSourceFilesCount)); } public static IEnumerable FailingParsingTestCases { get { yield return @"SRCSRV: ini ------------------------------------------------ VERSION=2 SRCSRV: variables ------------------------------------------ SRCSRVVERCTRL=tfs SRCSRV: source files --------------------------------------- SRCSRV: end ------------------------------------------------"; yield return ""; } } [TestCaseSource(nameof(FailingParsingTestCases))] public static void GivenInvliadRawSourceServerData_WhenParsing_ThenValuesShouldTHeExpectedOne(string raw) { HttpSourceServerDescriptor descriptor; var success = HttpSourceServerDescriptor.TryParse(raw, out descriptor); Assert.IsFalse(success); } public static IEnumerable ToStringTestCases { get { yield return new TestCaseData(new HttpSourceServerDescriptor( 3, "http", "http://website/raw/therepo/commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\project\folder\file1.cs", "folder/file1.cs"), new SourceFileDescriptor(@"c:\project\folder\file2.cs", "folder/file2.cs") })) .Returns(@"SRCSRV: ini ------------------------------------------------ VERSION=3 SRCSRV: variables ------------------------------------------ SRCSRVVERCTRL=http SRCSRVTRG=http://website/raw/therepo/commit-hash/%var2% SRCSRV: source files --------------------------------------- c:\project\folder\file1.cs*folder/file1.cs c:\project\folder\file2.cs*folder/file2.cs SRCSRV: end ------------------------------------------------".GetLines()); yield return new TestCaseData(new HttpSourceServerDescriptor( 50, "", "", new SourceFileDescriptor[0])) .Returns(@"SRCSRV: ini ------------------------------------------------ VERSION=50 SRCSRV: variables ------------------------------------------ SRCSRVVERCTRL= SRCSRVTRG= SRCSRV: source files --------------------------------------- SRCSRV: end ------------------------------------------------".GetLines()); } } [TestCaseSource(nameof(ToStringTestCases))] public static IEnumerable<string> GivenSourceServerDataObject_WhenCallingToString_ThenAProperRawValueIGenerated(HttpSourceServerDescriptor descriptor) { return descriptor.ToString().GetLines(); } public static IEnumerable MergingTestCases { get { yield return new TestCaseData( 2, "http", "%var2%", new Dictionary<string, string> { { @"c:\proj\src\file.cs", "http://server/raw/repo/commit-hash/src/files.cs" }, { @"c:\proj\test\file.cs", "http://server/raw/repo/commit-hash/test/files.cs" } }, new HttpSourceServerDescriptor( 2, "http", "http://server/raw/repo/commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\proj\src\file.cs", "src/files.cs"), new SourceFileDescriptor(@"c:\proj\test\file.cs", "test/files.cs") }), new HttpSourceServerDescriptor[0]).SetName("One assembly"); yield return new TestCaseData( 2, "http", "%var2%", new Dictionary<string, string> { { @"c:\primary\src\file1.cs", "http://server/raw/primary/primary-commit-hash/src/files1.cs" }, { @"c:\proj1\sources\file2.cs", "http://server/raw/proj1/commit-hash/sources/files2.cs" }, { @"c:\proj2\main\file3.cs", "http://server/raw/proj2/commit-hash/main/files3.cs" } }, new HttpSourceServerDescriptor( 2, "http", "http://server/raw/primary/primary-commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\primary\src\file1.cs", "src/files1.cs") }), new[] { new HttpSourceServerDescriptor( 2, "http", "http://server/raw/proj1/commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\proj1\sources\file2.cs", "sources/files2.cs") }), new HttpSourceServerDescriptor( 2, "http", "http://server/raw/proj2/commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\proj2\main\file3.cs", "main/files3.cs") }) }) .SetName("Tree assemblies, same root web server, same version control, same version."); yield return new TestCaseData( 2, "http", "%var2%", new Dictionary<string, string> { { @"c:\primary\src\file1.cs", "http://server/raw/primary/primary-commit-hash/src/files1.cs" }, { @"c:\other-proj\src\file2.cs", "http://server/raw/other-proj/commit-hash/src/files2.cs" } }, new HttpSourceServerDescriptor( 2, "http", "http://server/raw/primary/primary-commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\primary\src\file1.cs", "src/files1.cs") }), new[] { new HttpSourceServerDescriptor( 55, "http", "http://server/raw/other-proj/commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\other-proj\src\file2.cs", "src/files2.cs") }) }) .SetName("Two assemblies, same root web server, same version control, different version."); yield return new TestCaseData( 2, "http", "%var2%", new Dictionary<string, string> { { @"c:\primary\src\file1.cs", "http://server/raw/primary/primary-commit-hash/src/files1.cs" } }, new HttpSourceServerDescriptor( 2, "http", "http://server/raw/primary/primary-commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\primary\src\file1.cs", "src/files1.cs") }), new[] { new HttpSourceServerDescriptor( 2, "tfs", "tfs://server/raw/other-proj/commit-hash/%var2%", new[] { new SourceFileDescriptor(@"c:\other-proj\src\file2.cs", "src/files2.cs") }) }) .SetName("Two assemblies, different version control"); } } [TestCaseSource(nameof(MergingTestCases))] public static void GivenAListOfSourceServerDescriptor_WhenMergingThem_ThenItShouldBeProperlyMerged( int expectedVersion, string expectedVersionControl, string expectedTarget, IDictionary<string, string> expectedSourceFiles, HttpSourceServerDescriptor primary, IEnumerable<HttpSourceServerDescriptor> other) { var merged = primary.MergeWith(other); Assert.That(merged.Version, Is.EqualTo(expectedVersion)); Assert.That(merged.VersionControl, Is.EqualTo(expectedVersionControl)); Assert.That(merged.Target, Is.EqualTo(expectedTarget)); CollectionAssert.AreEquivalent( expectedSourceFiles, merged.SourceFiles.Select(file => file.Variables) .ToDictionary(vars => vars.ElementAt(0), vars => vars.ElementAt(1))); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using Avalonia.Input.Platform; using System; using System.Collections.Generic; using System.Reactive.Disposables; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using System.Threading; using Avalonia.Controls.Platform; using Avalonia.Input; using Avalonia.Platform; using Avalonia.Win32.Input; using Avalonia.Win32.Interop; using Avalonia.Controls; using Avalonia.Rendering; #if NETSTANDARD using Win32Exception = Avalonia.Win32.NetStandard.AvaloniaWin32Exception; #else using System.ComponentModel; #endif namespace Avalonia { public static class Win32ApplicationExtensions { public static T UseWin32<T>( this T builder, bool deferredRendering = true) where T : AppBuilderBase<T>, new() { return builder.UseWindowingSubsystem( () => Win32.Win32Platform.Initialize(deferredRendering), "Win32"); } } } namespace Avalonia.Win32 { partial class Win32Platform : IPlatformThreadingInterface, IPlatformSettings, IWindowingPlatform, IPlatformIconLoader { private static readonly Win32Platform s_instance = new Win32Platform(); private static uint _uiThread; private UnmanagedMethods.WndProc _wndProcDelegate; private IntPtr _hwnd; private readonly List<Delegate> _delegates = new List<Delegate>(); public Win32Platform() { // Declare that this process is aware of per monitor DPI if (UnmanagedMethods.ShCoreAvailable) { UnmanagedMethods.SetProcessDpiAwareness(UnmanagedMethods.PROCESS_DPI_AWARENESS.PROCESS_PER_MONITOR_DPI_AWARE); } CreateMessageWindow(); } public static bool UseDeferredRendering { get; set; } public Size DoubleClickSize => new Size( UnmanagedMethods.GetSystemMetrics(UnmanagedMethods.SystemMetric.SM_CXDOUBLECLK), UnmanagedMethods.GetSystemMetrics(UnmanagedMethods.SystemMetric.SM_CYDOUBLECLK)); public TimeSpan DoubleClickTime => TimeSpan.FromMilliseconds(UnmanagedMethods.GetDoubleClickTime()); public static void Initialize() { Initialize(true); } public static void Initialize(bool deferredRendering = true) { AvaloniaLocator.CurrentMutable .Bind<IClipboard>().ToSingleton<ClipboardImpl>() .Bind<IStandardCursorFactory>().ToConstant(CursorFactory.Instance) .Bind<IKeyboardDevice>().ToConstant(WindowsKeyboardDevice.Instance) .Bind<IPlatformSettings>().ToConstant(s_instance) .Bind<IPlatformThreadingInterface>().ToConstant(s_instance) .Bind<IRenderLoop>().ToConstant(new RenderLoop(60)) .Bind<ISystemDialogImpl>().ToSingleton<SystemDialogImpl>() .Bind<IWindowingPlatform>().ToConstant(s_instance) .Bind<IPlatformIconLoader>().ToConstant(s_instance); UseDeferredRendering = deferredRendering; _uiThread = UnmanagedMethods.GetCurrentThreadId(); } public bool HasMessages() { UnmanagedMethods.MSG msg; return UnmanagedMethods.PeekMessage(out msg, IntPtr.Zero, 0, 0, 0); } public void ProcessMessage() { UnmanagedMethods.MSG msg; UnmanagedMethods.GetMessage(out msg, IntPtr.Zero, 0, 0); UnmanagedMethods.TranslateMessage(ref msg); UnmanagedMethods.DispatchMessage(ref msg); } public void RunLoop(CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { UnmanagedMethods.MSG msg; UnmanagedMethods.GetMessage(out msg, IntPtr.Zero, 0, 0); UnmanagedMethods.TranslateMessage(ref msg); UnmanagedMethods.DispatchMessage(ref msg); } } public IDisposable StartTimer(TimeSpan interval, Action callback) { UnmanagedMethods.TimerProc timerDelegate = (hWnd, uMsg, nIDEvent, dwTime) => callback(); IntPtr handle = UnmanagedMethods.SetTimer( IntPtr.Zero, IntPtr.Zero, (uint)interval.TotalMilliseconds, timerDelegate); // Prevent timerDelegate being garbage collected. _delegates.Add(timerDelegate); return Disposable.Create(() => { _delegates.Remove(timerDelegate); UnmanagedMethods.KillTimer(IntPtr.Zero, handle); }); } private static readonly int SignalW = unchecked((int) 0xdeadbeaf); private static readonly int SignalL = unchecked((int)0x12345678); public void Signal() { UnmanagedMethods.PostMessage( _hwnd, (int) UnmanagedMethods.WindowsMessage.WM_DISPATCH_WORK_ITEM, new IntPtr(SignalW), new IntPtr(SignalL)); } public bool CurrentThreadIsLoopThread => _uiThread == UnmanagedMethods.GetCurrentThreadId(); public event Action Signaled; [SuppressMessage("Microsoft.StyleCop.CSharp.NamingRules", "SA1305:FieldNamesMustNotUseHungarianNotation", Justification = "Using Win32 naming for consistency.")] private IntPtr WndProc(IntPtr hWnd, uint msg, IntPtr wParam, IntPtr lParam) { if (msg == (int) UnmanagedMethods.WindowsMessage.WM_DISPATCH_WORK_ITEM && wParam.ToInt64() == SignalW && lParam.ToInt64() == SignalL) { Signaled?.Invoke(); } return UnmanagedMethods.DefWindowProc(hWnd, msg, wParam, lParam); } private void CreateMessageWindow() { // Ensure that the delegate doesn't get garbage collected by storing it as a field. _wndProcDelegate = new UnmanagedMethods.WndProc(WndProc); UnmanagedMethods.WNDCLASSEX wndClassEx = new UnmanagedMethods.WNDCLASSEX { cbSize = Marshal.SizeOf(typeof(UnmanagedMethods.WNDCLASSEX)), lpfnWndProc = _wndProcDelegate, hInstance = UnmanagedMethods.GetModuleHandle(null), lpszClassName = "AvaloniaMessageWindow " + Guid.NewGuid(), }; ushort atom = UnmanagedMethods.RegisterClassEx(ref wndClassEx); if (atom == 0) { throw new Win32Exception(); } _hwnd = UnmanagedMethods.CreateWindowEx(0, atom, null, 0, 0, 0, 0, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); if (_hwnd == IntPtr.Zero) { throw new Win32Exception(); } } public IWindowImpl CreateWindow() { return new WindowImpl(); } public IEmbeddableWindowImpl CreateEmbeddableWindow() { #if NETSTANDARD throw new NotSupportedException(); #else var embedded = new EmbeddedWindowImpl(); embedded.Show(); return embedded; #endif } public IPopupImpl CreatePopup() { return new PopupImpl(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.InteropServices; using System.Text; namespace System { internal static class IriHelper { // // Checks if provided non surrogate char lies in iri range // internal static bool CheckIriUnicodeRange(char unicode, bool isQuery) { return ((unicode >= '\u00A0' && unicode <= '\uD7FF') || (unicode >= '\uF900' && unicode <= '\uFDCF') || (unicode >= '\uFDF0' && unicode <= '\uFFEF') || (isQuery && unicode >= '\uE000' && unicode <= '\uF8FF')); } // // Check if highSurr and lowSurr are a surrogate pair then // it checks if the combined char is in the range // Takes in isQuery because iri restrictions for query are different // internal static bool CheckIriUnicodeRange(char highSurr, char lowSurr, ref bool surrogatePair, bool isQuery) { bool inRange = false; surrogatePair = false; Debug.Assert(char.IsHighSurrogate(highSurr)); if (char.IsSurrogatePair(highSurr, lowSurr)) { surrogatePair = true; ReadOnlySpan<char> chars = stackalloc char[2] { highSurr, lowSurr }; string surrPair = new string(chars); if (((string.CompareOrdinal(surrPair, "\U00010000") >= 0) && (string.CompareOrdinal(surrPair, "\U0001FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00020000") >= 0) && (string.CompareOrdinal(surrPair, "\U0002FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00030000") >= 0) && (string.CompareOrdinal(surrPair, "\U0003FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00040000") >= 0) && (string.CompareOrdinal(surrPair, "\U0004FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00050000") >= 0) && (string.CompareOrdinal(surrPair, "\U0005FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00060000") >= 0) && (string.CompareOrdinal(surrPair, "\U0006FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00070000") >= 0) && (string.CompareOrdinal(surrPair, "\U0007FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00080000") >= 0) && (string.CompareOrdinal(surrPair, "\U0008FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00090000") >= 0) && (string.CompareOrdinal(surrPair, "\U0009FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000A0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000AFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000B0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000BFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000C0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000CFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000D0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000DFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000E1000") >= 0) && (string.CompareOrdinal(surrPair, "\U000EFFFD") <= 0)) || (isQuery && (((string.CompareOrdinal(surrPair, "\U000F0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000FFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00100000") >= 0) && (string.CompareOrdinal(surrPair, "\U0010FFFD") <= 0))))) { inRange = true; } } return inRange; } // // Check reserved chars according to RFC 3987 in a specific component // internal static bool CheckIsReserved(char ch, UriComponents component) { if ((component != UriComponents.Scheme) && (component != UriComponents.UserInfo) && (component != UriComponents.Host) && (component != UriComponents.Port) && (component != UriComponents.Path) && (component != UriComponents.Query) && (component != UriComponents.Fragment) ) { return (component == (UriComponents)0) ? UriHelper.IsGenDelim(ch) : false; } else if (UriParser.DontEnableStrictRFC3986ReservedCharacterSets) { // Since we aren't enabling strict RFC 3986 reserved sets, we stick with the old behavior // (for app-compat) which was a broken mix of RFCs 2396 and 3986. switch (component) { case UriComponents.UserInfo: if (ch == '/' || ch == '?' || ch == '#' || ch == '[' || ch == ']' || ch == '@') return true; break; case UriComponents.Host: if (ch == ':' || ch == '/' || ch == '?' || ch == '#' || ch == '[' || ch == ']' || ch == '@') return true; break; case UriComponents.Path: if (ch == '/' || ch == '?' || ch == '#' || ch == '[' || ch == ']') return true; break; case UriComponents.Query: if (ch == '#' || ch == '[' || ch == ']') return true; break; case UriComponents.Fragment: if (ch == '#' || ch == '[' || ch == ']') return true; break; default: break; } return false; } else { return (UriHelper.RFC3986ReservedMarks.IndexOf(ch) >= 0); } } // // IRI normalization for strings containing characters that are not allowed or // escaped characters that should be unescaped in the context of the specified Uri component. // internal static unsafe string EscapeUnescapeIri(char* pInput, int start, int end, UriComponents component) { char[] dest = new char[end - start]; byte[] bytes = null; // Pin the array to do pointer accesses GCHandle destHandle = GCHandle.Alloc(dest, GCHandleType.Pinned); char* pDest = (char*)destHandle.AddrOfPinnedObject(); const int percentEncodingLen = 3; // Escaped UTF-8 will take 3 chars: %AB. const int bufferCapacityIncrease = 30 * percentEncodingLen; int bufferRemaining = 0; int next = start; int destOffset = 0; char ch; bool escape = false; bool surrogatePair = false; for (; next < end; ++next) { escape = false; surrogatePair = false; if ((ch = pInput[next]) == '%') { if (next + 2 < end) { ch = UriHelper.EscapedAscii(pInput[next + 1], pInput[next + 2]); // Do not unescape a reserved char if (ch == Uri.c_DummyChar || ch == '%' || CheckIsReserved(ch, component) || UriHelper.IsNotSafeForUnescape(ch)) { // keep as is Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next++]; Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next++]; Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; continue; } else if (ch <= '\x7F') { Debug.Assert(ch < 0xFF, "Expecting ASCII character."); Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); //ASCII pDest[destOffset++] = ch; next += 2; continue; } else { // possibly utf8 encoded sequence of unicode // check if safe to unescape according to Iri rules Debug.Assert(ch < 0xFF, "Expecting ASCII character."); int startSeq = next; int byteCount = 1; // lazy initialization of max size, will reuse the array for next sequences if ((object)bytes == null) bytes = new byte[end - next]; bytes[0] = (byte)ch; next += 3; while (next < end) { // Check on exit criterion if ((ch = pInput[next]) != '%' || next + 2 >= end) break; // already made sure we have 3 characters in str ch = UriHelper.EscapedAscii(pInput[next + 1], pInput[next + 2]); //invalid hex sequence ? if (ch == Uri.c_DummyChar) break; // character is not part of a UTF-8 sequence ? else if (ch < '\x80') break; else { //a UTF-8 sequence bytes[byteCount++] = (byte)ch; next += 3; } Debug.Assert(ch < 0xFF, "Expecting ASCII character."); } next--; // for loop will increment // Using encoder with no replacement fall-back will skip all invalid UTF-8 sequences. Encoding noFallbackCharUTF8 = Encoding.GetEncoding( Encoding.UTF8.CodePage, new EncoderReplacementFallback(""), new DecoderReplacementFallback("")); char[] unescapedChars = new char[bytes.Length]; int charCount = noFallbackCharUTF8.GetChars(bytes, 0, byteCount, unescapedChars, 0); if (charCount != 0) { // If invalid sequences were present in the original escaped string, we need to // copy the escaped versions of those sequences. // Decoded Unicode values will be kept only when they are allowed by the URI/IRI RFC // rules. UriHelper.MatchUTF8Sequence(pDest, dest, ref destOffset, unescapedChars, charCount, bytes, byteCount, component == UriComponents.Query, true); } else { // copy escaped sequence as is for (int i = startSeq; i <= next; ++i) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[i]; } } } } else { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } } else if (ch > '\x7f') { // unicode char ch2; if ((char.IsHighSurrogate(ch)) && (next + 1 < end)) { ch2 = pInput[next + 1]; escape = !CheckIriUnicodeRange(ch, ch2, ref surrogatePair, component == UriComponents.Query); if (!escape) { // copy the two chars Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next++]; Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } } else { if (CheckIriUnicodeRange(ch, component == UriComponents.Query)) { if (!UriHelper.IsBidiControlCharacter(ch) || !UriParser.DontKeepUnicodeBidiFormattingCharacters) { // copy it Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } } else { // escape it escape = true; } } } else { // just copy the character Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } if (escape) { const int MaxNumberOfBytesEncoded = 4; if (bufferRemaining < MaxNumberOfBytesEncoded * percentEncodingLen) { int newBufferLength = 0; checked { // may need more memory since we didn't anticipate escaping newBufferLength = dest.Length + bufferCapacityIncrease; bufferRemaining += bufferCapacityIncrease; } char[] newDest = new char[newBufferLength]; fixed (char* pNewDest = newDest) { Buffer.MemoryCopy((byte*)pDest, (byte*)pNewDest, newBufferLength * sizeof(char), destOffset * sizeof(char)); } if (destHandle.IsAllocated) { destHandle.Free(); } dest = newDest; // re-pin new dest[] array destHandle = GCHandle.Alloc(dest, GCHandleType.Pinned); pDest = (char*)destHandle.AddrOfPinnedObject(); } byte[] encodedBytes = new byte[MaxNumberOfBytesEncoded]; fixed (byte* pEncodedBytes = &encodedBytes[0]) { int encodedBytesCount = Encoding.UTF8.GetBytes(pInput + next, surrogatePair ? 2 : 1, pEncodedBytes, MaxNumberOfBytesEncoded); Debug.Assert(encodedBytesCount <= MaxNumberOfBytesEncoded, "UTF8 encoder should not exceed specified byteCount"); bufferRemaining -= encodedBytesCount * percentEncodingLen; for (int count = 0; count < encodedBytesCount; ++count) { UriHelper.EscapeAsciiChar((char)encodedBytes[count], dest, ref destOffset); } } } } if (destHandle.IsAllocated) destHandle.Free(); Debug.Assert(destOffset <= dest.Length, "Destination length met or exceeded destination offset."); return new string(dest, 0, destOffset); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using log4net; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Services.Interfaces; using System; using System.Diagnostics; using System.Drawing; using System.Drawing.Imaging; namespace OpenSim.Region.CoreModules.World.Warp3DMap { public static class TerrainSplat { #region Constants private static readonly Color[] DEFAULT_TERRAIN_COLOR = new Color[] { Color.FromArgb(255, 164, 136, 117), Color.FromArgb(255, 65, 87, 47), Color.FromArgb(255, 157, 145, 131), Color.FromArgb(255, 125, 128, 130) }; private static readonly UUID[] DEFAULT_TERRAIN_DETAIL = new UUID[] { DIRT_DETAIL, GRASS_DETAIL, MOUNTAIN_DETAIL, ROCK_DETAIL }; private static readonly UUID DIRT_DETAIL = new UUID("0bc58228-74a0-7e83-89bc-5c23464bcec5"); private static readonly UUID GRASS_DETAIL = new UUID("63338ede-0037-c4fd-855b-015d77112fc8"); private static readonly UUID MOUNTAIN_DETAIL = new UUID("303cd381-8560-7579-23f1-f0a880799740"); private static readonly UUID ROCK_DETAIL = new UUID("53a2f406-4895-1d13-d541-d2e3b86bc19c"); private static readonly UUID TERRAIN_CACHE_MAGIC = new UUID("2c0c7ef2-56be-4eb8-aacb-76712c535b4b"); #endregion Constants private static readonly ILog m_log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name); private static string LogHeader = "[WARP3D TERRAIN SPLAT]"; public static Bitmap ResizeBitmap(Bitmap b, int nWidth, int nHeight) { m_log.DebugFormat("{0} ResizeBitmap. From <{1},{2}> to <{3},{4}>", LogHeader, b.Width, b.Height, nWidth, nHeight); Bitmap result = new Bitmap(nWidth, nHeight); using (Graphics g = Graphics.FromImage(result)) g.DrawImage(b, 0, 0, nWidth, nHeight); b.Dispose(); return result; } /// <summary> /// Builds a composited terrain texture given the region texture /// and heightmap settings /// </summary> /// <param name="terrain">Terrain heightmap</param> /// <param name="regionInfo">Region information including terrain texture parameters</param> /// <returns>A 256x256 square RGB texture ready for rendering</returns> /// <remarks>Based on the algorithm described at http://opensimulator.org/wiki/Terrain_Splatting /// Note we create a 256x256 dimension texture even if the actual terrain is larger. /// </remarks> public static Bitmap Splat(ITerrainChannel terrain, UUID[] textureIDs, float[] startHeights, float[] heightRanges, Vector3d regionPosition, IAssetService assetService, bool textureTerrain) { Debug.Assert(textureIDs.Length == 4); Debug.Assert(startHeights.Length == 4); Debug.Assert(heightRanges.Length == 4); Bitmap[] detailTexture = new Bitmap[4]; if (textureTerrain) { // Swap empty terrain textureIDs with default IDs for (int i = 0; i < textureIDs.Length; i++) { if (textureIDs[i] == UUID.Zero) textureIDs[i] = DEFAULT_TERRAIN_DETAIL[i]; } #region Texture Fetching if (assetService != null) { for (int i = 0; i < 4; i++) { AssetBase asset; UUID cacheID = UUID.Combine(TERRAIN_CACHE_MAGIC, textureIDs[i]); // Try to fetch a cached copy of the decoded/resized version of this texture asset = assetService.GetCached(cacheID.ToString()); if (asset != null) { try { using (System.IO.MemoryStream stream = new System.IO.MemoryStream(asset.Data)) detailTexture[i] = (Bitmap)Image.FromStream(stream); } catch (Exception ex) { m_log.Warn("Failed to decode cached terrain texture " + cacheID + " (textureID: " + textureIDs[i] + "): " + ex.Message); } } if (detailTexture[i] == null) { // Try to fetch the original JPEG2000 texture, resize if needed, and cache as PNG asset = assetService.Get(textureIDs[i].ToString()); if (asset != null) { // m_log.DebugFormat( // "[TERRAIN SPLAT]: Got cached original JPEG2000 terrain texture {0} {1}", i, asset.ID); try { detailTexture[i] = (Bitmap)CSJ2K.J2kImage.FromBytes(asset.Data); } catch (Exception ex) { m_log.Warn("Failed to decode terrain texture " + asset.ID + ": " + ex.Message); } } if (detailTexture[i] != null) { // Make sure this texture is the correct size, otherwise resize if (detailTexture[i].Width != 256 || detailTexture[i].Height != 256) { using (Bitmap origBitmap = detailTexture[i]) { detailTexture[i] = ImageUtils.ResizeImage(origBitmap, 256, 256); } } // Save the decoded and resized texture to the cache byte[] data; using (System.IO.MemoryStream stream = new System.IO.MemoryStream()) { detailTexture[i].Save(stream, ImageFormat.Png); data = stream.ToArray(); } // Cache a PNG copy of this terrain texture AssetBase newAsset = new AssetBase { Data = data, Description = "PNG", Flags = AssetFlags.Collectable, FullID = cacheID, ID = cacheID.ToString(), Local = true, Name = String.Empty, Temporary = true, Type = (sbyte)AssetType.Unknown }; newAsset.Metadata.ContentType = "image/png"; assetService.Store(newAsset); } } } } #endregion Texture Fetching } // Fill in any missing textures with a solid color for (int i = 0; i < 4; i++) { if (detailTexture[i] == null) { m_log.DebugFormat("{0} Missing terrain texture for layer {1}. Filling with solid default color", LogHeader, i); // Create a solid color texture for this layer detailTexture[i] = new Bitmap(256, 256, PixelFormat.Format24bppRgb); using (Graphics gfx = Graphics.FromImage(detailTexture[i])) { using (SolidBrush brush = new SolidBrush(DEFAULT_TERRAIN_COLOR[i])) gfx.FillRectangle(brush, 0, 0, 256, 256); } } else { if (detailTexture[i].Width != 256 || detailTexture[i].Height != 256) { detailTexture[i] = ResizeBitmap(detailTexture[i], 256, 256); } } } #region Layer Map float[,] layermap = new float[256, 256]; // Scale difference between actual region size and the 256 texture being created int xFactor = terrain.Width / 256; int yFactor = terrain.Height / 256; // Create 'layermap' where each value is the fractional layer number to place // at that point. For instance, a value of 1.345 gives the blending of // layer 1 and layer 2 for that point. for (int y = 0; y < 256; y++) { for (int x = 0; x < 256; x++) { float height = (float)terrain[x * xFactor, y * yFactor]; float pctX = (float)x / 255f; float pctY = (float)y / 255f; // Use bilinear interpolation between the four corners of start height and // height range to select the current values at this position float startHeight = ImageUtils.Bilinear( startHeights[0], startHeights[2], startHeights[1], startHeights[3], pctX, pctY); startHeight = Utils.Clamp(startHeight, 0f, 255f); float heightRange = ImageUtils.Bilinear( heightRanges[0], heightRanges[2], heightRanges[1], heightRanges[3], pctX, pctY); heightRange = Utils.Clamp(heightRange, 0f, 255f); // Generate two frequencies of perlin noise based on our global position // The magic values were taken from http://opensimulator.org/wiki/Terrain_Splatting Vector3 vec = new Vector3 ( ((float)regionPosition.X + (x * xFactor)) * 0.20319f, ((float)regionPosition.Y + (y * yFactor)) * 0.20319f, height * 0.25f ); float lowFreq = Perlin.noise2(vec.X * 0.222222f, vec.Y * 0.222222f) * 6.5f; float highFreq = Perlin.turbulence2(vec.X, vec.Y, 2f) * 2.25f; float noise = (lowFreq + highFreq) * 2f; // Combine the current height, generated noise, start height, and height range parameters, then scale all of it float layer = ((height + noise - startHeight) / heightRange) * 4f; if (Single.IsNaN(layer)) layer = 0f; layermap[x, y] = Utils.Clamp(layer, 0f, 3f); } } #endregion Layer Map #region Texture Compositing Bitmap output = new Bitmap(256, 256, PixelFormat.Format24bppRgb); BitmapData outputData = output.LockBits(new Rectangle(0, 0, 256, 256), ImageLockMode.WriteOnly, PixelFormat.Format24bppRgb); // Unsafe work as we lock down the source textures for quicker access and access the // pixel data directly unsafe { // Get handles to all of the texture data arrays BitmapData[] datas = new BitmapData[] { detailTexture[0].LockBits(new Rectangle(0, 0, 256, 256), ImageLockMode.ReadOnly, detailTexture[0].PixelFormat), detailTexture[1].LockBits(new Rectangle(0, 0, 256, 256), ImageLockMode.ReadOnly, detailTexture[1].PixelFormat), detailTexture[2].LockBits(new Rectangle(0, 0, 256, 256), ImageLockMode.ReadOnly, detailTexture[2].PixelFormat), detailTexture[3].LockBits(new Rectangle(0, 0, 256, 256), ImageLockMode.ReadOnly, detailTexture[3].PixelFormat) }; // Compute size of each pixel data (used to address into the pixel data array) int[] comps = new int[] { (datas[0].PixelFormat == PixelFormat.Format32bppArgb) ? 4 : 3, (datas[1].PixelFormat == PixelFormat.Format32bppArgb) ? 4 : 3, (datas[2].PixelFormat == PixelFormat.Format32bppArgb) ? 4 : 3, (datas[3].PixelFormat == PixelFormat.Format32bppArgb) ? 4 : 3 }; for (int y = 0; y < 256; y++) { for (int x = 0; x < 256; x++) { float layer = layermap[x, y]; // Select two textures int l0 = (int)Math.Floor(layer); int l1 = Math.Min(l0 + 1, 3); byte* ptrA = (byte*)datas[l0].Scan0 + y * datas[l0].Stride + x * comps[l0]; byte* ptrB = (byte*)datas[l1].Scan0 + y * datas[l1].Stride + x * comps[l1]; byte* ptrO = (byte*)outputData.Scan0 + y * outputData.Stride + x * 3; float aB = *(ptrA + 0); float aG = *(ptrA + 1); float aR = *(ptrA + 2); float bB = *(ptrB + 0); float bG = *(ptrB + 1); float bR = *(ptrB + 2); float layerDiff = layer - l0; // Interpolate between the two selected textures *(ptrO + 0) = (byte)Math.Floor(aB + layerDiff * (bB - aB)); *(ptrO + 1) = (byte)Math.Floor(aG + layerDiff * (bG - aG)); *(ptrO + 2) = (byte)Math.Floor(aR + layerDiff * (bR - aR)); } } for (int i = 0; i < detailTexture.Length; i++) detailTexture[i].UnlockBits(datas[i]); } for (int i = 0; i < detailTexture.Length; i++) if (detailTexture[i] != null) detailTexture[i].Dispose(); output.UnlockBits(outputData); // We generated the texture upside down, so flip it output.RotateFlip(RotateFlipType.RotateNoneFlipY); #endregion Texture Compositing return output; } public static Bitmap SplatSimple(float[] heightmap) { const float BASE_HSV_H = 93f / 360f; const float BASE_HSV_S = 44f / 100f; const float BASE_HSV_V = 34f / 100f; Bitmap img = new Bitmap(256, 256); BitmapData bitmapData = img.LockBits(new Rectangle(0, 0, 256, 256), ImageLockMode.WriteOnly, PixelFormat.Format24bppRgb); unsafe { for (int y = 255; y >= 0; y--) { for (int x = 0; x < 256; x++) { float normHeight = heightmap[y * 256 + x] / 255f; normHeight = Utils.Clamp(normHeight, BASE_HSV_V, 1.0f); Color4 color = Color4.FromHSV(BASE_HSV_H, BASE_HSV_S, normHeight); byte* ptr = (byte*)bitmapData.Scan0 + y * bitmapData.Stride + x * 3; *(ptr + 0) = (byte)(color.B * 255f); *(ptr + 1) = (byte)(color.G * 255f); *(ptr + 2) = (byte)(color.R * 255f); } } } img.UnlockBits(bitmapData); return img; } } }
/* This code is derived from NServiceBus 2.0 * https://github.com/NServiceBus/NServiceBus/blob/2.0/src/impl/Serializers/NServiceBus.Serializers.XML/MessageSerializer.cs * * Which is licensed under Apache Licence, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ using System; using System.Linq; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Reflection; using System.Text; using System.Xml; using System.Runtime.Serialization; using CWServiceBus.Reflection; using System.Xml.Serialization; using log4net; namespace CWServiceBus.Serializers.XML { /// <summary> /// Implementation of the message serializer over XML supporting interface-based messages. /// </summary> public class XmlMessageSerializer : IMessageSerializer { private readonly IMessageMapper mapper; /// <summary> /// The namespace to place in outgoing XML. /// </summary> public string Namespace { get { return nameSpace; } set { nameSpace = value; } } private List<Type> messageTypes; /// <summary> /// Scans the given type storing maps to fields and properties to save on reflection at runtime. /// </summary> /// <param name="t"></param> public void InitType(Type t) { logger.Debug("Initializing type: " + t.AssemblyQualifiedName); if (t.IsSimpleType()) return; if (typeof(IEnumerable).IsAssignableFrom(t)) { if (t.IsArray) typesToCreateForArrays[t] = typeof(List<>).MakeGenericType(t.GetElementType()); foreach (Type g in t.GetGenericArguments()) InitType(g); //Handle dictionaries - initalize relevant KeyValuePair<T,K> types. foreach (Type interfaceType in t.GetInterfaces()) { Type[] arr = interfaceType.GetGenericArguments(); if (arr.Length == 1) if (typeof(IEnumerable<>).MakeGenericType(arr[0]).IsAssignableFrom(t)) InitType(arr[0]); } if (t.IsGenericType && t.IsInterface) { var g = t.GetGenericArguments(); var e = typeof(IEnumerable<>).MakeGenericType(g); if (e.IsAssignableFrom(t)) typesToCreateForEnumerables[t] = typeof(List<>).MakeGenericType(g); } if (t.IsGenericType && t.GetGenericArguments().Length == 1) { Type setType = typeof(ISet<>).MakeGenericType(t.GetGenericArguments()); if (setType.IsAssignableFrom(t)) //handle ISet<Something> { var g = t.GetGenericArguments(); var e = typeof(IEnumerable<>).MakeGenericType(g); if (e.IsAssignableFrom(t)) typesToCreateForEnumerables[t] = typeof(List<>).MakeGenericType(g); } } return; } var isKeyValuePair = false; var args = t.GetGenericArguments(); if (args.Length == 2) { isKeyValuePair = (typeof (KeyValuePair<,>).MakeGenericType(args) == t); } if (args.Length == 1 && args[0].IsValueType) { if (args[0].GetGenericArguments().Any() || typeof(Nullable<>).MakeGenericType(args) == t) { InitType(args[0]); if (!args[0].GetGenericArguments().Any()) return; } } //already in the process of initializing this type (prevents infinite recursion). if (typesBeingInitialized.Contains(t)) return; typesBeingInitialized.Add(t); var props = GetAllPropertiesForType(t, isKeyValuePair); typeToProperties[t] = props; var fields = GetAllFieldsForType(t); typeToFields[t] = fields; foreach (var p in props) { logger.Debug("Handling property: " + p.Name); propertyInfoToLateBoundProperty[p] = DelegateFactory.Create(p); if (!isKeyValuePair) propertyInfoToLateBoundPropertySet[p] = DelegateFactory.CreateSet(p); InitType(p.PropertyType); } foreach (var f in fields) { logger.Debug("Handling field: " + f.Name); fieldInfoToLateBoundField[f] = DelegateFactory.Create(f); if (!isKeyValuePair) fieldInfoToLateBoundFieldSet[f] = DelegateFactory.CreateSet(f); InitType(f.FieldType); } } /// <summary> /// Gets a PropertyInfo for each property of the given type. /// </summary> /// <param name="t"></param> /// <param name="isKeyValuePair"></param> /// <returns></returns> IEnumerable<PropertyInfo> GetAllPropertiesForType(Type t, bool isKeyValuePair) { var result = new List<PropertyInfo>(); foreach (var prop in t.GetProperties()) { if (typeof(IList) == prop.PropertyType) throw new NotSupportedException("IList is not a supported property type for serialization, use List instead. Type: " + t.FullName + " Property: " + prop.Name); var args = prop.PropertyType.GetGenericArguments(); if (args.Length == 1) { if (typeof(IList<>).MakeGenericType(args) == prop.PropertyType) throw new NotSupportedException("IList<T> is not a supported property type for serialization, use List<T> instead. Type: " + t.FullName + " Property: " + prop.Name); if (typeof(ISet<>).MakeGenericType(args) == prop.PropertyType) throw new NotSupportedException("ISet<T> is not a supported property type for serialization, use HashSet<T> instead. Type: " + t.FullName + " Property: " + prop.Name); } if (args.Length == 2) { if (typeof(IDictionary<,>).MakeGenericType(args) == prop.PropertyType) throw new NotSupportedException("IDictionary<T, K> is not a supported property type for serialization, use Dictionary<T,K> instead. Type: " + t.FullName + " Property: " + prop.Name + ". Consider using a concrete Dictionary<T, K> instead, where T and K cannot be of type 'System.Object'"); if (args[0].FullName == "System.Object" || args[1].FullName == "System.Object") throw new NotSupportedException("Dictionary<T, K> is not a supported when Key or Value is of Type System.Object. Type: " + t.FullName + " Property: " + prop.Name + ". Consider using a concrete Dictionary<T, K> where T and K are not of type 'System.Object'"); } if (!prop.CanWrite && !isKeyValuePair) continue; if (prop.GetCustomAttributes(typeof(XmlIgnoreAttribute), false).Length > 0) continue; result.Add(prop); } if (t.IsInterface) foreach (Type interfaceType in t.GetInterfaces()) result.AddRange(GetAllPropertiesForType(interfaceType, false)); return result.Distinct(); } /// <summary> /// Gets a FieldInfo for each field in the given type. /// </summary> /// <param name="t"></param> /// <returns></returns> IEnumerable<FieldInfo> GetAllFieldsForType(Type t) { return t.GetFields(BindingFlags.FlattenHierarchy | BindingFlags.Instance | BindingFlags.Public); } #region Deserialize /// <summary> /// Deserializes the given stream to an array of messages which are returned. /// </summary> /// <param name="stream"></param> /// <returns></returns> public object[] Deserialize(Stream stream) { if (stream == null) return null; prefixesToNamespaces = new Dictionary<string, string>(); messageBaseTypes = new List<Type>(); var result = new List<object>(); var doc = new XmlDocument { PreserveWhitespace = true }; doc.Load(XmlReader.Create(stream, new XmlReaderSettings {CheckCharacters = false})); if (doc.DocumentElement == null) return result.ToArray(); foreach (XmlAttribute attr in doc.DocumentElement.Attributes) { if (attr.Name == "xmlns") defaultNameSpace = attr.Value.Substring(attr.Value.LastIndexOf("/") + 1); else { if (attr.Name.Contains("xmlns:")) { int colonIndex = attr.Name.LastIndexOf(":"); string prefix = attr.Name.Substring(colonIndex + 1); if (prefix.Contains(BASETYPE)) { Type baseType = mapper.GetMappedTypeFor(attr.Value); if (baseType != null) messageBaseTypes.Add(baseType); } else prefixesToNamespaces[prefix] = attr.Value; } } } if (doc.DocumentElement.Name.ToLower() != "messages") { object m = Process(doc.DocumentElement, null); if (m == null) throw new SerializationException("Could not deserialize message."); result.Add(m ); } else { foreach (XmlNode node in doc.DocumentElement.ChildNodes) { if (node.NodeType == XmlNodeType.Whitespace) continue; object m = Process(node, null); result.Add(m); } } defaultNameSpace = null; return result.ToArray(); } private object Process(XmlNode node, object parent) { string name = node.Name; string typeName = defaultNameSpace + "." + name; if (name.Contains(":")) { int colonIndex = node.Name.IndexOf(":"); name = name.Substring(colonIndex + 1); string prefix = node.Name.Substring(0, colonIndex); string ns = prefixesToNamespaces[prefix]; typeName = ns.Substring(ns.LastIndexOf("/") + 1) + "." + name; } if (name.Contains("NServiceBus.")) typeName = name; if (parent != null) { if (parent is IEnumerable) { if (parent.GetType().IsArray) return GetObjectOfTypeFromNode(parent.GetType().GetElementType(), node); var args = parent.GetType().GetGenericArguments(); if (args.Length == 1) return GetObjectOfTypeFromNode(args[0], node); } PropertyInfo prop = parent.GetType().GetProperty(name); if (prop != null) return GetObjectOfTypeFromNode(prop.PropertyType, node); } Type t = mapper.GetMappedTypeFor(typeName); if (t == null) { logger.Debug("Could not load " + typeName + ". Trying base types..."); foreach(Type baseType in messageBaseTypes) try { logger.Debug("Trying to deserialize message to " + baseType.FullName); return GetObjectOfTypeFromNode(baseType, node); } // ReSharper disable EmptyGeneralCatchClause catch { } // intentionally swallow exception // ReSharper restore EmptyGeneralCatchClause throw new TypeLoadException("Could not handle type '" + typeName + "'."); } return GetObjectOfTypeFromNode(t, node); } private object GetObjectOfTypeFromNode(Type t, XmlNode node) { if (t.IsSimpleType() || t == typeof(Uri)) return GetPropertyValue(t, node); if (typeof(IEnumerable).IsAssignableFrom(t)) return GetPropertyValue(t, node); object result = mapper.CreateInstance(t); foreach (XmlNode n in node.ChildNodes) { Type type = null; if (n.Name.Contains(":")) type = Type.GetType("System." + n.Name.Substring(0, n.Name.IndexOf(":")), false, true); var prop = GetProperty(t, n.Name); if (prop != null) { var val = GetPropertyValue(type ?? prop.PropertyType, n); if (val != null) { propertyInfoToLateBoundPropertySet[prop].Invoke(result, val); continue; } } var field = GetField(t, n.Name); if (field != null) { object val = GetPropertyValue(type ?? field.FieldType, n); if (val != null) { fieldInfoToLateBoundFieldSet[field].Invoke(result, val); continue; } } } return result; } private static PropertyInfo GetProperty(Type t, string name) { IEnumerable<PropertyInfo> props; typeToProperties.TryGetValue(t, out props); if (props == null) return null; string n = GetNameAfterColon(name); foreach (PropertyInfo prop in props) if (prop.Name == n) return prop; return null; } private static string GetNameAfterColon(string name) { var n = name; if (name.Contains(":")) n = name.Substring(name.IndexOf(":") + 1, name.Length - name.IndexOf(":") - 1); return n; } private FieldInfo GetField(Type t, string name) { IEnumerable<FieldInfo> fields; typeToFields.TryGetValue(t, out fields); if (fields == null) return null; foreach (FieldInfo f in fields) if (f.Name == name) return f; return null; } private object GetPropertyValue(Type type, XmlNode n) { if (n.ChildNodes.Count == 1 && n.ChildNodes[0] is XmlCharacterData) { var text = n.ChildNodes[0].InnerText; var args = type.GetGenericArguments(); if (args.Length == 1 && args[0].IsValueType) { if (args[0].GetGenericArguments().Any()) return GetPropertyValue(args[0], n); var nullableType = typeof(Nullable<>).MakeGenericType(args); if (type == nullableType) { if (text.ToLower() == "null") return null; return GetPropertyValue(args[0], n); } } if (type == typeof(string)) return text; if (type == typeof(Boolean)) return XmlConvert.ToBoolean(text); if (type == typeof(Byte)) return XmlConvert.ToByte(text); if (type == typeof(Char)) return XmlConvert.ToChar(text); if (type == typeof(DateTime)) return XmlConvert.ToDateTime(text, XmlDateTimeSerializationMode.RoundtripKind); if (type == typeof(DateTimeOffset)) return XmlConvert.ToDateTimeOffset(text); if (type == typeof(decimal)) return XmlConvert.ToDecimal(text); if (type == typeof(double)) return XmlConvert.ToDouble(text); if (type == typeof(Guid)) return XmlConvert.ToGuid(text); if (type == typeof(Int16)) return XmlConvert.ToInt16(text); if (type == typeof(Int32)) return XmlConvert.ToInt32(text); if (type == typeof(Int64)) return XmlConvert.ToInt64(text); if (type == typeof(sbyte)) return XmlConvert.ToSByte(text); if (type == typeof(Single)) return XmlConvert.ToSingle(text); if (type == typeof(TimeSpan)) return XmlConvert.ToTimeSpan(text); if (type == typeof(UInt16)) return XmlConvert.ToUInt16(text); if (type == typeof(UInt32)) return XmlConvert.ToUInt32(text); if (type == typeof(UInt64)) return XmlConvert.ToUInt64(text); if (type.IsEnum) return Enum.Parse(type, text); if (type == typeof(byte[])) return Convert.FromBase64String(text); if (type == typeof(Uri)) return new Uri(text); if (n.ChildNodes[0] is XmlWhitespace) return Activator.CreateInstance(type); throw new Exception("Type not supported by the serializer: " + type.AssemblyQualifiedName); } //Handle dictionaries if (typeof(IDictionary).IsAssignableFrom(type)) { var result = Activator.CreateInstance(type) as IDictionary; var keyType = typeof(object); var valueType = typeof(object); foreach (var interfaceType in type.GetInterfaces()) { var args = interfaceType.GetGenericArguments(); if (args.Length == 2) if (typeof(IDictionary<,>).MakeGenericType(args).IsAssignableFrom(type)) { keyType = args[0]; valueType = args[1]; break; } } foreach (XmlNode xn in n.ChildNodes) // go over KeyValuePairs { object key = null; object value = null; foreach (XmlNode node in xn.ChildNodes) { if (node.Name == "Key") key = GetObjectOfTypeFromNode(keyType, node); if (node.Name == "Value") value = GetObjectOfTypeFromNode(valueType, node); } if (result != null && key != null) result[key] = value; } return result; } if (typeof(IEnumerable).IsAssignableFrom(type) && type != typeof(string)) { bool isArray = type.IsArray; bool isISet = false; if (type.IsGenericType && type.GetGenericArguments().Length == 1) { Type setType = typeof(ISet<>).MakeGenericType(type.GetGenericArguments()); isISet = setType.IsAssignableFrom(type); } Type typeToCreate = type; if (isArray) typeToCreate = typesToCreateForArrays[type]; if (typesToCreateForEnumerables.ContainsKey(type)) //handle IEnumerable<Something> typeToCreate = typesToCreateForEnumerables[type]; if (typeof(IList).IsAssignableFrom(typeToCreate)) { var list = Activator.CreateInstance(typeToCreate) as IList; if (list != null) { foreach (XmlNode xn in n.ChildNodes) { if (xn.NodeType == XmlNodeType.Whitespace) continue; object m = Process(xn, list); list.Add(m); } if (isArray) return typeToCreate.GetMethod("ToArray").Invoke(list, null); #if !NET35 if (isISet) return Activator.CreateInstance(type, typeToCreate.GetMethod("ToArray").Invoke(list, null)); #endif } return list; } } if (n.ChildNodes.Count == 0) if (type == typeof(string)) return string.Empty; else return null; return GetObjectOfTypeFromNode(type, n); } #endregion #region Serialize /// <summary> /// Serializes the given messages to the given stream. /// </summary> /// <param name="messages"></param> /// <param name="stream"></param> public void Serialize(object[] messages, Stream stream) { namespacesToPrefix = new Dictionary<string, string>(); namespacesToAdd = new List<Type>(); var namespaces = GetNamespaces(messages); for (int i = 0; i < namespaces.Count; i++) { string prefix = "q" + i; if (i == 0) prefix = ""; if (namespaces[i] != null) namespacesToPrefix[namespaces[i]] = prefix; } var messageBuilder = new StringBuilder(); foreach (var m in messages) { var t = mapper.GetMappedTypeFor(m.GetType()); WriteObject(t.Name, t, m, messageBuilder); } var builder = new StringBuilder(); List<string> baseTypes = GetBaseTypes(messages); builder.AppendLine("<?xml version=\"1.0\" ?>"); builder.Append("<Messages xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""); for (int i = 0; i < namespaces.Count; i++) { string prefix = "q" + i; if (i == 0) prefix = ""; builder.AppendFormat(" xmlns{0}=\"{1}/{2}\"", (prefix != "" ? ":" + prefix : prefix), nameSpace, namespaces[i]); } foreach (var type in namespacesToAdd) builder.AppendFormat(" xmlns:{0}=\"{1}\"", type.Name.ToLower(), type.Name); for (int i = 0; i < baseTypes.Count; i++) { string prefix = BASETYPE; if (i != 0) prefix += i; builder.AppendFormat(" xmlns:{0}=\"{1}\"", prefix, baseTypes[i]); } builder.Append(">\n"); builder.Append(messageBuilder.ToString()); builder.AppendLine("</Messages>"); byte[] buffer = Encoding.UTF8.GetBytes(builder.ToString()); stream.Write(buffer, 0, buffer.Length); } private void Write(StringBuilder builder, Type t, object obj) { if (obj == null) return; if (!typeToProperties.ContainsKey(t)) throw new InvalidOperationException("Type " + t.FullName + " was not registered in the serializer. Check that it appears in the list of configured assemblies/types to scan."); foreach (PropertyInfo prop in typeToProperties[t]) WriteEntry(prop.Name, prop.PropertyType, propertyInfoToLateBoundProperty[prop].Invoke(obj), builder); foreach(FieldInfo field in typeToFields[t]) WriteEntry(field.Name, field.FieldType, fieldInfoToLateBoundField[field].Invoke(obj), builder); } private void WriteObject(string name, Type type, object value, StringBuilder builder) { string element = name; string prefix; namespacesToPrefix.TryGetValue(type.Namespace, out prefix); if (string.IsNullOrEmpty(prefix) && type == typeof(object) && (value.GetType().IsSimpleType())) { if (!namespacesToAdd.Contains(value.GetType())) namespacesToAdd.Add(value.GetType()); builder.AppendFormat("<{0}>{1}</{0}>\n", value.GetType().Name.ToLower() + ":" + name, FormatAsString(value)); return; } if (!string.IsNullOrEmpty(prefix)) element = prefix + ":" + name; builder.AppendFormat("<{0}>\n", element); Write(builder, type, value); builder.AppendFormat("</{0}>\n", element); } private void WriteEntry(string name, Type type, object value, StringBuilder builder) { if (value == null) { if (typeof(IEnumerable).IsAssignableFrom(type)) return; var args = type.GetGenericArguments(); if (args.Length == 1 && args[0].IsValueType) { var nullableType = typeof (Nullable<>).MakeGenericType(args); if (type == nullableType) { WriteEntry(name, typeof(string), "null", builder); return; } } return; } if (type.IsValueType || type == typeof(string) || type == typeof(Uri)) { builder.AppendFormat("<{0}>{1}</{0}>\n", name, FormatAsString(value)); return; } if (typeof(IEnumerable).IsAssignableFrom(type)) { builder.AppendFormat("<{0}>\n", name); if (type == typeof(byte[])) { var str = Convert.ToBase64String((byte[])value); builder.Append(str); } else { Type baseType = typeof(object); //Get generic type from list: T for List<T>, KeyValuePair<T,K> for IDictionary<T,K> foreach (Type interfaceType in type.GetInterfaces()) { Type[] arr = interfaceType.GetGenericArguments(); if (arr.Length == 1) if (typeof(IEnumerable<>).MakeGenericType(arr[0]).IsAssignableFrom(type)) { baseType = arr[0]; break; } } foreach (object obj in ((IEnumerable)value)) if (obj.GetType().IsSimpleType()) WriteEntry(obj.GetType().Name, obj.GetType(), obj, builder); else WriteObject(baseType.SerializationFriendlyName(), baseType, obj, builder); } builder.AppendFormat("</{0}>\n", name); return; } WriteObject(name, type, value, builder); } private static string FormatAsString(object value) { if (value is bool) return XmlConvert.ToString((bool)value); if (value is byte) return XmlConvert.ToString((byte)value); if (value is char) return XmlConvert.ToString((char)value); if (value is double) return XmlConvert.ToString((double)value); if (value is ulong) return XmlConvert.ToString((ulong)value); if (value is uint) return XmlConvert.ToString((uint)value); if (value is ushort) return XmlConvert.ToString((ushort)value); if (value is long) return XmlConvert.ToString((long)value); if (value is int) return XmlConvert.ToString((int)value); if (value is short) return XmlConvert.ToString((short)value); if (value is sbyte) return XmlConvert.ToString((sbyte)value); if (value is decimal) return XmlConvert.ToString((decimal)value); if (value is float) return XmlConvert.ToString((float)value); if (value is Guid) return XmlConvert.ToString((Guid)value); if (value is DateTime) return XmlConvert.ToString((DateTime)value, XmlDateTimeSerializationMode.RoundtripKind); if (value is DateTimeOffset) return XmlConvert.ToString((DateTimeOffset)value); if (value is TimeSpan) return XmlConvert.ToString((TimeSpan)value); if (value is string) return System.Security.SecurityElement.Escape(value as string); return value.ToString(); } private List<string> GetNamespaces(object[] messages) { var result = new List<string>(); foreach (var m in messages) { string ns = mapper.GetMappedTypeFor(m.GetType()).Namespace; if (!result.Contains(ns)) result.Add(ns); } return result; } private List<string> GetBaseTypes(object[] messages) { var result = new List<string>(); foreach (var m in messages) { Type t = mapper.GetMappedTypeFor(m.GetType()); Type baseType = t.BaseType; while (baseType != typeof(object) && baseType != null) { if (mapper.IsMessageType(baseType)) if (!result.Contains(baseType.FullName)) result.Add(baseType.FullName); baseType = baseType.BaseType; } foreach (Type i in t.GetInterfaces()) if (i != typeof(IMessage) && mapper.IsMessageType(baseType)) if (!result.Contains(i.FullName)) result.Add(i.FullName); } return result; } #endregion #region members private const string XMLPREFIX = "d1p1"; private const string XMLTYPE = XMLPREFIX + ":type"; private const string BASETYPE = "baseType"; private static readonly Dictionary<Type, IEnumerable<PropertyInfo>> typeToProperties = new Dictionary<Type, IEnumerable<PropertyInfo>>(); private static readonly Dictionary<Type, IEnumerable<FieldInfo>> typeToFields = new Dictionary<Type, IEnumerable<FieldInfo>>(); private static readonly Dictionary<Type, Type> typesToCreateForArrays = new Dictionary<Type, Type>(); private static readonly Dictionary<Type, Type> typesToCreateForEnumerables = new Dictionary<Type, Type>(); private static readonly List<Type> typesBeingInitialized = new List<Type>(); private static readonly Dictionary<PropertyInfo, LateBoundProperty> propertyInfoToLateBoundProperty = new Dictionary<PropertyInfo, LateBoundProperty>(); private static readonly Dictionary<FieldInfo, LateBoundField> fieldInfoToLateBoundField = new Dictionary<FieldInfo, LateBoundField>(); private static readonly Dictionary<PropertyInfo, LateBoundPropertySet> propertyInfoToLateBoundPropertySet = new Dictionary<PropertyInfo, LateBoundPropertySet>(); private static readonly Dictionary<FieldInfo, LateBoundFieldSet> fieldInfoToLateBoundFieldSet = new Dictionary<FieldInfo, LateBoundFieldSet>(); [ThreadStatic] private static string defaultNameSpace; /// <summary> /// Used for serialization /// </summary> [ThreadStatic] private static IDictionary<string, string> namespacesToPrefix; /// <summary> /// Used for deserialization /// </summary> [ThreadStatic] private static IDictionary<string, string> prefixesToNamespaces; [ThreadStatic] private static List<Type> messageBaseTypes; [ThreadStatic] private static List<Type> namespacesToAdd; private static readonly ILog logger = LogManager.GetLogger(typeof(XmlMessageSerializer).Namespace); #endregion public XmlMessageSerializer(IMessageMapper mapper) { this.mapper = mapper; } /// <summary> /// Initialized the serializer with the given message types /// </summary> /// <param name="types"></param> public void Initialize(IEnumerable<Type> types) { messageTypes = types.ToList(); foreach (Type t in messageTypes) InitType(t); } string nameSpace = "http://tempuri.net"; } }
#pragma warning disable 0618 using System; using System.Collections.Generic; using System.Linq; using UnityEngine; using UnityObject = UnityEngine.Object; #if UNITY_EDITOR using UnityEditor; #endif namespace Zios{ using Events; using Containers; [InitializeOnLoad] public static class Locate{ private static bool setup; private static bool cleanGameObjects = false; private static List<Type> cleanSceneComponents = new List<Type>(); private static List<GameObject> cleanSiblings = new List<GameObject>(); private static Dictionary<string,GameObject> searchCache = new Dictionary<string,GameObject>(); private static Dictionary<string,AssetImporter> importers = new Dictionary<string,AssetImporter>(); private static Dictionary<Type,UnityObject[]> assets = new Dictionary<Type,UnityObject[]>(); private static Dictionary<GameObject,GameObject[]> siblings = new Dictionary<GameObject,GameObject[]>(); private static Dictionary<GameObject,GameObject[]> enabledSiblings = new Dictionary<GameObject,GameObject[]>(); private static Dictionary<GameObject,GameObject[]> disabledSiblings = new Dictionary<GameObject,GameObject[]>(); private static GameObject[] rootObjects = new GameObject[0]; private static GameObject[] sceneObjects = new GameObject[0]; private static GameObject[] enabledObjects = new GameObject[0]; private static GameObject[] disabledObjects = new GameObject[0]; private static Component[] allComponents = new Component[0]; private static Dictionary<Type,Component[]> sceneComponents = new Dictionary<Type,Component[]>(); private static Dictionary<Type,Component[]> enabledComponents = new Dictionary<Type,Component[]>(); private static Dictionary<Type,Component[]> disabledComponents = new Dictionary<Type,Component[]>(); private static Hierarchy<GameObject,Type,Component[]> objectComponents = new Hierarchy<GameObject,Type,Component[]>(); static Locate(){ if(!Application.isPlaying){ //Event.Add("On Application Quit",Locate.SetDirty); Event.Add("On Level Was Loaded",Locate.SetDirty).SetPermanent(); Event.Add("On Hierarchy Changed",Locate.SetDirty).SetPermanent(); Event.Add("On Asset Changed",()=>Locate.assets.Clear()).SetPermanent(); } Event.Register("On Components Changed"); if(!Locate.setup){Locate.SetDirty();} } public static void CheckChanges(){ var components = Resources.FindObjectsOfTypeAll<Component>(); if(components.Length != Locate.allComponents.Count() && !Locate.allComponents.SequenceEqual(components)){ if(Locate.setup){Event.Call("On Components Changed");} Locate.allComponents = components; } } public static void SetDirty(){ Locate.CheckChanges(); Locate.cleanGameObjects = false; Locate.cleanSceneComponents.Clear(); Locate.cleanSiblings.Clear(); Locate.objectComponents.Clear(); Locate.searchCache.Clear(); Locate.setup = true; } public static void SetComponentsDirty<Type>() where Type : Component{Locate.cleanSceneComponents.Remove(typeof(Type));} public static void SetComponentsDirty<Type>(GameObject target) where Type : Component{Locate.objectComponents[target].Remove(typeof(Type));} public static void Build<Type>() where Type : Component{ List<GameObject> rootObjects = new List<GameObject>(); List<Type> enabled = new List<Type>(); List<Type> disabled = new List<Type>(); Type[] all = (Type[])Resources.FindObjectsOfTypeAll(typeof(Type)); foreach(Type current in all){ if(current.IsNull()){continue;} if(current.IsPrefab()){continue;} if(current.gameObject.IsNull()){continue;} if(current.gameObject.transform.parent == null){rootObjects.Add(current.gameObject);} if(current.gameObject.activeInHierarchy){enabled.Add(current);} else{disabled.Add(current);} } Locate.sceneComponents[typeof(Type)] = enabled.Extend(disabled).ToArray(); Locate.enabledComponents[typeof(Type)] = enabled.ToArray(); Locate.disabledComponents[typeof(Type)] = disabled.ToArray(); Locate.cleanSceneComponents.Add(typeof(Type)); if(typeof(Type) == typeof(Transform)){ List<GameObject> enabledObjects = enabled.Select(x=>x.gameObject).ToList(); List<GameObject> disabledObjects = disabled.Select(x=>x.gameObject).ToList(); Locate.sceneObjects = enabledObjects.Extend(disabledObjects).ToArray(); Locate.enabledObjects = enabledObjects.ToArray(); Locate.disabledObjects = disabledObjects.ToArray(); Locate.rootObjects = rootObjects.ToArray(); Locate.cleanGameObjects = true; } } //===================== // Gameobject //===================== public static bool HasDuplicate(GameObject target){ if(Application.isLoadingLevel){return false;} GameObject[] siblings = target.GetSiblings(true,true,false); foreach(GameObject current in siblings){ if(current.IsNull()){continue;} if(current.name == target.name){return true;} } return false; } public static GameObject[] GetSiblings(this GameObject current,bool includeEnabled=true,bool includeDisabled=true,bool includeSelf=true){ if(Application.isLoadingLevel){return new GameObject[0];} if(!Locate.cleanSiblings.Contains(current)){ GameObject parent = current.GetParent(); List<GameObject> siblings; if(parent.IsNull()){ Locate.GetSceneObjects(includeEnabled,includeDisabled); siblings = Locate.rootObjects.Remove(current).ToList(); } else{ siblings = parent.GetComponentsInChildren<Transform>(true).Select(x=>x.gameObject).ToList(); siblings.RemoveAll(x=>x.GetParent()!=parent); } Locate.siblings[current] = siblings.ToArray(); Locate.enabledSiblings[current] = Locate.siblings[current].Where(x=>!x.IsNull()&&x.gameObject.activeInHierarchy).Select(x=>x.gameObject).ToArray(); Locate.disabledSiblings[current] = Locate.siblings[current].Where(x=>!x.IsNull()&&!x.gameObject.activeInHierarchy).Select(x=>x.gameObject).ToArray(); Locate.cleanSiblings.Add(current); } GameObject[] results = Locate.enabledSiblings[current]; if(includeEnabled && includeDisabled){results = Locate.siblings[current];} if(!includeEnabled){results = Locate.disabledSiblings[current];} if(!includeSelf){results = results.Remove(current);} return results; } public static GameObject GetScenePath(string name,bool autocreate=true){ string[] parts = name.Split('/'); string path = ""; GameObject current = null; Transform parent = null; foreach(string part in parts){ path = path + "/" + part; current = Locate.Find(path); if(current.IsNull()){ if(!autocreate){ return null; } current = new GameObject(part); current.transform.parent = parent; Locate.SetDirty(); } parent = current.transform; } return current; } public static GameObject[] GetByName(string name){ if(Application.isLoadingLevel){return new GameObject[0];} if(!Locate.cleanGameObjects){Locate.Build<Transform>();} List<GameObject> matches = new List<GameObject>(); foreach(GameObject current in Locate.enabledObjects){ if(current.IsNull()){continue;} if(current.name == name){ matches.Add(current); } } return matches.ToArray(); } public static GameObject[] GetSceneObjects(bool includeEnabled=true,bool includeDisabled=true){ if(Application.isLoadingLevel){return new GameObject[0];} if(!Locate.cleanGameObjects){Locate.Build<Transform>();} if(includeEnabled && includeDisabled){return Locate.sceneObjects;} if(!includeEnabled){return Locate.disabledObjects;} return Locate.enabledObjects; } public static GameObject Find(string name,bool includeHidden=true){ if(Application.isLoadingLevel){return null;} if(!Locate.cleanGameObjects){Locate.Build<Transform>();} name = name.Trim("/"); if(Locate.searchCache.ContainsKey(name)){return Locate.searchCache[name];} GameObject[] all = includeHidden ? Locate.sceneObjects : Locate.enabledObjects; foreach(GameObject current in all){ if(current.IsNull()){continue;} string path = current.GetPath().Trim("/"); if(path == name){ Locate.searchCache[name] = current; return current; } } return null; } //===================== // Components //===================== public static Type[] GetSceneComponents<Type>(bool includeEnabled=true,bool includeDisabled=true) where Type : Component{ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.cleanSceneComponents.Contains(typeof(Type))){Locate.Build<Type>();} if(includeEnabled && includeDisabled){return (Type[])Locate.sceneComponents[typeof(Type)];} if(!includeEnabled){return (Type[])Locate.disabledComponents[typeof(Type)];} return (Type[])Locate.enabledComponents[typeof(Type)]; } public static Type[] GetObjectComponents<Type>(GameObject target) where Type : Component{ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.objectComponents.ContainsKey(target) || !Locate.objectComponents[target].ContainsKey(typeof(Type))){ Locate.objectComponents.AddNew(target); Locate.objectComponents[target][typeof(Type)] = target.GetComponents<Type>(true); } return (Type[])Locate.objectComponents[target][typeof(Type)]; } //===================== // Assets //===================== public static object[] GetAssets(Type type){ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.assets.ContainsKey(type)){Locate.assets[type] = Resources.FindObjectsOfTypeAll(type);} return Locate.assets[type]; } public static Type[] GetAssets<Type>() where Type : UnityObject{ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.assets.ContainsKey(typeof(Type))){Locate.assets[typeof(Type)] = Resources.FindObjectsOfTypeAll(typeof(Type));} return (Type[])Locate.assets[typeof(Type)]; } //===================== // Importers //===================== public static Type GetImporter<Type>(string path) where Type : AssetImporter{ if(Application.isLoadingLevel){return default(Type);} if(!Locate.importers.ContainsKey(path)){Locate.importers[path] = AssetImporter.GetAtPath(path);} return Locate.importers[path].As<Type>(); } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // File System.Windows.Media.Media3D.MaterialCollection.cs // Automatically generated contract file. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics.Contracts; using System; // Disable the "this variable is not used" warning as every field would imply it. #pragma warning disable 0414 // Disable the "this variable is never assigned to". #pragma warning disable 0067 // Disable the "this event is never assigned to". #pragma warning disable 0649 // Disable the "this variable is never used". #pragma warning disable 0169 // Disable the "new keyword not required" warning. #pragma warning disable 0109 // Disable the "extern without DllImport" warning. #pragma warning disable 0626 // Disable the "could hide other member" warning, can happen on certain properties. #pragma warning disable 0108 namespace System.Windows.Media.Media3D { sealed public partial class MaterialCollection : System.Windows.Media.Animation.Animatable, System.Collections.IList, System.Collections.ICollection, IList<Material>, ICollection<Material>, IEnumerable<Material>, System.Collections.IEnumerable { #region Methods and constructors public void Add(Material value) { } public void Clear() { } public MaterialCollection Clone() { return default(MaterialCollection); } protected override void CloneCore(System.Windows.Freezable source) { } public MaterialCollection CloneCurrentValue() { return default(MaterialCollection); } protected override void CloneCurrentValueCore(System.Windows.Freezable source) { } public bool Contains(Material value) { return default(bool); } public void CopyTo(Material[] array, int index) { } protected override System.Windows.Freezable CreateInstanceCore() { return default(System.Windows.Freezable); } protected override bool FreezeCore(bool isChecking) { return default(bool); } protected override void GetAsFrozenCore(System.Windows.Freezable source) { } protected override void GetCurrentValueAsFrozenCore(System.Windows.Freezable source) { } public MaterialCollection.Enumerator GetEnumerator() { return default(MaterialCollection.Enumerator); } public int IndexOf(Material value) { return default(int); } public void Insert(int index, Material value) { } public MaterialCollection() { } public MaterialCollection(IEnumerable<Material> collection) { } public MaterialCollection(int capacity) { } public bool Remove(Material value) { return default(bool); } public void RemoveAt(int index) { } IEnumerator<Material> System.Collections.Generic.IEnumerable<System.Windows.Media.Media3D.Material>.GetEnumerator() { return default(IEnumerator<Material>); } void System.Collections.ICollection.CopyTo(Array array, int index) { } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return default(System.Collections.IEnumerator); } int System.Collections.IList.Add(Object value) { return default(int); } bool System.Collections.IList.Contains(Object value) { return default(bool); } int System.Collections.IList.IndexOf(Object value) { return default(int); } void System.Collections.IList.Insert(int index, Object value) { } void System.Collections.IList.Remove(Object value) { } #endregion #region Properties and indexers public int Count { get { return default(int); } } public Material this [int index] { get { return default(Material); } set { } } bool System.Collections.Generic.ICollection<System.Windows.Media.Media3D.Material>.IsReadOnly { get { return default(bool); } } bool System.Collections.ICollection.IsSynchronized { get { return default(bool); } } Object System.Collections.ICollection.SyncRoot { get { return default(Object); } } bool System.Collections.IList.IsFixedSize { get { return default(bool); } } bool System.Collections.IList.IsReadOnly { get { return default(bool); } } Object System.Collections.IList.this [int index] { get { return default(Object); } set { } } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.Linq; using System.Net; using System.Runtime.InteropServices; using Mindscape.Raygun4Net.Messages; using System.Threading; using System.Reflection; using Android.Content; using Android.Views; using Android.Runtime; using Android.App; using Android.Net; using Java.IO; using Android.OS; using System.Text; using System.Threading.Tasks; using Android.Bluetooth; using Android.Provider; namespace Mindscape.Raygun4Net { public class RaygunClient : RaygunClientBase { private readonly string _apiKey; private readonly List<Type> _wrapperExceptions = new List<Type>(); /// <summary> /// Initializes a new instance of the <see cref="RaygunClient" /> class. /// </summary> /// <param name="apiKey">The API key.</param> public RaygunClient(string apiKey) { _apiKey = apiKey; _wrapperExceptions.Add(typeof(TargetInvocationException)); _wrapperExceptions.Add(typeof(System.AggregateException)); ThreadPool.QueueUserWorkItem(state => { SendStoredMessages(); }); } private bool ValidateApiKey() { if (string.IsNullOrEmpty(_apiKey)) { System.Diagnostics.Debug.WriteLine("ApiKey has not been provided, exception will not be logged"); return false; } return true; } /// <summary> /// Adds a list of outer exceptions that will be stripped, leaving only the valuable inner exception. /// This can be used when a wrapper exception, e.g. TargetInvocationException or AggregateException, /// contains the actual exception as the InnerException. The message and stack trace of the inner exception will then /// be used by Raygun for grouping and display. The above two do not need to be added manually, /// but if you have other wrapper exceptions that you want stripped you can pass them in here. /// </summary> /// <param name="wrapperExceptions">Exception types that you want removed and replaced with their inner exception.</param> public void AddWrapperExceptions(params Type[] wrapperExceptions) { foreach (Type wrapper in wrapperExceptions) { if (!_wrapperExceptions.Contains(wrapper)) { _wrapperExceptions.Add(wrapper); } } } /// <summary> /// Specifies types of wrapper exceptions that Raygun should send rather than stripping out and sending the inner exception. /// This can be used to remove the default wrapper exceptions (TargetInvocationException and AggregateException). /// </summary> /// <param name="wrapperExceptions">Exception types that should no longer be stripped away.</param> public void RemoveWrapperExceptions(params Type[] wrapperExceptions) { foreach (Type wrapper in wrapperExceptions) { _wrapperExceptions.Remove(wrapper); } } /// <summary> /// Transmits an exception to Raygun.io synchronously, using the version number of the originating assembly. /// </summary> /// <param name="exception">The exception to deliver.</param> public override void Send(Exception exception) { Send(exception, null, (IDictionary)null); } /// <summary> /// Transmits an exception to Raygun.io synchronously specifying a list of string tags associated /// with the message for identification. This uses the version number of the originating assembly. /// </summary> /// <param name="exception">The exception to deliver.</param> /// <param name="tags">A list of strings associated with the message.</param> public void Send(Exception exception, IList<string> tags) { Send(exception, tags, (IDictionary)null); } /// <summary> /// Transmits an exception to Raygun.io synchronously specifying a list of string tags associated /// with the message for identification, as well as sending a key-value collection of custom data. /// This uses the version number of the originating assembly. /// </summary> /// <param name="exception">The exception to deliver.</param> /// <param name="tags">A list of strings associated with the message.</param> /// <param name="userCustomData">A key-value collection of custom data that will be added to the payload.</param> public void Send(Exception exception, IList<string> tags, IDictionary userCustomData) { if (CanSend(exception)) { StripAndSend(exception, tags, userCustomData); FlagAsSent(exception); } } /// <summary> /// Asynchronously transmits a message to Raygun.io. /// </summary> /// <param name="exception">The exception to deliver.</param> public void SendInBackground(Exception exception) { SendInBackground(exception, null, (IDictionary)null); } /// <summary> /// Asynchronously transmits a message to Raygun.io. /// </summary> /// <param name="exception">The exception to deliver.</param> /// <param name="tags">A list of strings associated with the message.</param> public void SendInBackground(Exception exception, IList<string> tags) { SendInBackground(exception, tags, (IDictionary)null); } /// <summary> /// Asynchronously transmits a message to Raygun.io. /// </summary> /// <param name="exception">The exception to deliver.</param> /// <param name="tags">A list of strings associated with the message.</param> /// <param name="userCustomData">A key-value collection of custom data that will be added to the payload.</param> public void SendInBackground(Exception exception, IList<string> tags, IDictionary userCustomData) { if (CanSend(exception)) { ThreadPool.QueueUserWorkItem(c => StripAndSend(exception, tags, userCustomData)); FlagAsSent(exception); } } /// <summary> /// Asynchronously transmits a message to Raygun.io. /// </summary> /// <param name="raygunMessage">The RaygunMessage to send. This needs its OccurredOn property /// set to a valid DateTime and as much of the Details property as is available.</param> public void SendInBackground(RaygunMessage raygunMessage) { ThreadPool.QueueUserWorkItem(c => Send(raygunMessage)); } private string DeviceId { get { try { return Settings.Secure.GetString(Context.ContentResolver, Settings.Secure.AndroidId); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Failed to get device id: {0}", ex.Message); } return null; } } private static RaygunClient _client; /// <summary> /// Gets the <see cref="RaygunClient"/> created by the Attach method. /// </summary> public static RaygunClient Current { get { return _client; } } /// <summary> /// Causes Raygun to listen to and send all unhandled exceptions and unobserved task exceptions. /// </summary> /// <param name="apiKey">Your app api key.</param> public static void Attach(string apiKey) { Detach(); _client = new RaygunClient(apiKey); AppDomain.CurrentDomain.UnhandledException += CurrentDomain_UnhandledException; TaskScheduler.UnobservedTaskException += TaskScheduler_UnobservedTaskException; AndroidEnvironment.UnhandledExceptionRaiser += AndroidEnvironment_UnhandledExceptionRaiser; } /// <summary> /// Causes Raygun to listen to and send all unhandled exceptions and unobserved task exceptions. /// </summary> /// <param name="apiKey">Your app api key.</param> /// <param name="user">An identity string for tracking affected users.</param> public static void Attach(string apiKey, string user) { Detach(); _client = new RaygunClient(apiKey) { User = user }; AppDomain.CurrentDomain.UnhandledException += CurrentDomain_UnhandledException; TaskScheduler.UnobservedTaskException += TaskScheduler_UnobservedTaskException; AndroidEnvironment.UnhandledExceptionRaiser += AndroidEnvironment_UnhandledExceptionRaiser; } /// <summary> /// Detaches Raygun from listening to unhandled exceptions and unobserved task exceptions. /// </summary> public static void Detach() { AppDomain.CurrentDomain.UnhandledException -= CurrentDomain_UnhandledException; TaskScheduler.UnobservedTaskException -= TaskScheduler_UnobservedTaskException; AndroidEnvironment.UnhandledExceptionRaiser -= AndroidEnvironment_UnhandledExceptionRaiser; } private static void TaskScheduler_UnobservedTaskException(object sender, UnobservedTaskExceptionEventArgs e) { if (e.Exception != null) { _client.Send(e.Exception); } } private static void CurrentDomain_UnhandledException(object sender, UnhandledExceptionEventArgs e) { if (e.ExceptionObject is Exception) { _client.Send(e.ExceptionObject as Exception); } } private static void AndroidEnvironment_UnhandledExceptionRaiser(object sender, RaiseThrowableEventArgs e) { if (e.Exception != null) { _client.Send(e.Exception); } } internal static Context Context { get { return Application.Context; } } protected RaygunMessage BuildMessage(Exception exception, IList<string> tags, IDictionary userCustomData) { JNIEnv.ExceptionClear(); var message = RaygunMessageBuilder.New .SetEnvironmentDetails() .SetMachineName("Unknown") .SetExceptionDetails(exception) .SetClientDetails() .SetVersion(ApplicationVersion) .SetTags(tags) .SetUserCustomData(userCustomData) .SetUser(UserInfo ?? (!String.IsNullOrEmpty(User) ? new RaygunIdentifierMessage(User) : BuildRaygunIdentifierMessage(null))) .Build(); return message; } private RaygunIdentifierMessage BuildRaygunIdentifierMessage(string machineName) { string deviceId = DeviceId; return !String.IsNullOrWhiteSpace (deviceId) ? new RaygunIdentifierMessage (deviceId) { IsAnonymous = true, FullName = machineName } : null; } private void StripAndSend(Exception exception, IList<string> tags, IDictionary userCustomData) { foreach (Exception e in StripWrapperExceptions(exception)) { Send(BuildMessage(e, tags, userCustomData)); } } protected IEnumerable<Exception> StripWrapperExceptions(Exception exception) { if (exception != null && _wrapperExceptions.Any(wrapperException => exception.GetType() == wrapperException && exception.InnerException != null)) { System.AggregateException aggregate = exception as System.AggregateException; if (aggregate != null) { foreach (Exception e in aggregate.InnerExceptions) { foreach (Exception ex in StripWrapperExceptions(e)) { yield return ex; } } } else { foreach (Exception e in StripWrapperExceptions(exception.InnerException)) { yield return e; } } } else { yield return exception; } } /// <summary> /// Posts a RaygunMessage to the Raygun.io api endpoint. /// </summary> /// <param name="raygunMessage">The RaygunMessage to send. This needs its OccurredOn property /// set to a valid DateTime and as much of the Details property as is available.</param> public override void Send(RaygunMessage raygunMessage) { if (ValidateApiKey()) { bool canSend = OnSendingMessage(raygunMessage); if (canSend) { if (HasInternetConnection) { using (var client = new WebClient()) { client.Headers.Add("X-ApiKey", _apiKey); client.Headers.Add("content-type", "application/json; charset=utf-8"); client.Encoding = System.Text.Encoding.UTF8; try { var message = SimpleJson.SerializeObject(raygunMessage); client.UploadString(RaygunSettings.Settings.ApiEndpoint, message); System.Diagnostics.Debug.WriteLine("Sending message to Raygun.io"); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(string.Format("Error Logging Exception to Raygun.io {0}", ex.Message)); try { SaveMessage(SimpleJson.SerializeObject(raygunMessage)); System.Diagnostics.Debug.WriteLine("Exception has been saved to the device to try again later."); } catch (Exception e) { System.Diagnostics.Debug.WriteLine(string.Format("Error saving Exception to device {0}", e.Message)); } } } } else { try { var message = SimpleJson.SerializeObject(raygunMessage); SaveMessage(message); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(string.Format("Error saving Exception to device {0}", ex.Message)); } } } } } private bool SendMessage(string message) { using (var client = new WebClient()) { client.Headers.Add("X-ApiKey", _apiKey); client.Headers.Add("content-type", "application/json; charset=utf-8"); client.Encoding = System.Text.Encoding.UTF8; try { client.UploadString(RaygunSettings.Settings.ApiEndpoint, message); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(string.Format("Error Logging Exception to Raygun.io {0}", ex.Message)); return false; } } return true; } private bool HasInternetConnection { get { if (Context != null) { ConnectivityManager connectivityManager = (ConnectivityManager)Context.GetSystemService(Context.ConnectivityService); if (connectivityManager != null) { NetworkInfo networkInfo = connectivityManager.ActiveNetworkInfo; return networkInfo != null && networkInfo.IsConnected; } } return false; } } private void SaveMessage(string message) { try { if (Context != null) { using (File dir = Context.GetDir("RaygunIO", FileCreationMode.Private)) { int number = 1; string[] files = dir.List(); while (true) { bool exists = FileExists(files, "RaygunErrorMessage" + number + ".txt"); if (!exists) { string nextFileName = "RaygunErrorMessage" + (number + 1) + ".txt"; exists = FileExists(files, nextFileName); if (exists) { DeleteFile(dir, nextFileName); } break; } number++; } if (number == 11) { string firstFileName = "RaygunErrorMessage1.txt"; if (FileExists(files, firstFileName)) { DeleteFile(dir, firstFileName); } } using (File file = new File(dir, "RaygunErrorMessage" + number + ".txt")) { using (FileOutputStream stream = new FileOutputStream(file)) { stream.Write(Encoding.ASCII.GetBytes(message)); stream.Flush(); stream.Close(); } } System.Diagnostics.Debug.WriteLine("Saved message: " + "RaygunErrorMessage" + number + ".txt"); System.Diagnostics.Debug.WriteLine("File Count: " + dir.List().Length); } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(string.Format("Error saving message to isolated storage {0}", ex.Message)); } } private void SendStoredMessages() { if (HasInternetConnection) { try { using (File dir = Context.GetDir("RaygunIO", FileCreationMode.Private)) { File[] files = dir.ListFiles(); foreach (File file in files) { if (file.Name.StartsWith("RaygunErrorMessage")) { using (FileInputStream stream = new FileInputStream(file)) { using (InputStreamInvoker isi = new InputStreamInvoker(stream)) { using (InputStreamReader streamReader = new Java.IO.InputStreamReader(isi)) { using (BufferedReader bufferedReader = new BufferedReader(streamReader)) { StringBuilder stringBuilder = new StringBuilder(); string line; while ((line = bufferedReader.ReadLine()) != null) { stringBuilder.Append(line); } bool success = SendMessage(stringBuilder.ToString()); // If just one message fails to send, then don't delete the message, and don't attempt sending anymore until later. if (!success) { return; } System.Diagnostics.Debug.WriteLine("Sent " + file.Name); } } } } file.Delete(); } } if (dir.List().Length == 0) { if (files.Length > 0) { System.Diagnostics.Debug.WriteLine("Successfully sent all pending messages"); } dir.Delete(); } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(string.Format("Error sending stored messages to Raygun.io {0}", ex.Message)); } } } private bool FileExists(string[] files, string fileName) { foreach (string str in files) { if (fileName.Equals(str)) { return true; } } return false; } private void DeleteFile(File dir, string fileName) { File[] files = dir.ListFiles(); foreach (File file in files) { if (fileName.Equals(file.Name)) { file.Delete(); return; } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; namespace System.Xml { /// <devdoc> /// <para> /// XmlNameTable implemented as a simple hash table. /// </para> /// </devdoc> public class NameTable : XmlNameTable { // // Private types // private class Entry { internal string str; internal int hashCode; internal Entry next; internal Entry(string str, int hashCode, Entry next) { this.str = str; this.hashCode = hashCode; this.next = next; } } // // Fields // private Entry[] _entries; private int _count; private int _mask; private int _hashCodeRandomizer; // // Constructor /// <devdoc> /// Public constructor. /// </devdoc> public NameTable() { _mask = 31; _entries = new Entry[_mask + 1]; _hashCodeRandomizer = Environment.TickCount; } // // XmlNameTable public methods /// <devdoc> /// Add the given string to the NameTable or return /// the existing string if it is already in the NameTable. /// </devdoc> public override string Add(string key) { if (key == null) { throw new ArgumentNullException(nameof(key)); } int len = key.Length; if (len == 0) { return string.Empty; } int hashCode = len + _hashCodeRandomizer; // use key.Length to eliminate the range check for (int i = 0; i < key.Length; i++) { hashCode += (hashCode << 7) ^ key[i]; } // mix it a bit more hashCode -= hashCode >> 17; hashCode -= hashCode >> 11; hashCode -= hashCode >> 5; for (Entry e = _entries[hashCode & _mask]; e != null; e = e.next) { if (e.hashCode == hashCode && e.str.Equals(key)) { return e.str; } } return AddEntry(key, hashCode); } /// <devdoc> /// Add the given string to the NameTable or return /// the existing string if it is already in the NameTable. /// </devdoc> public override string Add(char[] key, int start, int len) { if (len == 0) { return string.Empty; } int hashCode = len + _hashCodeRandomizer; hashCode += (hashCode << 7) ^ key[start]; // this will throw IndexOutOfRangeException in case the start index is invalid int end = start + len; for (int i = start + 1; i < end; i++) { hashCode += (hashCode << 7) ^ key[i]; } // mix it a bit more hashCode -= hashCode >> 17; hashCode -= hashCode >> 11; hashCode -= hashCode >> 5; for (Entry e = _entries[hashCode & _mask]; e != null; e = e.next) { if (e.hashCode == hashCode && TextEquals(e.str, key, start, len)) { return e.str; } } return AddEntry(new string(key, start, len), hashCode); } /// <devdoc> /// Find the matching string in the NameTable. /// </devdoc> public override string Get(string value) { if (value == null) { throw new ArgumentNullException(nameof(value)); } if (value.Length == 0) { return string.Empty; } int len = value.Length + _hashCodeRandomizer; int hashCode = len; // use value.Length to eliminate the range check for (int i = 0; i < value.Length; i++) { hashCode += (hashCode << 7) ^ value[i]; } // mix it a bit more hashCode -= hashCode >> 17; hashCode -= hashCode >> 11; hashCode -= hashCode >> 5; for (Entry e = _entries[hashCode & _mask]; e != null; e = e.next) { if (e.hashCode == hashCode && e.str.Equals(value)) { return e.str; } } return null; } /// <devdoc> /// Find the matching string atom given a range of /// characters. /// </devdoc> public override string Get(char[] key, int start, int len) { if (len == 0) { return string.Empty; } int hashCode = len + _hashCodeRandomizer; hashCode += (hashCode << 7) ^ key[start]; // this will throw IndexOutOfRangeException in case the start index is invalid int end = start + len; for (int i = start + 1; i < end; i++) { hashCode += (hashCode << 7) ^ key[i]; } // mix it a bit more hashCode -= hashCode >> 17; hashCode -= hashCode >> 11; hashCode -= hashCode >> 5; for (Entry e = _entries[hashCode & _mask]; e != null; e = e.next) { if (e.hashCode == hashCode && TextEquals(e.str, key, start, len)) { return e.str; } } return null; } // // Private methods // private string AddEntry(string str, int hashCode) { int index = hashCode & _mask; Entry e = new Entry(str, hashCode, _entries[index]); _entries[index] = e; if (_count++ == _mask) { Grow(); } return e.str; } private void Grow() { int newMask = _mask * 2 + 1; Entry[] oldEntries = _entries; Entry[] newEntries = new Entry[newMask + 1]; // use oldEntries.Length to eliminate the range check for (int i = 0; i < oldEntries.Length; i++) { Entry e = oldEntries[i]; while (e != null) { int newIndex = e.hashCode & newMask; Entry tmp = e.next; e.next = newEntries[newIndex]; newEntries[newIndex] = e; e = tmp; } } _entries = newEntries; _mask = newMask; } private static bool TextEquals(string str1, char[] str2, int str2Start, int str2Length) { if (str1.Length != str2Length) { return false; } // use array.Length to eliminate the range check for (int i = 0; i < str1.Length; i++) { if (str1[i] != str2[str2Start + i]) { return false; } } return true; } } }
using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Metadata; namespace XlsToEfCore.Import { public class XlsxToTableImporter { private readonly DbContext _dbContext; private readonly IExcelIoWrapper _excelIoWrapper; public XlsxToTableImporter(DbContext dbContext, IExcelIoWrapper excelIoWrapper) { _dbContext = dbContext; _excelIoWrapper = excelIoWrapper; } public XlsxToTableImporter(DbContext dbContext) { _dbContext = dbContext; _excelIoWrapper = new ExcelIoWrapper(); } /// <summary> /// /// </summary> /// <typeparam name="TEntity">The type of EF Entity</typeparam> /// <param name="matchingData">Specification for how to match spreadsheet to entity</param> /// <param name="saveBehavior">Optional configuration to change the save behavior. See ImportSaveBehavior</param> /// <param name="fileLocation">Optional directory of source xlsx file. Defaults to temp dir</param> /// <returns></returns> public Task<ImportResult> ImportColumnData<TEntity>(DataMatchesForImport matchingData, ImportSaveBehavior saveBehavior = null, string fileLocation = null) where TEntity : class, new() { return ImportColumnData<TEntity, string>(matchingData, null, null, null, saveBehavior, fileLocation: fileLocation); } /// <summary> /// /// </summary> /// <typeparam name="TEntity">The type of EF Entity</typeparam> /// <typeparam name="TId">Type of the item to use for finding</typeparam> /// <param name="matchingData">Specification for how to match spreadsheet to entity</param> /// <param name="finder">A func to look for an existing copy of the entity id. default will use "find". Runs against the DB via EF.</param> /// <param name="idPropertyName">The unique identifier property. Leave blank unless you are using an overrider to use something other than the real key.</param> /// <param name="overridingMapper">A custom mapper for mapping between excel columns and an entity. </param> /// <param name="saveBehavior">Optional configuration to change the save behavior. See ImportSaveBehavior</param> /// <param name="validator">Optional method to run custom validation on the modified entity before saving</param> /// <param name="fileLocation">Directory of source xlsx file. Defaults to temp dir</param> /// <returns></returns> public async Task<ImportResult> ImportColumnData<TEntity, TId>(DataMatchesForImport matchingData, Func<TId, Expression<Func<TEntity, bool>>> finder = null, string idPropertyName = null, UpdatePropertyOverrider<TEntity> overridingMapper = null, ImportSaveBehavior saveBehavior = null, IEntityValidator<TEntity> validator = null, string fileLocation = null) where TEntity : class, new() { if (saveBehavior == null) { saveBehavior = new ImportSaveBehavior(); } var selctedDict = BuildDictionaryFromSelected(matchingData.Selected); var keyInfo = GetEntityKeys(typeof (TEntity)); EnsureImportingEntityHasSingleKey(keyInfo); var pk = keyInfo[0]; Type idType = pk.PropertyInfo.PropertyType; if (idPropertyName == null) { idPropertyName = pk.Name; } var isImportingEntityId = selctedDict.ContainsKey(idPropertyName); var isDbGeneratedId = IsIdDbGenerated(typeof(TEntity)); EnsureNoIdColumnIncludedWhenCreatingAutoIncrementEntites(saveBehavior.RecordMode, isDbGeneratedId, isImportingEntityId); var importResult = new ImportResult {RowErrorDetails = new Dictionary<string, string>()}; var excelRows = await GetExcelRows(matchingData, fileLocation); var foundErrors = false; for (var index = 0; index < excelRows.Count; index++) { var excelRow = excelRows[index]; var rowNumber = index + 2; // add 2 to reach the first data row because the first row is a header, excel row numbers start with 1 not 0 TEntity entityToUpdate = null; try { if (ExcelRowIsBlank(excelRow)) continue; string idValue = null; if (isImportingEntityId) { var xlsxIdColName = selctedDict[idPropertyName]; var idStringValue = excelRow[xlsxIdColName]; entityToUpdate = await GetMatchedDbObject(finder, idStringValue, idType); ValidateDbResult(entityToUpdate, saveBehavior.RecordMode, xlsxIdColName, idStringValue); } if (entityToUpdate == null) { EnsureNoEntityCreationWithIdWhenAutoIncrementIdType(idPropertyName, isDbGeneratedId, idValue); entityToUpdate = new TEntity(); await _dbContext.Set<TEntity>().AddAsync(entityToUpdate); } await MapIntoEntity(selctedDict, idPropertyName, overridingMapper, entityToUpdate, excelRow, isDbGeneratedId, saveBehavior.RecordMode); if (validator != null) { var errors = validator.GetValidationErrors(entityToUpdate); if(errors.Any()) throw new RowInvalidException(errors); } else { importResult.SuccessCount++; } } catch (RowParseException e) { HandleError(importResult.RowErrorDetails, rowNumber, entityToUpdate, "Error: " + e.Message); foundErrors = true; } catch (RowInvalidException e) { HandleError(importResult.RowErrorDetails, rowNumber, entityToUpdate, "Error: " + e.Message); foundErrors = true; } catch (Exception e) { HandleError(importResult.RowErrorDetails, rowNumber, entityToUpdate, "Cannot be updated - error importing"); foundErrors = true; } if (saveBehavior.CommitMode == CommitMode.AnySuccessfulOneAtATime) { await _dbContext.SaveChangesAsync(); } } if ((saveBehavior.CommitMode == CommitMode.AnySuccessfulAtEndAsBulk) || (saveBehavior.CommitMode == CommitMode.CommitAllAtEndIfAllGoodOrRejectAll && !foundErrors)) { await _dbContext.SaveChangesAsync(); } return importResult; } private Task<List<Dictionary<string, string>>> GetExcelRows(DataMatchesForImport matchingData, string fileLocation) { if(matchingData.FileStream != null) return _excelIoWrapper.GetRows(matchingData.FileStream, matchingData.Sheet); var filePath = Path.Combine((fileLocation ?? Path.GetTempPath()), matchingData.FileName); return _excelIoWrapper.GetRows(filePath, matchingData.Sheet); } private Dictionary<string, string> BuildDictionaryFromSelected(List<XlsToEfColumnPair> selected) { var hasDups = selected.GroupBy(x => x.EfName) .Select(group => new {Name = group.Key, Count = group.Count()}) .Any(x => x.Count > 1); if (hasDups) { throw new Exception("Destination targets must be unique"); } var dict = selected.ToDictionary(x => x.EfName, x => x.XlsName); return dict; } private void HandleError<TEntity>(IDictionary<string, string> rowErrorDetails, int rowNumber, TEntity entityToRollBack, string message) where TEntity : class, new() { rowErrorDetails.Add(rowNumber.ToString(), message); if (entityToRollBack != null) { MarkForNotSaving(entityToRollBack); } } private void MarkForNotSaving<TEntity>(TEntity entityToUpdate) where TEntity : class, new() { if (_dbContext.Entry(entityToUpdate).State == EntityState.Added) { _dbContext.Entry(entityToUpdate).State = EntityState.Detached; } else { _dbContext.Entry(entityToUpdate).State = EntityState.Unchanged; } } private async Task MapIntoEntity<TEntity>(Dictionary<string, string> matchingData, string idPropertyName, UpdatePropertyOverrider<TEntity> overridingMapper, TEntity entityToUpdate, Dictionary<string, string> excelRow, bool isAutoIncrementingId, RecordMode recordMode) where TEntity : class { if (overridingMapper != null) { await overridingMapper.UpdateProperties(entityToUpdate, matchingData, excelRow, recordMode); } else { UpdateProperties(entityToUpdate, matchingData, excelRow, idPropertyName, isAutoIncrementingId); } } // this condition might happen when Upserting. unlike the other check, this check is per-row. private static void EnsureNoEntityCreationWithIdWhenAutoIncrementIdType(string idPropertyName, bool isAutoIncrementingId, string idValue) { if (isAutoIncrementingId && !string.IsNullOrWhiteSpace(idValue)) { throw new RowParseException(idPropertyName + " value " + idValue + " cannot be added - you cannot import id for new items when underlying table id is autoincrementing"); } } private async Task<TEntity> GetMatchedDbObject<TEntity, TId>(Func<TId, Expression<Func<TEntity, bool>>> finder, string idStringValue, Type idType) where TEntity : class { if (string.IsNullOrWhiteSpace(idStringValue)) return null; TEntity matchedDbObject; if (finder != null) { var finderInputType = (TId) Convert.ChangeType(idStringValue,typeof(TId)); var getExp = finder(finderInputType); matchedDbObject = await _dbContext.Set<TEntity>().FirstOrDefaultAsync(getExp); } else { var idData = Convert.ChangeType(idStringValue, idType); matchedDbObject = await _dbContext.Set<TEntity>().FindAsync(idData); } return matchedDbObject; } private static void ValidateDbResult<TEntity>(TEntity matchedDbObject, RecordMode recordMode, string xlsxIdColName, string idValue) where TEntity : class { if (matchedDbObject == null && recordMode == RecordMode.UpdateOnly) { throw new RowParseException(xlsxIdColName + " value " + idValue + " cannot be updated - not found in database"); } if (matchedDbObject != null && recordMode == RecordMode.CreateOnly) { throw new RowParseException(xlsxIdColName + " value " + idValue + " cannot be added - already in database"); } } private static bool ExcelRowIsBlank(Dictionary<string, string> excelRow) { return excelRow.All(x => string.IsNullOrWhiteSpace(x.Value)); } private static void EnsureNoIdColumnIncludedWhenCreatingAutoIncrementEntites(RecordMode recordMode, bool isAutoIncrementingId, bool isImportingEntityId) { if (isAutoIncrementingId && isImportingEntityId && recordMode == RecordMode.CreateOnly) { throw new Exception("Id is created in the database. You cannot import an ID column when creating."); } } private void EnsureImportingEntityHasSingleKey(IReadOnlyList<IProperty> keyInfo) { if (keyInfo.Count > 1) { throw new Exception("XlsToEf only supports Single Column Key right now"); } } private bool IsIdDbGenerated(Type eType) { var key = GetMappedKeyInformation(eType); var keyProperty = key.Properties[0]; var idkeyAnnotation = keyProperty.FindAnnotation("SqlServer:ValueGenerationStrategy"); var isGeneratedOnAdd = key.Properties[0]?.ValueGenerated == ValueGenerated.OnAdd; return ((idkeyAnnotation?.Value != null) && (idkeyAnnotation.Value.ToString().Contains("Identity") || idkeyAnnotation.Value.ToString().Contains("Computed"))) || (isGeneratedOnAdd); } private IReadOnlyList<IProperty> GetEntityKeys(Type eType) { var keys = _dbContext.Model.FindEntityType(eType).FindPrimaryKey().Properties; return keys; } private IKey GetMappedKeyInformation(Type eType) { return _dbContext.Model.FindEntityType(eType).FindPrimaryKey(); } private void UpdateProperties<TSelector>(TSelector matchedObject, Dictionary<string, string> matches, Dictionary<string, string> excelRow, string selectorColName, bool shouldSkipIdInsert) where TSelector : class { foreach (var entityPropertyName in matches.Keys) { if ((entityPropertyName == selectorColName) && shouldSkipIdInsert) continue; var xlsxColumnName = matches[entityPropertyName]; var xlsxItemData = excelRow[xlsxColumnName]; Type matchedObjectType = matchedObject.GetType(); PropertyInfo propToSet = matchedObjectType.GetProperty(entityPropertyName); if (propToSet is null) { var shadow = _dbContext.Entry(matchedObject).Property(entityPropertyName); var shadowType = shadow.Metadata.ClrType; var converted = StringToTypeConverter.Convert(xlsxItemData,shadowType); shadow.CurrentValue = converted; } else { var converted = StringToTypeConverter.Convert(xlsxItemData, propToSet.PropertyType); propToSet.SetValue(matchedObject, converted, null); } } } } public static class StringToTypeConverter { public static object Convert(string xlsxItemData, Type propertyType) { if (propertyType == typeof (string)) { return xlsxItemData; } object converted; if (propertyType.IsGenericType && propertyType.GetGenericTypeDefinition() == typeof (Nullable<>)) { converted = String.IsNullOrEmpty(xlsxItemData) ? null : ConvertString(xlsxItemData, propertyType.GetGenericArguments()[0]); } else { converted = ConvertString(xlsxItemData, propertyType); } return converted; } private static object ConvertString(string xlsxItemData, Type propertyType) { if (propertyType == typeof(Guid)) { return new Guid(xlsxItemData); } if (propertyType == typeof (short)) return short.Parse(xlsxItemData, NumberStyles.AllowThousands); if (propertyType == typeof(int)) return int.Parse(xlsxItemData, NumberStyles.AllowThousands); if (propertyType == typeof(byte)) return byte.Parse(xlsxItemData, NumberStyles.AllowThousands); // if (propertyType == typeof (bool)) // return DisplayConversions.StringToBool(xlsxItemData); return System.Convert.ChangeType(xlsxItemData, propertyType, CultureInfo.CurrentCulture); } } }
using NLog; using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Linq; using System.Runtime.Caching; using Wox.Infrastructure.Logger; using static Wox.Infrastructure.StringMatcher; namespace Wox.Infrastructure { public class StringMatcher { public SearchPrecisionScore UserSettingSearchPrecision { get; set; } private readonly Alphabet _alphabet; private MemoryCache _cache; private static readonly NLog.Logger Logger = LogManager.GetCurrentClassLogger(); public StringMatcher() { _alphabet = new Alphabet(); _alphabet.Initialize(); NameValueCollection config = new NameValueCollection(); config.Add("pollingInterval", "00:05:00"); config.Add("physicalMemoryLimitPercentage", "1"); config.Add("cacheMemoryLimitMegabytes", "30"); _cache = new MemoryCache("StringMatcherCache", config); } public static StringMatcher Instance { get; internal set; } public static MatchResult FuzzySearch(string query, string stringToCompare) { return Instance.FuzzyMatch(query, stringToCompare); } public MatchResult FuzzyMatch(string query, string stringToCompare) { query = query.Trim(); if (string.IsNullOrEmpty(stringToCompare) || string.IsNullOrEmpty(query)) return new MatchResult(false, UserSettingSearchPrecision); var queryWithoutCase = query.ToLower(); string translated = _alphabet.Translate(stringToCompare); string key = $"{queryWithoutCase}|{translated}"; MatchResult match = _cache[key] as MatchResult; if (match == null) { match = FuzzyMatchRecurrsive( queryWithoutCase, translated, 0, 0, new List<int>() ); CacheItemPolicy policy = new CacheItemPolicy(); policy.SlidingExpiration = new TimeSpan(12, 0, 0); _cache.Set(key, match, policy); } return match; } public MatchResult FuzzyMatchRecurrsive( string query, string stringToCompare, int queryCurrentIndex, int stringCurrentIndex, List<int> sourceMatchData ) { if (queryCurrentIndex == query.Length || stringCurrentIndex == stringToCompare.Length) { return new MatchResult(false, UserSettingSearchPrecision); } bool recursiveMatch = false; List<int> bestRecursiveMatchData = new List<int>(); int bestRecursiveScore = 0; List<int> matchs = new List<int>(); if (sourceMatchData.Count > 0) { foreach (var data in sourceMatchData) { matchs.Add(data); } } while (queryCurrentIndex < query.Length && stringCurrentIndex < stringToCompare.Length) { char queryLower = char.ToLower(query[queryCurrentIndex]); char stringToCompareLower = char.ToLower(stringToCompare[stringCurrentIndex]); if (queryLower == stringToCompareLower) { MatchResult match = FuzzyMatchRecurrsive( query, stringToCompare, queryCurrentIndex, stringCurrentIndex + 1, matchs ); if (match.Success) { if (!recursiveMatch || match.RawScore > bestRecursiveScore) { bestRecursiveMatchData = new List<int>(); foreach (int data in match.MatchData) { bestRecursiveMatchData.Add(data); } bestRecursiveScore = match.Score; } recursiveMatch = true; } matchs.Add(stringCurrentIndex); queryCurrentIndex += 1; } stringCurrentIndex += 1; } bool matched = queryCurrentIndex == query.Length; int outScore; if (matched) { outScore = 100; int penality = 3 * matchs[0]; outScore = outScore - penality; int unmatched = stringToCompare.Length - matchs.Count; outScore = outScore - (5 * unmatched); int consecutiveMatch = 0; for (int i = 0; i < matchs.Count; i++) { int indexCurent = matchs[i]; if (i > 0) { int indexPrevious = matchs[i - 1]; if (indexCurent == indexPrevious + 1) { consecutiveMatch += 1; outScore += 10 * consecutiveMatch; } else { consecutiveMatch = 0; } } char current = stringToCompare[indexCurent]; bool currentUpper = char.IsUpper(current); if (indexCurent > 0) { char neighbor = stringToCompare[indexCurent - 1]; if (currentUpper && char.IsLower(neighbor)) { outScore += 30; } bool isNeighbourSeparator = neighbor == '_' || neighbor == ' '; if (isNeighbourSeparator) { outScore += 50; if (currentUpper) { outScore += 50; } } } else { outScore += 50; if (currentUpper) { outScore += 50; } } } } else { outScore = 0; } if (recursiveMatch && (!matched || bestRecursiveScore > outScore)) { matchs = new List<int>(); foreach (int data in bestRecursiveMatchData) { matchs.Add(data); } outScore = bestRecursiveScore; return new MatchResult(true, UserSettingSearchPrecision, matchs, outScore); } else if (matched) { return new MatchResult(true, UserSettingSearchPrecision, matchs, outScore); } else { return new MatchResult(false, UserSettingSearchPrecision); } } public enum SearchPrecisionScore { Regular = 50, Low = 20, None = 0 } } public class MatchResult { public MatchResult(bool success, SearchPrecisionScore searchPrecision) { Success = success; SearchPrecision = searchPrecision; } public MatchResult(bool success, SearchPrecisionScore searchPrecision, List<int> matchData, int rawScore) { Success = success; SearchPrecision = searchPrecision; MatchData = matchData; RawScore = rawScore; } public bool Success { get; set; } /// <summary> /// The final score of the match result with search precision filters applied. /// </summary> public int Score { get; private set; } /// <summary> /// The raw calculated search score without any search precision filtering applied. /// </summary> private int _rawScore; public int RawScore { get { return _rawScore; } set { _rawScore = value; Score = ScoreAfterSearchPrecisionFilter(_rawScore); } } /// <summary> /// Matched data to highlight. /// </summary> public List<int> MatchData { get; set; } public SearchPrecisionScore SearchPrecision { get; set; } public bool IsSearchPrecisionScoreMet() { return IsSearchPrecisionScoreMet(_rawScore); } private bool IsSearchPrecisionScoreMet(int rawScore) { return rawScore >= (int)SearchPrecision; } private int ScoreAfterSearchPrecisionFilter(int rawScore) { return IsSearchPrecisionScoreMet(rawScore) ? rawScore : 0; } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Diagnostics; using System.IO; using System.Linq; using System.Threading.Tasks; using Microsoft.VisualStudio.InteractiveWindow; using Microsoft.VisualStudio.InteractiveWindow.Commands; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Classification; using Microsoft.VisualStudio.Text.Editor.OptionsExtensionMethods; using Microsoft.VisualStudio.Utilities; using Microsoft.VisualStudioTools; namespace Microsoft.PythonTools.Repl { [Export(typeof(IInteractiveWindowCommand))] [ContentType(PythonCoreConstants.ContentType)] class LoadReplCommand : IInteractiveWindowCommand { const string _commentPrefix = "%%"; public Task<ExecutionResult> Execute(IInteractiveWindow window, string arguments) { var finder = new FileFinder(arguments); var eval = window.Evaluator as BasePythonReplEvaluator; if (eval != null && eval.CurrentOptions != null) { finder.Search(eval.CurrentOptions.WorkingDirectory); finder.SearchAll(eval.CurrentOptions.SearchPaths, ';'); } finder.ThrowIfNotFound(); string commandPrefix = "$"; string lineBreak = window.TextView.Options.GetNewLineCharacter(); IEnumerable<string> lines = File.ReadLines(finder.Filename); IEnumerable<string> submissions; if (eval != null) { submissions = eval.JoinCode(lines).Where(CommentPrefixPredicate); } else { // v1 behavior, will probably never be hit, but if someone was developing their own IReplEvaluator // and using this class it would be hit. var submissionList = new List<string>(); var currentSubmission = new List<string>(); foreach (var line in lines) { if (line.StartsWith(_commentPrefix)) { continue; } if (line.StartsWith(commandPrefix)) { AddSubmission(submissionList, currentSubmission, lineBreak); submissionList.Add(line); currentSubmission.Clear(); } else { currentSubmission.Add(line); } } AddSubmission(submissionList, currentSubmission, lineBreak); submissions = submissionList; } window.Submit(submissions); return ExecutionResult.Succeeded; } private static bool CommentPrefixPredicate(string input) { return !input.StartsWith(_commentPrefix); } private static void AddSubmission(List<string> submissions, List<string> lines, string lineBreak) { string submission = String.Join(lineBreak, lines); // skip empty submissions: if (submission.Length > 0) { submissions.Add(submission); } } public string Description { get { return "Loads commands from file and executes until complete"; } } public string Command { get { return "load"; } } public IEnumerable<ClassificationSpan> ClassifyArguments(ITextSnapshot snapshot, Span argumentsSpan, Span spanToClassify) { yield break; } public string CommandLine { get { return ""; } } public IEnumerable<string> DetailedDescription { get { yield return Description; } } public IEnumerable<KeyValuePair<string, string>> ParametersDescription { get { yield break; } } public IEnumerable<string> Names { get { yield return Command; } } class FileFinder { private readonly string _baseName; public FileFinder(string baseName) { _baseName = (baseName ?? "").Trim(' ', '\"'); if (CommonUtils.IsValidPath(_baseName) && Path.IsPathRooted(_baseName) && File.Exists(_baseName)) { Found = true; Filename = _baseName; } } /// <summary> /// Searches the specified path and changes <see cref="Found"/> to /// true if the file exists. Returns true if the file was found in /// the provided path. /// </summary> public bool Search(string path) { if (Found) { // File was found, but not in this path return false; } if (!CommonUtils.IsValidPath(path) || !Path.IsPathRooted(path)) { return false; } var fullPath = CommonUtils.GetAbsoluteFilePath(path, _baseName); if (File.Exists(fullPath)) { Found = true; Filename = fullPath; return true; } return false; } /// <summary> /// Searches each path in the list of paths as if they had been /// passed to <see cref="Search"/> individually. /// </summary> public bool SearchAll(string paths, char separator) { if (Found) { // File was found, but not in this path return false; } if (string.IsNullOrEmpty(paths)) { return false; } return SearchAll(paths.Split(separator)); } /// <summary> /// Searches each path in the sequence as if they had been passed /// to <see cref="Search"/> individually. /// </summary> public bool SearchAll(IEnumerable<string> paths) { if (Found) { // File was found, but not in this path return false; } if (paths == null) { return false; } foreach (var path in paths) { if (Search(path)) { return true; } } return false; } [DebuggerStepThrough, DebuggerHidden] public void ThrowIfNotFound() { if (!Found) { throw new FileNotFoundException("Cannot find file.", _baseName); } } public bool Found { get; private set; } public string Filename { get; private set; } } } }
//--------------------------------------------------------------------------- // // <copyright file="EllipseGeometry.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // This file was generated, please do not edit it directly. // // Please see http://wiki/default.aspx/Microsoft.Projects.Avalon/MilCodeGen.html for more information. // //--------------------------------------------------------------------------- using MS.Internal; using MS.Internal.KnownBoxes; using MS.Internal.Collections; using MS.Internal.PresentationCore; using MS.Utility; using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Reflection; using System.Runtime.InteropServices; using System.ComponentModel.Design.Serialization; using System.Text; using System.Windows; using System.Windows.Media; using System.Windows.Media.Effects; using System.Windows.Media.Media3D; using System.Windows.Media.Animation; using System.Windows.Media.Composition; using System.Windows.Media.Imaging; using System.Windows.Markup; using System.Windows.Media.Converters; using System.Security; using System.Security.Permissions; using SR=MS.Internal.PresentationCore.SR; using SRID=MS.Internal.PresentationCore.SRID; // These types are aliased to match the unamanaged names used in interop using BOOL = System.UInt32; using WORD = System.UInt16; using Float = System.Single; namespace System.Windows.Media { sealed partial class EllipseGeometry : Geometry { //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// Shadows inherited Clone() with a strongly typed /// version for convenience. /// </summary> public new EllipseGeometry Clone() { return (EllipseGeometry)base.Clone(); } /// <summary> /// Shadows inherited CloneCurrentValue() with a strongly typed /// version for convenience. /// </summary> public new EllipseGeometry CloneCurrentValue() { return (EllipseGeometry)base.CloneCurrentValue(); } #endregion Public Methods //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ private static void RadiusXPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { EllipseGeometry target = ((EllipseGeometry) d); target.PropertyChanged(RadiusXProperty); } private static void RadiusYPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { EllipseGeometry target = ((EllipseGeometry) d); target.PropertyChanged(RadiusYProperty); } private static void CenterPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { EllipseGeometry target = ((EllipseGeometry) d); target.PropertyChanged(CenterProperty); } #region Public Properties /// <summary> /// RadiusX - double. Default value is 0.0. /// </summary> public double RadiusX { get { return (double) GetValue(RadiusXProperty); } set { SetValueInternal(RadiusXProperty, value); } } /// <summary> /// RadiusY - double. Default value is 0.0. /// </summary> public double RadiusY { get { return (double) GetValue(RadiusYProperty); } set { SetValueInternal(RadiusYProperty, value); } } /// <summary> /// Center - Point. Default value is new Point(). /// </summary> public Point Center { get { return (Point) GetValue(CenterProperty); } set { SetValueInternal(CenterProperty, value); } } #endregion Public Properties //------------------------------------------------------ // // Protected Methods // //------------------------------------------------------ #region Protected Methods /// <summary> /// Implementation of <see cref="System.Windows.Freezable.CreateInstanceCore">Freezable.CreateInstanceCore</see>. /// </summary> /// <returns>The new Freezable.</returns> protected override Freezable CreateInstanceCore() { return new EllipseGeometry(); } #endregion ProtectedMethods //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods /// <SecurityNote> /// Critical: This code calls into an unsafe code block /// TreatAsSafe: This code does not return any critical data.It is ok to expose /// Channels are safe to call into and do not go cross domain and cross process /// </SecurityNote> [SecurityCritical,SecurityTreatAsSafe] internal override void UpdateResource(DUCE.Channel channel, bool skipOnChannelCheck) { // If we're told we can skip the channel check, then we must be on channel Debug.Assert(!skipOnChannelCheck || _duceResource.IsOnChannel(channel)); if (skipOnChannelCheck || _duceResource.IsOnChannel(channel)) { base.UpdateResource(channel, skipOnChannelCheck); // Read values of properties into local variables Transform vTransform = Transform; // Obtain handles for properties that implement DUCE.IResource DUCE.ResourceHandle hTransform; if (vTransform == null || Object.ReferenceEquals(vTransform, Transform.Identity) ) { hTransform = DUCE.ResourceHandle.Null; } else { hTransform = ((DUCE.IResource)vTransform).GetHandle(channel); } // Obtain handles for animated properties DUCE.ResourceHandle hRadiusXAnimations = GetAnimationResourceHandle(RadiusXProperty, channel); DUCE.ResourceHandle hRadiusYAnimations = GetAnimationResourceHandle(RadiusYProperty, channel); DUCE.ResourceHandle hCenterAnimations = GetAnimationResourceHandle(CenterProperty, channel); // Pack & send command packet DUCE.MILCMD_ELLIPSEGEOMETRY data; unsafe { data.Type = MILCMD.MilCmdEllipseGeometry; data.Handle = _duceResource.GetHandle(channel); data.hTransform = hTransform; if (hRadiusXAnimations.IsNull) { data.RadiusX = RadiusX; } data.hRadiusXAnimations = hRadiusXAnimations; if (hRadiusYAnimations.IsNull) { data.RadiusY = RadiusY; } data.hRadiusYAnimations = hRadiusYAnimations; if (hCenterAnimations.IsNull) { data.Center = Center; } data.hCenterAnimations = hCenterAnimations; // Send packed command structure channel.SendCommand( (byte*)&data, sizeof(DUCE.MILCMD_ELLIPSEGEOMETRY)); } } } internal override DUCE.ResourceHandle AddRefOnChannelCore(DUCE.Channel channel) { if (_duceResource.CreateOrAddRefOnChannel(this, channel, System.Windows.Media.Composition.DUCE.ResourceType.TYPE_ELLIPSEGEOMETRY)) { Transform vTransform = Transform; if (vTransform != null) ((DUCE.IResource)vTransform).AddRefOnChannel(channel); AddRefOnChannelAnimations(channel); UpdateResource(channel, true /* skip "on channel" check - we already know that we're on channel */ ); } return _duceResource.GetHandle(channel); } internal override void ReleaseOnChannelCore(DUCE.Channel channel) { Debug.Assert(_duceResource.IsOnChannel(channel)); if (_duceResource.ReleaseOnChannel(channel)) { Transform vTransform = Transform; if (vTransform != null) ((DUCE.IResource)vTransform).ReleaseOnChannel(channel); ReleaseOnChannelAnimations(channel); } } internal override DUCE.ResourceHandle GetHandleCore(DUCE.Channel channel) { // Note that we are in a lock here already. return _duceResource.GetHandle(channel); } internal override int GetChannelCountCore() { // must already be in composition lock here return _duceResource.GetChannelCount(); } internal override DUCE.Channel GetChannelCore(int index) { // Note that we are in a lock here already. return _duceResource.GetChannel(index); } #endregion Internal Methods //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ #region Internal Properties // // This property finds the correct initial size for the _effectiveValues store on the // current DependencyObject as a performance optimization // // This includes: // RadiusX // RadiusY // Center // internal override int EffectiveValuesInitialSize { get { return 3; } } #endregion Internal Properties //------------------------------------------------------ // // Dependency Properties // //------------------------------------------------------ #region Dependency Properties /// <summary> /// The DependencyProperty for the EllipseGeometry.RadiusX property. /// </summary> public static readonly DependencyProperty RadiusXProperty; /// <summary> /// The DependencyProperty for the EllipseGeometry.RadiusY property. /// </summary> public static readonly DependencyProperty RadiusYProperty; /// <summary> /// The DependencyProperty for the EllipseGeometry.Center property. /// </summary> public static readonly DependencyProperty CenterProperty; #endregion Dependency Properties //------------------------------------------------------ // // Internal Fields // //------------------------------------------------------ #region Internal Fields internal System.Windows.Media.Composition.DUCE.MultiChannelResource _duceResource = new System.Windows.Media.Composition.DUCE.MultiChannelResource(); internal const double c_RadiusX = 0.0; internal const double c_RadiusY = 0.0; internal static Point s_Center = new Point(); #endregion Internal Fields #region Constructors //------------------------------------------------------ // // Constructors // //------------------------------------------------------ static EllipseGeometry() { // We check our static default fields which are of type Freezable // to make sure that they are not mutable, otherwise we will throw // if these get touched by more than one thread in the lifetime // of your app. (Windows OS Bug #947272) // // Initializations Type typeofThis = typeof(EllipseGeometry); RadiusXProperty = RegisterProperty("RadiusX", typeof(double), typeofThis, 0.0, new PropertyChangedCallback(RadiusXPropertyChanged), null, /* isIndependentlyAnimated = */ true, /* coerceValueCallback */ null); RadiusYProperty = RegisterProperty("RadiusY", typeof(double), typeofThis, 0.0, new PropertyChangedCallback(RadiusYPropertyChanged), null, /* isIndependentlyAnimated = */ true, /* coerceValueCallback */ null); CenterProperty = RegisterProperty("Center", typeof(Point), typeofThis, new Point(), new PropertyChangedCallback(CenterPropertyChanged), null, /* isIndependentlyAnimated = */ true, /* coerceValueCallback */ null); } #endregion Constructors } }
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Linq; using NUnit.Framework; using osu.Framework.Testing; using osu.Game.Rulesets.Objects; using osu.Game.Rulesets.Osu; using osu.Game.Rulesets.Osu.Beatmaps; using osu.Game.Rulesets.Osu.Objects; using osu.Game.Screens.Edit; using osu.Game.Tests.Visual; namespace osu.Game.Tests.Beatmaps { [HeadlessTest] public class TestSceneEditorBeatmap : EditorClockTestScene { /// <summary> /// Tests that the addition event is correctly invoked after a hitobject is added. /// </summary> [Test] public void TestHitObjectAddEvent() { var hitCircle = new HitCircle(); HitObject addedObject = null; EditorBeatmap editorBeatmap = null; AddStep("add beatmap", () => { Child = editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, }); editorBeatmap.HitObjectAdded += h => addedObject = h; }); AddStep("add hitobject", () => editorBeatmap.Add(hitCircle)); AddAssert("received add event", () => addedObject == hitCircle); } /// <summary> /// Tests that the removal event is correctly invoked after a hitobject is removed. /// </summary> [Test] public void HitObjectRemoveEvent() { var hitCircle = new HitCircle(); HitObject removedObject = null; EditorBeatmap editorBeatmap = null; AddStep("add beatmap", () => { Child = editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, HitObjects = { hitCircle } }); editorBeatmap.HitObjectRemoved += h => removedObject = h; }); AddStep("remove hitobject", () => editorBeatmap.Remove(editorBeatmap.HitObjects.First())); AddAssert("received remove event", () => removedObject == hitCircle); } /// <summary> /// Tests that the changed event is correctly invoked after the start time of a hitobject is changed. /// This tests for hitobjects which were already present before the editor beatmap was constructed. /// </summary> [Test] public void TestInitialHitObjectStartTimeChangeEvent() { var hitCircle = new HitCircle(); HitObject changedObject = null; AddStep("add beatmap", () => { EditorBeatmap editorBeatmap; Child = editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, HitObjects = { hitCircle } }); editorBeatmap.HitObjectUpdated += h => changedObject = h; }); AddStep("change start time", () => hitCircle.StartTime = 1000); AddAssert("received change event", () => changedObject == hitCircle); } /// <summary> /// Tests that the changed event is correctly invoked after the start time of a hitobject is changed. /// This tests for hitobjects which were added to an existing editor beatmap. /// </summary> [Test] public void TestAddedHitObjectStartTimeChangeEvent() { EditorBeatmap editorBeatmap = null; HitObject changedObject = null; AddStep("add beatmap", () => { Child = editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, }); editorBeatmap.HitObjectUpdated += h => changedObject = h; }); var hitCircle = new HitCircle(); AddStep("add object", () => editorBeatmap.Add(hitCircle)); AddAssert("event not received", () => changedObject == null); AddStep("change start time", () => hitCircle.StartTime = 1000); AddAssert("event received", () => changedObject == hitCircle); } /// <summary> /// Tests that the channged event is not invoked after a hitobject is removed from the beatmap/ /// </summary> [Test] public void TestRemovedHitObjectStartTimeChangeEvent() { var hitCircle = new HitCircle(); var editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, HitObjects = { hitCircle } }); HitObject changedObject = null; editorBeatmap.HitObjectUpdated += h => changedObject = h; editorBeatmap.Remove(hitCircle); Assert.That(changedObject, Is.Null); hitCircle.StartTime = 1000; Assert.That(changedObject, Is.Null); } /// <summary> /// Tests that an added hitobject is correctly inserted to preserve the sorting order of the beatmap. /// </summary> [Test] public void TestAddHitObjectInMiddle() { var editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, HitObjects = { new HitCircle(), new HitCircle { StartTime = 1000 }, new HitCircle { StartTime = 1000 }, new HitCircle { StartTime = 2000 }, } }); var hitCircle = new HitCircle { StartTime = 1000 }; editorBeatmap.Add(hitCircle); Assert.That(editorBeatmap.HitObjects.Count(h => h == hitCircle), Is.EqualTo(1)); Assert.That(Array.IndexOf(editorBeatmap.HitObjects.ToArray(), hitCircle), Is.EqualTo(3)); } /// <summary> /// Tests that the beatmap remains correctly sorted after the start time of a hitobject is changed. /// </summary> [Test] public void TestResortWhenStartTimeChanged() { var hitCircle = new HitCircle { StartTime = 1000 }; var editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, HitObjects = { new HitCircle(), new HitCircle { StartTime = 1000 }, new HitCircle { StartTime = 1000 }, hitCircle, new HitCircle { StartTime = 2000 }, } }); hitCircle.StartTime = 0; Assert.That(editorBeatmap.HitObjects.Count(h => h == hitCircle), Is.EqualTo(1)); Assert.That(Array.IndexOf(editorBeatmap.HitObjects.ToArray(), hitCircle), Is.EqualTo(1)); } /// <summary> /// Tests that multiple hitobjects are updated simultaneously. /// </summary> [Test] public void TestMultipleHitObjectUpdate() { var updatedObjects = new List<HitObject>(); var allHitObjects = new List<HitObject>(); EditorBeatmap editorBeatmap = null; AddStep("add beatmap", () => { updatedObjects.Clear(); Child = editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, }); for (int i = 0; i < 10; i++) { var h = new HitCircle(); editorBeatmap.Add(h); allHitObjects.Add(h); } }); AddStep("change all start times", () => { editorBeatmap.HitObjectUpdated += h => updatedObjects.Add(h); for (int i = 0; i < 10; i++) allHitObjects[i].StartTime += 10; }); // Distinct ensures that all hitobjects have been updated once, debounce is tested below. AddAssert("all hitobjects updated", () => updatedObjects.Distinct().Count() == 10); } /// <summary> /// Tests that hitobject updates are debounced when they happen too soon. /// </summary> [Test] public void TestDebouncedUpdate() { var updatedObjects = new List<HitObject>(); EditorBeatmap editorBeatmap = null; AddStep("add beatmap", () => { updatedObjects.Clear(); Child = editorBeatmap = new EditorBeatmap(new OsuBeatmap { BeatmapInfo = { Ruleset = new OsuRuleset().RulesetInfo, }, }); editorBeatmap.Add(new HitCircle()); }); AddStep("change start time twice", () => { editorBeatmap.HitObjectUpdated += h => updatedObjects.Add(h); editorBeatmap.HitObjects[0].StartTime = 10; editorBeatmap.HitObjects[0].StartTime = 20; }); AddAssert("only updated once", () => updatedObjects.Count == 1); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.DocumentationCommentFormatting; using Microsoft.CodeAnalysis.Editor.SignatureHelp; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.CSharp.SignatureHelp { [ExportSignatureHelpProvider("AttributeSignatureHelpProvider", LanguageNames.CSharp)] internal partial class AttributeSignatureHelpProvider : AbstractCSharpSignatureHelpProvider { public override bool IsTriggerCharacter(char ch) { return ch == '(' || ch == ','; } public override bool IsRetriggerCharacter(char ch) { return ch == ')'; } private bool TryGetAttributeExpression(SyntaxNode root, int position, ISyntaxFactsService syntaxFacts, SignatureHelpTriggerReason triggerReason, CancellationToken cancellationToken, out AttributeSyntax attribute) { if (!CommonSignatureHelpUtilities.TryGetSyntax(root, position, syntaxFacts, triggerReason, IsTriggerToken, IsArgumentListToken, cancellationToken, out attribute)) { return false; } return attribute.ArgumentList != null; } private bool IsTriggerToken(SyntaxToken token) { return !token.IsKind(SyntaxKind.None) && token.ValueText.Length == 1 && IsTriggerCharacter(token.ValueText[0]) && token.Parent is AttributeArgumentListSyntax && token.Parent.Parent is AttributeSyntax; } private static bool IsArgumentListToken(AttributeSyntax expression, SyntaxToken token) { return expression.ArgumentList != null && expression.ArgumentList.Span.Contains(token.SpanStart) && token != expression.ArgumentList.CloseParenToken; } protected override async Task<SignatureHelpItems> GetItemsWorkerAsync(Document document, int position, SignatureHelpTriggerInfo triggerInfo, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); AttributeSyntax attribute; if (!TryGetAttributeExpression(root, position, document.GetLanguageService<ISyntaxFactsService>(), triggerInfo.TriggerReason, cancellationToken, out attribute)) { return null; } var semanticModel = await document.GetSemanticModelForNodeAsync(attribute, cancellationToken).ConfigureAwait(false); var attributeType = semanticModel.GetTypeInfo(attribute, cancellationToken).Type as INamedTypeSymbol; if (attributeType == null) { return null; } var within = semanticModel.GetEnclosingNamedTypeOrAssembly(position, cancellationToken); if (within == null) { return null; } var symbolDisplayService = document.Project.LanguageServices.GetService<ISymbolDisplayService>(); var accessibleConstructors = attributeType.InstanceConstructors .Where(c => c.IsAccessibleWithin(within)) .FilterToVisibleAndBrowsableSymbols(document.ShouldHideAdvancedMembers(), semanticModel.Compilation) .Sort(symbolDisplayService, semanticModel, attribute.SpanStart); if (!accessibleConstructors.Any()) { return null; } var anonymousTypeDisplayService = document.Project.LanguageServices.GetService<IAnonymousTypeDisplayService>(); var documentationCommentFormatter = document.Project.LanguageServices.GetService<IDocumentationCommentFormattingService>(); var textSpan = SignatureHelpUtilities.GetSignatureHelpSpan(attribute.ArgumentList); var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>(); return CreateSignatureHelpItems(accessibleConstructors.Select(c => Convert(c, within, attribute, semanticModel, symbolDisplayService, anonymousTypeDisplayService, documentationCommentFormatter, cancellationToken)), textSpan, GetCurrentArgumentState(root, position, syntaxFacts, textSpan, cancellationToken)); } public override SignatureHelpState GetCurrentArgumentState(SyntaxNode root, int position, ISyntaxFactsService syntaxFacts, TextSpan currentSpan, CancellationToken cancellationToken) { AttributeSyntax expression; if (TryGetAttributeExpression(root, position, syntaxFacts, SignatureHelpTriggerReason.InvokeSignatureHelpCommand, cancellationToken, out expression) && currentSpan.Start == SignatureHelpUtilities.GetSignatureHelpSpan(expression.ArgumentList).Start) { return SignatureHelpUtilities.GetSignatureHelpState(expression.ArgumentList, position); } return null; } private SignatureHelpItem Convert( IMethodSymbol constructor, ISymbol within, AttributeSyntax attribute, SemanticModel semanticModel, ISymbolDisplayService symbolDisplayService, IAnonymousTypeDisplayService anonymousTypeDisplayService, IDocumentationCommentFormattingService documentationCommentFormatter, CancellationToken cancellationToken) { var position = attribute.SpanStart; var namedParameters = constructor.ContainingType.GetAttributeNamedParameters(semanticModel.Compilation, within) .OrderBy(s => s.Name) .ToList(); var isVariadic = constructor.Parameters.Length > 0 && constructor.Parameters.Last().IsParams && namedParameters.Count == 0; var item = CreateItem( constructor, semanticModel, position, symbolDisplayService, anonymousTypeDisplayService, isVariadic, constructor.GetDocumentationParts(semanticModel, position, documentationCommentFormatter, cancellationToken), GetPreambleParts(constructor, semanticModel, position), GetSeparatorParts(), GetPostambleParts(constructor), GetParameters(constructor, semanticModel, position, namedParameters, documentationCommentFormatter, cancellationToken)); return item; } private IEnumerable<SignatureHelpParameter> GetParameters( IMethodSymbol constructor, SemanticModel semanticModel, int position, IList<ISymbol> namedParameters, IDocumentationCommentFormattingService documentationCommentFormatter, CancellationToken cancellationToken) { foreach (var parameter in constructor.Parameters) { yield return Convert(parameter, semanticModel, position, documentationCommentFormatter, cancellationToken); } for (int i = 0; i < namedParameters.Count; i++) { var namedParameter = namedParameters[i]; var type = namedParameter is IFieldSymbol ? ((IFieldSymbol)namedParameter).Type : ((IPropertySymbol)namedParameter).Type; var displayParts = new List<SymbolDisplayPart>(); displayParts.Add(new SymbolDisplayPart( namedParameter is IFieldSymbol ? SymbolDisplayPartKind.FieldName : SymbolDisplayPartKind.PropertyName, namedParameter, namedParameter.Name.ToIdentifierToken().ToString())); displayParts.Add(Space()); displayParts.Add(Punctuation(SyntaxKind.EqualsToken)); displayParts.Add(Space()); displayParts.AddRange(type.ToMinimalDisplayParts(semanticModel, position)); yield return new SignatureHelpParameter( namedParameter.Name, isOptional: true, documentation: namedParameter.GetDocumentationParts(semanticModel, position, documentationCommentFormatter, cancellationToken), displayParts: displayParts, prefixDisplayParts: GetParameterPrefixDisplayParts(i)); } } private static List<SymbolDisplayPart> GetParameterPrefixDisplayParts(int i) { if (i == 0) { return new List<SymbolDisplayPart> { new SymbolDisplayPart(SymbolDisplayPartKind.Text, null, CSharpEditorResources.Properties), Punctuation(SyntaxKind.ColonToken), Space() }; } return null; } private IEnumerable<SymbolDisplayPart> GetPreambleParts( IMethodSymbol method, SemanticModel semanticModel, int position) { var result = new List<SymbolDisplayPart>(); result.AddRange(method.ContainingType.ToMinimalDisplayParts(semanticModel, position)); result.Add(Punctuation(SyntaxKind.OpenParenToken)); return result; } private IEnumerable<SymbolDisplayPart> GetPostambleParts(IMethodSymbol method) { yield return Punctuation(SyntaxKind.CloseParenToken); } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using DevExpress.XtraEditors; using System.IO; using Trionic5Tools; namespace T5Suite2 { public partial class frmPartnumberLookup : DevExpress.XtraEditors.XtraForm { private bool m_open_File = false; private bool m_compare_File = false; public bool Compare_File { get { return m_compare_File; } set { m_compare_File = value; } } public bool Open_File { get { return m_open_File; } set { m_open_File = value; } } public frmPartnumberLookup() { InitializeComponent(); } private void ConvertPartNumber() { PartNumberConverter pnc = new PartNumberConverter(); ECUInformation ecuinfo = pnc.GetECUInfo(buttonEdit1.Text, ""); lblBaseBoost.Text = "---"; lblCarModel.Text = "---"; lblEngineType.Text = "---"; lblMaxBoostAUT.Text = "---"; lblMaxBoostManual.Text = "---"; lblPower.Text = "---"; lblStageI.Text = "---"; lblStageII.Text = "---"; lblStageIII.Text = "---"; lblTorque.Text = "---"; checkEdit1.Checked = false; checkEdit2.Checked = false; checkEdit3.Checked = false; checkEdit4.Checked = false; checkEdit5.Checked = false; checkEdit6.Checked = false; lblRegion.Text = "---"; lblMYs.Text = "---"; lblEcuType.Visible = false; lblEcuType.Text = "---"; if (ecuinfo.Valid) { lblBaseBoost.Text = ecuinfo.Baseboost.ToString() + " bar"; lblCarModel.Text = ecuinfo.Carmodel.ToString(); lblEngineType.Text = ecuinfo.Enginetype.ToString(); lblMaxBoostAUT.Text = ecuinfo.Max_stock_boost_automatic.ToString() + " bar"; lblMaxBoostManual.Text = ecuinfo.Max_stock_boost_manual.ToString() + " bar"; lblPower.Text = ecuinfo.Bhp.ToString() + " bhp"; lblStageI.Text = ecuinfo.Stage1boost.ToString() + " bar"; lblStageII.Text = ecuinfo.Stage2boost.ToString() + " bar"; lblStageIII.Text = ecuinfo.Stage3boost.ToString() + " bar"; lblEcuType.Text = ecuinfo.Ecutype; if (lblEcuType.Text == "T5.2") lblEcuType.Visible = true; if (ecuinfo.MakeYearFrom != ecuinfo.MakeYearUpto) { lblMYs.Text = ecuinfo.MakeYearFrom.ToString() + "-" + ecuinfo.MakeYearUpto.ToString(); } else { lblMYs.Text = ecuinfo.MakeYearFrom.ToString(); } lblRegion.Text = ecuinfo.Region; checkEdit6.Checked = ecuinfo.HighAltitude; if (ecuinfo.Is2point3liter) { checkEdit1.Checked = false; checkEdit2.Checked = true; } else { checkEdit1.Checked = true; checkEdit2.Checked = false; } if (ecuinfo.Isturbo) checkEdit4.Checked = true; if (ecuinfo.Isfpt) { checkEdit5.Checked = true; checkEdit4.Checked = true; } if (ecuinfo.Isaero) { checkEdit3.Checked = true; checkEdit4.Checked = true; checkEdit5.Checked = true; } lblTorque.Text = ecuinfo.Torque.ToString() + " Nm"; if (System.IO.File.Exists(Path.Combine(Application.StartupPath, "Binaries\\" + buttonEdit1.Text + ".BIN"))) { simpleButton2.Enabled = true; simpleButton3.Enabled = true; } else { simpleButton2.Enabled = false; simpleButton3.Enabled = false; } } else { MessageBox.Show("The entered partnumber was not recognized by T5Suite"); } } private void buttonEdit1_ButtonClick(object sender, DevExpress.XtraEditors.Controls.ButtonPressedEventArgs e) { //ConvertPartNumber(); frmPartNumberList pnl = new frmPartNumberList(); pnl.ShowDialog(); if (pnl.Selectedpartnumber != null) { if (pnl.Selectedpartnumber != string.Empty) { buttonEdit1.Text = pnl.Selectedpartnumber; } } if (buttonEdit1.Text != "") { ConvertPartNumber(); } else { simpleButton2.Enabled = false; simpleButton3.Enabled = false; } } private void simpleButton1_Click(object sender, EventArgs e) { this.Close(); } private void buttonEdit1_KeyDown(object sender, KeyEventArgs e) { if (e.KeyCode == Keys.Enter) { ConvertPartNumber(); } } internal void LookUpPartnumber(string p) { buttonEdit1.Text = p; ConvertPartNumber(); } public string GetFileToOpen() { string retval = string.Empty; if (buttonEdit1.Text != string.Empty) { if (System.IO.File.Exists(Path.Combine(Application.StartupPath, "Binaries\\" + buttonEdit1.Text + ".BIN"))) { retval = Path.Combine(Application.StartupPath, "Binaries\\" + buttonEdit1.Text + ".BIN"); } } return retval; } private void simpleButton2_Click(object sender, EventArgs e) { m_open_File = true; this.Close(); } private void simpleButton3_Click(object sender, EventArgs e) { m_compare_File = true; this.Close(); } internal void DisableOpenButtons() { // for lookup only simpleButton2.Visible = false; simpleButton3.Visible = false; } } }
using System; using System.Collections.Specialized; using System.IO; using System.IO.Compression; using System.Net.Mime; using System.Security.Cryptography; using System.Threading; using System.Data; using System.Data.Common; using System.Text; using System.Text.RegularExpressions; using System.Xml; //using Mono; //using FirebirdSql.Data.FirebirdClient; //using Mono.Data.Sqlite; //using MongoDB.Driver; //using MongoDB.Bson; using System.Configuration; using Kraken.Util; using Kraken.Core; using Kraken.Http; using Http; using WebDav; namespace Kraken.CommandLine { /// <summary> /// Main class. /// /// The command line version of kraken. /// /// The following are the available kraken commands. /// /// kraken init <root_name:required> /// ================================ /// /// This will start treating the folder hierarchy as a repo to be managed (i.e. the files saved here can be stored /// into the hierarchy). /// /// The first first time, a .kraken/ repo will be created under your home directory (future might be configurable). /// /// The root_name argument gives you a top-level identifier that you can use to identify the particular file. /// /// The following example clarifies a bit more of the usage /// /// $ cd ~/docs/hr /// $ kraken init hr # now ~/docs/hr is mapped to hr /// $ kraken save # all files within ~/docs/hr will now be committed into the repo /// $ cd # back to home directory /// $ kraken ls hr # list the files mapped under hr /// $ kraken restore hr ~/temp/hr # create a new copy of hr stored in kraken /// /// kraken save /// =========== /// /// This will pull the tree and push any differences into the kraken repo. /// /// Note - the kraken repo differs by default from repos like git, that files are managed independently /// (i.e. things are not snapshotted together). /// /// </summary> class MainClass { string rootPath; IniFile iniFile; PathStore pathStore; HttpServer server; MimeTypeRegistry mimeTypes = new MimeTypeRegistry(); protected MainClass() { ensureKrakenBase(); } public static void Main(string[] args) { MainClass app = new MainClass(); if (args.Length == 0) { app.showUsage(); } else { switch (args [0]) { case "init": app.initRepo(args); break; case "save": app.savePath(args); break; case "restore": app.restorePath(args); break; case "ls": app.listPaths(args); break; case "list": app.listPaths(args); break; case "li": app.listPaths(args); break; case "raw": app.getPath(args); break; case "checksum": app.checksum(args); break; case "http": app.callHttp(args); break; default: app.unknownCommand(args [0]); break; } } } void initRepo(string[] args) { if (args.Length == 1) { Console.WriteLine("krakan init <root_name> --> required"); return; } // okay - we are here now - we should start to track a list of mappings of the name against // the current path is... string rootName = args[1]; string dirPath = Directory.GetCurrentDirectory(); pathStore.RootPathMap.Add(rootName, dirPath); } void savePath(string[] args) { if (args.Length < 3) { Console.WriteLine("krakan save <from_local_path> <to_kraken_path> --> required"); return; } string fromPath = args [1]; string toPath = args [2]; if (File.Exists(fromPath)) { if (pathStore.IsDirectory(toPath)) { // let's append the fileName to the end of the toPath. pathStore.SaveOnePath(fromPath, FileUtil.ChangePathDirectory(fromPath, toPath)); } else { pathStore.SaveOnePath(fromPath, toPath); } } else if (Directory.Exists(fromPath)) { if (pathStore.IsBlob(toPath)) { Console.WriteLine("Cannot save a folder into a file: {0} is a folder, and {1} is a file", fromPath, toPath); return; } else if (pathStore.IsDirectory(toPath)) { Console.Write("Folder {0} exists - do you want to [m]erge or [r]eplace? [m/r] ", toPath); string answer = Console.ReadLine().Trim().ToLower(); if (answer == "m") { pathStore.SaveFolder(fromPath, toPath); } else if (answer == "r") { Console.WriteLine("Replace is currently unsupported yet (soon!)"); } else { Console.WriteLine("Unknown response."); return; } } else { pathStore.SaveFolder(fromPath, toPath); } } else { Console.WriteLine("kraken save: path does not exist {0}", fromPath); } } void restorePath(string[] args) { if (args.Length < 3) { Console.WriteLine("kraken restore <from_kraken_path> <to_local_path> --> required"); return; } string fromPath = args [1]; string toPath = args [2]; // for now let's handle the single file case... // also - how do we know whether or not it's a path or a directory? // we also should remember the rules of the target. if (pathStore.IsBlob(fromPath)) { if (Directory.Exists(toPath)) { pathStore.RestoreOnePath(fromPath, FileUtil.ChangePathDirectory(fromPath, toPath)); return; } else if (File.Exists(toPath)) { // this means we are overwriting the file. Console.Write("File {0} exists - do you want to overwrite? [y/n] ", toPath); string answer = Console.ReadLine().Trim().ToLower(); if (answer == "y") { pathStore.RestoreOnePath(fromPath, toPath); } else { return; } } else { pathStore.RestoreOnePath(fromPath, toPath); } } else if (pathStore.IsDirectory(fromPath)) { // now we will need to restore path. if (File.Exists(toPath)) { Console.WriteLine("Cannot restore a directory into a file: {0} is a directory, and {1} is a file", fromPath, toPath); } else if (Directory.Exists(toPath)) { } else { // the simplest mode... pathStore.RestoreFolder(fromPath, toPath); } } else { // neither Console.WriteLine("Path {0} is not in the repo", fromPath); } } void listPaths(string[] args) { string listPath = "."; int depth = 0; if (args.Length > 1) { listPath = args [1]; if (args.Length > 2) { if (args[2].ToLower() == "infinity") { depth = int.MaxValue; } else { depth = int.Parse(args[2]); } } } foreach (string path in pathStore.ListPaths(listPath, depth)) { Console.WriteLine(" {0}", path); } } void getPath(string[] args) { if (args.Length == 1) { Console.WriteLine("kraken raw <kraken_repo_path> --> required"); return; } // once we have the path. string path = args [1]; if (pathStore.IsBlob(path)) { BlobStream blob = pathStore.GetBlob(path); blob.CopyTo(Console.OpenStandardOutput()); return; } else if (pathStore.IsDirectory(path)) { Console.WriteLine("{0} is a directory", path); return; } else { Console.WriteLine("{0} is not in the repo", path); return; } } void checksum(string[] args) { if (args.Length == 1) { Console.WriteLine("kraken checksum <local_path> --> required"); return; } } void callHttp(string[] args) { // ***THIS IS EXPERIMENTAL RIGHT NOW*** i.e. not hooked up. // we'll need to "block" the call. if (args.Length < 2) { Console.WriteLine("kraken http <start|stop> --> required"); return; } if (server == null) { server = new HttpServer("http://*:8080/"); server.AddRoute("get", "/path...", this.httpGetPath); server.AddRoute("put", "/path...", this.httpPutPath2); server.AddRoute("delete", "/path...", this.httpDeletePath); server.AddRoute("propfind", "/path...", this.httpPropfindPath); } server.Start(); Console.WriteLine("kraken http is being developed - this is experimental"); Console.WriteLine("Server Listening - Press any key to stop..."); Console.ReadKey(); } void httpGetPath(HttpContext context) { var path = context.UrlParams ["path"]; Console.WriteLine("GET: {0}", path); if (pathStore.IsBlob(path)) { using (BlobStream blob = pathStore.GetBlob(path)) { if (context.Request.Headers["If-None-Match"] == blob.Envelope.Checksum) { context.Response.StatusCode = 304; context.Response.Headers["ETag"] = blob.Envelope.Checksum; context.Response.Headers["Server"] = "Kraken/0.1"; string mimeType = mimeTypes.PathToMimeType(path); if (!string.IsNullOrEmpty(mimeType)) context.Response.ContentType = mimeType; Console.WriteLine("ContentType: {0} -> {1} => {2}", path, mimeType, context.Response.ContentType); context.Response.SetOutput(""); blob.Close(); Console.WriteLine("got here"); } else { context.Response.StatusCode = 200; context.Response.Headers["ETag"] = blob.Envelope.Checksum; context.Response.Headers["Server"] = "Kraken/0.1"; string mimeType = mimeTypes.PathToMimeType(path); if (!string.IsNullOrEmpty(mimeType)) context.Response.ContentType = mimeType; Console.WriteLine("ContentType: {0} -> {1} => {2}", path, mimeType, context.Response.ContentType); context.Response.SetOutput(blob); } } } else { throw new HttpException(404); } } void httpPutPath2(HttpContext context) { string path = context.UrlParams ["path"]; // let's convert the path into something that can be viewed. Regex slashRE = new Regex(@"\/"); string normalized = path.Replace("/", "_"); Directory.CreateDirectory(System.IO.Path.Combine(rootPath, "uploads")); string tempFilePath = FileUtil.TempFilePath(System.IO.Path.Combine(rootPath, "uploads", normalized)); using (FileStream fs = File.Open(tempFilePath, FileMode.CreateNew, FileAccess.Write, FileShare.None)) { context.Request.InputStream.CopyTo(fs); // it does look like that we should handle the input stream. // also it looks like chunked is automatically processed by HttpListenerRequest } context.Response.Respond(201); } void httpPutPath(HttpContext context) { // in this particular case we'll put up one file... // and we'll store the file into the location pinpoint at the place. // if successful return 201 (with ETag set). string path = context.UrlParams ["path"]; try { pathStore.SaveStream(context.Request.InputStream, path); context.Response.Respond(201); } catch (Exception e) { Console.WriteLine("PUT: {0} ERROR: {1}", path, e); throw new HttpException(500, "PUT FAILED: {0}", e); } } void httpDeletePath(HttpContext context) { string path = context.UrlParams ["path"]; if (pathStore.IsDirectory(path)) { pathStore.DeleteFolder(path); context.Response.Respond(204); } else if (pathStore.IsBlob(path)) { pathStore.DeletePath(path); context.Response.Respond(204); } else { // doesn't exist - it's a NO OP. context.Response.Respond(204); } } void httpPropfindPath(HttpContext context) { string path = context.UrlParams ["path"]; // we need to be able to parse the XML for manipulation... // if there are no XML - do we consider this a BAD request? if (context.Request.ContentType != "application/xml") { throw new HttpException(400, "payload_not_xml"); } WebDav.Factory factory = new Factory(); WebDav.Request req = factory.ParseRequest(context.Request); // propname -> return a list of available propnames. // we can now parse the reqest -> we'll need to further determine whether or not we are looking for // a particular depth. string resp = File.ReadAllText("testpropfind.xml", Encoding.UTF8); context.Response.ContentType = "application/xml"; context.Response.Respond(207, resp); } void httpMakeCollection(HttpContext context) { string path = context.UrlParams["path"]; } void ensureKrakenBase() { rootPath = System.IO.Path.Combine(FileUtil.GetHomeDirectory(), ".kraken"); string iniPath = System.IO.Path.Combine(rootPath, "kraken.ini"); if (File.Exists(iniPath)) { iniFile = new IniFile(iniPath); pathStore = new PathStore(iniFile.GetSection("base")); } else { iniFile = new IniFile("./kraken.ini"); if (!iniFile.Contains("base", "rootPath")) iniFile.Add("base", "rootPath", rootPath); pathStore = new PathStore(iniFile.GetSection("base")); // how do we ensure that things are under the home directory? iniFile.Save(iniPath); // this is for managing multiple repos... at the end it simply maps to a single path // directory... but things won't start right underneath the pathRoot. // when we do a save - we wouldn't know what has changed by default... do we? // i.e. how can we do this part fast? } } void unknownCommand(string command) { Console.WriteLine("Unknown command: {0}", command); showUsage(); } void showUsage() { Console.WriteLine("Usage: kraken <command> <args>"); } } }
using System; using System.Collections.Generic; using System.Drawing; using System.Drawing.Drawing2D; using System.Linq; using System.Threading; using System.Windows.Forms; using Ai2dShooter.Common; using Ai2dShooter.Controller; using Ai2dShooter.Map; using Ai2dShooter.Properties; using Ai2dShooter.View; namespace Ai2dShooter.Model { /// <summary> /// Represents an agent on the map. /// </summary> public abstract class Player { #region Events public delegate void OnHealthChanged(); public event OnHealthChanged HealthChanged; public delegate void OnLocationChanged(); public event OnLocationChanged LocationChanged; public delegate void OnDeath(); public event OnDeath Death; public delegate void OnKillsChanged(); public event OnKillsChanged KillsChanged; public delegate void OnAmmoChanged(); public event OnAmmoChanged AmmoChanged; #endregion #region Public Fields public bool UsesKnife { get { return Ammo == 0; } } public const int MaxAmmo = 3; public int Ammo { get { return _ammo; } set { if (_ammo == value) return; _ammo = value; if (AmmoChanged != null) AmmoChanged(); } } public int Kills { get { return _kills; } private set { if (_kills == value) return; _kills = value; if (KillsChanged != null) KillsChanged(); } } public double ShootingAccuracy { get; private set; } public int Slowness { get; private set; } public bool IsAlive { get { return Health > 0; } } public int Health { get { return _health; } private set { if (_health == value || !PlayerExists) return; // update value _health = value; // trigger event if (HealthChanged != null) HealthChanged(); if (Health > 0 || Death == null) return; Death(); if (MainForm.Instance.PlaySoundEffects) MainForm.Instance.Invoke((MethodInvoker) (() => Constants.DeathSound.Play())); } } public int HealthyThreshold { get; private set; } public int FrontDamage { get; private set; } public int BackDamage { get; private set; } public double HeadshotChance { get; private set; } public string Name { get; private set; } public Cell Location { get { return _location; } set { if (_location == value || !IsAlive || !PlayerExists) return; lock (Constants.MovementLock) { // update value _location = value; } // trigger event if (LocationChanged != null) LocationChanged(); } } public Color Color { get; private set; } public Teams Team { get; private set; } public PlayerController Controller { get; private set; } public Direction Orientation { get { return _orientation; } private set { if (!IsAlive) return; _orientation = value; } } /// <summary> /// Returns all cells that are within the visibility range. /// </summary> public IEnumerable<Cell> VisibleReachableCells { get { var testedCells = new List<Cell> {Location}; var cells = new List<Cell> {Location}; var stack = new Stack<Cell>(); stack.Push(Location); var distances = new Dictionary<Cell, int> {{Location, 0}}; while (stack.Count > 0) { var currentCell = stack.Pop(); foreach ( var s in currentCell.Neighbors.Where( s => s != null && s.IsClear && distances[currentCell] + 1 <= Constants.Visibility && !testedCells.Contains(s))) { stack.Push(s); cells.Add(s); testedCells.Add(s); distances[s] = distances[currentCell] + 1; } } return cells.ToArray(); } } /// <summary> /// Another (friendly) player that is followed by the current player. /// </summary> public Player FollowedPlayer { get; protected set; } #endregion #region Private/Protected Fields private static readonly string[] PlayerNames = Resources.names.Split('\r'); private int _health; private Cell _location; protected PointF LocationOffset; private Direction _orientation; private int _kills; protected bool PlayerExists = true; private int _ammo; protected bool IsMoving { get; set; } #endregion #region Constructor protected Player(Cell initialLocation, PlayerController controller, Teams team) { // initialize values from parameters _location = initialLocation; Controller = controller; Team = team; // initialize fixed values Health = 100; Color = Utils.GetTeamColor(team); Ammo = MaxAmmo; // initialize random values ShootingAccuracy = ((double) Constants.Rnd.Next(3) + 17)/20; // 85-95% Slowness = Constants.Rnd.Next(200, 350); HealthyThreshold = Constants.Rnd.Next(10, 50); BackDamage = Constants.Rnd.Next(35, 75); FrontDamage = Constants.Rnd.Next(35, BackDamage); HeadshotChance = ((double) Constants.Rnd.Next(2, 5))/20; // 10-20% Name = PlayerNames[Constants.Rnd.Next(PlayerNames.Length)].Substring(1); Orientation = (Direction) Constants.Rnd.Next((int) Direction.Count); // start movement thread new Thread(MovementWorker).Start(); } #endregion #region Implemented Methods /// <summary> /// After stopping the game, notify the players' worker threads to stop working. /// </summary> public void RemovePlayer() { PlayerExists = false; } private void MovementWorker() { // loop while application is running while (MainForm.Instance.ApplicationRunning && PlayerExists) { // zzzzzzzzZZZZZZZZZZzzzzzz if (!IsMoving) { Thread.Sleep(Constants.Framelength); continue; } // calculate step number var stepCount = (float)Slowness / Constants.Framelength; // get offset in right direction PointF stepOffset = Utils.GetDirectionPoint(Orientation); // divide offset to get offset for single step stepOffset = new PointF(stepOffset.X / stepCount, stepOffset.Y / stepCount); // do the steps int i; for (i = 0; i < stepCount && IsMoving && MainForm.Instance.ApplicationRunning && IsAlive && PlayerExists; i++) { LocationOffset.X += stepOffset.X; LocationOffset.Y += stepOffset.Y; Thread.Sleep(Constants.Framelength); } if (!IsMoving) { for (; i >= 0 && MainForm.Instance.ApplicationRunning && IsAlive && PlayerExists; i -= 2) { LocationOffset.X -= 2 * stepOffset.X; LocationOffset.Y -= 2 * stepOffset.Y; Thread.Sleep(Constants.Framelength); } // clear offset LocationOffset = Point.Empty; //Console.WriteLine(this + " had his movement aborted"); continue; } // clear offset LocationOffset = Point.Empty; // stop moving IsMoving = false; //Console.WriteLine("Changing location"); Location = Location.GetNeighbor(Orientation); } } protected void AbortMovement() { IsMoving = false; } public override string ToString() { return Name + " " + Location; } public void DrawPlayer(Graphics graphics, int scaleFactor) { // box in which to draw the player var box = new Rectangle((int) ((Location.X + LocationOffset.X)*scaleFactor) - 1, (int) ((Location.Y + LocationOffset.Y)*scaleFactor) - 1, scaleFactor + 1, scaleFactor + 1); // draw opponent circle graphics.FillEllipse(new SolidBrush(Color.FromArgb(IsAlive ? 255 : Constants.DeadAlpha, Color)), box); // start of the orientation line var orientationStart = new Point(box.Left + box.Width/2, box.Top + box.Height/2); // get end of the orientation line (depending on orientation) Point orientationEnd; switch (Orientation) { case Direction.North: orientationEnd = new Point((box.Left + box.Right)/2, box.Top); break; case Direction.East: orientationEnd = new Point(box.Right, (box.Bottom + box.Top)/2); break; case Direction.South: orientationEnd = new Point((box.Left + box.Right)/2, box.Bottom); break; case Direction.West: orientationEnd = new Point(box.Left, (box.Bottom + box.Top)/2); break; default: throw new ArgumentOutOfRangeException(); } // draw orientation line graphics.DrawLine(new Pen(Color.FromArgb(IsAlive ? 255 : Constants.DeadAlpha, Color.Black), 4), orientationStart, orientationEnd); // draw opponent visibility range if (Controller != PlayerController.Human && IsAlive && !MainForm.Instance.HasLivingHumanPlayer) { foreach (var c in VisibleReachableCells) graphics.FillRectangle( new HatchBrush(HatchStyle.DiagonalCross, Color.FromArgb(127, Color), Color.FromArgb(0)), new Rectangle(c.X*Constants.ScaleFactor, c.Y*Constants.ScaleFactor, Constants.ScaleFactor, Constants.ScaleFactor)); } if (!MainForm.Instance.HasLivingHumanPlayer || Controller == PlayerController.Human) DrawPlayerImplementation(graphics, scaleFactor, box); } /// <summary> /// Determines whether the player can move in a specific direction. /// </summary> /// <param name="direction">Direction to potentially move to</param> /// <returns>True if the neighbor in the direction is part of the maze and is no wall</returns> public bool CanMove(Direction direction) { var c = Location.GetNeighbor(direction); return c != null && c.IsClear; } /// <summary> /// Move in a specific direction. /// </summary> /// <param name="direction">Direction to move to</param> public void Move(Direction direction) { if (!CanMove(direction)) throw new ArgumentException("Illegal move in direction " + direction); if (GameController.Instance == null) return; GameController.Instance.CheckForOpponents(this); // abort if we're already moving if (IsMoving) return; // assign to backing field because locationchanged will be triggered when updating location Orientation = direction; // tell movement thread to start moving IsMoving = true; //Console.WriteLine(this + " is moving towards " + Location.GetNeighbor(direction)); } /// <summary> /// Handle damage received by the player. /// </summary> /// <param name="opponent">Opponent that dealt the damage</param> /// <param name="damage">Amount of damage</param> /// <param name="frontalAttack">True if the damage was dealt from the front</param> /// <param name="headshot">True if the damage was to the head</param> /// <param name="knife">True if the damage was dealth with a knife</param> public void Damage(Player opponent, int damage, bool frontalAttack, bool headshot, bool knife) { if (!PlayerExists) return; // ensure we're still allowed to shoot (the game isn't paused) lock (Constants.ShootingLock) { // abort if the game was terminated if (GameController.Instance == null || !GameController.Instance.GameRunning) return; } // reduce life Health -= damage <= Health ? damage : Health; Console.WriteLine(this + " has taken " + damage + " damage from " + opponent + " by " + (knife ? "knife" : "gun") + " from " + (frontalAttack ? "the front" : "the back") + (headshot ? ", it was a HEADSHOT!" : "")); // play appropriate sound if (MainForm.Instance.PlaySoundEffects) { MainForm.Instance.Invoke((MethodInvoker) (() => { if (knife) { if (damage == 0) Constants.KnifeMissSound.Play(); else Constants.KnifeHitSound.Play(); } else { // play sounds if (headshot) Constants.HeadshotSound.Play(); if (damage == 0) Constants.MissSound.Play(); else if (damage > 55) Constants.HardHitSound.Play(); else if (damage > 45) Constants.MediumHitSound.Play(); else Constants.LowHitSound.Play(); } })); } // retaliate! if (!IsAlive) { Console.WriteLine(this + " has died!"); // notify opponent of death opponent.KilledEnemy(); return; } // turn towards opponent Orientation = Location.GetDirection(opponent.Location); // zzz Thread.Sleep(Constants.ShootingTimeout); // prepare own hit var hit = Constants.Rnd.NextDouble() < ShootingAccuracy; var hs = hit && (Constants.Rnd.NextDouble() < HeadshotChance); var knifeHit = UsesKnife; if (!UsesKnife) Ammo--; opponent.Damage(this, (int)((hit ? 1 : 0) * FrontDamage * (hs ? 2 : 1) * (knifeHit ? 0.5 : 1)), true, hs, knifeHit); } #endregion #region Abstract Methods /// <summary> /// Prepare implementation at the beginning of the game. /// </summary> public abstract void StartGame(); /// <summary> /// Notify the player that he's spotted an enemy. /// </summary> public abstract void EnemySpotted(); /// <summary> /// Notify the player that he's been spotted. /// </summary> public abstract void SpottedByEnemy(); /// <summary> /// Draw implementation-specific information. /// </summary> /// <param name="graphics">Graphics used to draw</param> /// <param name="scaleFactor">How big it should be</param> /// <param name="box">Box around the player</param> protected abstract void DrawPlayerImplementation(Graphics graphics, int scaleFactor, Rectangle box); /// <summary> /// The player has killed an opponent. /// </summary> public virtual void KilledEnemy() { Kills++; if (MainForm.Instance.PlaySoundEffects && Kills == 3) MainForm.Instance.Invoke((MethodInvoker) (() => Constants.TripleKillSound.Play())); } #endregion } }
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Globalization; using System.Linq; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.UnitTests { public class CommonCommandLineParserTests : TestBase { private const int EN_US = 1033; private void VerifyCommandLineSplitter(string commandLine, string[] expected) { string[] actual = CommandLineSplitter.SplitCommandLine(commandLine); Assert.Equal(expected.Length, actual.Length); for (int i = 0; i < actual.Length; ++i) { Assert.Equal(expected[i], actual[i]); } } private RuleSet ParseRuleSet(string source, params string[] otherSources) { var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(source); for (int i = 1; i <= otherSources.Length; i++) { var newFile = dir.CreateFile("file" + i + ".ruleset"); newFile.WriteAllText(otherSources[i - 1]); } if (otherSources.Length != 0) { return RuleSet.LoadEffectiveRuleSetFromFile(file.Path); } return RuleSetProcessor.LoadFromFile(file.Path); } private void VerifyRuleSetError(string source, string message, bool locSpecific = true, string locMessage = "", params string[] otherSources) { try { ParseRuleSet(source, otherSources); } catch (Exception e) { if (CultureInfo.CurrentCulture.LCID == EN_US || CultureInfo.CurrentUICulture.LCID == EN_US || CultureInfo.CurrentCulture == CultureInfo.InvariantCulture || CultureInfo.CurrentUICulture == CultureInfo.InvariantCulture) { Assert.Equal(message, e.Message); } else if (locSpecific) { if (locMessage != "") Assert.Contains(locMessage, e.Message); else Assert.Equal(message, e.Message); } return; } Assert.True(false, "Didn't return an error"); } [Fact] public void TestCommandLineSplitter() { VerifyCommandLineSplitter("", new string[0]); VerifyCommandLineSplitter(" \t ", new string[0]); VerifyCommandLineSplitter(" abc\tdef baz quuz ", new string[] {"abc", "def", "baz", "quuz"}); VerifyCommandLineSplitter(@" ""abc def"" fi""ddle dee de""e ""hi there ""dude he""llo there"" ", new string[] { @"abc def", @"fi""ddle dee de""e", @"""hi there ""dude", @"he""llo there""" }); VerifyCommandLineSplitter(@" ""abc def \"" baz quuz"" ""\""straw berry"" fi\""zz \""buzz fizzbuzz", new string[] { @"abc def "" baz quuz", @"""straw berry", @"fi""zz", @"""buzz", @"fizzbuzz"}); VerifyCommandLineSplitter(@" \\""abc def"" \\\""abc def"" ", new string[] { @"\""abc def""", @"\""abc", @"def""" }); VerifyCommandLineSplitter(@" \\\\""abc def"" \\\\\""abc def"" ", new string[] { @"\\""abc def""", @"\\""abc", @"def""" }); } [Fact] public void TestRuleSetParsingDuplicateRule() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1012"" Action=""Warning"" /> <Rule Id=""CA1013"" Action=""Warning"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> </RuleSet>"; string paranment = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "There is a duplicate key sequence 'CA1012' for the 'UniqueRuleName' key or unique identity constraint."); VerifyRuleSetError(source, () => parameter); } [Fact] public void TestRuleSetParsingDuplicateRule2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> <Rule Id=""CA1013"" Action=""None"" /> </Rules> </RuleSet>"; string paranment = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "There is a duplicate key sequence 'CA1012' for the 'UniqueRuleName' key or unique identity constraint."); VerifyRuleSetError(source, () => parameter); } [Fact] public void TestRuleSetParsingDuplicateRule3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1013"" Action=""None"" /> </Rules> </RuleSet>"; var ruleSet = ParseRuleSet(source); Assert.Equal(expected: ReportDiagnostic.Error, actual: ruleSet.SpecificDiagnosticOptions["CA1012"]); } [Fact] public void TestRuleSetParsingDuplicateRuleSet() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> <RuleSet Name=""Ruleset2"" Description=""Test""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; VerifyRuleSetError(source, "There are multiple root elements. Line 8, position 2.", false); } [Fact] public void TestRuleSetParsingIncludeAll1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.Equal(ReportDiagnostic.Warn, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetParsingIncludeAll2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetParsingWithIncludeOfSameFile() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""a.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, new string[] { "" }); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Equal(1, RuleSet.GetEffectiveIncludesFromFile(ruleSet.FilePath).Count()); } [Fact] public void TestRuleSetParsingWithMutualIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""a.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Equal(2, RuleSet.GetEffectiveIncludesFromFile(ruleSet.FilePath).Count()); } [Fact] public void TestRuleSetParsingWithSiblingIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Warning"" /> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Equal(3, RuleSet.GetEffectiveIncludesFromFile(ruleSet.FilePath).Count()); } [Fact] public void TestRuleSetParsingIncludeAll3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; VerifyRuleSetError(source, () => string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The 'Action' attribute is invalid - The value 'Default' is invalid according to its datatype 'TIncludeAllAction' - The Enumeration constraint failed."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'Id' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'Action' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'AnalyzerId' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute4() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'RuleNamespace' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute5() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'ToolsVersion' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute6() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'Name' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRules() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1013"" Action=""Warning"" /> <Rule Id=""CA1014"" Action=""None"" /> <Rule Id=""CA1015"" Action=""Info"" /> <Rule Id=""CA1016"" Action=""Hidden"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1012"], ReportDiagnostic.Error); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1013"], ReportDiagnostic.Warn); Assert.Contains("CA1014", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1014"], ReportDiagnostic.Suppress); Assert.Contains("CA1015", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1015"], ReportDiagnostic.Info); Assert.Contains("CA1016", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1016"], ReportDiagnostic.Hidden); } [Fact] public void TestRuleSetParsingRules2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Default"" /> <Rule Id=""CA1013"" Action=""Warning"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The 'Action' attribute is invalid - The value 'Default' is invalid according to its datatype 'TRuleAction' - The Enumeration constraint failed."), locMessage: locMessage); } [Fact] public void TestRuleSetInclude() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""foo.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.True(ruleSet.Includes.Count() == 1); Assert.Equal(ruleSet.Includes.First().Action, ReportDiagnostic.Default); Assert.Equal(ruleSet.Includes.First().IncludePath, "foo.ruleset"); } [WorkItem(156)] [Fact(Skip = "156")] public void TestRuleSetInclude1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""foo.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; VerifyRuleSetError(source, string.Format(CodeAnalysisResources.InvalidRuleSetInclude, "foo.ruleset", string.Format(CodeAnalysisResources.FailedToResolveRuleSetName, "foo.ruleset")), otherSources: new string[] {""}); } [Fact] public void TestRuleSetInclude2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Hidden"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Hidden, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Info"" /> <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Info, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Include Path=""file1.ruleset"" Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeRecursiveIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1014"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); Assert.Contains("CA1014", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1014"]); } [Fact] public void TestRuleSetIncludeSpecificStrict1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); // CA1012's value in source wins. Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); } [Fact] public void TestRuleSetIncludeSpecificStrict2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); // CA1012's value in source still wins. Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); } [Fact] public void TestRuleSetIncludeSpecificStrict3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); // CA1013's value in source2 wins. Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeEffectiveAction() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""None"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.DoesNotContain("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); } [Fact] public void TestRuleSetIncludeEffectiveAction1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetIncludeEffectiveActionGlobal1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetIncludeEffectiveActionGlobal2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Warn, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetIncludeEffectiveActionSpecific1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""None"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Suppress, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeEffectiveActionSpecific2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestAllCombinations() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1000"" Action=""Warning"" /> <Rule Id=""CA1001"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""None"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set2"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set3"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> <Rule Id=""CA2119"" Action=""None"" /> <Rule Id=""CA2104"" Action=""Error"" /> <Rule Id=""CA2105"" Action=""Warning"" /> </Rules> </RuleSet>"; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1000"]); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1001"]); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA2100"]); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA2104"]); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA2105"]); Assert.Equal(ReportDiagnostic.Suppress, ruleSet.SpecificDiagnosticOptions["CA2111"]); Assert.Equal(ReportDiagnostic.Suppress, ruleSet.SpecificDiagnosticOptions["CA2119"]); } [Fact] public void TestRuleSetIncludeError() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Default"" /> </Rules> </RuleSet> "; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(source); var newFile = dir.CreateFile("file1.ruleset"); newFile.WriteAllText(source1); try { RuleSet.LoadEffectiveRuleSetFromFile(file.Path); Assert.True(false, "Didn't throw an exception"); } catch (InvalidRuleSetException e) { Assert.Contains(string.Format(CodeAnalysisResources.InvalidRuleSetInclude, newFile.Path, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "")), e.Message); } } [Fact] public void GetEffectiveIncludes_NoIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(source); var includePaths = RuleSet.GetEffectiveIncludesFromFile(file.Path); Assert.Equal(expected: 1, actual: includePaths.Length); Assert.Equal(expected: file.Path, actual: includePaths[0]); } [Fact] public void GetEffectiveIncludes_OneLevel() { string ruleSetSource = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1000"" Action=""Warning"" /> <Rule Id=""CA1001"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""None"" /> </Rules> </RuleSet> "; string includeSource = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set2"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> </Rules> </RuleSet> "; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(ruleSetSource); var include = dir.CreateFile("file1.ruleset"); include.WriteAllText(includeSource); var includePaths = RuleSet.GetEffectiveIncludesFromFile(file.Path); Assert.Equal(expected: 2, actual: includePaths.Length); Assert.Equal(expected: file.Path, actual: includePaths[0]); Assert.Equal(expected: include.Path, actual: includePaths[1]); } [Fact] public void GetEffectiveIncludes_TwoLevels() { string ruleSetSource = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1000"" Action=""Warning"" /> <Rule Id=""CA1001"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""None"" /> </Rules> </RuleSet> "; string includeSource1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set2"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> </Rules> </RuleSet> "; string includeSource2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set3"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> <Rule Id=""CA2119"" Action=""None"" /> <Rule Id=""CA2104"" Action=""Error"" /> <Rule Id=""CA2105"" Action=""Warning"" /> </Rules> </RuleSet>"; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(ruleSetSource); var include1 = dir.CreateFile("file1.ruleset"); include1.WriteAllText(includeSource1); var include2 = dir.CreateFile("file2.ruleset"); include2.WriteAllText(includeSource2); var includePaths = RuleSet.GetEffectiveIncludesFromFile(file.Path); Assert.Equal(expected: 3, actual: includePaths.Length); Assert.Equal(expected: file.Path, actual: includePaths[0]); Assert.Equal(expected: include1.Path, actual: includePaths[1]); Assert.Equal(expected: include2.Path, actual: includePaths[2]); } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; namespace DalSic { /// <summary> /// Strongly-typed collection for the GuardiaTurnero class. /// </summary> [Serializable] public partial class GuardiaTurneroCollection : ActiveList<GuardiaTurnero, GuardiaTurneroCollection> { public GuardiaTurneroCollection() {} /// <summary> /// Filters an existing collection based on the set criteria. This is an in-memory filter /// Thanks to developingchris for this! /// </summary> /// <returns>GuardiaTurneroCollection</returns> public GuardiaTurneroCollection Filter() { for (int i = this.Count - 1; i > -1; i--) { GuardiaTurnero o = this[i]; foreach (SubSonic.Where w in this.wheres) { bool remove = false; System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName); if (pi.CanRead) { object val = pi.GetValue(o, null); switch (w.Comparison) { case SubSonic.Comparison.Equals: if (!val.Equals(w.ParameterValue)) { remove = true; } break; } } if (remove) { this.Remove(o); break; } } } return this; } } /// <summary> /// This is an ActiveRecord class which wraps the Guardia_Turnero table. /// </summary> [Serializable] public partial class GuardiaTurnero : ActiveRecord<GuardiaTurnero>, IActiveRecord { #region .ctors and Default Settings public GuardiaTurnero() { SetSQLProps(); InitSetDefaults(); MarkNew(); } private void InitSetDefaults() { SetDefaults(); } public GuardiaTurnero(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) ForceDefaults(); MarkNew(); } public GuardiaTurnero(object keyID) { SetSQLProps(); InitSetDefaults(); LoadByKey(keyID); } public GuardiaTurnero(string columnName, object columnValue) { SetSQLProps(); InitSetDefaults(); LoadByParam(columnName,columnValue); } protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema and Query Accessor public static Query CreateQuery() { return new Query(Schema); } public static TableSchema.Table Schema { get { if (BaseSchema == null) SetSQLProps(); return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("Guardia_Turnero", TableType.Table, DataService.GetInstance("sicProvider")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema); colvarNombre.ColumnName = "nombre"; colvarNombre.DataType = DbType.AnsiString; colvarNombre.MaxLength = 50; colvarNombre.AutoIncrement = false; colvarNombre.IsNullable = false; colvarNombre.IsPrimaryKey = true; colvarNombre.IsForeignKey = false; colvarNombre.IsReadOnly = false; colvarNombre.DefaultSetting = @""; colvarNombre.ForeignKeyTableName = ""; schema.Columns.Add(colvarNombre); TableSchema.TableColumn colvarValorInt = new TableSchema.TableColumn(schema); colvarValorInt.ColumnName = "valorInt"; colvarValorInt.DataType = DbType.Int32; colvarValorInt.MaxLength = 0; colvarValorInt.AutoIncrement = false; colvarValorInt.IsNullable = true; colvarValorInt.IsPrimaryKey = false; colvarValorInt.IsForeignKey = false; colvarValorInt.IsReadOnly = false; colvarValorInt.DefaultSetting = @""; colvarValorInt.ForeignKeyTableName = ""; schema.Columns.Add(colvarValorInt); TableSchema.TableColumn colvarValorString = new TableSchema.TableColumn(schema); colvarValorString.ColumnName = "valorString"; colvarValorString.DataType = DbType.String; colvarValorString.MaxLength = 500; colvarValorString.AutoIncrement = false; colvarValorString.IsNullable = true; colvarValorString.IsPrimaryKey = false; colvarValorString.IsForeignKey = false; colvarValorString.IsReadOnly = false; colvarValorString.DefaultSetting = @""; colvarValorString.ForeignKeyTableName = ""; schema.Columns.Add(colvarValorString); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["sicProvider"].AddSchema("Guardia_Turnero",schema); } } #endregion #region Props [XmlAttribute("Nombre")] [Bindable(true)] public string Nombre { get { return GetColumnValue<string>(Columns.Nombre); } set { SetColumnValue(Columns.Nombre, value); } } [XmlAttribute("ValorInt")] [Bindable(true)] public int? ValorInt { get { return GetColumnValue<int?>(Columns.ValorInt); } set { SetColumnValue(Columns.ValorInt, value); } } [XmlAttribute("ValorString")] [Bindable(true)] public string ValorString { get { return GetColumnValue<string>(Columns.ValorString); } set { SetColumnValue(Columns.ValorString, value); } } #endregion //no foreign key tables defined (0) //no ManyToMany tables defined (0) #region ObjectDataSource support /// <summary> /// Inserts a record, can be used with the Object Data Source /// </summary> public static void Insert(string varNombre,int? varValorInt,string varValorString) { GuardiaTurnero item = new GuardiaTurnero(); item.Nombre = varNombre; item.ValorInt = varValorInt; item.ValorString = varValorString; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } /// <summary> /// Updates a record, can be used with the Object Data Source /// </summary> public static void Update(string varNombre,int? varValorInt,string varValorString) { GuardiaTurnero item = new GuardiaTurnero(); item.Nombre = varNombre; item.ValorInt = varValorInt; item.ValorString = varValorString; item.IsNew = false; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } #endregion #region Typed Columns public static TableSchema.TableColumn NombreColumn { get { return Schema.Columns[0]; } } public static TableSchema.TableColumn ValorIntColumn { get { return Schema.Columns[1]; } } public static TableSchema.TableColumn ValorStringColumn { get { return Schema.Columns[2]; } } #endregion #region Columns Struct public struct Columns { public static string Nombre = @"nombre"; public static string ValorInt = @"valorInt"; public static string ValorString = @"valorString"; } #endregion #region Update PK Collections #endregion #region Deep Save #endregion } }
namespace WebBaseSystem.Web.Areas.HelpPage { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { this.ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); this.ActionSamples = new Dictionary<HelpPageSampleKey, object>(); this.SampleObjects = new Dictionary<Type, object>(); this.SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return this.GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return this.GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = this.ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = this.GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = this.GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = this.GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = this.WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (this.ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || this.ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || this.ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || this.ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!this.SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in this.SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return this.ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (this.ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || this.ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = string.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(string.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(string.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in this.ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (string.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && string.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Text; using System.Windows.Automation; using EnvDTE; using EnvDTE80; using Microsoft.VisualStudio; using Microsoft.VisualStudio.ComponentModelHost; using Microsoft.VisualStudio.Shell; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudioTools.VSTestHost; using IOleServiceProvider = Microsoft.VisualStudio.OLE.Interop.IServiceProvider; using Task = System.Threading.Tasks.Task; namespace TestUtilities.UI { /// <summary> /// Provides wrappers for automating the VisualStudio UI. /// </summary> public class VisualStudioApp : AutomationWrapper, IDisposable { private SolutionExplorerTree _solutionExplorerTreeView; private ObjectBrowser _objectBrowser, _resourceView; private AzureCloudServiceActivityLog _azureActivityLog; private IntPtr _mainWindowHandle; private readonly DTE _dte; private IServiceProvider _provider; private List<Action> _onDispose; private bool _isDisposed, _skipCloseAll; public VisualStudioApp(DTE dte = null) : this(new IntPtr((dte ?? VSTestContext.DTE).MainWindow.HWnd)) { _dte = dte ?? VSTestContext.DTE; } private VisualStudioApp(IntPtr windowHandle) : base(AutomationElement.FromHandle(windowHandle)) { _mainWindowHandle = windowHandle; } public bool IsDisposed { get { return _isDisposed; } } public void OnDispose(Action action) { Debug.Assert(action != null); if (_onDispose == null) { _onDispose = new List<Action> { action }; } else { _onDispose.Add(action); } } protected virtual void Dispose(bool disposing) { if (!_isDisposed) { _isDisposed = true; try { if (_onDispose != null) { foreach (var action in _onDispose) { action(); } } if (_dte != null && _dte.Debugger.CurrentMode != dbgDebugMode.dbgDesignMode) { _dte.Debugger.TerminateAll(); _dte.Debugger.Stop(); } DismissAllDialogs(); for (int i = 0; i < 100 && !_skipCloseAll; i++) { try { _dte.Solution.Close(false); break; } catch { _dte.Documents.CloseAll(EnvDTE.vsSaveChanges.vsSaveChangesNo); System.Threading.Thread.Sleep(200); } } } catch (Exception ex) { Debug.WriteLine("Exception disposing VisualStudioApp: {0}", ex); } } } public void Dispose() { Dispose(true); } public void SuppressCloseAllOnDispose() { _skipCloseAll = true; } public IComponentModel ComponentModel { get { return GetService<IComponentModel>(typeof(SComponentModel)); } } public IServiceProvider ServiceProvider { get { if (_provider == null) { if (_dte == null) { _provider = VSTestContext.ServiceProvider; } else { _provider = new ServiceProvider((IOleServiceProvider)_dte); OnDispose(() => ((ServiceProvider)_provider).Dispose()); } } return _provider; } } public T GetService<T>(Type type = null) { return (T)ServiceProvider.GetService(type ?? typeof(T)); } /// <summary> /// File->Save /// </summary> public void SaveSelection() { Dte.ExecuteCommand("File.SaveSelectedItems"); } /// <summary> /// Opens and activates the solution explorer window. /// </summary> public SolutionExplorerTree OpenSolutionExplorer() { _solutionExplorerTreeView = null; Dte.ExecuteCommand("View.SolutionExplorer"); return SolutionExplorerTreeView; } /// <summary> /// Opens and activates the object browser window. /// </summary> public void OpenObjectBrowser() { Dte.ExecuteCommand("View.ObjectBrowser"); } /// <summary> /// Opens and activates the Resource View window. /// </summary> public void OpenResourceView() { Dte.ExecuteCommand("View.ResourceView"); } public IntPtr OpenDialogWithDteExecuteCommand(string commandName, string commandArgs = "") { Task task = Task.Factory.StartNew(() => { Dte.ExecuteCommand(commandName, commandArgs); Console.WriteLine("Successfully executed command {0} {1}", commandName, commandArgs); }); IntPtr dialog = IntPtr.Zero; try { dialog = WaitForDialog(task); } finally { if (dialog == IntPtr.Zero) { if (task.IsFaulted && task.Exception != null) { Assert.Fail("Unexpected Exception - VSTestContext.DTE.ExecuteCommand({0},{1}){2}{3}", commandName, commandArgs, Environment.NewLine, task.Exception.ToString()); } Assert.Fail("Task failed - VSTestContext.DTE.ExecuteCommand({0},{1})", commandName, commandArgs); } } return dialog; } public void ExecuteCommand(string commandName, string commandArgs = "", int timeout = 25000) { Task task = Task.Factory.StartNew(() => { Console.WriteLine("Executing command {0} {1}", commandName, commandArgs); Dte.ExecuteCommand(commandName, commandArgs); Console.WriteLine("Successfully executed command {0} {1}", commandName, commandArgs); }); bool timedOut = false; try { timedOut = !task.Wait(timeout); } catch (AggregateException ae) { foreach (var ex in ae.InnerExceptions) { Console.WriteLine(ex.ToString()); } throw ae.InnerException; } if (timedOut) { string msg = String.Format("Command {0} failed to execute in specified timeout", commandName); Console.WriteLine(msg); DumpVS(); Assert.Fail(msg); } } /// <summary> /// Opens and activates the Navigate To window. /// </summary> public NavigateToDialog OpenNavigateTo() { Task task = Task.Factory.StartNew(() => { Dte.ExecuteCommand("Edit.NavigateTo"); Console.WriteLine("Successfully executed Edit.NavigateTo"); }); for (int retries = 10; retries > 0; --retries) { #if DEV12_OR_LATER foreach (var element in Element.FindAll( TreeScope.Descendants, new PropertyCondition(AutomationElement.ClassNameProperty, "Window") ).OfType<AutomationElement>()) { if (element.FindAll(TreeScope.Children, new OrCondition( new PropertyCondition(AutomationElement.AutomationIdProperty, "PART_SearchHost"), new PropertyCondition(AutomationElement.AutomationIdProperty, "PART_ResultList") )).Count == 2) { return new NavigateToDialog(element); } } #else var element = Element.FindFirst( TreeScope.Descendants, new AndCondition( new PropertyCondition(AutomationElement.ControlTypeProperty, ControlType.Window), new PropertyCondition(AutomationElement.NameProperty, "Navigate To") ) ); if (element != null) { return new NavigateToDialog(element); } #endif System.Threading.Thread.Sleep(500); } Assert.Fail("Could not find Navigate To window"); return null; } public SaveDialog SaveAs() { return SaveDialog.FromDte(this); } /// <summary> /// Gets the specified document. Filename should be fully qualified filename. /// </summary> public EditorWindow GetDocument(string filename) { Debug.Assert(Path.IsPathRooted(filename)); string windowName = Path.GetFileName(filename); var elem = GetDocumentTab(windowName); elem = elem.FindFirst(TreeScope.Descendants, new PropertyCondition( AutomationElement.ClassNameProperty, "WpfTextView" ) ); return new EditorWindow(filename, elem); } public AutomationElement GetDocumentTab(string windowName) { var elem = Element.FindFirst(TreeScope.Descendants, new AndCondition( new PropertyCondition( AutomationElement.ClassNameProperty, "TabItem" ), new PropertyCondition( AutomationElement.NameProperty, windowName ) ) ); if (elem == null) { // maybe the file has been modified, try again with a * elem = Element.FindFirst(TreeScope.Descendants, new AndCondition( new PropertyCondition( AutomationElement.ClassNameProperty, "TabItem" ), new PropertyCondition( AutomationElement.NameProperty, windowName + "*" ) ) ); } return elem; } /// <summary> /// Selects the given source control provider. Name merely needs to be /// enough text to disambiguate from other source control providers. /// </summary> public void SelectSourceControlProvider(string providerName) { Element.SetFocus(); using (var dialog = ToolsOptionsDialog.FromDte(this)) { dialog.SelectedView = "Source Control/Plug-in Selection"; var currentSourceControl = new ComboBox( dialog.FindByAutomationId("2001") // Current source control plug-in ); currentSourceControl.SelectItem(providerName); dialog.OK(); } } public NewProjectDialog FileNewProject() { var dialog = OpenDialogWithDteExecuteCommand("File.NewProject"); return new NewProjectDialog(this, AutomationElement.FromHandle(dialog)); } public AttachToProcessDialog OpenDebugAttach() { var dialog = OpenDialogWithDteExecuteCommand("Debug.AttachtoProcess"); return new AttachToProcessDialog(dialog); } public OutputWindowPane GetOutputWindow(string name) { return ((DTE2)VSTestContext.DTE).ToolWindows.OutputWindow.OutputWindowPanes.Item(name); } public IEnumerable<Window> OpenDocumentWindows { get { return Dte.Windows.OfType<Window>().Where(w => w.Document != null); } } public void WaitForBuildComplete(int timeout) { for (int i = 0; i < timeout; i += 500) { if (Dte.Solution.SolutionBuild.BuildState == vsBuildState.vsBuildStateDone) { return; } System.Threading.Thread.Sleep(500); } throw new TimeoutException("Timeout waiting for build to complete"); } public string GetOutputWindowText(string name) { var window = GetOutputWindow(name); var doc = window.TextDocument; doc.Selection.SelectAll(); return doc.Selection.Text; } public void WaitForOutputWindowText(string name, string containsText, int timeout = 5000) { for (int i = 0; i < timeout; i += 500) { var text = GetOutputWindowText(name); if (text.Contains(containsText)) { return; } System.Threading.Thread.Sleep(500); } Assert.Fail("Failed to find {0} in output window {1}, found:\r\n{2}", containsText, name, GetOutputWindowText(name)); } public void DismissAllDialogs() { int foundWindow = 2; while (foundWindow != 0) { IVsUIShell uiShell = GetService<IVsUIShell>(typeof(IVsUIShell)); if (uiShell == null) { return; } IntPtr hwnd; uiShell.GetDialogOwnerHwnd(out hwnd); for (int j = 0; j < 10 && hwnd == _mainWindowHandle; j++) { System.Threading.Thread.Sleep(100); uiShell.GetDialogOwnerHwnd(out hwnd); } //We didn't see any dialogs if (hwnd == IntPtr.Zero || hwnd == _mainWindowHandle) { foundWindow--; continue; } //MessageBoxButton.Abort //MessageBoxButton.Cancel //MessageBoxButton.No //MessageBoxButton.Ok //MessageBoxButton.Yes //The second parameter is going to be the value returned... We always send Ok Debug.WriteLine("Dismissing dialog"); AutomationWrapper.DumpElement(AutomationElement.FromHandle(hwnd)); NativeMethods.EndDialog(hwnd, new IntPtr(1)); } } /// <summary> /// Waits for a modal dialog to take over VS's main window and returns the HWND for the dialog. /// </summary> /// <returns></returns> public IntPtr WaitForDialog(Task task) { return WaitForDialogToReplace(_mainWindowHandle, task); } public IntPtr WaitForDialog() { return WaitForDialogToReplace(_mainWindowHandle, null); } public ExceptionHelperDialog WaitForException() { var window = FindByName("Exception Helper Indicator Window"); if (window != null) { var innerPane = window.FindFirst(TreeScope.Descendants, new PropertyCondition( AutomationElement.ControlTypeProperty, ControlType.Pane ) ); Assert.IsNotNull(innerPane); return new ExceptionHelperDialog(innerPane); } Assert.Fail("Failed to find exception helper window"); return null; } /// <summary> /// Waits for a modal dialog to take over a given window and returns the HWND for the new dialog. /// </summary> /// <returns>An IntPtr which should be interpreted as an HWND</returns> public IntPtr WaitForDialogToReplace(IntPtr originalHwnd) { return WaitForDialogToReplace(originalHwnd, null); } /// <summary> /// Waits for a modal dialog to take over a given window and returns the HWND for the new dialog. /// </summary> /// <returns>An IntPtr which should be interpreted as an HWND</returns> public IntPtr WaitForDialogToReplace(AutomationElement element) { return WaitForDialogToReplace(new IntPtr(element.Current.NativeWindowHandle), null); } private IntPtr WaitForDialogToReplace(IntPtr originalHwnd, Task task) { IVsUIShell uiShell = GetService<IVsUIShell>(typeof(IVsUIShell)); IntPtr hwnd; uiShell.GetDialogOwnerHwnd(out hwnd); int timeout = task == null ? 10000 : 60000; while (timeout > 0 && hwnd == originalHwnd && (task == null || !(task.IsFaulted || task.IsCanceled))) { timeout -= 500; System.Threading.Thread.Sleep(500); uiShell.GetDialogOwnerHwnd(out hwnd); } if (task != null && (task.IsFaulted || task.IsCanceled)) { return IntPtr.Zero; } if (hwnd == originalHwnd) { DumpElement(AutomationElement.FromHandle(hwnd)); } Assert.AreNotEqual(IntPtr.Zero, hwnd); Assert.AreNotEqual(originalHwnd, hwnd, "Main window still has focus"); return hwnd; } /// <summary> /// Waits for the VS main window to receive the focus. /// </summary> /// <returns> /// True if the main window has the focus. Otherwise, false. /// </returns> public bool WaitForDialogDismissed(bool assertIfFailed = true, int timeout = 100000) { IVsUIShell uiShell = GetService<IVsUIShell>(typeof(IVsUIShell)); IntPtr hwnd; uiShell.GetDialogOwnerHwnd(out hwnd); for (int i = 0; i < (timeout / 100) && hwnd != _mainWindowHandle; i++) { System.Threading.Thread.Sleep(100); uiShell.GetDialogOwnerHwnd(out hwnd); } if (assertIfFailed) { Assert.AreEqual(_mainWindowHandle, hwnd); return true; } return _mainWindowHandle == hwnd; } /// <summary> /// Waits for no dialog. If a dialog appears before the timeout expires /// then the test fails and the dialog is closed. /// </summary> public void WaitForNoDialog(TimeSpan timeout) { IVsUIShell uiShell = GetService<IVsUIShell>(typeof(IVsUIShell)); IntPtr hwnd; uiShell.GetDialogOwnerHwnd(out hwnd); for (int i = 0; i < 100 && hwnd == _mainWindowHandle; i++) { System.Threading.Thread.Sleep((int)timeout.TotalMilliseconds / 100); uiShell.GetDialogOwnerHwnd(out hwnd); } if (hwnd != (IntPtr)_mainWindowHandle) { AutomationWrapper.DumpElement(AutomationElement.FromHandle(hwnd)); NativeMethods.EndDialog(hwnd, (IntPtr)(int)MessageBoxButton.Cancel); Assert.Fail("Dialog appeared - see output for details"); } } public static void CheckMessageBox(params string[] text) { CheckMessageBox(MessageBoxButton.Cancel, text); } public static void CheckMessageBox(MessageBoxButton button, params string[] text) { CheckAndDismissDialog(text, 65535, new IntPtr((int)button)); } /// <summary> /// Checks the text of a dialog and dismisses it. /// /// dlgField is the field to check the text of. /// buttonId is the button to press to dismiss. /// </summary> private static void CheckAndDismissDialog(string[] text, int dlgField, IntPtr buttonId) { var handle = new IntPtr(VSTestContext.DTE.MainWindow.HWnd); IVsUIShell uiShell = VSTestContext.ServiceProvider.GetService(typeof(IVsUIShell)) as IVsUIShell; IntPtr hwnd; uiShell.GetDialogOwnerHwnd(out hwnd); for (int i = 0; i < 20 && hwnd == handle; i++) { System.Threading.Thread.Sleep(500); uiShell.GetDialogOwnerHwnd(out hwnd); } Assert.AreNotEqual(IntPtr.Zero, hwnd, "hwnd is null, We failed to get the dialog"); Assert.AreNotEqual(handle, hwnd, "hwnd is Dte.MainWindow, We failed to get the dialog"); Console.WriteLine("Ending dialog: "); AutomationWrapper.DumpElement(AutomationElement.FromHandle(hwnd)); Console.WriteLine("--------"); try { StringBuilder title = new StringBuilder(4096); Assert.AreNotEqual(NativeMethods.GetDlgItemText(hwnd, dlgField, title, title.Capacity), (uint)0); string t = title.ToString(); AssertUtil.Contains(t, text); } finally { NativeMethods.EndDialog(hwnd, buttonId); } } /// <summary> /// Provides access to Visual Studio's solution explorer tree view. /// </summary> public SolutionExplorerTree SolutionExplorerTreeView { get { if (_solutionExplorerTreeView == null) { AutomationElement element = null; for (int i = 0; i < 20 && element == null; i++) { element = Element.FindFirst(TreeScope.Descendants, new AndCondition( new PropertyCondition( AutomationElement.ControlTypeProperty, ControlType.Pane ), new PropertyCondition( AutomationElement.NameProperty, "Solution Explorer" ) ) ); if (element == null) { System.Threading.Thread.Sleep(500); } } AutomationElement treeElement = null; if (element != null) { for (int i = 0; i < 20 && treeElement == null; i++) { treeElement = element.FindFirst(TreeScope.Descendants, new PropertyCondition( AutomationElement.ControlTypeProperty, ControlType.Tree ) ); if (treeElement == null) { System.Threading.Thread.Sleep(500); } } } _solutionExplorerTreeView = new SolutionExplorerTree(treeElement); } return _solutionExplorerTreeView; } } /// <summary> /// Provides access to Visual Studio's object browser. /// </summary> public ObjectBrowser ObjectBrowser { get { if (_objectBrowser == null) { AutomationElement element = null; for (int i = 0; i < 10 && element == null; i++) { element = Element.FindFirst(TreeScope.Descendants, new AndCondition( new PropertyCondition( AutomationElement.ClassNameProperty, "ViewPresenter" ), new PropertyCondition( AutomationElement.NameProperty, "Object Browser" ) ) ); if (element == null) { System.Threading.Thread.Sleep(500); } } _objectBrowser = new ObjectBrowser(element); } return _objectBrowser; } } /// <summary> /// Provides access to Visual Studio's resource view. /// </summary> public ObjectBrowser ResourceView { get { if (_resourceView == null) { AutomationElement element = null; for (int i = 0; i < 10 && element == null; i++) { element = Element.FindFirst(TreeScope.Descendants, new AndCondition( new PropertyCondition( AutomationElement.ClassNameProperty, "ViewPresenter" ), new PropertyCondition( AutomationElement.NameProperty, "Resource View" ) ) ); if (element == null) { System.Threading.Thread.Sleep(500); } } _resourceView = new ObjectBrowser(element); } return _resourceView; } } /// <summary> /// Provides access to Azure's VS Activity Log window. /// </summary> public AzureCloudServiceActivityLog AzureActivityLog { get { if (_azureActivityLog == null) { AutomationElement element = null; for (int i = 0; i < 10 && element == null; i++) { element = Element.FindFirst(TreeScope.Descendants, new AndCondition( new PropertyCondition( AutomationElement.ClassNameProperty, "GenericPane" ), new OrCondition( new PropertyCondition( AutomationElement.NameProperty, "Microsoft Azure Activity Log" ), new PropertyCondition( AutomationElement.NameProperty, "Windows Azure Activity Log" ) ) ) ); if (element == null) { System.Threading.Thread.Sleep(500); } } _azureActivityLog = new AzureCloudServiceActivityLog(element); } return _azureActivityLog; } } /// <summary> /// Produces a name which is compatible with x:Name requirements (starts with a letter/underscore, contains /// only letter, numbers, or underscores). /// </summary> public static string GetName(string title) { if (title.Length == 0) { return "InteractiveWindowHost"; } StringBuilder res = new StringBuilder(); if (!Char.IsLetter(title[0])) { res.Append('_'); } foreach (char c in title) { if (Char.IsLetter(c) || Char.IsDigit(c) || c == '_') { res.Append(c); } } res.Append("Host"); return res.ToString(); } public DTE Dte { get { return _dte; } } public void WaitForMode(dbgDebugMode mode) { for (int i = 0; i < 60 && Dte.Debugger.CurrentMode != mode; i++) { System.Threading.Thread.Sleep(500); } Assert.AreEqual(mode, VSTestContext.DTE.Debugger.CurrentMode); } public virtual Project CreateProject( string languageName, string templateName, string createLocation, string projectName, bool newSolution = true, bool suppressUI = true ) { var sln = (Solution2)Dte.Solution; var templatePath = sln.GetProjectTemplate(templateName, languageName); Assert.IsTrue(File.Exists(templatePath) || Directory.Exists(templatePath), string.Format("Cannot find template '{0}' for language '{1}'", templateName, languageName)); var origName = projectName; var projectDir = Path.Combine(createLocation, projectName); for (int i = 1; Directory.Exists(projectDir); ++i) { projectName = string.Format("{0}{1}", origName, i); projectDir = Path.Combine(createLocation, projectName); } var previousSuppressUI = Dte.SuppressUI; try { Dte.SuppressUI = suppressUI; sln.AddFromTemplate(templatePath, projectDir, projectName, newSolution); } finally { Dte.SuppressUI = previousSuppressUI; } return sln.Projects.Cast<Project>().FirstOrDefault(p => p.Name == projectName); } public Project OpenProject(string projName, string startItem = null, int? expectedProjects = null, string projectName = null, bool setStartupItem = true) { string fullPath = TestData.GetPath(projName); Assert.IsTrue(File.Exists(fullPath), "Cannot find " + fullPath); Console.WriteLine("Opening {0}", fullPath); // If there is a .suo file, delete that so that there is no state carried over from another test. for (int i = 10; i <= 12; ++i) { string suoPath = Path.ChangeExtension(fullPath, ".v" + i + ".suo"); if (File.Exists(suoPath)) { File.Delete(suoPath); } } Dte.Solution.Open(fullPath); Assert.IsTrue(Dte.Solution.IsOpen, "The solution is not open"); // Force all projects to load before running any tests. var solution = GetService<IVsSolution4>(typeof(SVsSolution)); Assert.IsNotNull(solution, "Failed to obtain SVsSolution service"); solution.EnsureSolutionIsLoaded((uint)__VSBSLFLAGS.VSBSLFLAGS_None); int count = Dte.Solution.Projects.Count; if (expectedProjects != null && expectedProjects.Value != count) { // if we have other files open we can end up with a bonus project... int i = 0; foreach (EnvDTE.Project proj in Dte.Solution.Projects) { if (proj.Name != "Miscellaneous Files") { i++; } } Assert.AreEqual(expectedProjects, i, "Wrong number of loaded projects"); } Project project = GetProject(projectName); string outputText = "(unable to get Solution output)"; try { outputText = GetOutputWindowText("Solution"); } catch (Exception) { } Assert.IsNotNull(project, "No project loaded: " + outputText); // HACK: Testing whether Properties is just slow to initialize for (int retries = 10; retries > 0 && project.Properties == null; --retries) { Trace.TraceWarning("Waiting for project.Properties to become non-null"); System.Threading.Thread.Sleep(250); } Assert.IsNotNull(project.Properties, "No project properties: " + outputText); Assert.IsTrue(project.Properties.GetEnumerator().MoveNext(), "No items in project properties: " + outputText); if (startItem != null && setStartupItem) { project.SetStartupFile(startItem); for (var i = 0; i < 20; i++) { //Wait for the startupItem to be set before returning from the project creation try { if (((string)project.Properties.Item("StartupFile").Value) == startItem) { break; } } catch { } System.Threading.Thread.Sleep(250); } } DeleteAllBreakPoints(); return project; } public Project GetProject(string projectName) { var iter = Dte.Solution.Projects.GetEnumerator(); if (!iter.MoveNext()) { return null; } Project project = (Project)iter.Current; if (projectName != null) { while (project.Name != projectName) { Assert.IsTrue(iter.MoveNext(), "Failed to find project named " + projectName); project = (Project)iter.Current; } } return project; } public void DeleteAllBreakPoints() { var debug3 = (EnvDTE90.Debugger3)Dte.Debugger; if (debug3.Breakpoints != null) { foreach (var bp in debug3.Breakpoints) { ((EnvDTE90a.Breakpoint3)bp).Delete(); } } } public Uri PublishToAzureCloudService(string serviceName, string subscriptionPublishSettingsFilePath) { using (var publishDialog = AzureCloudServicePublishDialog.FromDte(this)) { using (var manageSubscriptionsDialog = publishDialog.SelectManageSubscriptions()) { LoadPublishSettings(manageSubscriptionsDialog, subscriptionPublishSettingsFilePath); manageSubscriptionsDialog.Close(); } publishDialog.ClickNext(); using (var createServiceDialog = publishDialog.SelectCreateNewService()) { createServiceDialog.ServiceName = serviceName; createServiceDialog.Location = "West US"; createServiceDialog.ClickCreate(); } publishDialog.ClickPublish(); } return new Uri(string.Format("http://{0}.cloudapp.net", serviceName)); } public Uri PublishToAzureWebSite(string siteName, string subscriptionPublishSettingsFilePath) { using (var publishDialog = AzureWebSitePublishDialog.FromDte(this)) { using (var importSettingsDialog = publishDialog.ClickImportSettings()) { importSettingsDialog.ClickImportFromWindowsAzureWebSite(); using (var manageSubscriptionsDialog = importSettingsDialog.ClickImportOrManageSubscriptions()) { LoadPublishSettings(manageSubscriptionsDialog, subscriptionPublishSettingsFilePath); manageSubscriptionsDialog.Close(); } using (var createSiteDialog = importSettingsDialog.ClickNew()) { createSiteDialog.SiteName = siteName; createSiteDialog.ClickCreate(); } importSettingsDialog.ClickOK(); } publishDialog.ClickPublish(); } return new Uri(string.Format("http://{0}.azurewebsites.net", siteName)); } private void LoadPublishSettings(AzureManageSubscriptionsDialog manageSubscriptionsDialog, string publishSettingsFilePath) { manageSubscriptionsDialog.ClickCertificates(); while (manageSubscriptionsDialog.SubscriptionsListBox.Count > 0) { manageSubscriptionsDialog.SubscriptionsListBox[0].Select(); manageSubscriptionsDialog.ClickRemove(); WaitForDialogToReplace(manageSubscriptionsDialog.Element); VisualStudioApp.CheckMessageBox(TestUtilities.MessageBoxButton.Yes); } using (var importSubscriptionDialog = manageSubscriptionsDialog.ClickImport()) { importSubscriptionDialog.FileName = publishSettingsFilePath; importSubscriptionDialog.ClickImport(); } } public List<IVsTaskItem> WaitForErrorListItems(int expectedCount) { return WaitForTaskListItems(typeof(SVsErrorList), expectedCount, exactMatch: false); } public List<IVsTaskItem> WaitForTaskListItems(Type taskListService, int expectedCount, bool exactMatch = true) { Console.Write("Waiting for {0} items on {1} ... ", expectedCount, taskListService.Name); var errorList = GetService<IVsTaskList>(taskListService); var allItems = new List<IVsTaskItem>(); if (expectedCount == 0) { // Allow time for errors to appear. Otherwise when we expect 0 // errors we will get a false pass. System.Threading.Thread.Sleep(5000); } for (int retries = 10; retries > 0; --retries) { allItems.Clear(); IVsEnumTaskItems items; ErrorHandler.ThrowOnFailure(errorList.EnumTaskItems(out items)); IVsTaskItem[] taskItems = new IVsTaskItem[1]; uint[] itemCnt = new uint[1]; while (ErrorHandler.Succeeded(items.Next(1, taskItems, itemCnt)) && itemCnt[0] == 1) { allItems.Add(taskItems[0]); } if (allItems.Count >= expectedCount) { break; } // give time for errors to process... System.Threading.Thread.Sleep(1000); } if (exactMatch) { Assert.AreEqual(expectedCount, allItems.Count); } return allItems; } internal ProjectItem AddItem(Project project, string language, string template, string filename) { var fullTemplate = ((Solution2)project.DTE.Solution).GetProjectItemTemplate(template, language); return project.ProjectItems.AddFromTemplate(fullTemplate, filename); } } }
using System; using System.Collections; using System.Collections.Generic; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; namespace MonoGame.Extended.TextureAtlases { /// <summary> /// Defines a texture atlas which stores a source image and contains regions specifying its sub-images. /// </summary> /// <remarks> /// <para> /// Texture atlas (also called a tile map, tile engine, or sprite sheet) is a large image containing a collection, /// or "atlas", of sub-images, each of which is a texture map for some part of a 2D or 3D model. /// The sub-textures can be rendered by modifying the texture coordinates of the object's uvmap on the atlas, /// essentially telling it which part of the image its texture is in. /// In an application where many small textures are used frequently, it is often more efficient to store the /// textures in a texture atlas which is treated as a single unit by the graphics hardware. /// This saves memory and because there are less rendering state changes by binding once, it can be faster to bind /// one large texture once than to bind many smaller textures as they are drawn. /// Careful alignment may be needed to avoid bleeding between sub textures when used with mipmapping, and artefacts /// between tiles for texture compression. /// </para> /// </remarks> public class TextureAtlas : IEnumerable<TextureRegion2D> { /// <summary> /// Initializes a new texture atlas with an empty list of regions. /// </summary> /// <param name="name">The asset name of this texture atlas</param> /// <param name="texture">Source <see cref="Texture2D " /> image used to draw on screen.</param> public TextureAtlas(string name, Texture2D texture) { Name = name; Texture = texture; _regions = new List<TextureRegion2D>(); _regionMap = new Dictionary<string, int>(); } /// <summary> /// Initializes a new texture atlas and populates it with regions. /// </summary> /// <param name="name">The asset name of this texture atlas</param> /// <param name="texture">Source <see cref="Texture2D " /> image used to draw on screen.</param> /// <param name="regions">A collection of regions to populate the atlas with.</param> public TextureAtlas(string name, Texture2D texture, Dictionary<string, Rectangle> regions) : this(name, texture) { foreach (var region in regions) CreateRegion(region.Key, region.Value.X, region.Value.Y, region.Value.Width, region.Value.Height); } private readonly Dictionary<string, int> _regionMap; private readonly List<TextureRegion2D> _regions; public string Name { get; } /// <summary> /// Gets a source <see cref="Texture2D" /> image. /// </summary> public Texture2D Texture { get; } /// <summary> /// Gets a list of regions in the <see cref="TextureAtlas" />. /// </summary> public IEnumerable<TextureRegion2D> Regions => _regions; /// <summary> /// Gets the number of regions in the <see cref="TextureAtlas" />. /// </summary> public int RegionCount => _regions.Count; public TextureRegion2D this[string name] => GetRegion(name); public TextureRegion2D this[int index] => GetRegion(index); /// <summary> /// Gets the enumerator of the <see cref="TextureAtlas" />' list of regions. /// </summary> /// <returns>The <see cref="IEnumerator" /> of regions.</returns> public IEnumerator<TextureRegion2D> GetEnumerator() { return _regions.GetEnumerator(); } /// <summary> /// Gets the enumerator of the <see cref="TextureAtlas" />' list of regions. /// </summary> /// <returns>The <see cref="IEnumerator" /> of regions</returns> IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } /// <summary> /// Creates a new texture region and adds it to the list of the <see cref="TextureAtlas" />' regions. /// </summary> /// <param name="name">Name of the texture region.</param> /// <param name="x">X coordinate of the region's top left corner.</param> /// <param name="y">Y coordinate of the region's top left corner.</param> /// <param name="width">Width of the texture region.</param> /// <param name="height">Height of the texture region.</param> /// <returns>Created texture region.</returns> public TextureRegion2D CreateRegion(string name, int x, int y, int width, int height) { if (_regionMap.ContainsKey(name)) throw new InvalidOperationException($"Region {name} already exists in the texture atlas"); var region = new TextureRegion2D(name, Texture, x, y, width, height); var index = _regions.Count; _regions.Add(region); _regionMap.Add(name, index); return region; } /// <summary> /// Removes a texture region from the <see cref="TextureAtlas" /> /// </summary> /// <param name="index">An index of the <see cref="TextureRegion2D" /> in <see cref="Region" /> to remove</param> public void RemoveRegion(int index) { _regions.RemoveAt(index); } /// <summary> /// Removes a texture region from the <see cref="TextureAtlas" /> /// </summary> /// <param name="name">Name of the <see cref="TextureRegion2D" /> to remove</param> public void RemoveRegion(string name) { int index; if (_regionMap.TryGetValue(name, out index)) { RemoveRegion(index); _regionMap.Remove(name); } } /// <summary> /// Gets a <see cref="TextureRegion2D" /> from the <see cref="TextureAtlas" />' list. /// </summary> /// <param name="index">An index of the <see cref="TextureRegion2D" /> in <see cref="Region" /> to get.</param> /// <returns>The <see cref="TextureRegion2D" />.</returns> public TextureRegion2D GetRegion(int index) { if ((index < 0) || (index >= _regions.Count)) throw new IndexOutOfRangeException(); return _regions[index]; } /// <summary> /// Gets a <see cref="TextureRegion2D" /> from the <see cref="TextureAtlas" />' list. /// </summary> /// <param name="name">Name of the <see cref="TextureRegion2D" /> to get.</param> /// <returns>The <see cref="TextureRegion2D" />.</returns> public TextureRegion2D GetRegion(string name) { int index; if (_regionMap.TryGetValue(name, out index)) return GetRegion(index); throw new KeyNotFoundException(name); } /// <summary> /// Creates a new <see cref="TextureAtlas" /> and populates it with a grid of <see cref="TextureRegion2D" />. /// </summary> /// <param name="name">The name of this texture atlas</param> /// <param name="texture">Source <see cref="Texture2D" /> image used to draw on screen</param> /// <param name="regionWidth">Width of the <see cref="TextureRegion2D" />.</param> /// <param name="regionHeight">Height of the <see cref="TextureRegion2D" />.</param> /// <param name="maxRegionCount">The number of <see cref="TextureRegion2D" /> to create.</param> /// <param name="margin">Minimum distance of the regions from the border of the source <see cref="Texture2D" /> image.</param> /// <param name="spacing">Horizontal and vertical space between regions.</param> /// <returns>A created and populated <see cref="TextureAtlas" />.</returns> public static TextureAtlas Create(string name, Texture2D texture, int regionWidth, int regionHeight, int maxRegionCount = int.MaxValue, int margin = 0, int spacing = 0) { var textureAtlas = new TextureAtlas(name, texture); var count = 0; var width = texture.Width - margin; var height = texture.Height - margin; var xIncrement = regionWidth + spacing; var yIncrement = regionHeight + spacing; for (var y = margin; y < height; y += yIncrement) { for (var x = margin; x < width; x += xIncrement) { var regionName = $"{texture.Name ?? "region"}{count}"; textureAtlas.CreateRegion(regionName, x, y, regionWidth, regionHeight); count++; if (count >= maxRegionCount) return textureAtlas; } } return textureAtlas; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Tests.Dataload { using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Cache; using Apache.Ignite.Core.Datastream; using NUnit.Framework; /// <summary> /// Data streamer tests. /// </summary> public sealed class DataStreamerTest { /** Cache name. */ private const string CacheName = "partitioned"; /** Node. */ private IIgnite _grid; /** Node 2. */ private IIgnite _grid2; /** Cache. */ private ICache<int, int?> _cache; /// <summary> /// Initialization routine. /// </summary> [TestFixtureSetUp] public void InitClient() { _grid = Ignition.Start(TestUtils.GetTestConfiguration()); _grid2 = Ignition.Start(new IgniteConfiguration(TestUtils.GetTestConfiguration()) { IgniteInstanceName = "grid1" }); _cache = _grid.CreateCache<int, int?>(CacheName); } /// <summary> /// Fixture teardown. /// </summary> [TestFixtureTearDown] public void StopGrids() { Ignition.StopAll(true); } /// <summary> /// /// </summary> [SetUp] public void BeforeTest() { Console.WriteLine("Test started: " + TestContext.CurrentContext.Test.Name); for (int i = 0; i < 100; i++) _cache.Remove(i); } [TearDown] public void AfterTest() { TestUtils.AssertHandleRegistryIsEmpty(1000, _grid); } /// <summary> /// Test data streamer property configuration. Ensures that at least no exceptions are thrown. /// </summary> [Test] public void TestPropertyPropagation() { using (IDataStreamer<int, int> ldr = _grid.GetDataStreamer<int, int>(CacheName)) { Assert.AreEqual(CacheName, ldr.CacheName); Assert.AreEqual(0, ldr.AutoFlushFrequency); Assert.IsFalse(ldr.AllowOverwrite); ldr.AllowOverwrite = true; Assert.IsTrue(ldr.AllowOverwrite); ldr.AllowOverwrite = false; Assert.IsFalse(ldr.AllowOverwrite); Assert.IsFalse(ldr.SkipStore); ldr.SkipStore = true; Assert.IsTrue(ldr.SkipStore); ldr.SkipStore = false; Assert.IsFalse(ldr.SkipStore); Assert.AreEqual(DataStreamerDefaults.DefaultPerNodeBufferSize, ldr.PerNodeBufferSize); ldr.PerNodeBufferSize = 1; Assert.AreEqual(1, ldr.PerNodeBufferSize); ldr.PerNodeBufferSize = 2; Assert.AreEqual(2, ldr.PerNodeBufferSize); Assert.AreEqual(DataStreamerDefaults.DefaultPerThreadBufferSize, ldr.PerThreadBufferSize); ldr.PerThreadBufferSize = 1; Assert.AreEqual(1, ldr.PerThreadBufferSize); ldr.PerThreadBufferSize = 2; Assert.AreEqual(2, ldr.PerThreadBufferSize); Assert.AreEqual(0, ldr.PerNodeParallelOperations); var ops = DataStreamerDefaults.DefaultParallelOperationsMultiplier * IgniteConfiguration.DefaultThreadPoolSize; ldr.PerNodeParallelOperations = ops; Assert.AreEqual(ops, ldr.PerNodeParallelOperations); ldr.PerNodeParallelOperations = 2; Assert.AreEqual(2, ldr.PerNodeParallelOperations); Assert.AreEqual(DataStreamerDefaults.DefaultTimeout, ldr.Timeout); ldr.Timeout = TimeSpan.MaxValue; Assert.AreEqual(TimeSpan.MaxValue, ldr.Timeout); ldr.Timeout = TimeSpan.FromSeconds(1.5); Assert.AreEqual(1.5, ldr.Timeout.TotalSeconds); } } /// <summary> /// Test data add/remove. /// </summary> [Test] public void TestAddRemove() { IDataStreamer<int, int> ldr; using (ldr = _grid.GetDataStreamer<int, int>(CacheName)) { Assert.IsFalse(ldr.Task.IsCompleted); ldr.AllowOverwrite = true; // Additions. var task = ldr.AddData(1, 1); ldr.Flush(); Assert.AreEqual(1, _cache.Get(1)); Assert.IsTrue(task.IsCompleted); Assert.IsFalse(ldr.Task.IsCompleted); task = ldr.AddData(new KeyValuePair<int, int>(2, 2)); ldr.Flush(); Assert.AreEqual(2, _cache.Get(2)); Assert.IsTrue(task.IsCompleted); task = ldr.AddData(new [] { new KeyValuePair<int, int>(3, 3), new KeyValuePair<int, int>(4, 4) }); ldr.Flush(); Assert.AreEqual(3, _cache.Get(3)); Assert.AreEqual(4, _cache.Get(4)); Assert.IsTrue(task.IsCompleted); // Removal. task = ldr.RemoveData(1); ldr.Flush(); Assert.IsFalse(_cache.ContainsKey(1)); Assert.IsTrue(task.IsCompleted); // Mixed. ldr.AddData(5, 5); ldr.RemoveData(2); ldr.AddData(new KeyValuePair<int, int>(7, 7)); ldr.AddData(6, 6); ldr.RemoveData(4); ldr.AddData(new List<KeyValuePair<int, int>> { new KeyValuePair<int, int>(9, 9), new KeyValuePair<int, int>(10, 10) }); ldr.AddData(new KeyValuePair<int, int>(8, 8)); ldr.RemoveData(3); ldr.AddData(new List<KeyValuePair<int, int>> { new KeyValuePair<int, int>(11, 11), new KeyValuePair<int, int>(12, 12) }); ldr.Flush(); for (int i = 2; i < 5; i++) Assert.IsFalse(_cache.ContainsKey(i)); for (int i = 5; i < 13; i++) Assert.AreEqual(i, _cache.Get(i)); } Assert.IsTrue(ldr.Task.IsCompleted); } /// <summary> /// Tests object graphs with loops. /// </summary> [Test] public void TestObjectGraphs() { var obj1 = new Container(); var obj2 = new Container(); var obj3 = new Container(); var obj4 = new Container(); obj1.Inner = obj2; obj2.Inner = obj1; obj3.Inner = obj1; obj4.Inner = new Container(); using (var ldr = _grid.GetDataStreamer<int, Container>(CacheName)) { ldr.AllowOverwrite = true; ldr.AddData(1, obj1); ldr.AddData(2, obj2); ldr.AddData(3, obj3); ldr.AddData(4, obj4); } var cache = _grid.GetCache<int, Container>(CacheName); var res = cache[1]; Assert.AreEqual(res, res.Inner.Inner); Assert.IsNotNull(cache[2].Inner); Assert.IsNotNull(cache[2].Inner.Inner); Assert.IsNotNull(cache[3].Inner); Assert.IsNotNull(cache[3].Inner.Inner); Assert.IsNotNull(cache[4].Inner); Assert.IsNull(cache[4].Inner.Inner); } /// <summary> /// Test "tryFlush". /// </summary> [Test] public void TestTryFlush() { using (IDataStreamer<int, int> ldr = _grid.GetDataStreamer<int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.TryFlush(); fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); } } /// <summary> /// Test buffer size adjustments. /// </summary> [Test] public void TestBufferSize() { using (var ldr = _grid.GetDataStreamer<int, int>(CacheName)) { const int timeout = 5000; var part1 = GetPrimaryPartitionKeys(_grid, 4); var part2 = GetPrimaryPartitionKeys(_grid2, 4); var task = ldr.AddData(part1[0], part1[0]); Thread.Sleep(100); Assert.IsFalse(task.IsCompleted); ldr.PerNodeBufferSize = 2; ldr.PerThreadBufferSize = 1; ldr.AddData(part2[0], part2[0]); ldr.AddData(part1[1], part1[1]); Assert.IsTrue(ldr.AddData(part2[1], part2[1]).Wait(timeout)); Assert.IsTrue(task.Wait(timeout)); Assert.AreEqual(part1[0], _cache.Get(part1[0])); Assert.AreEqual(part1[1], _cache.Get(part1[1])); Assert.AreEqual(part2[0], _cache.Get(part2[0])); Assert.AreEqual(part2[1], _cache.Get(part2[1])); Assert.IsTrue(ldr.AddData(new[] { new KeyValuePair<int, int>(part1[2], part1[2]), new KeyValuePair<int, int>(part1[3], part1[3]), new KeyValuePair<int, int>(part2[2], part2[2]), new KeyValuePair<int, int>(part2[3], part2[3]) }).Wait(timeout)); Assert.AreEqual(part1[2], _cache.Get(part1[2])); Assert.AreEqual(part1[3], _cache.Get(part1[3])); Assert.AreEqual(part2[2], _cache.Get(part2[2])); Assert.AreEqual(part2[3], _cache.Get(part2[3])); } } /// <summary> /// Gets the primary partition keys. /// </summary> private static int[] GetPrimaryPartitionKeys(IIgnite ignite, int count) { var affinity = ignite.GetAffinity(CacheName); var localNode = ignite.GetCluster().GetLocalNode(); var part = affinity.GetPrimaryPartitions(localNode).First(); return Enumerable.Range(0, int.MaxValue) .Where(k => affinity.GetPartition(k) == part) .Take(count) .ToArray(); } /// <summary> /// Test close. /// </summary> [Test] public void TestClose() { using (IDataStreamer<int, int> ldr = _grid.GetDataStreamer<int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.Close(false); fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); } } /// <summary> /// Test close with cancellation. /// </summary> [Test] public void TestCancel() { using (IDataStreamer<int, int> ldr = _grid.GetDataStreamer<int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.Close(true); fut.Wait(); Assert.IsFalse(_cache.ContainsKey(1)); } } /// <summary> /// Tests that streamer gets collected when there are no references to it. /// </summary> [Test] public void TestFinalizer() { Assert.Fail("https://issues.apache.org/jira/browse/IGNITE-8731"); var streamer = _grid.GetDataStreamer<int, int>(CacheName); var streamerRef = new WeakReference(streamer); Assert.IsNotNull(streamerRef.Target); // ReSharper disable once RedundantAssignment streamer = null; GC.Collect(); GC.WaitForPendingFinalizers(); Assert.IsNull(streamerRef.Target); } /// <summary> /// Test auto-flush feature. /// </summary> [Test] public void TestAutoFlush() { using (IDataStreamer<int, int> ldr = _grid.GetDataStreamer<int, int>(CacheName)) { // Test auto flush turning on. var fut = ldr.AddData(1, 1); Thread.Sleep(100); Assert.IsFalse(fut.IsCompleted); ldr.AutoFlushFrequency = 1000; fut.Wait(); // Test forced flush after frequency change. fut = ldr.AddData(2, 2); ldr.AutoFlushFrequency = long.MaxValue; fut.Wait(); // Test another forced flush after frequency change. fut = ldr.AddData(3, 3); ldr.AutoFlushFrequency = 1000; fut.Wait(); // Test flush before stop. fut = ldr.AddData(4, 4); ldr.AutoFlushFrequency = 0; fut.Wait(); // Test flush after second turn on. fut = ldr.AddData(5, 5); ldr.AutoFlushFrequency = 1000; fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); Assert.AreEqual(2, _cache.Get(2)); Assert.AreEqual(3, _cache.Get(3)); Assert.AreEqual(4, _cache.Get(4)); Assert.AreEqual(5, _cache.Get(5)); } } /// <summary> /// Test multithreaded behavior. /// </summary> [Test] [Category(TestUtils.CategoryIntensive)] public void TestMultithreaded() { int entriesPerThread = 100000; int threadCnt = 8; for (int i = 0; i < 5; i++) { _cache.Clear(); Assert.AreEqual(0, _cache.GetSize()); Stopwatch watch = new Stopwatch(); watch.Start(); using (IDataStreamer<int, int> ldr = _grid.GetDataStreamer<int, int>(CacheName)) { ldr.PerNodeBufferSize = 1024; int ctr = 0; TestUtils.RunMultiThreaded(() => { int threadIdx = Interlocked.Increment(ref ctr); int startIdx = (threadIdx - 1) * entriesPerThread; int endIdx = startIdx + entriesPerThread; for (int j = startIdx; j < endIdx; j++) { // ReSharper disable once AccessToDisposedClosure ldr.AddData(j, j); if (j % 100000 == 0) Console.WriteLine("Put [thread=" + threadIdx + ", cnt=" + j + ']'); } }, threadCnt); } Console.WriteLine("Iteration " + i + ": " + watch.ElapsedMilliseconds); watch.Reset(); for (int j = 0; j < threadCnt * entriesPerThread; j++) Assert.AreEqual(j, j); } } /// <summary> /// Tests custom receiver. /// </summary> [Test] public void TestStreamReceiver() { TestStreamReceiver(new StreamReceiverBinarizable()); TestStreamReceiver(new StreamReceiverSerializable()); } /// <summary> /// Tests StreamVisitor. /// </summary> [Test] public void TestStreamVisitor() { TestStreamReceiver(new StreamVisitor<int, int>((c, e) => c.Put(e.Key, e.Value + 1))); } /// <summary> /// Tests StreamTransformer. /// </summary> [Test] public void TestStreamTransformer() { TestStreamReceiver(new StreamTransformer<int, int, int, int>(new EntryProcessorSerializable())); TestStreamReceiver(new StreamTransformer<int, int, int, int>(new EntryProcessorBinarizable())); } /// <summary> /// Tests specified receiver. /// </summary> private void TestStreamReceiver(IStreamReceiver<int, int> receiver) { using (var ldr = _grid.GetDataStreamer<int, int>(CacheName)) { ldr.AllowOverwrite = true; ldr.Receiver = new StreamReceiverBinarizable(); ldr.Receiver = receiver; // check double assignment Assert.AreEqual(ldr.Receiver, receiver); for (var i = 0; i < 100; i++) ldr.AddData(i, i); ldr.Flush(); for (var i = 0; i < 100; i++) Assert.AreEqual(i + 1, _cache.Get(i)); } } /// <summary> /// Tests the stream receiver in keepBinary mode. /// </summary> [Test] public void TestStreamReceiverKeepBinary() { // ReSharper disable once LocalVariableHidesMember var cache = _grid.GetCache<int, BinarizableEntry>(CacheName); using (var ldr0 = _grid.GetDataStreamer<int, int>(CacheName)) using (var ldr = ldr0.WithKeepBinary<int, IBinaryObject>()) { ldr.Receiver = new StreamReceiverKeepBinary(); ldr.AllowOverwrite = true; for (var i = 0; i < 100; i++) ldr.AddData(i, _grid.GetBinary().ToBinary<IBinaryObject>(new BinarizableEntry {Val = i})); ldr.Flush(); for (var i = 0; i < 100; i++) Assert.AreEqual(i + 1, cache.Get(i).Val); // Repeating WithKeepBinary call: valid args. Assert.AreSame(ldr, ldr.WithKeepBinary<int, IBinaryObject>()); // Invalid type args. var ex = Assert.Throws<InvalidOperationException>(() => ldr.WithKeepBinary<string, IBinaryObject>()); Assert.AreEqual( "Can't change type of binary streamer. WithKeepBinary has been called on an instance of " + "binary streamer with incompatible generic arguments.", ex.Message); } } /// <summary> /// Test binarizable receiver. /// </summary> private class StreamReceiverBinarizable : IStreamReceiver<int, int> { /** <inheritdoc /> */ public void Receive(ICache<int, int> cache, ICollection<ICacheEntry<int, int>> entries) { cache.PutAll(entries.ToDictionary(x => x.Key, x => x.Value + 1)); } } /// <summary> /// Test binary receiver. /// </summary> [Serializable] private class StreamReceiverKeepBinary : IStreamReceiver<int, IBinaryObject> { /** <inheritdoc /> */ public void Receive(ICache<int, IBinaryObject> cache, ICollection<ICacheEntry<int, IBinaryObject>> entries) { var binary = cache.Ignite.GetBinary(); cache.PutAll(entries.ToDictionary(x => x.Key, x => binary.ToBinary<IBinaryObject>(new BinarizableEntry { Val = x.Value.Deserialize<BinarizableEntry>().Val + 1 }))); } } /// <summary> /// Test serializable receiver. /// </summary> [Serializable] private class StreamReceiverSerializable : IStreamReceiver<int, int> { /** <inheritdoc /> */ public void Receive(ICache<int, int> cache, ICollection<ICacheEntry<int, int>> entries) { cache.PutAll(entries.ToDictionary(x => x.Key, x => x.Value + 1)); } } /// <summary> /// Test entry processor. /// </summary> [Serializable] private class EntryProcessorSerializable : ICacheEntryProcessor<int, int, int, int> { /** <inheritdoc /> */ public int Process(IMutableCacheEntry<int, int> entry, int arg) { entry.Value = entry.Key + 1; return 0; } } /// <summary> /// Test entry processor. /// </summary> private class EntryProcessorBinarizable : ICacheEntryProcessor<int, int, int, int>, IBinarizable { /** <inheritdoc /> */ public int Process(IMutableCacheEntry<int, int> entry, int arg) { entry.Value = entry.Key + 1; return 0; } /** <inheritdoc /> */ public void WriteBinary(IBinaryWriter writer) { // No-op. } /** <inheritdoc /> */ public void ReadBinary(IBinaryReader reader) { // No-op. } } /// <summary> /// Binarizable entry. /// </summary> private class BinarizableEntry { public int Val { get; set; } } /// <summary> /// Container class. /// </summary> private class Container { public Container Inner; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void SubtractInt32() { var test = new SimpleBinaryOpTest__SubtractInt32(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__SubtractInt32 { private const int VectorSize = 32; private const int ElementCount = VectorSize / sizeof(Int32); private static Int32[] _data1 = new Int32[ElementCount]; private static Int32[] _data2 = new Int32[ElementCount]; private static Vector256<Int32> _clsVar1; private static Vector256<Int32> _clsVar2; private Vector256<Int32> _fld1; private Vector256<Int32> _fld2; private SimpleBinaryOpTest__DataTable<Int32> _dataTable; static SimpleBinaryOpTest__SubtractInt32() { var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _clsVar1), ref Unsafe.As<Int32, byte>(ref _data2[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _clsVar2), ref Unsafe.As<Int32, byte>(ref _data1[0]), VectorSize); } public SimpleBinaryOpTest__SubtractInt32() { Succeeded = true; var random = new Random(); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int32>, byte>(ref _fld2), ref Unsafe.As<Int32, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < ElementCount; i++) { _data1[i] = (int)(random.Next(int.MinValue, int.MaxValue)); _data2[i] = (int)(random.Next(int.MinValue, int.MaxValue)); } _dataTable = new SimpleBinaryOpTest__DataTable<Int32>(_data1, _data2, new Int32[ElementCount], VectorSize); } public bool IsSupported => Avx2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Avx2.Subtract( Unsafe.Read<Vector256<Int32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Int32>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Avx2.Subtract( Avx.LoadVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Int32*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Avx2.Subtract( Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Avx2).GetMethod(nameof(Avx2.Subtract), new Type[] { typeof(Vector256<Int32>), typeof(Vector256<Int32>) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<Int32>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Int32>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Avx2).GetMethod(nameof(Avx2.Subtract), new Type[] { typeof(Vector256<Int32>), typeof(Vector256<Int32>) }) .Invoke(null, new object[] { Avx.LoadVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Int32*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Avx2).GetMethod(nameof(Avx2.Subtract), new Type[] { typeof(Vector256<Int32>), typeof(Vector256<Int32>) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int32>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Avx2.Subtract( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector256<Int32>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector256<Int32>>(_dataTable.inArray2Ptr); var result = Avx2.Subtract(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Avx.LoadVector256((Int32*)(_dataTable.inArray1Ptr)); var right = Avx.LoadVector256((Int32*)(_dataTable.inArray2Ptr)); var result = Avx2.Subtract(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray1Ptr)); var right = Avx.LoadAlignedVector256((Int32*)(_dataTable.inArray2Ptr)); var result = Avx2.Subtract(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__SubtractInt32(); var result = Avx2.Subtract(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Avx2.Subtract(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector256<Int32> left, Vector256<Int32> right, void* result, [CallerMemberName] string method = "") { Int32[] inArray1 = new Int32[ElementCount]; Int32[] inArray2 = new Int32[ElementCount]; Int32[] outArray = new Int32[ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { Int32[] inArray1 = new Int32[ElementCount]; Int32[] inArray2 = new Int32[ElementCount]; Int32[] outArray = new Int32[ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Int32[] left, Int32[] right, Int32[] result, [CallerMemberName] string method = "") { if ((int)(left[0] - right[0]) != result[0]) { Succeeded = false; } else { for (var i = 1; i < left.Length; i++) { if ((int)(left[i] - right[i]) != result[i]) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Avx2)}.{nameof(Avx2.Subtract)}<Int32>: {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
using System; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Umbraco.Cms.Core; using Umbraco.Cms.Core.Events; using Umbraco.Cms.Core.Models; using Umbraco.Cms.Core.Models.Entities; using Umbraco.Cms.Core.Notifications; using Umbraco.Cms.Core.Trees; using Umbraco.Cms.Web.BackOffice.Controllers; using Umbraco.Cms.Web.Common.Filters; using Umbraco.Cms.Web.Common.ModelBinders; using Umbraco.Extensions; namespace Umbraco.Cms.Web.BackOffice.Trees { /// <summary> /// A base controller reference for non-attributed trees (un-registered). /// </summary> /// <remarks> /// Developers should generally inherit from TreeController. /// </remarks> [AngularJsonOnlyConfiguration] public abstract class TreeControllerBase : UmbracoAuthorizedApiController, ITree { // TODO: Need to set this, but from where? // Presumably not injecting as this will be a base controller for package/solution developers. private readonly UmbracoApiControllerTypeCollection _apiControllers; private readonly IEventAggregator _eventAggregator; protected TreeControllerBase(UmbracoApiControllerTypeCollection apiControllers, IEventAggregator eventAggregator) { _apiControllers = apiControllers; _eventAggregator = eventAggregator; } /// <summary> /// The method called to render the contents of the tree structure /// </summary> /// <param name="id"></param> /// <param name="queryStrings"> /// All of the query string parameters passed from jsTree /// </param> /// <remarks> /// We are allowing an arbitrary number of query strings to be passed in so that developers are able to persist custom data from the front-end /// to the back end to be used in the query for model data. /// </remarks> protected abstract ActionResult<TreeNodeCollection> GetTreeNodes(string id, [ModelBinder(typeof(HttpQueryStringModelBinder))]FormCollection queryStrings); /// <summary> /// Returns the menu structure for the node /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> protected abstract ActionResult<MenuItemCollection> GetMenuForNode(string id, [ModelBinder(typeof(HttpQueryStringModelBinder))]FormCollection queryStrings); /// <summary> /// The name to display on the root node /// </summary> public abstract string RootNodeDisplayName { get; } /// <inheritdoc /> public abstract string TreeGroup { get; } /// <inheritdoc /> public abstract string TreeAlias { get; } /// <inheritdoc /> public abstract string TreeTitle { get; } /// <inheritdoc /> public abstract TreeUse TreeUse { get; } /// <inheritdoc /> public abstract string SectionAlias { get; } /// <inheritdoc /> public abstract int SortOrder { get; } /// <inheritdoc /> public abstract bool IsSingleNodeTree { get; } /// <summary> /// Returns the root node for the tree /// </summary> /// <param name="queryStrings"></param> /// <returns></returns> public async Task<ActionResult<TreeNode>> GetRootNode([ModelBinder(typeof(HttpQueryStringModelBinder))]FormCollection queryStrings) { if (queryStrings == null) queryStrings = FormCollection.Empty; var nodeResult = CreateRootNode(queryStrings); if (!(nodeResult.Result is null)) { return nodeResult.Result; } var node = nodeResult.Value; //add the tree alias to the root node.AdditionalData["treeAlias"] = TreeAlias; AddQueryStringsToAdditionalData(node, queryStrings); //check if the tree is searchable and add that to the meta data as well if (this is ISearchableTree) node.AdditionalData.Add("searchable", "true"); //now update all data based on some of the query strings, like if we are running in dialog mode if (IsDialog(queryStrings)) node.RoutePath = "#"; await _eventAggregator.PublishAsync(new RootNodeRenderingNotification(node, queryStrings, TreeAlias)); return node; } /// <summary> /// The action called to render the contents of the tree structure /// </summary> /// <param name="id"></param> /// <param name="queryStrings"> /// All of the query string parameters passed from jsTree /// </param> /// <returns>JSON markup for jsTree</returns> /// <remarks> /// We are allowing an arbitrary number of query strings to be passed in so that developers are able to persist custom data from the front-end /// to the back end to be used in the query for model data. /// </remarks> public async Task<ActionResult<TreeNodeCollection>> GetNodes(string id, [ModelBinder(typeof(HttpQueryStringModelBinder))]FormCollection queryStrings) { if (queryStrings == null) queryStrings = FormCollection.Empty; var nodesResult = GetTreeNodes(id, queryStrings); if (!(nodesResult.Result is null)) { return nodesResult.Result; } var nodes = nodesResult.Value; foreach (var node in nodes) AddQueryStringsToAdditionalData(node, queryStrings); //now update all data based on some of the query strings, like if we are running in dialog mode if (IsDialog(queryStrings)) foreach (var node in nodes) node.RoutePath = "#"; //raise the event await _eventAggregator.PublishAsync(new TreeNodesRenderingNotification(nodes, queryStrings, TreeAlias)); return nodes; } /// <summary> /// The action called to render the menu for a tree node /// </summary> /// <param name="id"></param> /// <param name="queryStrings"></param> /// <returns></returns> public async Task<ActionResult<MenuItemCollection>> GetMenu(string id, [ModelBinder(typeof(HttpQueryStringModelBinder))]FormCollection queryStrings) { if (queryStrings == null) queryStrings = FormCollection.Empty; var menuResult = GetMenuForNode(id, queryStrings); if (!(menuResult.Result is null)) { return menuResult.Result; } var menu = menuResult.Value; //raise the event await _eventAggregator.PublishAsync(new MenuRenderingNotification(id, menu, queryStrings, TreeAlias)); return menu; } /// <summary> /// Helper method to create a root model for a tree /// </summary> /// <returns></returns> protected virtual ActionResult<TreeNode> CreateRootNode(FormCollection queryStrings) { var rootNodeAsString = Constants.System.RootString; queryStrings.TryGetValue(TreeQueryStringParameters.Application, out var currApp); var node = new TreeNode( rootNodeAsString, null, //this is a root node, there is no parent Url.GetTreeUrl(_apiControllers, GetType(), rootNodeAsString, queryStrings), Url.GetMenuUrl(_apiControllers, GetType(), rootNodeAsString, queryStrings)) { HasChildren = true, RoutePath = currApp, Name = RootNodeDisplayName }; return node; } #region Create TreeNode methods /// <summary> /// Helper method to create tree nodes /// </summary> /// <param name="id"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="title"></param> /// <returns></returns> public TreeNode CreateTreeNode(string id, string parentId, FormCollection queryStrings, string title) { var jsonUrl = Url.GetTreeUrl(_apiControllers, GetType(), id, queryStrings); var menuUrl = Url.GetMenuUrl(_apiControllers, GetType(), id, queryStrings); var node = new TreeNode(id, parentId, jsonUrl, menuUrl) { Name = title }; return node; } /// <summary> /// Helper method to create tree nodes /// </summary> /// <param name="id"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="title"></param> /// <param name="icon"></param> /// <returns></returns> public TreeNode CreateTreeNode(string id, string parentId, FormCollection queryStrings, string title, string icon) { var jsonUrl = Url.GetTreeUrl(_apiControllers, GetType(), id, queryStrings); var menuUrl = Url.GetMenuUrl(_apiControllers, GetType(), id, queryStrings); var node = new TreeNode(id, parentId, jsonUrl, menuUrl) { Name = title, Icon = icon, NodeType = TreeAlias }; return node; } /// <summary> /// Helper method to create tree nodes /// </summary> /// <param name="id"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="title"></param> /// <param name="routePath"></param> /// <param name="icon"></param> /// <returns></returns> public TreeNode CreateTreeNode(string id, string parentId, FormCollection queryStrings, string title, string icon, string routePath) { var jsonUrl = Url.GetTreeUrl(_apiControllers, GetType(), id, queryStrings); var menuUrl = Url.GetMenuUrl(_apiControllers, GetType(), id, queryStrings); var node = new TreeNode(id, parentId, jsonUrl, menuUrl) { Name = title, RoutePath = routePath, Icon = icon }; return node; } /// <summary> /// Helper method to create tree nodes and automatically generate the json URL + UDI /// </summary> /// <param name="entity"></param> /// <param name="entityObjectType"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="hasChildren"></param> /// <returns></returns> public TreeNode CreateTreeNode(IEntitySlim entity, Guid entityObjectType, string parentId, FormCollection queryStrings, bool hasChildren) { var contentTypeIcon = entity is IContentEntitySlim contentEntity ? contentEntity.ContentTypeIcon : null; var treeNode = CreateTreeNode(entity.Id.ToInvariantString(), parentId, queryStrings, entity.Name, contentTypeIcon); treeNode.Path = entity.Path; treeNode.Udi = Udi.Create(ObjectTypes.GetUdiType(entityObjectType), entity.Key); treeNode.HasChildren = hasChildren; treeNode.Trashed = entity.Trashed; return treeNode; } /// <summary> /// Helper method to create tree nodes and automatically generate the json URL + UDI /// </summary> /// <param name="entity"></param> /// <param name="entityObjectType"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="icon"></param> /// <param name="hasChildren"></param> /// <returns></returns> public TreeNode CreateTreeNode(IUmbracoEntity entity, Guid entityObjectType, string parentId, FormCollection queryStrings, string icon, bool hasChildren) { var treeNode = CreateTreeNode(entity.Id.ToInvariantString(), parentId, queryStrings, entity.Name, icon); treeNode.Udi = Udi.Create(ObjectTypes.GetUdiType(entityObjectType), entity.Key); treeNode.Path = entity.Path; treeNode.HasChildren = hasChildren; return treeNode; } /// <summary> /// Helper method to create tree nodes and automatically generate the json URL /// </summary> /// <param name="id"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="title"></param> /// <param name="icon"></param> /// <param name="hasChildren"></param> /// <returns></returns> public TreeNode CreateTreeNode(string id, string parentId, FormCollection queryStrings, string title, string icon, bool hasChildren) { var treeNode = CreateTreeNode(id, parentId, queryStrings, title, icon); treeNode.HasChildren = hasChildren; return treeNode; } /// <summary> /// Helper method to create tree nodes and automatically generate the json URL /// </summary> /// <param name="id"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="title"></param> /// <param name="routePath"></param> /// <param name="hasChildren"></param> /// <param name="icon"></param> /// <returns></returns> public TreeNode CreateTreeNode(string id, string parentId, FormCollection queryStrings, string title, string icon, bool hasChildren, string routePath) { var treeNode = CreateTreeNode(id, parentId, queryStrings, title, icon); treeNode.HasChildren = hasChildren; treeNode.RoutePath = routePath; return treeNode; } /// <summary> /// Helper method to create tree nodes and automatically generate the json URL + UDI /// </summary> /// <param name="id"></param> /// <param name="parentId"></param> /// <param name="queryStrings"></param> /// <param name="title"></param> /// <param name="routePath"></param> /// <param name="hasChildren"></param> /// <param name="icon"></param> /// <param name="udi"></param> /// <returns></returns> public TreeNode CreateTreeNode(string id, string parentId, FormCollection queryStrings, string title, string icon, bool hasChildren, string routePath, Udi udi) { var treeNode = CreateTreeNode(id, parentId, queryStrings, title, icon); treeNode.HasChildren = hasChildren; treeNode.RoutePath = routePath; treeNode.Udi = udi; return treeNode; } #endregion /// <summary> /// The AdditionalData of a node is always populated with the query string data, this method performs this /// operation and ensures that special values are not inserted or that duplicate keys are not added. /// </summary> /// <param name="node"></param> /// <param name="queryStrings"></param> protected void AddQueryStringsToAdditionalData(TreeNode node, FormCollection queryStrings) { foreach (var q in queryStrings.Where(x => node.AdditionalData.ContainsKey(x.Key) == false)) node.AdditionalData.Add(q.Key, q.Value); } /// <summary> /// If the request is for a dialog mode tree /// </summary> /// <param name="queryStrings"></param> /// <returns></returns> protected bool IsDialog(FormCollection queryStrings) { queryStrings.TryGetValue(TreeQueryStringParameters.Use, out var use); return use == "dialog"; } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== // <OWNER>[....]</OWNER> // using Microsoft.Win32; using Microsoft.Win32.SafeHandles; using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Security.Permissions; using System.Text; using System.Runtime.Versioning; namespace System.Security.Principal { using BOOL = System.Int32; using DWORD = System.UInt32; using System.Globalization; using System.Diagnostics.Contracts; [Flags] internal enum PolicyRights { POLICY_VIEW_LOCAL_INFORMATION = 0x00000001, POLICY_VIEW_AUDIT_INFORMATION = 0x00000002, POLICY_GET_PRIVATE_INFORMATION = 0x00000004, POLICY_TRUST_ADMIN = 0x00000008, POLICY_CREATE_ACCOUNT = 0x00000010, POLICY_CREATE_SECRET = 0x00000020, POLICY_CREATE_PRIVILEGE = 0x00000040, POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080, POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100, POLICY_AUDIT_LOG_ADMIN = 0x00000200, POLICY_SERVER_ADMIN = 0x00000400, POLICY_LOOKUP_NAMES = 0x00000800, POLICY_NOTIFICATION = 0x00001000, } internal static class Win32 { internal const BOOL FALSE = 0; internal const BOOL TRUE = 1; private static bool _LsaLookupNames2Supported; private static bool _WellKnownSidApisSupported; [System.Security.SecuritySafeCritical] // auto-generated static Win32() { Win32Native.OSVERSIONINFO osvi = new Win32Native.OSVERSIONINFO(); bool r = Environment.GetVersion(osvi); if ( !r ) { Contract.Assert( r, "OSVersion native call failed." ); throw new SystemException( Environment.GetResourceString( "InvalidOperation_GetVersion" )); } if (osvi.MajorVersion > 5 || osvi.MinorVersion > 0 ) // Windows XP/2003 and above { // // LsaLookupNames2 supported only on XP and Windows 2003 and above // _LsaLookupNames2Supported = true; _WellKnownSidApisSupported = true; } else { // Win2000 _LsaLookupNames2Supported = false; // // WellKnownSid apis are only supported on Windows 2000 SP3 and above // (so we need sp info) // Win32Native.OSVERSIONINFOEX osviex = new Win32Native.OSVERSIONINFOEX(); r = Environment.GetVersionEx(osviex); if ( !r ) { Contract.Assert( r, "OSVersion native call failed"); throw new SystemException( Environment.GetResourceString( "InvalidOperation_GetVersion" )); } if (osviex.ServicePackMajor < 3) { _WellKnownSidApisSupported = false; } else { _WellKnownSidApisSupported = true; } } } internal static bool LsaLookupNames2Supported { get { return _LsaLookupNames2Supported; } } internal static bool WellKnownSidApisSupported { get { return _WellKnownSidApisSupported; } } // // Wrapper around advapi32.LsaOpenPolicy // [System.Security.SecurityCritical] // auto-generated internal static SafeLsaPolicyHandle LsaOpenPolicy( string systemName, PolicyRights rights ) { uint ReturnCode; SafeLsaPolicyHandle Result; Win32Native.LSA_OBJECT_ATTRIBUTES Loa; Loa.Length = Marshal.SizeOf( typeof( Win32Native.LSA_OBJECT_ATTRIBUTES )); Loa.RootDirectory = IntPtr.Zero; Loa.ObjectName = IntPtr.Zero; Loa.Attributes = 0; Loa.SecurityDescriptor = IntPtr.Zero; Loa.SecurityQualityOfService = IntPtr.Zero; if ( 0 == ( ReturnCode = Win32Native.LsaOpenPolicy( systemName, ref Loa, ( int )rights, out Result ))) { return Result; } else if ( ReturnCode == Win32Native.STATUS_ACCESS_DENIED ) { throw new UnauthorizedAccessException(); } else if ( ReturnCode == Win32Native.STATUS_INSUFFICIENT_RESOURCES || ReturnCode == Win32Native.STATUS_NO_MEMORY ) { throw new OutOfMemoryException(); } else { int win32ErrorCode = Win32Native.LsaNtStatusToWinError(unchecked((int) ReturnCode)); throw new SystemException(Win32Native.GetMessage(win32ErrorCode)); } } [System.Security.SecurityCritical] // auto-generated internal static byte[] ConvertIntPtrSidToByteArraySid( IntPtr binaryForm ) { byte[] ResultSid; // // Verify the revision (just sanity, should never fail to be 1) // byte Revision = Marshal.ReadByte( binaryForm, 0 ); if ( Revision != SecurityIdentifier.Revision ) { throw new ArgumentException(Environment.GetResourceString( "IdentityReference_InvalidSidRevision" ), "binaryForm"); } // // Need the subauthority count in order to figure out how many bytes to read // byte SubAuthorityCount = Marshal.ReadByte( binaryForm, 1 ); if ( SubAuthorityCount < 0 || SubAuthorityCount > SecurityIdentifier.MaxSubAuthorities ) { throw new ArgumentException(Environment.GetResourceString( "IdentityReference_InvalidNumberOfSubauthorities", SecurityIdentifier.MaxSubAuthorities), "binaryForm"); } // // Compute the size of the binary form of this SID and allocate the memory // int BinaryLength = 1 + 1 + 6 + SubAuthorityCount * 4; ResultSid = new byte[ BinaryLength ]; // // Extract the data from the returned pointer // Marshal.Copy( binaryForm, ResultSid, 0, BinaryLength ); return ResultSid; } // // Wrapper around advapi32.ConvertStringSidToSidW // [System.Security.SecurityCritical] // auto-generated internal static int CreateSidFromString( string stringSid, out byte[] resultSid ) { int ErrorCode; IntPtr ByteArray = IntPtr.Zero; try { if ( TRUE != Win32Native.ConvertStringSidToSid( stringSid, out ByteArray )) { ErrorCode = Marshal.GetLastWin32Error(); goto Error; } resultSid = ConvertIntPtrSidToByteArraySid( ByteArray ); } finally { // // Now is a good time to get rid of the returned pointer // Win32Native.LocalFree( ByteArray ); } // // Now invoke the SecurityIdentifier factory method to create the result // return Win32Native.ERROR_SUCCESS; Error: resultSid = null; return ErrorCode; } // // Wrapper around advapi32.CreateWellKnownSid // [System.Security.SecurityCritical] // auto-generated internal static int CreateWellKnownSid( WellKnownSidType sidType, SecurityIdentifier domainSid, out byte[] resultSid ) { // // Check if the api is supported // if (!WellKnownSidApisSupported) { throw new PlatformNotSupportedException( Environment.GetResourceString( "PlatformNotSupported_RequiresW2kSP3" )); } // // Passing an array as big as it can ever be is a small price to pay for // not having to P/Invoke twice (once to get the buffer, once to get the data) // uint length = ( uint )SecurityIdentifier.MaxBinaryLength; resultSid = new byte[ length ]; if ( FALSE != Win32Native.CreateWellKnownSid(( int )sidType, domainSid == null ? null : domainSid.BinaryForm, resultSid, ref length )) { return Win32Native.ERROR_SUCCESS; } else { resultSid = null; return Marshal.GetLastWin32Error(); } } // // Wrapper around advapi32.EqualDomainSid // [System.Security.SecurityCritical] // auto-generated internal static bool IsEqualDomainSid( SecurityIdentifier sid1, SecurityIdentifier sid2 ) { // // Check if the api is supported // if (!WellKnownSidApisSupported) { throw new PlatformNotSupportedException( Environment.GetResourceString( "PlatformNotSupported_RequiresW2kSP3" )); } if ( sid1 == null || sid2 == null ) { return false; } else { bool result; byte[] BinaryForm1 = new Byte[sid1.BinaryLength]; sid1.GetBinaryForm( BinaryForm1, 0 ); byte[] BinaryForm2 = new Byte[sid2.BinaryLength]; sid2.GetBinaryForm( BinaryForm2, 0 ); return ( Win32Native.IsEqualDomainSid( BinaryForm1, BinaryForm2, out result ) == FALSE ? false : result ); } } /// <summary> /// Setup the size of the buffer Windows provides for an LSA_REFERENCED_DOMAIN_LIST /// </summary> [System.Security.SecurityCritical] // auto-generated internal static void InitializeReferencedDomainsPointer(SafeLsaMemoryHandle referencedDomains) { Contract.Assert(referencedDomains != null, "referencedDomains != null"); // We don't know the real size of the referenced domains yet, so we need to set an initial // size based on the LSA_REFERENCED_DOMAIN_LIST structure, then resize it to include all of // the domains. referencedDomains.Initialize((uint)Marshal.SizeOf(typeof(Win32Native.LSA_REFERENCED_DOMAIN_LIST))); Win32Native.LSA_REFERENCED_DOMAIN_LIST domainList = referencedDomains.Read<Win32Native.LSA_REFERENCED_DOMAIN_LIST>(0); unsafe { byte* pRdl = null; RuntimeHelpers.PrepareConstrainedRegions(); try { referencedDomains.AcquirePointer(ref pRdl); // If there is a trust information list, then the buffer size is the end of that list minus // the beginning of the domain list. Otherwise, then the buffer is just the size of the // referenced domain list structure, which is what we defaulted to. if (!domainList.Domains.IsNull()) { Win32Native.LSA_TRUST_INFORMATION* pTrustInformation = (Win32Native.LSA_TRUST_INFORMATION*)domainList.Domains; pTrustInformation = pTrustInformation + domainList.Entries; long bufferSize = (byte*)pTrustInformation - pRdl; Contract.Assert(bufferSize > 0, "bufferSize > 0"); referencedDomains.Initialize((ulong)bufferSize); } } finally { if (pRdl != null) referencedDomains.ReleasePointer(); } } } // // Wrapper around avdapi32.GetWindowsAccountDomainSid // [System.Security.SecurityCritical] // auto-generated internal static int GetWindowsAccountDomainSid( SecurityIdentifier sid, out SecurityIdentifier resultSid ) { // // Check if the api is supported // if (!WellKnownSidApisSupported) { throw new PlatformNotSupportedException( Environment.GetResourceString( "PlatformNotSupported_RequiresW2kSP3" )); } // // Passing an array as big as it can ever be is a small price to pay for // not having to P/Invoke twice (once to get the buffer, once to get the data) // byte[] BinaryForm = new Byte[sid.BinaryLength]; sid.GetBinaryForm( BinaryForm, 0 ); uint sidLength = ( uint )SecurityIdentifier.MaxBinaryLength; byte[] resultSidBinary = new byte[ sidLength ]; if ( FALSE != Win32Native.GetWindowsAccountDomainSid( BinaryForm, resultSidBinary, ref sidLength )) { resultSid = new SecurityIdentifier( resultSidBinary, 0 ); return Win32Native.ERROR_SUCCESS; } else { resultSid = null; return Marshal.GetLastWin32Error(); } } // // Wrapper around advapi32.IsWellKnownSid // [System.Security.SecurityCritical] // auto-generated internal static bool IsWellKnownSid( SecurityIdentifier sid, WellKnownSidType type ) { // // Check if the api is supported // if (!WellKnownSidApisSupported) { throw new PlatformNotSupportedException( Environment.GetResourceString( "PlatformNotSupported_RequiresW2kSP3" )); } byte[] BinaryForm = new byte[sid.BinaryLength]; sid.GetBinaryForm( BinaryForm, 0 ); if ( FALSE == Win32Native.IsWellKnownSid( BinaryForm, ( int )type )) { return false; } else { return true; } } // When the CLR is hosted, the host gets to implement these calls, // otherwise, we call down into the Win32 APIs. #if FEATURE_IMPERSONATION [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.Process)] [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode), SuppressUnmanagedCodeSecurity] internal static extern int ImpersonateLoggedOnUser (SafeTokenHandle hToken); [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.Process)] [MethodImplAttribute(MethodImplOptions.InternalCall)] internal static extern int OpenThreadToken (TokenAccessLevels dwDesiredAccess, WinSecurityContext OpenAs, out SafeTokenHandle phThreadToken); [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode), SuppressUnmanagedCodeSecurity] internal static extern int RevertToSelf (); [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode), SuppressUnmanagedCodeSecurity] internal static extern int SetThreadToken(SafeTokenHandle hToken); #endif } }
using System; using Csla; using ProjectsVendors.DataAccess; namespace ProjectsVendors.Business { /// <summary> /// VendorItem (editable child object).<br/> /// This is a generated <see cref="VendorItem"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="VendorCollection"/> collection. /// </remarks> [Serializable] public partial class VendorItem : BusinessBase<VendorItem> { #region Static Fields private static int _lastId; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="VendorId"/> property. /// </summary> [NotUndoable] public static readonly PropertyInfo<int> VendorIdProperty = RegisterProperty<int>(p => p.VendorId, "Vendor Id"); /// <summary> /// Gets the Vendor Id. /// </summary> /// <value>The Vendor Id.</value> public int VendorId { get { return GetProperty(VendorIdProperty); } } /// <summary> /// Maintains metadata about <see cref="VendorName"/> property. /// </summary> public static readonly PropertyInfo<string> VendorNameProperty = RegisterProperty<string>(p => p.VendorName, "Vendor Name"); /// <summary> /// Gets or sets the Vendor Name. /// </summary> /// <value>The Vendor Name.</value> public string VendorName { get { return GetProperty(VendorNameProperty); } set { SetProperty(VendorNameProperty, value); } } /// <summary> /// Maintains metadata about <see cref="VendorContact"/> property. /// </summary> public static readonly PropertyInfo<string> VendorContactProperty = RegisterProperty<string>(p => p.VendorContact, "Vendor Contact"); /// <summary> /// Gets or sets the Vendor Contact. /// </summary> /// <value>The Vendor Contact.</value> public string VendorContact { get { return GetProperty(VendorContactProperty); } set { SetProperty(VendorContactProperty, value); } } /// <summary> /// Maintains metadata about <see cref="VendorPhone"/> property. /// </summary> public static readonly PropertyInfo<string> VendorPhoneProperty = RegisterProperty<string>(p => p.VendorPhone, "Vendor Phone"); /// <summary> /// Gets or sets the Vendor Phone. /// </summary> /// <value>The Vendor Phone.</value> public string VendorPhone { get { return GetProperty(VendorPhoneProperty); } set { SetProperty(VendorPhoneProperty, value); } } /// <summary> /// Maintains metadata about <see cref="VendorEmail"/> property. /// </summary> public static readonly PropertyInfo<string> VendorEmailProperty = RegisterProperty<string>(p => p.VendorEmail, "Vendor Email"); /// <summary> /// Gets or sets the Vendor Email. /// </summary> /// <value>The Vendor Email.</value> public string VendorEmail { get { return GetProperty(VendorEmailProperty); } set { SetProperty(VendorEmailProperty, value); } } /// <summary> /// Maintains metadata about <see cref="IsPrimaryVendor"/> property. /// </summary> public static readonly PropertyInfo<bool> IsPrimaryVendorProperty = RegisterProperty<bool>(p => p.IsPrimaryVendor, "Is Primary Vendor"); /// <summary> /// Gets or sets the Is Primary Vendor. /// </summary> /// <value><c>true</c> if Is Primary Vendor; otherwise, <c>false</c>.</value> public bool IsPrimaryVendor { get { return GetProperty(IsPrimaryVendorProperty); } set { SetProperty(IsPrimaryVendorProperty, value); } } /// <summary> /// Maintains metadata about <see cref="LastUpdated"/> property. /// </summary> [NotUndoable] public static readonly PropertyInfo<SmartDate> LastUpdatedProperty = RegisterProperty<SmartDate>(p => p.LastUpdated, "Last Updated"); /// <summary> /// Gets the Last Updated. /// </summary> /// <value>The Last Updated.</value> public SmartDate LastUpdated { get { return GetProperty(LastUpdatedProperty); } } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="VendorItem"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public VendorItem() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="VendorItem"/> object properties. /// </summary> [RunLocal] protected override void Child_Create() { LoadProperty(VendorIdProperty, System.Threading.Interlocked.Decrement(ref _lastId)); LoadProperty(LastUpdatedProperty, new SmartDate(DateTime.Now)); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="VendorItem"/> object from the given <see cref="VendorItemDto"/>. /// </summary> /// <param name="data">The VendorItemDto to use.</param> private void Child_Fetch(VendorItemDto data) { // Value properties LoadProperty(VendorIdProperty, data.VendorId); LoadProperty(VendorNameProperty, data.VendorName); LoadProperty(VendorContactProperty, data.VendorContact); LoadProperty(VendorPhoneProperty, data.VendorPhone); LoadProperty(VendorEmailProperty, data.VendorEmail); LoadProperty(IsPrimaryVendorProperty, data.IsPrimaryVendor); LoadProperty(LastUpdatedProperty, data.LastUpdated); var args = new DataPortalHookArgs(data); OnFetchRead(args); // check all object rules and property rules BusinessRules.CheckRules(); } /// <summary> /// Inserts a new <see cref="VendorItem"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(ProjectEdit parent) { SimpleAuditTrail(); var dto = new VendorItemDto(); dto.Parent_ProjectId = parent.ProjectId; dto.VendorName = VendorName; dto.VendorContact = VendorContact; dto.VendorPhone = VendorPhone; dto.VendorEmail = VendorEmail; dto.IsPrimaryVendor = IsPrimaryVendor; dto.LastUpdated = LastUpdated; using (var dalManager = DalFactoryProjectsVendors.GetManager()) { var args = new DataPortalHookArgs(dto); OnInsertPre(args); var dal = dalManager.GetProvider<IVendorItemDal>(); using (BypassPropertyChecks) { var resultDto = dal.Insert(dto); LoadProperty(VendorIdProperty, resultDto.VendorId); args = new DataPortalHookArgs(resultDto); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="VendorItem"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; SimpleAuditTrail(); var dto = new VendorItemDto(); dto.VendorId = VendorId; dto.VendorName = VendorName; dto.VendorContact = VendorContact; dto.VendorPhone = VendorPhone; dto.VendorEmail = VendorEmail; dto.IsPrimaryVendor = IsPrimaryVendor; dto.LastUpdated = LastUpdated; using (var dalManager = DalFactoryProjectsVendors.GetManager()) { var args = new DataPortalHookArgs(dto); OnUpdatePre(args); var dal = dalManager.GetProvider<IVendorItemDal>(); using (BypassPropertyChecks) { var resultDto = dal.Update(dto); args = new DataPortalHookArgs(resultDto); } OnUpdatePost(args); } } private void SimpleAuditTrail() { LoadProperty(LastUpdatedProperty, DateTime.Now); } /// <summary> /// Self deletes the <see cref="VendorItem"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { // audit the object, just in case soft delete is used on this object SimpleAuditTrail(); using (var dalManager = DalFactoryProjectsVendors.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IVendorItemDal>(); using (BypassPropertyChecks) { dal.Delete(ReadProperty(VendorIdProperty)); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
using UnityEngine; using System.Collections.Generic; /// <summary> /// Game Jolt API Helper Trophies Window. Inherit from <see cref="GJHWindow"/> /// </summary> public class GJHTrophiesWindow : GJHWindow { /// <summary> /// The trophies scroll view position. /// </summary> Vector2 trophiesScrollViewPosition; /// <summary> /// The trophies. /// </summary> GJTrophy[] trophies = null; /// <summary> /// The trophies icons. /// </summary> Texture2D[] trophiesIcons = null; /// <summary> /// The window states. /// </summary> enum TrophiesWindowStates { TrophiesList } /// <summary> /// GUI styles. /// </summary> GUIStyle trophyTitleStyle = null, trophyDescriptionStyle = null; /// <summary> /// The secret trophies ids. /// </summary> public uint[] secretTrophies = null; /// <summary> /// <c>true</c> to show secret trophies with ???; <c>false</c> to not show secret trophies at all. /// </summary> public bool showSecretTrophies = true; /// <summary> /// Initializes a new instance of the <see cref="GJHTrophiesMethodsWindow"/> class. /// </summary> public GJHTrophiesWindow () : base () { Title = "Trophies"; float w = Screen.width * .9f; w = w > 500 ? 500 : w; float h = Screen.height * .9f; Position = new Rect (Screen.width / 2 - w / 2, Screen.height / 2 - h / 2, w, h); drawWindowDelegates.Add (TrophiesWindowStates.TrophiesList.ToString (), DrawTrophiesList); trophyTitleStyle = GJAPIHelper.Skin.FindStyle ("TrophyTitle") ?? GJAPIHelper.Skin.label; trophyDescriptionStyle = GJAPIHelper.Skin.FindStyle ("TrophyDescription") ?? GJAPIHelper.Skin.label; } /// <summary> /// Releases unmanaged resources and performs other cleanup operations before the <see cref="GJHTrophiesWindow"/> is /// reclaimed by garbage collection. /// </summary> ~GJHTrophiesWindow () { trophies = null; trophiesIcons = null; trophyTitleStyle = null; trophyDescriptionStyle = null; secretTrophies = null; } /// <summary> /// Show this window. /// </summary> public override bool Show () { bool success = base.Show (); if (success) { GetTrophies (); } return success; } /// <summary> /// Dismiss this window. /// </summary> public override bool Dismiss () { return base.Dismiss (); } /// <summary> /// Gets the trophies. /// </summary> void GetTrophies () { SetWindowMessage ("Loading trophies"); ChangeState (BaseWindowStates.Process.ToString ()); GJAPI.Trophies.GetAllCallback += OnGetTrophies; GJAPI.Trophies.GetAll (); } /// <summary> /// GetTrophies callback. /// </summary> /// <param name='t'> /// The trophies. /// </param> void OnGetTrophies (GJTrophy[] t) { GJAPI.Trophies.GetAllCallback -= OnGetTrophies; if (t == null) { SetWindowMessage ("Error loading trophies."); ChangeState (BaseWindowStates.Error.ToString ()); return; } trophies = t; int count = trophies.Length; trophiesIcons = new Texture2D[count]; for (int i = 0; i < count; i++) { trophiesIcons[i] = (Texture2D) Resources.Load ("Images/TrophyIcon", typeof (Texture2D)) ?? new Texture2D (75,75); int index = i; // If we pass i directly, it passes a reference and will be out of range. GJAPIHelper.Trophies.DownloadTrophyIcon ( trophies[i], icon => { trophiesIcons[index] = icon; }); } ChangeState (TrophiesWindowStates.TrophiesList.ToString ()); } /// <summary> /// Draws the trophies list. /// </summary> void DrawTrophiesList () { trophiesScrollViewPosition = GUILayout.BeginScrollView (trophiesScrollViewPosition); int count = trophies.Length; for (int i = 0 ; i < count ; i++) { if (secretTrophies != null && secretTrophies.Length > 0 && ((IList<uint>)secretTrophies).Contains(trophies[i].Id) && !trophies[i].Achieved) { if (!showSecretTrophies) { continue; } DrawTrophy (i, false); } else { DrawTrophy (i, true); } if (i != count - 1) { GUILayout.Space (10); } } GUILayout.EndScrollView (); GUILayout.Space (10); GUILayout.BeginHorizontal (); GUILayout.FlexibleSpace (); if (GUILayout.Button ("Close")) { Dismiss (); } GUILayout.EndHorizontal (); } /// <summary> /// Draws the trophy. /// </summary> /// <param name='t'> /// The trophy. /// </param> void DrawTrophy (int t, bool show) { GUILayout.BeginHorizontal (); GUI.enabled = trophies[t].Achieved ? true : false; GUILayout.Label (trophiesIcons[t]); GUI.enabled = true; GUILayout.Space (10); GUILayout.BeginVertical ("box", GUILayout.Height (75)); GUILayout.FlexibleSpace (); GUILayout.Label (show ? trophies[t].Title : "???", trophyTitleStyle); GUILayout.Space (5); GUILayout.Label (show ? trophies[t].Description : "???", trophyDescriptionStyle); GUILayout.FlexibleSpace (); GUILayout.EndVertical (); GUILayout.FlexibleSpace (); GUILayout.EndHorizontal (); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using Xunit; namespace Microsoft.Win32.RegistryTests { public class Registry_SetValue_str_str_obj_valueKind : TestSubKey { private const string TestKey = "CM3001_TEST"; public Registry_SetValue_str_str_obj_valueKind() : base(TestKey) { } public static IEnumerable<object[]> TestObjects { get { return TestData.TestObjects; } } [Theory] [MemberData("TestObjects")] public void SetValueWithUnknownValueKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { string valueName = "Testing_" + testIndex.ToString(); Registry.SetValue(_testRegistryKey.Name, valueName, testValue, RegistryValueKind.Unknown); Assert.Equal(testValue.ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); _testRegistryKey.DeleteValue(valueName); } [Theory] [MemberData("TestObjects")] public void SetValueWithStringValueKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { string valueName = "Testing_" + testIndex.ToString(); expectedValueKind = RegistryValueKind.String; Registry.SetValue(_testRegistryKey.Name, valueName, testValue, expectedValueKind); Assert.Equal(testValue.ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); _testRegistryKey.DeleteValue(valueName); } [Theory] [MemberData("TestObjects")] public void SetValueWithExpandStringValueKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { string valueName = "Testing_" + testIndex.ToString(); expectedValueKind = RegistryValueKind.ExpandString; Registry.SetValue(_testRegistryKey.Name, valueName, testValue, expectedValueKind); Assert.Equal(testValue.ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); _testRegistryKey.DeleteValue(valueName); } [Theory] [MemberData("TestObjects")] public void SetValueWithMultiStringValeKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { try { string valueName = "Testing_" + testIndex.ToString(); expectedValueKind = RegistryValueKind.MultiString; Registry.SetValue(_testRegistryKey.Name, valueName, testValue, expectedValueKind); Assert.Equal(testValue.ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); _testRegistryKey.DeleteValue(valueName); } catch (ArgumentException) { Assert.IsNotType<string[]>(testValue); } } [Theory] [MemberData("TestObjects")] public void SetValueWithBinaryValueKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { try { string valueName = "Testing_" + testIndex.ToString(); expectedValueKind = RegistryValueKind.Binary; Registry.SetValue(_testRegistryKey.Name, valueName, testValue, expectedValueKind); Assert.Equal(testValue.ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); _testRegistryKey.DeleteValue(valueName); } catch (ArgumentException) { Assert.IsNotType<byte[]>(testValue); } } [Theory] [MemberData("TestObjects")] public void SetValueWithDWordValueKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { try { string valueName = "Testing_" + testIndex.ToString(); expectedValueKind = RegistryValueKind.DWord; Registry.SetValue(_testRegistryKey.Name, valueName, testValue, expectedValueKind); Assert.Equal(Convert.ToInt32(testValue).ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); Assert.True(testIndex <= 15); _testRegistryKey.DeleteValue(valueName); } catch (ArgumentException ioe) { Assert.False(testIndex <= 15, ioe.ToString()); } } [Theory] [MemberData("TestObjects")] public void SetValueWithQWordValueKind(int testIndex, object testValue, RegistryValueKind expectedValueKind) { try { string valueName = "Testing_" + testIndex.ToString(); expectedValueKind = RegistryValueKind.QWord; Registry.SetValue(_testRegistryKey.Name, valueName, testValue, expectedValueKind); Assert.Equal(Convert.ToInt64(testValue).ToString(), _testRegistryKey.GetValue(valueName).ToString()); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); Assert.True(testIndex <= 25); _testRegistryKey.DeleteValue(valueName); } catch (ArgumentException ioe) { Assert.False(testIndex <= 25, ioe.ToString()); } } [Fact] public void SetValueForNonExistingKey() { const string valueName = "FooBar"; const int expectedValue1 = int.MaxValue; const long expectedValue2 = long.MinValue; const RegistryValueKind expectedValueKind1 = RegistryValueKind.DWord; const RegistryValueKind expectedValueKind2 = RegistryValueKind.QWord; Assert.True(_testRegistryKey.GetValue(valueName) == null, "Registry key already exists"); Registry.SetValue(_testRegistryKey.Name, valueName, expectedValue1, expectedValueKind1); Assert.True(_testRegistryKey.GetValue(valueName) != null, "Registry key doesn't exists"); Assert.Equal(expectedValue1, (int)_testRegistryKey.GetValue(valueName)); Assert.Equal(expectedValueKind1, _testRegistryKey.GetValueKind(valueName)); Registry.SetValue(_testRegistryKey.Name, valueName, expectedValue2, expectedValueKind2); Assert.Equal(expectedValue2, (long)_testRegistryKey.GetValue(valueName)); Assert.Equal(expectedValueKind2, _testRegistryKey.GetValueKind(valueName)); } public IEnumerable<object[]> TestValueNames { get { return TestData.TestValueNames; } } [Theory] [InlineData("TestValueNames")] public void SetValueWithNameTest(string valueName) { const long expectedValue = long.MaxValue; const RegistryValueKind expectedValueKind = RegistryValueKind.QWord; Registry.SetValue(_testRegistryKey.Name, valueName, expectedValue, expectedValueKind); Assert.Equal(expectedValue, (long)_testRegistryKey.GetValue(valueName)); Assert.Equal(expectedValueKind, _testRegistryKey.GetValueKind(valueName)); } [Fact] public void NegativeTests() { // Should throw if key length above 255 characters but prior to V4 the limit is 16383 const int maxValueNameLength = 16383; var valueName = new string('a', maxValueNameLength + 1); Assert.Throws<ArgumentException>(() => Registry.SetValue(_testRegistryKey.Name, valueName, ulong.MaxValue, RegistryValueKind.String)); valueName = "FooBar"; // Should throw if passed value is null Assert.Throws<ArgumentNullException>(() => Registry.SetValue(_testRegistryKey.Name, valueName, null, RegistryValueKind.QWord)); // Should throw because valueKind is equal to -2 which is not an acceptable value Assert.Throws<ArgumentException>(() => Registry.SetValue(_testRegistryKey.Name, valueName, int.MinValue, (RegistryValueKind)(-2))); // Should throw because passed array contains null string[] strArr = { "one", "two", null, "three" }; Assert.Throws<ArgumentException>(() => Registry.SetValue(_testRegistryKey.Name, valueName, strArr, RegistryValueKind.MultiString)); // Should throw because passed array has wrong type Assert.Throws<ArgumentException>(() => Registry.SetValue(_testRegistryKey.Name, valueName, new[] { new object() }, RegistryValueKind.MultiString)); // Should throw because passed array has wrong type object[] objTemp = { "my string", "your string", "Any once string" }; Assert.Throws<ArgumentException>(() => Registry.SetValue(_testRegistryKey.Name, valueName, objTemp, RegistryValueKind.Unknown)); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Net; using System.Reflection; using System.Threading; using OpenMetaverse; using OpenMetaverse.StructuredData; using log4net; using OpenSim.Framework; using OpenSim.Framework.Client; using OpenSim.Framework.Communications; using OpenSim.Framework.Communications.Cache; using OpenSim.Framework.Capabilities; using OpenSim.Region.Framework.Interfaces; using OpenSim.Services.Interfaces; using OSD = OpenMetaverse.StructuredData.OSD; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Region.Framework.Scenes { public delegate void KiPrimitiveDelegate(uint localID); public delegate void RemoveKnownRegionsFromAvatarList(UUID avatarID, List<ulong> regionlst); /// <summary> /// Class that Region communications runs through /// </summary> public class SceneCommunicationService //one instance per region { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); protected CommunicationsManager m_commsProvider; protected IInterregionCommsOut m_interregionCommsOut; protected RegionInfo m_regionInfo; protected Scene m_scene; protected RegionCommsListener regionCommsHost; protected List<UUID> m_agentsInTransit; /// <summary> /// An agent is crossing into this region /// </summary> public event AgentCrossing OnAvatarCrossingIntoRegion; /// <summary> /// A user will arrive shortly, set up appropriate credentials so it can connect /// </summary> public event ExpectUserDelegate OnExpectUser; /// <summary> /// A Prim will arrive shortly /// </summary> public event CloseAgentConnection OnCloseAgentConnection; /// <summary> /// A new prim has arrived /// </summary> public event PrimCrossing OnPrimCrossingIntoRegion; ///// <summary> ///// A New Region is up and available ///// </summary> //public event RegionUp OnRegionUp; /// <summary> /// We have a child agent for this avatar and we're getting a status update about it /// </summary> public event ChildAgentUpdate OnChildAgentUpdate; //public event RemoveKnownRegionsFromAvatarList OnRemoveKnownRegionFromAvatar; /// <summary> /// Time to log one of our users off. Grid Service sends this mostly /// </summary> public event LogOffUser OnLogOffUser; /// <summary> /// A region wants land data from us! /// </summary> public event GetLandData OnGetLandData; private AgentCrossing handlerAvatarCrossingIntoRegion = null; // OnAvatarCrossingIntoRegion; private ExpectUserDelegate handlerExpectUser = null; // OnExpectUser; private CloseAgentConnection handlerCloseAgentConnection = null; // OnCloseAgentConnection; private PrimCrossing handlerPrimCrossingIntoRegion = null; // OnPrimCrossingIntoRegion; //private RegionUp handlerRegionUp = null; // OnRegionUp; private ChildAgentUpdate handlerChildAgentUpdate = null; // OnChildAgentUpdate; //private RemoveKnownRegionsFromAvatarList handlerRemoveKnownRegionFromAvatar = null; // OnRemoveKnownRegionFromAvatar; private LogOffUser handlerLogOffUser = null; private GetLandData handlerGetLandData = null; // OnGetLandData public KiPrimitiveDelegate KiPrimitive; public SceneCommunicationService(CommunicationsManager commsMan) { m_commsProvider = commsMan; m_agentsInTransit = new List<UUID>(); } public void SetScene(Scene s) { m_scene = s; m_regionInfo = s.RegionInfo; m_interregionCommsOut = m_scene.RequestModuleInterface<IInterregionCommsOut>(); } /// <summary> /// Register a region with the grid /// </summary> /// <param name="regionInfos"></param> /// <exception cref="System.Exception">Thrown if region registration fails.</exception> public void RegisterRegion(IInterregionCommsOut comms_out, RegionInfo regionInfos) { } /// <summary> /// This region is shutting down, de-register all events! /// De-Register region from Grid! /// </summary> public void Close() { } #region CommsManager Event handlers /// <summary> /// A New User will arrive shortly, Informs the scene that there's a new user on the way /// </summary> /// <param name="agent">Data we need to ensure that the agent can connect</param> /// protected void NewUserConnection(AgentCircuitData agent) { handlerExpectUser = OnExpectUser; if (handlerExpectUser != null) { //m_log.Info("[INTER]: " + debugRegionName + ": SceneCommunicationService: OnExpectUser Fired for User:" + agent.firstname + " " + agent.lastname); handlerExpectUser(agent); } } /// <summary> /// The Grid has requested us to log-off the user /// </summary> /// <param name="AgentID">Unique ID of agent to log-off</param> /// <param name="RegionSecret">The secret string that the region establishes with the grid when registering</param> /// <param name="message">The message to send to the user that tells them why they were logged off</param> protected void GridLogOffUser(UUID AgentID, UUID RegionSecret, string message) { handlerLogOffUser = OnLogOffUser; if (handlerLogOffUser != null) { handlerLogOffUser(AgentID, RegionSecret, message); } } /// <summary> /// Inform the scene that we've got an update about a child agent that we have /// </summary> /// <param name="cAgentData"></param> /// <returns></returns> protected bool ChildAgentUpdate(ChildAgentDataUpdate cAgentData) { handlerChildAgentUpdate = OnChildAgentUpdate; if (handlerChildAgentUpdate != null) handlerChildAgentUpdate(cAgentData); return true; } protected void AgentCrossing(UUID agentID, Vector3 position, bool isFlying) { handlerAvatarCrossingIntoRegion = OnAvatarCrossingIntoRegion; if (handlerAvatarCrossingIntoRegion != null) { handlerAvatarCrossingIntoRegion(agentID, position, isFlying); } } protected void PrimCrossing(UUID primID, Vector3 position, bool isPhysical) { handlerPrimCrossingIntoRegion = OnPrimCrossingIntoRegion; if (handlerPrimCrossingIntoRegion != null) { handlerPrimCrossingIntoRegion(primID, position, isPhysical); } } protected bool CloseConnection(UUID agentID) { m_log.Debug("[INTERREGION]: Incoming Agent Close Request for agent: " + agentID); handlerCloseAgentConnection = OnCloseAgentConnection; if (handlerCloseAgentConnection != null) { return handlerCloseAgentConnection(agentID); } return false; } protected LandData FetchLandData(uint x, uint y) { handlerGetLandData = OnGetLandData; if (handlerGetLandData != null) { return handlerGetLandData(x, y); } return null; } #endregion #region Inform Client of Neighbours private delegate void InformClientOfNeighbourDelegate( ScenePresence avatar, AgentCircuitData a, GridRegion reg, IPEndPoint endPoint, bool newAgent); private void InformClientOfNeighbourCompleted(IAsyncResult iar) { InformClientOfNeighbourDelegate icon = (InformClientOfNeighbourDelegate) iar.AsyncState; icon.EndInvoke(iar); } /// <summary> /// Async component for informing client of which neighbours exist /// </summary> /// <remarks> /// This needs to run asynchronously, as a network timeout may block the thread for a long while /// </remarks> /// <param name="remoteClient"></param> /// <param name="a"></param> /// <param name="regionHandle"></param> /// <param name="endPoint"></param> private void InformClientOfNeighbourAsync(ScenePresence avatar, AgentCircuitData a, GridRegion reg, IPEndPoint endPoint, bool newAgent) { // Let's wait just a little to give time to originating regions to catch up with closing child agents // after a cross here Thread.Sleep(500); uint x, y; Utils.LongToUInts(reg.RegionHandle, out x, out y); x = x / Constants.RegionSize; y = y / Constants.RegionSize; m_log.Info("[INTERGRID]: Starting to inform client about neighbour " + x + ", " + y + "(" + endPoint.ToString() + ")"); string capsPath = "http://" + reg.ExternalHostName + ":" + reg.HttpPort + "/CAPS/" + a.CapsPath + "0000/"; string reason = String.Empty; bool regionAccepted = m_interregionCommsOut.SendCreateChildAgent(reg.RegionHandle, a, 0, out reason); if (regionAccepted && newAgent) { IEventQueue eq = avatar.Scene.RequestModuleInterface<IEventQueue>(); if (eq != null) { #region IP Translation for NAT IClientIPEndpoint ipepClient; if (avatar.ClientView.TryGet(out ipepClient)) { endPoint.Address = NetworkUtil.GetIPFor(ipepClient.EndPoint, endPoint.Address); } #endregion eq.EnableSimulator(reg.RegionHandle, endPoint, avatar.UUID); eq.EstablishAgentCommunication(avatar.UUID, endPoint, capsPath); m_log.DebugFormat("[CAPS]: Sending new CAPS seed url {0} to client {1} in region {2}", capsPath, avatar.UUID, avatar.Scene.RegionInfo.RegionName); } else { avatar.ControllingClient.InformClientOfNeighbour(reg.RegionHandle, endPoint); // TODO: make Event Queue disablable! } m_log.Info("[INTERGRID]: Completed inform client about neighbour " + endPoint.ToString()); } } public List<GridRegion> RequestNeighbours(Scene pScene, uint pRegionLocX, uint pRegionLocY) { Border[] northBorders = pScene.NorthBorders.ToArray(); Border[] southBorders = pScene.SouthBorders.ToArray(); Border[] eastBorders = pScene.EastBorders.ToArray(); Border[] westBorders = pScene.WestBorders.ToArray(); // Legacy one region. Provided for simplicity while testing the all inclusive method in the else statement. if (northBorders.Length <= 1 && southBorders.Length <= 1 && eastBorders.Length <= 1 && westBorders.Length <= 1) { return m_scene.GridService.GetNeighbours(m_regionInfo.ScopeID, m_regionInfo.RegionID); } else { Vector2 extent = Vector2.Zero; for (int i = 0; i < eastBorders.Length; i++) { extent.X = (eastBorders[i].BorderLine.Z > extent.X) ? eastBorders[i].BorderLine.Z : extent.X; } for (int i = 0; i < northBorders.Length; i++) { extent.Y = (northBorders[i].BorderLine.Z > extent.Y) ? northBorders[i].BorderLine.Z : extent.Y; } // Loss of fraction on purpose extent.X = ((int)extent.X / (int)Constants.RegionSize) + 1; extent.Y = ((int)extent.Y / (int)Constants.RegionSize) + 1; int startX = (int)(pRegionLocX - 1) * (int)Constants.RegionSize; int startY = (int)(pRegionLocY - 1) * (int)Constants.RegionSize; int endX = ((int)pRegionLocX + (int)extent.X) * (int)Constants.RegionSize; int endY = ((int)pRegionLocY + (int)extent.Y) * (int)Constants.RegionSize; List<GridRegion> neighbours = m_scene.GridService.GetRegionRange(m_regionInfo.ScopeID, startX, endX, startY, endY); neighbours.RemoveAll(delegate(GridRegion r) { return r.RegionID == m_regionInfo.RegionID; }); return neighbours; } } /// <summary> /// This informs all neighboring regions about agent "avatar". /// Calls an asynchronous method to do so.. so it doesn't lag the sim. /// </summary> public void EnableNeighbourChildAgents(ScenePresence avatar, List<RegionInfo> lstneighbours) { List<GridRegion> neighbours = new List<GridRegion>(); if (m_regionInfo != null) { neighbours = RequestNeighbours(avatar.Scene,m_regionInfo.RegionLocX, m_regionInfo.RegionLocY); } else { m_log.Debug("[ENABLENEIGHBOURCHILDAGENTS]: m_regionInfo was null in EnableNeighbourChildAgents, is this a NPC?"); } /// We need to find the difference between the new regions where there are no child agents /// and the regions where there are already child agents. We only send notification to the former. List<ulong> neighbourHandles = NeighbourHandles(neighbours); // on this region neighbourHandles.Add(avatar.Scene.RegionInfo.RegionHandle); // add this region too List<ulong> previousRegionNeighbourHandles ; if (avatar.Scene.CapsModule != null) { previousRegionNeighbourHandles = new List<ulong>(avatar.Scene.CapsModule.GetChildrenSeeds(avatar.UUID).Keys); } else { previousRegionNeighbourHandles = new List<ulong>(); } List<ulong> newRegions = NewNeighbours(neighbourHandles, previousRegionNeighbourHandles); List<ulong> oldRegions = OldNeighbours(neighbourHandles, previousRegionNeighbourHandles); //Dump("Current Neighbors", neighbourHandles); //Dump("Previous Neighbours", previousRegionNeighbourHandles); //Dump("New Neighbours", newRegions); //Dump("Old Neighbours", oldRegions); /// Update the scene presence's known regions here on this region avatar.DropOldNeighbours(oldRegions); /// Collect as many seeds as possible Dictionary<ulong, string> seeds; if (avatar.Scene.CapsModule != null) seeds = new Dictionary<ulong, string>(avatar.Scene.CapsModule.GetChildrenSeeds(avatar.UUID)); else seeds = new Dictionary<ulong, string>(); //m_log.Debug(" !!! No. of seeds: " + seeds.Count); if (!seeds.ContainsKey(avatar.Scene.RegionInfo.RegionHandle)) seeds.Add(avatar.Scene.RegionInfo.RegionHandle, avatar.ControllingClient.RequestClientInfo().CapsPath); /// Create the necessary child agents List<AgentCircuitData> cagents = new List<AgentCircuitData>(); foreach (GridRegion neighbour in neighbours) { if (neighbour.RegionHandle != avatar.Scene.RegionInfo.RegionHandle) { AgentCircuitData agent = avatar.ControllingClient.RequestClientInfo(); agent.BaseFolder = UUID.Zero; agent.InventoryFolder = UUID.Zero; agent.startpos = new Vector3(128, 128, 70); agent.child = true; if (newRegions.Contains(neighbour.RegionHandle)) { agent.CapsPath = CapsUtil.GetRandomCapsObjectPath(); avatar.AddNeighbourRegion(neighbour.RegionHandle, agent.CapsPath); seeds.Add(neighbour.RegionHandle, agent.CapsPath); } else agent.CapsPath = avatar.Scene.CapsModule.GetChildSeed(avatar.UUID, neighbour.RegionHandle); cagents.Add(agent); } } /// Update all child agent with everyone's seeds foreach (AgentCircuitData a in cagents) { a.ChildrenCapSeeds = new Dictionary<ulong, string>(seeds); } if (avatar.Scene.CapsModule != null) { // These two are the same thing! avatar.Scene.CapsModule.SetChildrenSeed(avatar.UUID, seeds); } avatar.KnownRegions = seeds; //avatar.Scene.DumpChildrenSeeds(avatar.UUID); //avatar.DumpKnownRegions(); bool newAgent = false; int count = 0; foreach (GridRegion neighbour in neighbours) { // Don't do it if there's already an agent in that region if (newRegions.Contains(neighbour.RegionHandle)) newAgent = true; else newAgent = false; if (neighbour.RegionHandle != avatar.Scene.RegionInfo.RegionHandle) { InformClientOfNeighbourDelegate d = InformClientOfNeighbourAsync; try { d.BeginInvoke(avatar, cagents[count], neighbour, neighbour.ExternalEndPoint, newAgent, InformClientOfNeighbourCompleted, d); } catch (ArgumentOutOfRangeException) { m_log.ErrorFormat( "[REGIONINFO]: Neighbour Regions response included the current region in the neighbor list. The following region will not display to the client: {0} for region {1} ({2}, {3}).", neighbour.ExternalHostName, neighbour.RegionHandle, neighbour.RegionLocX, neighbour.RegionLocY); } catch (Exception e) { m_log.ErrorFormat( "[REGIONINFO]: Could not resolve external hostname {0} for region {1} ({2}, {3}). {4}", neighbour.ExternalHostName, neighbour.RegionHandle, neighbour.RegionLocX, neighbour.RegionLocY, e); // FIXME: Okay, even though we've failed, we're still going to throw the exception on, // since I don't know what will happen if we just let the client continue // XXX: Well, decided to swallow the exception instead for now. Let us see how that goes. // throw e; } } count++; } } /// <summary> /// This informs a single neighboring region about agent "avatar". /// Calls an asynchronous method to do so.. so it doesn't lag the sim. /// </summary> public void InformNeighborChildAgent(ScenePresence avatar, GridRegion region) { AgentCircuitData agent = avatar.ControllingClient.RequestClientInfo(); agent.BaseFolder = UUID.Zero; agent.InventoryFolder = UUID.Zero; agent.startpos = new Vector3(128, 128, 70); agent.child = true; InformClientOfNeighbourDelegate d = InformClientOfNeighbourAsync; d.BeginInvoke(avatar, agent, region, region.ExternalEndPoint, true, InformClientOfNeighbourCompleted, d); } #endregion public delegate void InformNeighbourThatRegionUpDelegate(INeighbourService nService, RegionInfo region, ulong regionhandle); private void InformNeighborsThatRegionisUpCompleted(IAsyncResult iar) { InformNeighbourThatRegionUpDelegate icon = (InformNeighbourThatRegionUpDelegate) iar.AsyncState; icon.EndInvoke(iar); } /// <summary> /// Asynchronous call to information neighbouring regions that this region is up /// </summary> /// <param name="region"></param> /// <param name="regionhandle"></param> private void InformNeighboursThatRegionIsUpAsync(INeighbourService neighbourService, RegionInfo region, ulong regionhandle) { uint x = 0, y = 0; Utils.LongToUInts(regionhandle, out x, out y); GridRegion neighbour = null; if (neighbourService != null) neighbour = neighbourService.HelloNeighbour(regionhandle, region); else m_log.DebugFormat("[SCS]: No neighbour service provided for informing neigbhours of this region"); if (neighbour != null) { m_log.DebugFormat("[INTERGRID]: Successfully informed neighbour {0}-{1} that I'm here", x / Constants.RegionSize, y / Constants.RegionSize); m_scene.EventManager.TriggerOnRegionUp(neighbour); } else { m_log.WarnFormat("[INTERGRID]: Failed to inform neighbour {0}-{1} that I'm here.", x / Constants.RegionSize, y / Constants.RegionSize); } } public void InformNeighborsThatRegionisUp(INeighbourService neighbourService, RegionInfo region) { //m_log.Info("[INTER]: " + debugRegionName + ": SceneCommunicationService: Sending InterRegion Notification that region is up " + region.RegionName); for (int x = (int)region.RegionLocX - 1; x <= region.RegionLocX + 1; x++) { for (int y = (int)region.RegionLocY - 1; y <= region.RegionLocY + 1; y++) { if (!((x == region.RegionLocX) && (y == region.RegionLocY))) // skip this region { ulong handle = Utils.UIntsToLong((uint)x * Constants.RegionSize, (uint)y * Constants.RegionSize); InformNeighbourThatRegionUpDelegate d = InformNeighboursThatRegionIsUpAsync; d.BeginInvoke(neighbourService, region, handle, InformNeighborsThatRegionisUpCompleted, d); } } } } public delegate void SendChildAgentDataUpdateDelegate(AgentPosition cAgentData, ulong regionHandle); /// <summary> /// This informs all neighboring regions about the settings of it's child agent. /// Calls an asynchronous method to do so.. so it doesn't lag the sim. /// /// This contains information, such as, Draw Distance, Camera location, Current Position, Current throttle settings, etc. /// /// </summary> private void SendChildAgentDataUpdateAsync(AgentPosition cAgentData, ulong regionHandle) { //m_log.Info("[INTERGRID]: Informing neighbors about my agent in " + m_regionInfo.RegionName); try { //m_commsProvider.InterRegion.ChildAgentUpdate(regionHandle, cAgentData); m_interregionCommsOut.SendChildAgentUpdate(regionHandle, cAgentData); } catch { // Ignore; we did our best } //if (regionAccepted) //{ // //m_log.Info("[INTERGRID]: Completed sending a neighbor an update about my agent"); //} //else //{ // //m_log.Info("[INTERGRID]: Failed sending a neighbor an update about my agent"); //} } private void SendChildAgentDataUpdateCompleted(IAsyncResult iar) { SendChildAgentDataUpdateDelegate icon = (SendChildAgentDataUpdateDelegate) iar.AsyncState; icon.EndInvoke(iar); } public void SendChildAgentDataUpdate(AgentPosition cAgentData, ScenePresence presence) { // This assumes that we know what our neighbors are. try { foreach (ulong regionHandle in presence.KnownChildRegionHandles) { if (regionHandle != m_regionInfo.RegionHandle) { SendChildAgentDataUpdateDelegate d = SendChildAgentDataUpdateAsync; d.BeginInvoke(cAgentData, regionHandle, SendChildAgentDataUpdateCompleted, d); } } } catch (InvalidOperationException) { // We're ignoring a collection was modified error because this data gets old and outdated fast. } } public delegate void SendCloseChildAgentDelegate(UUID agentID, ulong regionHandle); /// <summary> /// This Closes child agents on neighboring regions /// Calls an asynchronous method to do so.. so it doesn't lag the sim. /// </summary> protected void SendCloseChildAgentAsync(UUID agentID, ulong regionHandle) { m_log.Debug("[INTERGRID]: Sending close agent to " + regionHandle); // let's do our best, but there's not much we can do if the neighbour doesn't accept. //m_commsProvider.InterRegion.TellRegionToCloseChildConnection(regionHandle, agentID); m_interregionCommsOut.SendCloseAgent(regionHandle, agentID); } private void SendCloseChildAgentCompleted(IAsyncResult iar) { SendCloseChildAgentDelegate icon = (SendCloseChildAgentDelegate)iar.AsyncState; icon.EndInvoke(iar); } public void SendCloseChildAgentConnections(UUID agentID, List<ulong> regionslst) { foreach (ulong handle in regionslst) { SendCloseChildAgentDelegate d = SendCloseChildAgentAsync; d.BeginInvoke(agentID, handle, SendCloseChildAgentCompleted, d); } } /// <summary> /// Try to teleport an agent to a new region. /// </summary> /// <param name="remoteClient"></param> /// <param name="RegionHandle"></param> /// <param name="position"></param> /// <param name="lookAt"></param> /// <param name="flags"></param> public virtual void RequestTeleportToLocation(ScenePresence avatar, ulong regionHandle, Vector3 position, Vector3 lookAt, uint teleportFlags) { if (!avatar.Scene.Permissions.CanTeleport(avatar.UUID)) return; bool destRegionUp = true; IEventQueue eq = avatar.Scene.RequestModuleInterface<IEventQueue>(); // Reset animations; the viewer does that in teleports. avatar.Animator.ResetAnimations(); if (regionHandle == m_regionInfo.RegionHandle) { m_log.DebugFormat( "[SCENE COMMUNICATION SERVICE]: RequestTeleportToLocation {0} within {1}", position, m_regionInfo.RegionName); // Teleport within the same region if (IsOutsideRegion(avatar.Scene, position) || position.Z < 0) { Vector3 emergencyPos = new Vector3(128, 128, 128); m_log.WarnFormat( "[SCENE COMMUNICATION SERVICE]: RequestTeleportToLocation() was given an illegal position of {0} for avatar {1}, {2}. Substituting {3}", position, avatar.Name, avatar.UUID, emergencyPos); position = emergencyPos; } // TODO: Get proper AVG Height float localAVHeight = 1.56f; float posZLimit = 22; // TODO: Check other Scene HeightField if (position.X > 0 && position.X <= (int)Constants.RegionSize && position.Y > 0 && position.Y <=(int)Constants.RegionSize) { posZLimit = (float) avatar.Scene.Heightmap[(int) position.X, (int) position.Y]; } float newPosZ = posZLimit + localAVHeight; if (posZLimit >= (position.Z - (localAVHeight / 2)) && !(Single.IsInfinity(newPosZ) || Single.IsNaN(newPosZ))) { position.Z = newPosZ; } // Only send this if the event queue is null if (eq == null) avatar.ControllingClient.SendTeleportLocationStart(); avatar.ControllingClient.SendLocalTeleport(position, lookAt, teleportFlags); avatar.Teleport(position); } else { uint x = 0, y = 0; Utils.LongToUInts(regionHandle, out x, out y); GridRegion reg = m_scene.GridService.GetRegionByPosition(m_scene.RegionInfo.ScopeID, (int)x, (int)y); if (reg != null) { m_log.DebugFormat( "[SCENE COMMUNICATION SERVICE]: RequestTeleportToLocation to {0} in {1}", position, reg.RegionName); if (eq == null) avatar.ControllingClient.SendTeleportLocationStart(); // Let's do DNS resolution only once in this process, please! // This may be a costly operation. The reg.ExternalEndPoint field is not a passive field, // it's actually doing a lot of work. IPEndPoint endPoint = reg.ExternalEndPoint; if (endPoint.Address == null) { // Couldn't resolve the name. Can't TP, because the viewer wants IP addresses. destRegionUp = false; } if (destRegionUp) { uint newRegionX = (uint)(reg.RegionHandle >> 40); uint newRegionY = (((uint)(reg.RegionHandle)) >> 8); uint oldRegionX = (uint)(m_regionInfo.RegionHandle >> 40); uint oldRegionY = (((uint)(m_regionInfo.RegionHandle)) >> 8); // Fixing a bug where teleporting while sitting results in the avatar ending up removed from // both regions if (avatar.ParentID != (uint)0) avatar.StandUp(); if (!avatar.ValidateAttachments()) { avatar.ControllingClient.SendTeleportFailed("Inconsistent attachment state"); return; } // the avatar.Close below will clear the child region list. We need this below for (possibly) // closing the child agents, so save it here (we need a copy as it is Clear()-ed). //List<ulong> childRegions = new List<ulong>(avatar.GetKnownRegionList()); // Compared to ScenePresence.CrossToNewRegion(), there's no obvious code to handle a teleport // failure at this point (unlike a border crossing failure). So perhaps this can never fail // once we reach here... //avatar.Scene.RemoveCapsHandler(avatar.UUID); string capsPath = String.Empty; AgentCircuitData agentCircuit = avatar.ControllingClient.RequestClientInfo(); agentCircuit.BaseFolder = UUID.Zero; agentCircuit.InventoryFolder = UUID.Zero; agentCircuit.startpos = position; agentCircuit.child = true; if (Util.IsOutsideView(oldRegionX, newRegionX, oldRegionY, newRegionY)) { // brand new agent, let's create a new caps seed agentCircuit.CapsPath = CapsUtil.GetRandomCapsObjectPath(); } string reason = String.Empty; // Let's create an agent there if one doesn't exist yet. //if (!m_commsProvider.InterRegion.InformRegionOfChildAgent(reg.RegionHandle, agentCircuit)) if (!m_interregionCommsOut.SendCreateChildAgent(reg.RegionHandle, agentCircuit, teleportFlags, out reason)) { avatar.ControllingClient.SendTeleportFailed(String.Format("Destination is not accepting teleports: {0}", reason)); return; } // OK, it got this agent. Let's close some child agents avatar.CloseChildAgents(newRegionX, newRegionY); if (Util.IsOutsideView(oldRegionX, newRegionX, oldRegionY, newRegionY)) { #region IP Translation for NAT IClientIPEndpoint ipepClient; if (avatar.ClientView.TryGet(out ipepClient)) { capsPath = "http://" + NetworkUtil.GetHostFor(ipepClient.EndPoint, reg.ExternalHostName) + ":" + reg.HttpPort + CapsUtil.GetCapsSeedPath(agentCircuit.CapsPath); } else { capsPath = "http://" + reg.ExternalHostName + ":" + reg.HttpPort + CapsUtil.GetCapsSeedPath(agentCircuit.CapsPath); } #endregion if (eq != null) { #region IP Translation for NAT // Uses ipepClient above if (avatar.ClientView.TryGet(out ipepClient)) { endPoint.Address = NetworkUtil.GetIPFor(ipepClient.EndPoint, endPoint.Address); } #endregion eq.EnableSimulator(reg.RegionHandle, endPoint, avatar.UUID); // ES makes the client send a UseCircuitCode message to the destination, // which triggers a bunch of things there. // So let's wait Thread.Sleep(2000); eq.EstablishAgentCommunication(avatar.UUID, endPoint, capsPath); } else { avatar.ControllingClient.InformClientOfNeighbour(reg.RegionHandle, endPoint); } } else { agentCircuit.CapsPath = avatar.Scene.CapsModule.GetChildSeed(avatar.UUID, reg.RegionHandle); capsPath = "http://" + reg.ExternalHostName + ":" + reg.HttpPort + "/CAPS/" + agentCircuit.CapsPath + "0000/"; } // Expect avatar crossing is a heavy-duty function at the destination. // That is where MakeRoot is called, which fetches appearance and inventory. // Plus triggers OnMakeRoot, which spawns a series of asynchronous updates. //m_commsProvider.InterRegion.ExpectAvatarCrossing(reg.RegionHandle, avatar.ControllingClient.AgentId, // position, false); //{ // avatar.ControllingClient.SendTeleportFailed("Problem with destination."); // // We should close that agent we just created over at destination... // List<ulong> lst = new List<ulong>(); // lst.Add(reg.RegionHandle); // SendCloseChildAgentAsync(avatar.UUID, lst); // return; //} SetInTransit(avatar.UUID); // Let's send a full update of the agent. This is a synchronous call. AgentData agent = new AgentData(); avatar.CopyTo(agent); agent.Position = position; agent.CallbackURI = "http://" + m_regionInfo.ExternalHostName + ":" + m_regionInfo.HttpPort + "/agent/" + avatar.UUID.ToString() + "/" + avatar.Scene.RegionInfo.RegionHandle.ToString() + "/release/"; m_interregionCommsOut.SendChildAgentUpdate(reg.RegionHandle, agent); m_log.DebugFormat( "[CAPS]: Sending new CAPS seed url {0} to client {1}", capsPath, avatar.UUID); if (eq != null) { eq.TeleportFinishEvent(reg.RegionHandle, 13, endPoint, 0, teleportFlags, capsPath, avatar.UUID); } else { avatar.ControllingClient.SendRegionTeleport(reg.RegionHandle, 13, endPoint, 4, teleportFlags, capsPath); } // TeleportFinish makes the client send CompleteMovementIntoRegion (at the destination), which // trigers a whole shebang of things there, including MakeRoot. So let's wait for confirmation // that the client contacted the destination before we send the attachments and close things here. if (!WaitForCallback(avatar.UUID)) { // Client never contacted destination. Let's restore everything back avatar.ControllingClient.SendTeleportFailed("Problems connecting to destination."); ResetFromTransit(avatar.UUID); // Yikes! We should just have a ref to scene here. avatar.Scene.InformClientOfNeighbours(avatar); // Finally, kill the agent we just created at the destination. m_interregionCommsOut.SendCloseAgent(reg.RegionHandle, avatar.UUID); return; } // Can't go back from here if (KiPrimitive != null) { KiPrimitive(avatar.LocalId); } avatar.MakeChildAgent(); // CrossAttachmentsIntoNewRegion is a synchronous call. We shouldn't need to wait after it avatar.CrossAttachmentsIntoNewRegion(reg.RegionHandle, true); // Finally, let's close this previously-known-as-root agent, when the jump is outside the view zone if (Util.IsOutsideView(oldRegionX, newRegionX, oldRegionY, newRegionY)) { Thread.Sleep(5000); avatar.Close(); CloseConnection(avatar.UUID); } else // now we have a child agent in this region. avatar.Reset(); // if (teleport success) // seems to be always success here // the user may change their profile information in other region, // so the userinfo in UserProfileCache is not reliable any more, delete it if (avatar.Scene.NeedSceneCacheClear(avatar.UUID)) { m_commsProvider.UserProfileCacheService.RemoveUser(avatar.UUID); m_log.DebugFormat( "[SCENE COMMUNICATION SERVICE]: User {0} is going to another region, profile cache removed", avatar.UUID); } } else { avatar.ControllingClient.SendTeleportFailed("Remote Region appears to be down"); } } else { // TP to a place that doesn't exist (anymore) // Inform the viewer about that avatar.ControllingClient.SendTeleportFailed("The region you tried to teleport to doesn't exist anymore"); // and set the map-tile to '(Offline)' uint regX, regY; Utils.LongToUInts(regionHandle, out regX, out regY); MapBlockData block = new MapBlockData(); block.X = (ushort)(regX / Constants.RegionSize); block.Y = (ushort)(regY / Constants.RegionSize); block.Access = 254; // == not there List<MapBlockData> blocks = new List<MapBlockData>(); blocks.Add(block); avatar.ControllingClient.SendMapBlock(blocks, 0); } } } protected bool IsOutsideRegion(Scene s, Vector3 pos) { if (s.TestBorderCross(pos,Cardinals.N)) return true; if (s.TestBorderCross(pos, Cardinals.S)) return true; if (s.TestBorderCross(pos, Cardinals.E)) return true; if (s.TestBorderCross(pos, Cardinals.W)) return true; return false; } public bool WaitForCallback(UUID id) { int count = 200; while (m_agentsInTransit.Contains(id) && count-- > 0) { //m_log.Debug(" >>> Waiting... " + count); Thread.Sleep(100); } if (count > 0) return true; else return false; } public bool ReleaseAgent(UUID id) { //m_log.Debug(" >>> ReleaseAgent called <<< "); return ResetFromTransit(id); } public void SetInTransit(UUID id) { lock (m_agentsInTransit) { if (!m_agentsInTransit.Contains(id)) m_agentsInTransit.Add(id); } } protected bool ResetFromTransit(UUID id) { lock (m_agentsInTransit) { if (m_agentsInTransit.Contains(id)) { m_agentsInTransit.Remove(id); return true; } } return false; } private List<ulong> NeighbourHandles(List<GridRegion> neighbours) { List<ulong> handles = new List<ulong>(); foreach (GridRegion reg in neighbours) { handles.Add(reg.RegionHandle); } return handles; } private List<ulong> NewNeighbours(List<ulong> currentNeighbours, List<ulong> previousNeighbours) { return currentNeighbours.FindAll(delegate(ulong handle) { return !previousNeighbours.Contains(handle); }); } // private List<ulong> CommonNeighbours(List<ulong> currentNeighbours, List<ulong> previousNeighbours) // { // return currentNeighbours.FindAll(delegate(ulong handle) { return previousNeighbours.Contains(handle); }); // } private List<ulong> OldNeighbours(List<ulong> currentNeighbours, List<ulong> previousNeighbours) { return previousNeighbours.FindAll(delegate(ulong handle) { return !currentNeighbours.Contains(handle); }); } public void CrossAgentToNewRegion(Scene scene, ScenePresence agent, bool isFlying) { Vector3 pos = agent.AbsolutePosition; Vector3 newpos = new Vector3(pos.X, pos.Y, pos.Z); uint neighbourx = m_regionInfo.RegionLocX; uint neighboury = m_regionInfo.RegionLocY; const float boundaryDistance = 1.7f; Vector3 northCross = new Vector3(0,boundaryDistance, 0); Vector3 southCross = new Vector3(0, -1 * boundaryDistance, 0); Vector3 eastCross = new Vector3(boundaryDistance, 0, 0); Vector3 westCross = new Vector3(-1 * boundaryDistance, 0, 0); // distance to edge that will trigger crossing // distance into new region to place avatar const float enterDistance = 0.5f; if (scene.TestBorderCross(pos + westCross, Cardinals.W)) { if (scene.TestBorderCross(pos + northCross, Cardinals.N)) { Border b = scene.GetCrossedBorder(pos + northCross, Cardinals.N); neighboury += (uint)(int)(b.BorderLine.Z / (int)Constants.RegionSize); } else if (scene.TestBorderCross(pos + southCross, Cardinals.S)) { Border b = scene.GetCrossedBorder(pos + southCross, Cardinals.S); if (b.TriggerRegionX == 0 && b.TriggerRegionY == 0) { neighboury--; newpos.Y = Constants.RegionSize - enterDistance; } else { neighboury = b.TriggerRegionY; neighbourx = b.TriggerRegionX; Vector3 newposition = pos; newposition.X += (scene.RegionInfo.RegionLocX - neighbourx) * Constants.RegionSize; newposition.Y += (scene.RegionInfo.RegionLocY - neighboury) * Constants.RegionSize; agent.ControllingClient.SendAgentAlertMessage( String.Format("Moving you to region {0},{1}", neighbourx, neighboury), false); InformClientToInitateTeleportToLocation(agent, neighbourx, neighboury, newposition, scene); return; } } Border ba = scene.GetCrossedBorder(pos + westCross, Cardinals.W); if (ba.TriggerRegionX == 0 && ba.TriggerRegionY == 0) { neighbourx--; newpos.X = Constants.RegionSize - enterDistance; } else { neighboury = ba.TriggerRegionY; neighbourx = ba.TriggerRegionX; Vector3 newposition = pos; newposition.X += (scene.RegionInfo.RegionLocX - neighbourx) * Constants.RegionSize; newposition.Y += (scene.RegionInfo.RegionLocY - neighboury) * Constants.RegionSize; agent.ControllingClient.SendAgentAlertMessage( String.Format("Moving you to region {0},{1}", neighbourx, neighboury), false); InformClientToInitateTeleportToLocation(agent, neighbourx, neighboury, newposition, scene); return; } } else if (scene.TestBorderCross(pos + eastCross, Cardinals.E)) { Border b = scene.GetCrossedBorder(pos + eastCross, Cardinals.E); neighbourx += (uint)(int)(b.BorderLine.Z / (int)Constants.RegionSize); newpos.X = enterDistance; if (scene.TestBorderCross(pos + southCross, Cardinals.S)) { Border ba = scene.GetCrossedBorder(pos + southCross, Cardinals.S); if (ba.TriggerRegionX == 0 && ba.TriggerRegionY == 0) { neighboury--; newpos.Y = Constants.RegionSize - enterDistance; } else { neighboury = ba.TriggerRegionY; neighbourx = ba.TriggerRegionX; Vector3 newposition = pos; newposition.X += (scene.RegionInfo.RegionLocX - neighbourx) * Constants.RegionSize; newposition.Y += (scene.RegionInfo.RegionLocY - neighboury) * Constants.RegionSize; agent.ControllingClient.SendAgentAlertMessage( String.Format("Moving you to region {0},{1}", neighbourx, neighboury), false); InformClientToInitateTeleportToLocation(agent, neighbourx, neighboury, newposition, scene); return; } } else if (scene.TestBorderCross(pos + northCross, Cardinals.N)) { Border c = scene.GetCrossedBorder(pos + northCross, Cardinals.N); neighboury += (uint)(int)(c.BorderLine.Z / (int)Constants.RegionSize); newpos.Y = enterDistance; } } else if (scene.TestBorderCross(pos + southCross, Cardinals.S)) { Border b = scene.GetCrossedBorder(pos + southCross, Cardinals.S); if (b.TriggerRegionX == 0 && b.TriggerRegionY == 0) { neighboury--; newpos.Y = Constants.RegionSize - enterDistance; } else { neighboury = b.TriggerRegionY; neighbourx = b.TriggerRegionX; Vector3 newposition = pos; newposition.X += (scene.RegionInfo.RegionLocX - neighbourx) * Constants.RegionSize; newposition.Y += (scene.RegionInfo.RegionLocY - neighboury) * Constants.RegionSize; agent.ControllingClient.SendAgentAlertMessage( String.Format("Moving you to region {0},{1}", neighbourx, neighboury), false); InformClientToInitateTeleportToLocation(agent, neighbourx, neighboury, newposition, scene); return; } } else if (scene.TestBorderCross(pos + northCross, Cardinals.N)) { Border b = scene.GetCrossedBorder(pos + northCross, Cardinals.N); neighboury += (uint)(int)(b.BorderLine.Z / (int)Constants.RegionSize); newpos.Y = enterDistance; } /* if (pos.X < boundaryDistance) //West { neighbourx--; newpos.X = Constants.RegionSize - enterDistance; } else if (pos.X > Constants.RegionSize - boundaryDistance) // East { neighbourx++; newpos.X = enterDistance; } if (pos.Y < boundaryDistance) // South { neighboury--; newpos.Y = Constants.RegionSize - enterDistance; } else if (pos.Y > Constants.RegionSize - boundaryDistance) // North { neighboury++; newpos.Y = enterDistance; } */ CrossAgentToNewRegionDelegate d = CrossAgentToNewRegionAsync; d.BeginInvoke(agent, newpos, neighbourx, neighboury, isFlying, CrossAgentToNewRegionCompleted, d); } public delegate void InformClientToInitateTeleportToLocationDelegate(ScenePresence agent, uint regionX, uint regionY, Vector3 position, Scene initiatingScene); public void InformClientToInitateTeleportToLocation(ScenePresence agent, uint regionX, uint regionY, Vector3 position, Scene initiatingScene) { // This assumes that we know what our neighbors are. InformClientToInitateTeleportToLocationDelegate d = InformClientToInitiateTeleportToLocationAsync; d.BeginInvoke(agent,regionX,regionY,position,initiatingScene, InformClientToInitiateTeleportToLocationCompleted, d); } public void InformClientToInitiateTeleportToLocationAsync(ScenePresence agent, uint regionX, uint regionY, Vector3 position, Scene initiatingScene) { Thread.Sleep(10000); IMessageTransferModule im = initiatingScene.RequestModuleInterface<IMessageTransferModule>(); if (im != null) { UUID gotoLocation = Util.BuildFakeParcelID( Util.UIntsToLong( (regionX * (uint)Constants.RegionSize), (regionY * (uint)Constants.RegionSize)), (uint)(int)position.X, (uint)(int)position.Y, (uint)(int)position.Z); GridInstantMessage m = new GridInstantMessage(initiatingScene, UUID.Zero, "Region", agent.UUID, (byte)InstantMessageDialog.GodLikeRequestTeleport, false, "", gotoLocation, false, new Vector3(127, 0, 0), new Byte[0]); im.SendInstantMessage(m, delegate(bool success) { m_log.DebugFormat("[CLIENT]: Client Initiating Teleport sending IM success = {0}", success); }); } } private void InformClientToInitiateTeleportToLocationCompleted(IAsyncResult iar) { InformClientToInitateTeleportToLocationDelegate icon = (InformClientToInitateTeleportToLocationDelegate) iar.AsyncState; icon.EndInvoke(iar); } public delegate ScenePresence CrossAgentToNewRegionDelegate(ScenePresence agent, Vector3 pos, uint neighbourx, uint neighboury, bool isFlying); /// <summary> /// This Closes child agents on neighboring regions /// Calls an asynchronous method to do so.. so it doesn't lag the sim. /// </summary> protected ScenePresence CrossAgentToNewRegionAsync(ScenePresence agent, Vector3 pos, uint neighbourx, uint neighboury, bool isFlying) { m_log.DebugFormat("[SCENE COMM]: Crossing agent {0} {1} to {2}-{3}", agent.Firstname, agent.Lastname, neighbourx, neighboury); ulong neighbourHandle = Utils.UIntsToLong((uint)(neighbourx * Constants.RegionSize), (uint)(neighboury * Constants.RegionSize)); int x = (int)(neighbourx * Constants.RegionSize), y = (int)(neighboury * Constants.RegionSize); GridRegion neighbourRegion = m_scene.GridService.GetRegionByPosition(m_scene.RegionInfo.ScopeID, (int)x, (int)y); if (neighbourRegion != null && agent.ValidateAttachments()) { pos = pos + (agent.Velocity); //CachedUserInfo userInfo = m_commsProvider.UserProfileCacheService.GetUserDetails(agent.UUID); //if (userInfo != null) //{ // userInfo.DropInventory(); //} //else //{ // m_log.WarnFormat("[SCENE COMM]: No cached user info found for {0} {1} on leaving region {2}", // agent.Name, agent.UUID, agent.Scene.RegionInfo.RegionName); //} //bool crossingSuccessful = // CrossToNeighbouringRegion(neighbourHandle, agent.ControllingClient.AgentId, pos, //isFlying); SetInTransit(agent.UUID); AgentData cAgent = new AgentData(); agent.CopyTo(cAgent); cAgent.Position = pos; if (isFlying) cAgent.ControlFlags |= (uint)AgentManager.ControlFlags.AGENT_CONTROL_FLY; cAgent.CallbackURI = "http://" + m_regionInfo.ExternalHostName + ":" + m_regionInfo.HttpPort + "/agent/" + agent.UUID.ToString() + "/" + agent.Scene.RegionInfo.RegionHandle.ToString() + "/release/"; m_interregionCommsOut.SendChildAgentUpdate(neighbourHandle, cAgent); // Next, let's close the child agent connections that are too far away. agent.CloseChildAgents(neighbourx, neighboury); //AgentCircuitData circuitdata = m_controllingClient.RequestClientInfo(); agent.ControllingClient.RequestClientInfo(); //m_log.Debug("BEFORE CROSS"); //Scene.DumpChildrenSeeds(UUID); //DumpKnownRegions(); string agentcaps; if (!agent.KnownRegions.TryGetValue(neighbourRegion.RegionHandle, out agentcaps)) { m_log.ErrorFormat("[SCENE COMM]: No CAPS information for region handle {0}, exiting CrossToNewRegion.", neighbourRegion.RegionHandle); return agent; } // TODO Should construct this behind a method string capsPath = "http://" + neighbourRegion.ExternalHostName + ":" + neighbourRegion.HttpPort + "/CAPS/" + agentcaps /*circuitdata.CapsPath*/ + "0000/"; m_log.DebugFormat("[CAPS]: Sending new CAPS seed url {0} to client {1}", capsPath, agent.UUID); IEventQueue eq = agent.Scene.RequestModuleInterface<IEventQueue>(); if (eq != null) { eq.CrossRegion(neighbourHandle, pos, agent.Velocity, neighbourRegion.ExternalEndPoint, capsPath, agent.UUID, agent.ControllingClient.SessionId); } else { agent.ControllingClient.CrossRegion(neighbourHandle, pos, agent.Velocity, neighbourRegion.ExternalEndPoint, capsPath); } if (!WaitForCallback(agent.UUID)) { ResetFromTransit(agent.UUID); // Yikes! We should just have a ref to scene here. agent.Scene.InformClientOfNeighbours(agent); return agent; } agent.MakeChildAgent(); // now we have a child agent in this region. Request all interesting data about other (root) agents agent.SendInitialFullUpdateToAllClients(); // Console.WriteLine("SCS 1"); agent.CrossAttachmentsIntoNewRegion(neighbourHandle, true); // Console.WriteLine("SCS 2"); // m_scene.SendKillObject(m_localId); agent.Scene.NotifyMyCoarseLocationChange(); // the user may change their profile information in other region, // so the userinfo in UserProfileCache is not reliable any more, delete it if (agent.Scene.NeedSceneCacheClear(agent.UUID)) { agent.Scene.CommsManager.UserProfileCacheService.RemoveUser(agent.UUID); m_log.DebugFormat( "[SCENE COMM]: User {0} is going to another region, profile cache removed", agent.UUID); } } //m_log.Debug("AFTER CROSS"); //Scene.DumpChildrenSeeds(UUID); //DumpKnownRegions(); return agent; } private void CrossAgentToNewRegionCompleted(IAsyncResult iar) { CrossAgentToNewRegionDelegate icon = (CrossAgentToNewRegionDelegate)iar.AsyncState; ScenePresence agent = icon.EndInvoke(iar); // If the cross was successful, this agent is a child agent if (agent.IsChildAgent) { agent.Reset(); } else // Not successful { //CachedUserInfo userInfo = m_commsProvider.UserProfileCacheService.GetUserDetails(agent.UUID); //if (userInfo != null) //{ // userInfo.FetchInventory(); //} agent.RestoreInCurrentScene(); } // In any case agent.NotInTransit(); //m_log.DebugFormat("[SCENE COMM]: Crossing agent {0} {1} completed.", agent.Firstname, agent.Lastname); } public void LogOffUser(UUID userid, UUID regionid, ulong regionhandle, Vector3 position, Vector3 lookat) { m_commsProvider.LogOffUser(userid, regionid, regionhandle, position, lookat); } // deprecated as of 2008-08-27 public void LogOffUser(UUID userid, UUID regionid, ulong regionhandle, float posx, float posy, float posz) { m_commsProvider.LogOffUser(userid, regionid, regionhandle, posx, posy, posz); } public void ClearUserAgent(UUID avatarID) { m_commsProvider.UserService.ClearUserAgent(avatarID); } public void AddNewUserFriend(UUID friendlistowner, UUID friend, uint perms) { m_commsProvider.AddNewUserFriend(friendlistowner, friend, perms); } public void UpdateUserFriendPerms(UUID friendlistowner, UUID friend, uint perms) { m_commsProvider.UpdateUserFriendPerms(friendlistowner, friend, perms); } public void RemoveUserFriend(UUID friendlistowner, UUID friend) { m_commsProvider.RemoveUserFriend(friendlistowner, friend); } public List<FriendListItem> GetUserFriendList(UUID friendlistowner) { return m_commsProvider.GetUserFriendList(friendlistowner); } public List<AvatarPickerAvatar> GenerateAgentPickerRequestResponse(UUID queryID, string query) { return m_commsProvider.GenerateAgentPickerRequestResponse(queryID, query); } public List<GridRegion> RequestNamedRegions(string name, int maxNumber) { return m_scene.GridService.GetRegionsByName(UUID.Zero, name, maxNumber); } //private void Dump(string msg, List<ulong> handles) //{ // m_log.InfoFormat("-------------- HANDLE DUMP ({0}) ---------", msg); // foreach (ulong handle in handles) // { // uint x, y; // Utils.LongToUInts(handle, out x, out y); // x = x / Constants.RegionSize; // y = y / Constants.RegionSize; // m_log.InfoFormat("({0}, {1})", x, y); // } //} } }
using System; using UnityEngine; namespace UnityStandardAssets.Vehicles.Car { internal enum CarDriveType { FrontWheelDrive, RearWheelDrive, FourWheelDrive } internal enum SpeedType { MPH, KPH } public class CarController : MonoBehaviour { [SerializeField] private CarDriveType m_CarDriveType = CarDriveType.FourWheelDrive; [SerializeField] private WheelCollider[] m_WheelColliders = new WheelCollider[4]; [SerializeField] private GameObject[] m_WheelMeshes = new GameObject[4]; [SerializeField] private Vector3 m_CentreOfMassOffset; [SerializeField] private float m_MaximumSteerAngle; [Range(0, 1)] [SerializeField] private float m_SteerHelper; // 0 is raw physics , 1 the car will grip in the direction it is facing [Range(0, 1)] [SerializeField] private float m_TractionControl; // 0 is no traction control, 1 is full interference [SerializeField] private float m_FullTorqueOverAllWheels; [SerializeField] private float m_ReverseTorque; [SerializeField] private float m_MaxHandbrakeTorque; [SerializeField] private float m_Downforce = 100f; [SerializeField] private SpeedType m_SpeedType; [SerializeField] private float m_Topspeed = 200; [SerializeField] private static int NoOfGears = 5; [SerializeField] private float m_RevRangeBoundary = 1f; [SerializeField] private float m_SlipLimit; [SerializeField] private float m_BrakeTorque; private Quaternion[] m_WheelMeshLocalRotations; private Vector3 m_Prevpos, m_Pos; private float m_SteerAngle; private int m_GearNum; private float m_GearFactor; private float m_OldRotation; private float m_CurrentTorque; private Rigidbody m_Rigidbody; private const float k_ReversingThreshold = 0.01f; public bool Skidding { get; private set; } public float BrakeInput { get; private set; } public float CurrentSteerAngle{ get { return m_SteerAngle; }} public float CurrentSpeed{ get { return m_Rigidbody.velocity.magnitude*2.23693629f; }} public float MaxSpeed{get { return m_Topspeed; }} public float Revs { get; private set; } public float AccelInput { get; private set; } // Use this for initialization private void Start() { m_WheelMeshLocalRotations = new Quaternion[4]; for (int i = 0; i < 4; i++) { m_WheelMeshLocalRotations[i] = m_WheelMeshes[i].transform.localRotation; } m_WheelColliders[0].attachedRigidbody.centerOfMass = m_CentreOfMassOffset; m_MaxHandbrakeTorque = float.MaxValue; m_Rigidbody = GetComponent<Rigidbody>(); m_CurrentTorque = m_FullTorqueOverAllWheels - (m_TractionControl*m_FullTorqueOverAllWheels); } private void GearChanging() { float f = Mathf.Abs(CurrentSpeed/MaxSpeed); float upgearlimit = (1/(float) NoOfGears)*(m_GearNum + 1); float downgearlimit = (1/(float) NoOfGears)*m_GearNum; if (m_GearNum > 0 && f < downgearlimit) { m_GearNum--; } if (f > upgearlimit && (m_GearNum < (NoOfGears - 1))) { m_GearNum++; } } // simple function to add a curved bias towards 1 for a value in the 0-1 range private static float CurveFactor(float factor) { return 1 - (1 - factor)*(1 - factor); } // unclamped version of Lerp, to allow value to exceed the from-to range private static float ULerp(float from, float to, float value) { return (1.0f - value)*from + value*to; } private void CalculateGearFactor() { float f = (1/(float) NoOfGears); // gear factor is a normalised representation of the current speed within the current gear's range of speeds. // We smooth towards the 'target' gear factor, so that revs don't instantly snap up or down when changing gear. var targetGearFactor = Mathf.InverseLerp(f*m_GearNum, f*(m_GearNum + 1), Mathf.Abs(CurrentSpeed/MaxSpeed)); m_GearFactor = Mathf.Lerp(m_GearFactor, targetGearFactor, Time.deltaTime*5f); } private void CalculateRevs() { // calculate engine revs (for display / sound) // (this is done in retrospect - revs are not used in force/power calculations) CalculateGearFactor(); var gearNumFactor = m_GearNum/(float) NoOfGears; var revsRangeMin = ULerp(0f, m_RevRangeBoundary, CurveFactor(gearNumFactor)); var revsRangeMax = ULerp(m_RevRangeBoundary, 1f, gearNumFactor); Revs = ULerp(revsRangeMin, revsRangeMax, m_GearFactor); } public void Move(float steering, float accel, float footbrake, float handbrake) { for (int i = 0; i < 4; i++) { Quaternion quat; Vector3 position; m_WheelColliders[i].GetWorldPose(out position, out quat); m_WheelMeshes[i].transform.position = position; m_WheelMeshes[i].transform.rotation = quat; } //clamp input values steering = Mathf.Clamp(steering, -1, 1); AccelInput = accel = Mathf.Clamp(accel, 0, 1); BrakeInput = footbrake = -1*Mathf.Clamp(footbrake, -1, 0); handbrake = Mathf.Clamp(handbrake, 0, 1); //Set the steer on the front wheels. //Assuming that wheels 0 and 1 are the front wheels. m_SteerAngle = steering*m_MaximumSteerAngle; m_WheelColliders[0].steerAngle = m_SteerAngle; m_WheelColliders[1].steerAngle = m_SteerAngle; SteerHelper(); ApplyDrive(accel, footbrake); CapSpeed(); //Set the handbrake. //Assuming that wheels 2 and 3 are the rear wheels. if (handbrake > 0f) { var hbTorque = handbrake*m_MaxHandbrakeTorque; m_WheelColliders[2].brakeTorque = hbTorque; m_WheelColliders[3].brakeTorque = hbTorque; } CalculateRevs(); GearChanging(); AddDownForce(); TractionControl(); } private void CapSpeed() { float speed = m_Rigidbody.velocity.magnitude; switch (m_SpeedType) { case SpeedType.MPH: speed *= 2.23693629f; if (speed > m_Topspeed) m_Rigidbody.velocity = (m_Topspeed/2.23693629f) * m_Rigidbody.velocity.normalized; break; case SpeedType.KPH: speed *= 3.6f; if (speed > m_Topspeed) m_Rigidbody.velocity = (m_Topspeed/3.6f) * m_Rigidbody.velocity.normalized; break; } } private void ApplyDrive(float accel, float footbrake) { float thrustTorque; switch (m_CarDriveType) { case CarDriveType.FourWheelDrive: thrustTorque = accel * (m_CurrentTorque / 4f); for (int i = 0; i < 4; i++) { m_WheelColliders[i].motorTorque = thrustTorque; } break; case CarDriveType.FrontWheelDrive: thrustTorque = accel * (m_CurrentTorque / 2f); m_WheelColliders[0].motorTorque = m_WheelColliders[1].motorTorque = thrustTorque; break; case CarDriveType.RearWheelDrive: thrustTorque = accel * (m_CurrentTorque / 2f); m_WheelColliders[2].motorTorque = m_WheelColliders[3].motorTorque = thrustTorque; break; } for (int i = 0; i < 4; i++) { if (CurrentSpeed > 5 && Vector3.Angle(transform.forward, m_Rigidbody.velocity) < 50f) { m_WheelColliders[i].brakeTorque = m_BrakeTorque*footbrake; } else if (footbrake > 0) { m_WheelColliders[i].brakeTorque = 0f; m_WheelColliders[i].motorTorque = -m_ReverseTorque*footbrake; } } } private void SteerHelper() { for (int i = 0; i < 4; i++) { WheelHit wheelhit; m_WheelColliders[i].GetGroundHit(out wheelhit); if (wheelhit.normal == Vector3.zero) return; // wheels arent on the ground so dont realign the rigidbody velocity } // this if is needed to avoid gimbal lock problems that will make the car suddenly shift direction if (Mathf.Abs(m_OldRotation - transform.eulerAngles.y) < 10f) { var turnadjust = (transform.eulerAngles.y - m_OldRotation) * m_SteerHelper; Quaternion velRotation = Quaternion.AngleAxis(turnadjust, Vector3.up); m_Rigidbody.velocity = velRotation * m_Rigidbody.velocity; } m_OldRotation = transform.eulerAngles.y; } // this is used to add more grip in relation to speed private void AddDownForce() { m_WheelColliders[0].attachedRigidbody.AddForce(-transform.up*m_Downforce* m_WheelColliders[0].attachedRigidbody.velocity.magnitude); } // crude traction control that reduces the power to wheel if the car is wheel spinning too much private void TractionControl() { WheelHit wheelHit; switch (m_CarDriveType) { case CarDriveType.FourWheelDrive: // loop through all wheels for (int i = 0; i < 4; i++) { m_WheelColliders[i].GetGroundHit(out wheelHit); AdjustTorque(wheelHit.forwardSlip); } break; case CarDriveType.RearWheelDrive: m_WheelColliders[2].GetGroundHit(out wheelHit); AdjustTorque(wheelHit.forwardSlip); m_WheelColliders[3].GetGroundHit(out wheelHit); AdjustTorque(wheelHit.forwardSlip); break; case CarDriveType.FrontWheelDrive: m_WheelColliders[0].GetGroundHit(out wheelHit); AdjustTorque(wheelHit.forwardSlip); m_WheelColliders[1].GetGroundHit(out wheelHit); AdjustTorque(wheelHit.forwardSlip); break; } } private void AdjustTorque(float forwardSlip) { if (forwardSlip >= m_SlipLimit && m_CurrentTorque >= 0) { m_CurrentTorque -= 10 * m_TractionControl; } else { m_CurrentTorque += 10 * m_TractionControl; if (m_CurrentTorque > m_FullTorqueOverAllWheels) { m_CurrentTorque = m_FullTorqueOverAllWheels; } } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Speech.Recognition; using System.Diagnostics; using System.Speech.Recognition.SrgsGrammar; using System.IO; using System.Xml; using eDocumentReader.Hubs.activities.system.lightweight; using eDocumentReader.Hubs.structure; namespace eDocumentReader.Hubs { /* * Generate text for the given page. Produce highlighting for the recognized text. */ public class TextProcessor { private System.Object lockThis = new System.Object(); private readonly string GRAMMAR_TMP_DIR = "tmp"; //a directory store the temporary grammar files private readonly float PLAY_ANIMATION_CONFIDENT_THRESHOLD = 0.2f; //play the animation only when the intermidiate confidence is greater than this private readonly float HYP_THRESHOLD = 0.2f; //display the intermidiate result only when the conficence is greater than this private readonly double maxLag = 750; //milliseconds maximum possible lag time in a speech private readonly bool enableCache = true; //cache page's text and annotation in memory, fast loading time for the same page private readonly int INITIAL_LOOK_AHEAD = 4; //the minimal syllables to look ahead //indicate how many words a speech can skip, and the prototype considers it is a continuation //for example, consider this sentence "I am working on the protoype, and I am writting some comments." //if a reader say "I am working on the", and pause for a second, and say "and I am writing some comments" //The prototype will treat it as the reader say "I am working on the prototype, and I am writting some comments." //value in the SPEECH_JUMP_GAP_THRESHOLD is 2 in above example private readonly int SPEECH_JUMP_GAP_THRESHOLD = 2; private Dictionary<int,string[]> pageTextCached = new Dictionary<int,string[]>(); private Dictionary<int, string[]> pageTextHTMLCached = new Dictionary<int, string[]>(); private Dictionary<int, string[]> annotationArrayCached = new Dictionary<int, string[]>(); private Dictionary<int, int[]> syllableArrayCached = new Dictionary<int, int[]>(); private List<int> defaultStartIndexes = new List<int>(); private Page currentPage; private string[] allSpeechText; private string[] allSpeechTextHTML; private string[] annotationArray; private int[] syllableArray; private int confirmedStartIndex; private int confirmedEndIndex; private int confirmedLastSegPosition; private int hypothesisEndIndex; private int startIndex; private int endIndex; private int lastSegPosition; //for test only //private string intermediateSpeechText; //private string allConfirmedSpeechText; private bool guessAhead = true; private int numOfHypothsis; private string storyDirectory; private int lastConfirmIndexRM = -1;//to keep track the position where the last confirmed speech in record mode private int maxSyllableAhead; private Mode mode; private List<string> playedAnimationList = new List<string>();//keep track which animation already played in the current speech private int speechState; //value=1 means in speech, value=0 means silent private double timePerSyllable = 150; //continuously estimate the time to spoke one syllable, in ms. default value = 250ms public TextProcessor() { } /* * set current story's direction */ public void SetStoryDirectory(string storyDirectory){ this.storyDirectory = storyDirectory; } /* * Withdraw the last uncomfirmed hightlighting for the speech. * This method is called when reader click on reject button to remove the previous recorded speed * in record my voice mode. */ public void rollBackHighlight() { confirmedLastSegPosition = lastConfirmIndexRM+1; lastSegPosition = confirmedLastSegPosition; if (lastConfirmIndexRM >= 0) { endIndex = lastConfirmIndexRM; } else { endIndex = 0; } confirmedEndIndex = endIndex; if (lastConfirmIndexRM < 0) { string pageTextStr = constructPageText(); ActivityExecutor.add(new InternalUpdatePageTextActivity(pageTextStr, currentPage.GetPageNumber())); } else { constructTextAndDisplay(); } string cfgPath = storyDirectory + "\\" + GRAMMAR_TMP_DIR + "\\" + currentPage.GetPageNumber() + "_" + (endIndex + 1) + ".cfg"; if (!defaultStartIndexes.Contains(endIndex + 1)) { Grammar g = new Grammar(cfgPath); g.Weight = EBookConstant.NEXT_WORD_WEIGHT; g.Priority = EBookConstant.NEXT_WORD_PRIORITY; ActivityExecutor.add(new InternalReloadOnGoingGrammarActivity(g)); Debug.WriteLine("Load onGoing grammar " + cfgPath); } else { ActivityExecutor.add(new InternalChangeGrammarPriorityActivity(endIndex+1)); } } /* * Save the unconfirm speech. * This method is alled when user click on the accept button in record my voice mode */ public void confirmHighlight() { lastConfirmIndexRM = confirmedEndIndex; if (confirmedEndIndex == allSpeechText.Length - 1) { ActivityExecutor.add(new InternalFinishPageActivity()); } } /* * clean cached data */ private void resetParameters() { defaultStartIndexes.Clear(); confirmedStartIndex = 0; confirmedEndIndex = 0; confirmedLastSegPosition = 0; startIndex = -1; endIndex = -1; lastSegPosition = -1; numOfHypothsis = 0; lastConfirmIndexRM = -1; playedAnimationList.Clear(); } /* * If Mode == REAL TIME, enable jump to different sentence. otherwise, disable jump to different sentence */ public void process(Page page, Mode mode) { this.mode = mode; currentPage = page; //reset parameters when process a new page resetParameters(); //retrieve data from cache if (enableCache && pageTextCached.ContainsKey(page.GetPageNumber())) { pageTextCached.TryGetValue(page.GetPageNumber(), out allSpeechText); pageTextHTMLCached.TryGetValue(page.GetPageNumber(), out allSpeechTextHTML); annotationArrayCached.TryGetValue(page.GetPageNumber(), out annotationArray); syllableArrayCached.TryGetValue(page.GetPageNumber(), out syllableArray); } else { List<string[]> pageText = page.GetListOfTextArray(); List<string> annotation = page.GetListOfAnnotations(); List<string> annArray = new List<string>(); //cover list to array string whole = ""; string wholeHTML = ""; //obtain the text and the text in HTML format for the current page for(int i=0;i<pageText.Count;i++) { if (pageText.ElementAt(i) == null) { wholeHTML = wholeHTML.TrimEnd() + "<br> "; } else { foreach (string str in pageText.ElementAt(i)) { whole += str + " "; if (str.Trim().Length == 0) { wholeHTML = wholeHTML.TrimEnd() + " "; } else { wholeHTML += str + " "; } annArray.Add(annotation.ElementAt(i)); } //wholeHTML = wholeHTML.TrimEnd() + "<br> "; } } whole = whole.Replace("\"", ""); allSpeechText = whole.Split(default(string[]), StringSplitOptions.RemoveEmptyEntries); allSpeechTextHTML = wholeHTML.Split(default(string[]), StringSplitOptions.RemoveEmptyEntries); annotationArray = annArray.ToArray(); syllableArray = EBookUtil.CountSyllables(allSpeechText); //save the data to hash map, we can simply retrieve the page data when //the page get revisit again if (enableCache) { pageTextCached.Add(page.GetPageNumber(), allSpeechText); pageTextHTMLCached.Add(page.GetPageNumber(), allSpeechTextHTML); annotationArrayCached.Add(page.GetPageNumber(), annotationArray); syllableArrayCached.Add(page.GetPageNumber(), syllableArray); } } if (mode != Mode.REPLAY) { defaultStartIndexes.Clear(); List<SrgsDocument> srgsDocs = EBookUtil.GenerateGrammars(allSpeechText, annotationArray); Debug.WriteLine("generated " + srgsDocs.Count + " grammar files"); //load grammars List<Grammar> gs = new List<Grammar>(); //loop from srgsDocs.Count to 0 will give the early sentence the priority. for (int i = srgsDocs.Count; --i >= 0; ) { string cfgPath = storyDirectory + "\\" + GRAMMAR_TMP_DIR + "\\" + page.GetPageNumber() + "_" + i + ".cfg"; Directory.CreateDirectory(storyDirectory + "\\" + GRAMMAR_TMP_DIR); CompileGrammar(srgsDocs.ElementAt(i), cfgPath); if (mode == Mode.REALTIME) { if (i == 0 || (i > 0 && (allSpeechText[i - 1].Contains("?") || allSpeechText[i - 1].Contains(".") || allSpeechText[i - 1].Contains("!")))) { defaultStartIndexes.Add(i); Debug.WriteLine("loading grammar:" + cfgPath); Grammar storyG = new Grammar(cfgPath); storyG.Weight = EBookConstant.DEFAULT_WEIGHT; storyG.Priority = EBookConstant.DEFAULT_PRIORITY; gs.Add(storyG); } } else { if (i == 0) { defaultStartIndexes.Add(i); Debug.WriteLine("loading grammar:" + cfgPath); Grammar storyG = new Grammar(cfgPath); storyG.Weight = EBookConstant.DEFAULT_WEIGHT; storyG.Priority = EBookConstant.DEFAULT_PRIORITY; ActivityExecutor.add(new InternalReloadOnGoingGrammarActivity(storyG)); } } } if (gs.Count > 0) { ActivityExecutor.add(new InternalReloadStoryGrammarActivity(gs)); } } string pageTextStr = constructPageText(); ActivityExecutor.add(new InternalUpdatePageTextActivity(pageTextStr, page.pageNumber)); } /* * Compile the given grammar to a file */ private void CompileGrammar(SrgsDocument srgDoc, string cfgPath) { FileStream fs = new FileStream(cfgPath, FileMode.Create); SrgsGrammarCompiler.Compile(srgDoc, (Stream)fs); fs.Close(); } /* * return 1 if the result is a complete speech and it is the end of the page. */ private int processSpeechResult(string result, int startInd, bool isH, float confidence, double duration) { //lock this method, in case the this method will take more time than the SR lock (lockThis) { int ret = 0; string[] arr = result.Split(default(string[]), StringSplitOptions.RemoveEmptyEntries); if (isH) { int increment = 0; if (guessAhead && EBookInteractiveSystem.lookAheadDivider > 0 && syllableArray.Length >= arr.Length+startInd) { //numOfHypothsis++; int syCount = 0; for (int i = startInd; i < arr.Length + startInd; i++) { syCount += syllableArray[i]; } int syIn = syCount / EBookInteractiveSystem.lookAheadDivider; //one syllable forward for every lookAheadDivide if (syIn < INITIAL_LOOK_AHEAD) { //The highlighting seems slow in the first second of a speech, let's highlight 2 syllables //ahead in the beginning of a speech syIn = INITIAL_LOOK_AHEAD; } int enInc = startInd + arr.Length; if (maxSyllableAhead > 0) { //Debug.WriteLine("Time pier syllable=" + timePerSyllable); if (syIn > maxSyllableAhead) { syIn = (int)maxSyllableAhead; } } string currentEndWord = ""; if (enInc > 0 && enInc <= allSpeechText.Length) { currentEndWord = allSpeechText[enInc - 1]; } Boolean cont = true; while (syIn > 0 && cont) { if (enInc < syllableArray.Length) { string guessWord = allSpeechText[enInc]; //if the current end word has pause punctuation, stop look ahead if (EBookUtil.containPausePunctuation(currentEndWord)) { Debug.WriteLine("currentEndWord \"" + currentEndWord + "\" contains pause, stop look ahead"); break; } else if (EBookUtil.containPausePunctuation(guessWord)) { //reduce 4 syllables from enInc syIn -= 3; //no guess ahead if there is any possible pause in guess ahead word. //Debug.WriteLine("guessWord \"" + guessWord + "\" contains pause, stop look ahead"); //cont = false; } if (syIn >= syllableArray[enInc]) { increment++; } syIn -= syllableArray[enInc]; enInc++; } else { break; } } Debug.WriteLine("guess " + increment + " word(s) ahead"); } //ONLY DISPLAY INTERMIDIATE RESULT WITH CONFIDENCE SCORE GREATER THAN HYP_THRESHOLD if (confidence > HYP_THRESHOLD) { if (startInd == lastSegPosition) //continue with last segment { startIndex = confirmedStartIndex; hypothesisEndIndex = arr.Length + lastSegPosition - 1; endIndex = hypothesisEndIndex + increment; } else if (arr.Length > 1) //jumping to onther sentence when at least two word in the speech { int gap = startInd - confirmedEndIndex; if (gap > 0 && gap <= SPEECH_JUMP_GAP_THRESHOLD) { //if reader skip maximum of SPEECH_JUMP_GAP_THRESHOLD, consider //the continue highlight the sentence } else { startIndex = startInd; } hypothesisEndIndex = arr.Length + startInd - 1; endIndex = hypothesisEndIndex + increment; } } } else { //duration != -1 only when !isH, so we need to estimate the time //per syllable for the next scentence using the previous speech int syCount = 0; for (int i = startInd; i < arr.Length + startInd; i++) { syCount += syllableArray[i]; } timePerSyllable = duration / syCount; Trace.WriteLine("timePerSyllable: " + timePerSyllable); maxSyllableAhead = (int)(maxLag / timePerSyllable); // numOfHypothsis = 0; if (startInd == lastSegPosition) { //if number of confirmed word in the complete speech is one word less than imcomplete speech, //we can treat the last imcomplete speech as complete int completeEndIndex = arr.Length + lastSegPosition - 1; if (endIndex - completeEndIndex != 1) { endIndex = completeEndIndex; } lastSegPosition = endIndex + 1; } else { int gap = startInd - lastSegPosition; if (gap > 0 && gap <= SPEECH_JUMP_GAP_THRESHOLD) { //if reader skip maximum of SPEECH_JUMP_GAP_THRESHOLD, consider //the continue highlight the sentence } else { startIndex = startInd; } endIndex = arr.Length + startInd - 1; lastSegPosition = endIndex + 1; } confirmedStartIndex = startIndex; confirmedEndIndex = endIndex; confirmedLastSegPosition = lastSegPosition; if (mode != Mode.REPLAY) { //EBookSRDevice rec = EBookSRDevice.GetInstance(); //reach the end of the page if (confirmedEndIndex == allSpeechText.Length - 1) { //stop SR when transitioning to the next page //rec.enableSR(false); //ret = 1; //AbstractEBookEvent.raise(new FinishPageEvent()); } else { string cfgPath = storyDirectory + "\\" + GRAMMAR_TMP_DIR + "\\" + currentPage.GetPageNumber() + "_" + (endIndex + 1) + ".cfg"; if (!defaultStartIndexes.Contains(endIndex + 1)) { //rec.enableSR(false); //rec.UnloadOnGoingGrammar(); Grammar g = new Grammar(cfgPath); g.Weight = EBookConstant.NEXT_WORD_WEIGHT; g.Priority = EBookConstant.NEXT_WORD_PRIORITY; //rec.LoadOnGoingGrammar(g); ActivityExecutor.add(new InternalReloadOnGoingGrammarActivity(g)); Debug.WriteLine("Load onGoing grammar " + cfgPath); //rec.GenerateAndLoadOnGoingGrammars(allSpeechText,endIndex+1); //rec.enableSR(true); } else { //the next sentence has higher priority //rec.ReloadAndChangeGrammarPriority(endIndex + 1); ActivityExecutor.add(new InternalChangeGrammarPriorityActivity(endIndex + 1)); } } } if (mode != Mode.RECORD && confirmedEndIndex == allSpeechText.Length - 1) { //the complete recognized result reaches the end of the page ret = 1; } //Detect speech and complete //AbstractEBookEvent.raise(new CompleteSpeechEvent()); } Debug.WriteLine("startIndex=" + startIndex); Debug.WriteLine("endIndex=" + endIndex); Debug.WriteLine("lastSegPosition=" + lastSegPosition); Debug.WriteLine("confirmedStartIndex=" + confirmedStartIndex); Debug.WriteLine("confirmedEndIndex=" + confirmedEndIndex); Debug.WriteLine("confirmedLastSegPosition=" + confirmedLastSegPosition); return ret; } } /* * construct the highlight text and raise in a updateSpeechTextEvent */ private void constructTextAndDisplay() { //if startIndex not equals to confirmedStartIndex, the hypothesis result //starts from different position. string displayText = "<span class='storytext'>"; for (int i = 0; i < allSpeechTextHTML.Length; i++) { //the on going speech is not yet confirmed if (startIndex == confirmedStartIndex && endIndex > confirmedEndIndex){ //if it is the beginning (no confirmed speech) if (confirmedEndIndex == 0) { if (startIndex == i && endIndex == i) { if (endIndex > hypothesisEndIndex) { displayText += "<span class='hypothesis'>" + allSpeechTextHTML[i] + " </span> <span class='lookAhead'>" ; } else { displayText += "<span class='hypothesis'>" + allSpeechTextHTML[i] + " </span> "; } } else if (startIndex == i) { displayText += " <span class='hypothesis'>" + allSpeechTextHTML[i] + " "; } else if (hypothesisEndIndex == i && hypothesisEndIndex < endIndex) { displayText += allSpeechTextHTML[i] + " </span> <span class='lookAhead'>"; } else if (endIndex == i) { displayText += allSpeechTextHTML[i] + " </span> "; } else { displayText += allSpeechTextHTML[i] + " "; } } else { if (startIndex == i && endIndex == i) { if (endIndex > hypothesisEndIndex) { displayText += "<span class='hypothesis'>" + allSpeechTextHTML[i] + " </span> <span class='lookAhead'>" ; } else { displayText += "<span class='hypothesis'>" + allSpeechTextHTML[i] + " </span> "; } } else if (startIndex == i) { displayText += "<span class='highlight'>" + allSpeechTextHTML[i] + " "; } else if (confirmedEndIndex == i) { displayText += allSpeechTextHTML[i] + " </span> <span class='hypothesis'>"; } else if (hypothesisEndIndex == i && hypothesisEndIndex < endIndex) { displayText += allSpeechTextHTML[i] + " </span> <span class='lookAhead'>"; } else if (endIndex == i) { displayText += allSpeechTextHTML[i] + " </span> "; } else { displayText += allSpeechTextHTML[i] + " "; } } } //the on going speech is not yet confirmed and the hypothesis result starts from different position else if (startIndex != confirmedStartIndex && startIndex != -1) { //display the confimred words if (confirmedStartIndex == i && confirmedEndIndex == i) { displayText += "<span class='highlight'>" + allSpeechTextHTML[i] + " </span> "; } else if (confirmedStartIndex == i) { displayText += "<span class='highlight'>" + allSpeechTextHTML[i] + " "; } else if (confirmedEndIndex == i) { displayText += allSpeechTextHTML[i] + " </span> "; } else if (startIndex == i) { displayText += "<span class='hypothesis'>" + allSpeechTextHTML[i] + " "; } else if (endIndex == i) { displayText += allSpeechTextHTML[i] + " </span> "; } else { displayText += allSpeechTextHTML[i] + " "; } } //all spoken speech are confirmed else { if (startIndex == i && endIndex == i) { displayText += "<span class='highlight'>" + allSpeechTextHTML[i] + "</span> "; } else if (startIndex == i) { displayText += "<span class='highlight'>" + allSpeechTextHTML[i] + " "; } else if (endIndex == i) { displayText += allSpeechTextHTML[i] + "</span> "; } else { displayText += allSpeechTextHTML[i] + " "; } } } displayText += "</span>"; Debug.WriteLine(displayText); //AbstractEBookEvent.raise(new UpdateSpeechTextEvent(displayText)); ActivityExecutor.add(new InternalUpdateTextHighLightActivity(displayText)); } /* * Construct the html text for the current page with no highlight */ private string constructPageText() { string displayText = "<span class='storytext'>"; for (int i = 0; i < allSpeechTextHTML.Length; i++) { displayText += allSpeechTextHTML[i] + " "; } displayText += "</span>"; return displayText; } /* * play the animation for a completed recognition, do nothing if the * animation already been played in the hypothesis result. * */ private void processAnimation(int reverseIndex) { //preserve the last animation in the list string lastAnim = ""; if (playedAnimationList.Count > 0) { lastAnim = playedAnimationList.ElementAt(playedAnimationList.Count-1); playedAnimationList.Clear(); } int maxLen = annotationArray.Length; for (int i = reverseIndex; i >= 0 && i < maxLen; i--) { if (annotationArray[i].Length > 0) { string[] annX = annotationArray[i].Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries); foreach (string each in annX) { if (each.StartsWith("action=")) { string action = each.Substring(7); if (currentPage != null) { if (!lastAnim.Equals(action)) { Trace.WriteLine("Processing animation:" + action); currentPage.processAction(action); //playedAnimationList.Add(action); return; } else { //the latest animation was just played in hypothesis result //don't need to do anything return; } } } } } } } /* * play the animation within the hypothesis text. */ private void processHypothesisAnimation(int textLength, int start) { //just play the last animation within the result int endIndex = textLength + start; endIndex = Math.Min(endIndex, annotationArray.Length - 1); for (int i = endIndex; i >= start; i--) { if (annotationArray[i].Length > 0) { string[] annX = annotationArray[i].Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries); foreach (string each in annX) { if (each.StartsWith("action=")) { string action = each.Substring(7); if (currentPage != null && !playedAnimationList.Contains(action)) { Trace.WriteLine("Processing animation:" + action); currentPage.processAction(action); playedAnimationList.Add(action); return; } } } } } } /* * take out the hypothesis highlighting if SR return recognition result or * if the audio contains no speech. */ private void withdrawAudioEnergyHypothesizedHighlight() { Trace.WriteLine("withdraw highlight from " + endIndex + " to " + confirmedEndIndex); startIndex = confirmedStartIndex; endIndex = confirmedEndIndex; if (startIndex == endIndex && endIndex == 0) { //display no highlight string pageTextStr = constructPageText(); //AbstractEBookEvent.raise(new UpdatePageTextEvent(pageTextStr, currentPage.GetPageNumber())); ActivityExecutor.add(new InternalUpdatePageTextActivity(pageTextStr, currentPage.GetPageNumber())); } else { //display the previous highlight constructTextAndDisplay(); } } /* * The SR detects the audio energy jumps to a significant level, but hasn't yet declear * it is a speech. The audio can be the beginning * of a speech, or any other noise. In all cases, we want to start highlighting * the next word to reduce the delay. */ private void processAudioEnergyHypothesizedHighlight(double audioStartTime) { if (EBookInteractiveSystem.initialLookAhead > 0 ) { double elapsedTime = GetUnixTime() - audioStartTime; int syIn = (int)(elapsedTime / timePerSyllable); int MAX_INITIAL_HIGHLIGHT = EBookInteractiveSystem.initialLookAhead; int forLoopMax = confirmedEndIndex + 1 + MAX_INITIAL_HIGHLIGHT; int syCount = 0; for (int i = confirmedEndIndex+1; i < forLoopMax && i < syllableArray.Length; i++) { syCount += syllableArray[i]; } //int syIn = syCount / EBookDialogueSystem.lookAheadDivider; //one syllable forward for every lookAheadDivide //make sure we can highlight at least one word if (confirmedEndIndex +1 < syllableArray.Length && syIn < syllableArray[confirmedEndIndex + 1]) { syIn = syllableArray[confirmedEndIndex + 1]; } //the upperbound if (syIn > syCount) { syIn = syCount; } int enInc = confirmedEndIndex+1; Boolean cont = true; int increment = 0; while (syIn > 0 && cont) { if (enInc < syllableArray.Length) { if (syIn >= syllableArray[enInc]) { increment++; } syIn -= syllableArray[enInc]; enInc++; } else { break; } } Trace.WriteLine("(audio level)guess " + increment + " word(s) ahead"); startIndex = confirmedStartIndex; endIndex = confirmedEndIndex + increment; } constructTextAndDisplay(); } /* * Interpret the meaning for a command speech. * Caution: the string comparison in this function for the commands are * rely on the script in command.grxml. If you can something in the command.grxml file * you may need to edit command string in this function, vice versa. */ private void processCommandSemantics(KeyValuePair<string, SemanticValue>[] semantics) { foreach (KeyValuePair<string, SemanticValue> each in semantics) { if (each.Key.CompareTo("NavigatePage") == 0) { if (each.Value.Value.ToString().CompareTo("[next]") == 0) { //AbstractEBookEvent.raise(new ChangePageEvent(PageAction.NEXT)); ActivityExecutor.add(new InternalSpeechNavigatePageActivity(PageAction.NEXT)); } else if (each.Value.Value.ToString().CompareTo("[previous]") == 0) { //AbstractEBookEvent.raise(new ChangePageEvent(PageAction.PREVIOUS)); ActivityExecutor.add(new InternalSpeechNavigatePageActivity(PageAction.PREVIOUS)); } else { int pageN = Convert.ToInt32(each.Value.Value); //AbstractEBookEvent.raise(new ChangePageEvent(PageAction.GO_PAGE_X, pageN)); ActivityExecutor.add(new InternalSpeechNavigatePageActivity(PageAction.GO_PAGE_X, pageN)); } } } } private void processRecognitionResult(RecognitionResult result) { processRecognitionResult(result.confidence, result.textResult, result.isHypothesis, result.semanticResult, result.grammarName, result.ruleName, result.audioDuration, result.wavPath); } /* * process the recognition result from SR, the recognition result can be the * hypothesis result or the complete result. */ public void processRecognitionResult(float confidence, string textResult, bool isHypothesis, KeyValuePair<string, SemanticValue>[] semantics, string grammarName, string ruleName, double audioDuration, string wavPath) { //handle result if the recognized speech is a command if (grammarName.CompareTo("command") == 0) { if (!isHypothesis && confidence*100 > EBookInteractiveSystem.commandConfidenceThreshold) { processCommandSemantics(semantics); } } //handle result if this is story text else { int start = -1; //the index of the first word of the recognized text in the current page //process the story annotations if (semantics != null && semantics.Length > 0) { foreach (KeyValuePair<string, SemanticValue> each in semantics) { if (each.Key.CompareTo("startIndex") == 0) { start = Convert.ToInt32(each.Value.Value); } } } if (start == -1) { string rule = ruleName; if (rule.StartsWith("index_")) { string startIndex = rule.Substring(6); if (startIndex.Length > 0) { start = Convert.ToInt32(startIndex); } } } //process the highlighting before animation (try to underline the text as fast as possible) Debug.WriteLine(textResult); Trace.WriteLine("start process Text time: " + DateTime.Now.ToString("yyyyMMddHHmmssfff")); int isEndOfPage = processSpeechResult(textResult, start, isHypothesis, confidence, audioDuration); constructTextAndDisplay(); Trace.WriteLine("end process Text time: " + DateTime.Now.ToString("yyyyMMddHHmmssfff")); //for some reasons, the hypothesis results do not contain any semantic results, so //we have to find it manually if (isHypothesis) { string[] tmp = textResult.Split(default(string[]), StringSplitOptions.RemoveEmptyEntries); //it is offen misrecognize the first word in a speech. //generate animation when hypothsis text is greater than 1 if (tmp.Length > 1) { processHypothesisAnimation(tmp.Length, start); } } else { string[] tmp = textResult.Split(default(string[]), StringSplitOptions.RemoveEmptyEntries); processAnimation(tmp.Length+start); //keep the last action in the list, remove rest of the them //string lastAction = playedAnimationList.Last(); //playedAnimationList.Clear(); //playedAnimationList.Add(lastAction); if (isEndOfPage == 1) { //Debug.WriteLine("generating a finishpageactivity "+isHypothesis); //AbstractEBookEvent.raise(new FinishPageEvent()); ActivityExecutor.add(new InternalFinishPageActivity()); } } } } public double GetUnixTime() { return (DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalMilliseconds; } public void setSpeechState(SpeechState state) { if (state == SpeechState.SPEECH_START) { speechState = 1; } else if (state == SpeechState.SPEECH_END) { speechState = 0; } } public void processAcousticHypothesisHighlight(int audioState, double startTime) { if (speechState == 0 && audioState >= 0) { processAudioEnergyHypothesizedHighlight(startTime); } else if (speechState == 0 && audioState < 0) { withdrawAudioEnergyHypothesizedHighlight(); } } //SR rejects the recent hypothesis result, roll the highlighting to previous detect speech public void rollBackText() { numOfHypothsis = 0; lastSegPosition = confirmedLastSegPosition; startIndex = confirmedStartIndex; endIndex = confirmedEndIndex; processAnimation(endIndex); //construct the text without highlight if the rejected recognition is the first sentence/word of the page if (confirmedLastSegPosition == confirmedStartIndex && confirmedStartIndex == confirmedEndIndex && confirmedEndIndex == 0) { string pageTextStr = constructPageText(); //AbstractEBookEvent.raise(new UpdatePageTextEvent(pageTextStr, currentPage.GetPageNumber())); ActivityExecutor.add(new InternalUpdatePageTextActivity(pageTextStr, currentPage.GetPageNumber())); } else { //construct html text with highlight constructTextAndDisplay(); } } } }
// Copyright(c) 2016, Michal Skalsky // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors // may be used to endorse or promote products derived from this software without // specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT // SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT // OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR // TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using UnityEngine; using System.Collections; using UnityEngine.Rendering; using System; [RequireComponent(typeof(Camera))] public class VolumetricLightRenderer : MonoBehaviour { public enum VolumtericResolution { Full, Half, Quarter }; public static event Action<VolumetricLightRenderer, Matrix4x4> PreRenderEvent; private static Mesh _pointLightMesh; private static Mesh _spotLightMesh; private static Material _lightMaterial; private Camera _camera; private CommandBuffer _preLightPass; private Matrix4x4 _viewProj; private Material _blitAddMaterial; private Material _bilateralBlurMaterial; private RenderTexture _volumeLightTexture; private RenderTexture _halfVolumeLightTexture; private RenderTexture _quarterVolumeLightTexture; private static Texture _defaultSpotCookie; private RenderTexture _halfDepthBuffer; private RenderTexture _quarterDepthBuffer; private VolumtericResolution _currentResolution = VolumtericResolution.Half; private Texture2D _ditheringTexture; private Texture3D _noiseTexture; public VolumtericResolution Resolution = VolumtericResolution.Half; public Texture DefaultSpotCookie; public CommandBuffer GlobalCommandBuffer { get { return _preLightPass; } } /// <summary> /// /// </summary> /// <returns></returns> public static Material GetLightMaterial() { return _lightMaterial; } /// <summary> /// /// </summary> /// <returns></returns> public static Mesh GetPointLightMesh() { return _pointLightMesh; } /// <summary> /// /// </summary> /// <returns></returns> public static Mesh GetSpotLightMesh() { return _spotLightMesh; } /// <summary> /// /// </summary> /// <returns></returns> public RenderTexture GetVolumeLightBuffer() { if (Resolution == VolumtericResolution.Quarter) return _quarterVolumeLightTexture; else if (Resolution == VolumtericResolution.Half) return _halfVolumeLightTexture; else return _volumeLightTexture; } /// <summary> /// /// </summary> /// <returns></returns> public RenderTexture GetVolumeLightDepthBuffer() { if (Resolution == VolumtericResolution.Quarter) return _quarterDepthBuffer; else if (Resolution == VolumtericResolution.Half) return _halfDepthBuffer; else return null; } /// <summary> /// /// </summary> /// <returns></returns> public static Texture GetDefaultSpotCookie() { return _defaultSpotCookie; } /// <summary> /// /// </summary> void Awake() { _camera = GetComponent<Camera>(); if (_camera.actualRenderingPath == RenderingPath.Forward) _camera.depthTextureMode = DepthTextureMode.Depth; _currentResolution = Resolution; Shader shader = Shader.Find("Hidden/BlitAdd"); if (shader == null) throw new Exception("Critical Error: \"Hidden/BlitAdd\" shader is missing. Make sure it is included in \"Always Included Shaders\" in ProjectSettings/Graphics."); _blitAddMaterial = new Material(shader); shader = Shader.Find("Hidden/BilateralBlur"); if (shader == null) throw new Exception("Critical Error: \"Hidden/BilateralBlur\" shader is missing. Make sure it is included in \"Always Included Shaders\" in ProjectSettings/Graphics."); _bilateralBlurMaterial = new Material(shader); _preLightPass = new CommandBuffer(); _preLightPass.name = "PreLight"; ChangeResolution(); if (_pointLightMesh == null) { GameObject go = GameObject.CreatePrimitive(PrimitiveType.Sphere); _pointLightMesh = go.GetComponent<MeshFilter>().sharedMesh; Destroy(go); } if (_spotLightMesh == null) { _spotLightMesh = CreateSpotLightMesh(); } if (_lightMaterial == null) { shader = Shader.Find("Sandbox/VolumetricLight"); if (shader == null) throw new Exception("Critical Error: \"Sandbox/VolumetricLight\" shader is missing. Make sure it is included in \"Always Included Shaders\" in ProjectSettings/Graphics."); _lightMaterial = new Material(shader); } if (_defaultSpotCookie == null) { _defaultSpotCookie = DefaultSpotCookie; } LoadNoise3dTexture(); GenerateDitherTexture(); } /// <summary> /// /// </summary> void OnEnable() { //_camera.RemoveAllCommandBuffers(); if(_camera.actualRenderingPath == RenderingPath.Forward) _camera.AddCommandBuffer(CameraEvent.AfterDepthTexture, _preLightPass); else _camera.AddCommandBuffer(CameraEvent.BeforeLighting, _preLightPass); } /// <summary> /// /// </summary> void OnDisable() { //_camera.RemoveAllCommandBuffers(); if(_camera.actualRenderingPath == RenderingPath.Forward) _camera.RemoveCommandBuffer(CameraEvent.AfterDepthTexture, _preLightPass); else _camera.RemoveCommandBuffer(CameraEvent.BeforeLighting, _preLightPass); } /// <summary> /// /// </summary> void ChangeResolution() { int width = _camera.pixelWidth; int height = _camera.pixelHeight; if (_volumeLightTexture != null) Destroy(_volumeLightTexture); _volumeLightTexture = new RenderTexture(width, height, 0, RenderTextureFormat.ARGBHalf); _volumeLightTexture.name = "VolumeLightBuffer"; _volumeLightTexture.filterMode = FilterMode.Bilinear; if (_halfDepthBuffer != null) Destroy(_halfDepthBuffer); if (_halfVolumeLightTexture != null) Destroy(_halfVolumeLightTexture); if (Resolution == VolumtericResolution.Half || Resolution == VolumtericResolution.Quarter) { _halfVolumeLightTexture = new RenderTexture(width / 2, height / 2, 0, RenderTextureFormat.ARGBHalf); _halfVolumeLightTexture.name = "VolumeLightBufferHalf"; _halfVolumeLightTexture.filterMode = FilterMode.Bilinear; _halfDepthBuffer = new RenderTexture(width / 2, height / 2, 0, RenderTextureFormat.RFloat); _halfDepthBuffer.name = "VolumeLightHalfDepth"; _halfDepthBuffer.Create(); _halfDepthBuffer.filterMode = FilterMode.Point; } if (_quarterVolumeLightTexture != null) Destroy(_quarterVolumeLightTexture); if (_quarterDepthBuffer != null) Destroy(_quarterDepthBuffer); if (Resolution == VolumtericResolution.Quarter) { _quarterVolumeLightTexture = new RenderTexture(width / 4, height / 4, 0, RenderTextureFormat.ARGBHalf); _quarterVolumeLightTexture.name = "VolumeLightBufferQuarter"; _quarterVolumeLightTexture.filterMode = FilterMode.Bilinear; _quarterDepthBuffer = new RenderTexture(width / 4, height / 4, 0, RenderTextureFormat.RFloat); _quarterDepthBuffer.name = "VolumeLightQuarterDepth"; _quarterDepthBuffer.Create(); _quarterDepthBuffer.filterMode = FilterMode.Point; } } /// <summary> /// /// </summary> public void OnPreRender() { // use very low value for near clip plane to simplify cone/frustum intersection Matrix4x4 proj = Matrix4x4.Perspective(_camera.fieldOfView, _camera.aspect, 0.01f, _camera.farClipPlane); proj = GL.GetGPUProjectionMatrix(proj, true); _viewProj = proj * _camera.worldToCameraMatrix; _preLightPass.Clear(); bool dx11 = SystemInfo.graphicsShaderLevel > 40; if (Resolution == VolumtericResolution.Quarter) { Texture nullTexture = null; // down sample depth to half res _preLightPass.Blit(nullTexture, _halfDepthBuffer, _bilateralBlurMaterial, dx11 ? 4 : 10); // down sample depth to quarter res _preLightPass.Blit(nullTexture, _quarterDepthBuffer, _bilateralBlurMaterial, dx11 ? 6 : 11); _preLightPass.SetRenderTarget(_quarterVolumeLightTexture); } else if (Resolution == VolumtericResolution.Half) { Texture nullTexture = null; // down sample depth to half res _preLightPass.Blit(nullTexture, _halfDepthBuffer, _bilateralBlurMaterial, dx11 ? 4 : 10); _preLightPass.SetRenderTarget(_halfVolumeLightTexture); } else { _preLightPass.SetRenderTarget(_volumeLightTexture); } _preLightPass.ClearRenderTarget(false, true, new Color(0, 0, 0, 1)); UpdateMaterialParameters(); if (PreRenderEvent != null) PreRenderEvent(this, _viewProj); } [ImageEffectOpaque] public void OnRenderImage(RenderTexture source, RenderTexture destination) { if (Resolution == VolumtericResolution.Quarter) { RenderTexture temp = RenderTexture.GetTemporary(_quarterDepthBuffer.width, _quarterDepthBuffer.height, 0, RenderTextureFormat.ARGBHalf); temp.filterMode = FilterMode.Bilinear; // horizontal bilateral blur at quarter res Graphics.Blit(_quarterVolumeLightTexture, temp, _bilateralBlurMaterial, 8); // vertical bilateral blur at quarter res Graphics.Blit(temp, _quarterVolumeLightTexture, _bilateralBlurMaterial, 9); // upscale to full res Graphics.Blit(_quarterVolumeLightTexture, _volumeLightTexture, _bilateralBlurMaterial, 7); RenderTexture.ReleaseTemporary(temp); } else if (Resolution == VolumtericResolution.Half) { RenderTexture temp = RenderTexture.GetTemporary(_halfVolumeLightTexture.width, _halfVolumeLightTexture.height, 0, RenderTextureFormat.ARGBHalf); temp.filterMode = FilterMode.Bilinear; // horizontal bilateral blur at half res Graphics.Blit(_halfVolumeLightTexture, temp, _bilateralBlurMaterial, 2); // vertical bilateral blur at half res Graphics.Blit(temp, _halfVolumeLightTexture, _bilateralBlurMaterial, 3); // upscale to full res Graphics.Blit(_halfVolumeLightTexture, _volumeLightTexture, _bilateralBlurMaterial, 5); RenderTexture.ReleaseTemporary(temp); } else { RenderTexture temp = RenderTexture.GetTemporary(_volumeLightTexture.width, _volumeLightTexture.height, 0, RenderTextureFormat.ARGBHalf); temp.filterMode = FilterMode.Bilinear; // horizontal bilateral blur at full res Graphics.Blit(_volumeLightTexture, temp, _bilateralBlurMaterial, 0); // vertical bilateral blur at full res Graphics.Blit(temp, _volumeLightTexture, _bilateralBlurMaterial, 1); RenderTexture.ReleaseTemporary(temp); } // add volume light buffer to rendered scene _blitAddMaterial.SetTexture("_Source", source); Graphics.Blit(_volumeLightTexture, destination, _blitAddMaterial, 0); } private void UpdateMaterialParameters() { _bilateralBlurMaterial.SetTexture("_HalfResDepthBuffer", _halfDepthBuffer); _bilateralBlurMaterial.SetTexture("_HalfResColor", _halfVolumeLightTexture); _bilateralBlurMaterial.SetTexture("_QuarterResDepthBuffer", _quarterDepthBuffer); _bilateralBlurMaterial.SetTexture("_QuarterResColor", _quarterVolumeLightTexture); Shader.SetGlobalTexture("_DitherTexture", _ditheringTexture); Shader.SetGlobalTexture("_NoiseTexture", _noiseTexture); } /// <summary> /// /// </summary> void Update() { //#if UNITY_EDITOR if (_currentResolution != Resolution) { _currentResolution = Resolution; ChangeResolution(); } if ((_volumeLightTexture.width != _camera.pixelWidth || _volumeLightTexture.height != _camera.pixelHeight)) ChangeResolution(); //#endif } /// <summary> /// /// </summary> void LoadNoise3dTexture() { // basic dds loader for 3d texture - !not very robust! TextAsset data = Resources.Load("NoiseVolume") as TextAsset; byte[] bytes = data.bytes; uint height = BitConverter.ToUInt32(data.bytes, 12); uint width = BitConverter.ToUInt32(data.bytes, 16); uint pitch = BitConverter.ToUInt32(data.bytes, 20); uint depth = BitConverter.ToUInt32(data.bytes, 24); uint formatFlags = BitConverter.ToUInt32(data.bytes, 20 * 4); uint fourCC = BitConverter.ToUInt32(data.bytes, 21 * 4); uint bitdepth = BitConverter.ToUInt32(data.bytes, 22 * 4); if (bitdepth == 0) bitdepth = pitch / width * 8; // doesn't work with TextureFormat.Alpha8 for some reason _noiseTexture = new Texture3D((int)width, (int)height, (int)depth, TextureFormat.RGBA32, false); _noiseTexture.name = "3D Noise"; Color[] c = new Color[width * height * depth]; uint index = 128; if (data.bytes[21 * 4] == 'D' && data.bytes[21 * 4 + 1] == 'X' && data.bytes[21 * 4 + 2] == '1' && data.bytes[21 * 4 + 3] == '0' && (formatFlags & 0x4) != 0) { uint format = BitConverter.ToUInt32(data.bytes, (int)index); if (format >= 60 && format <= 65) bitdepth = 8; else if (format >= 48 && format <= 52) bitdepth = 16; else if (format >= 27 && format <= 32) bitdepth = 32; //Debug.Log("DXGI format: " + format); // dx10 format, skip dx10 header //Debug.Log("DX10 format"); index += 20; } uint byteDepth = bitdepth / 8; pitch = (width * bitdepth + 7) / 8; for (int d = 0; d < depth; ++d) { //index = 128; for (int h = 0; h < height; ++h) { for (int w = 0; w < width; ++w) { float v = (bytes[index + w * byteDepth] / 255.0f); c[w + h * width + d * width * height] = new Color(v, v, v, v); } index += pitch; } } _noiseTexture.SetPixels(c); _noiseTexture.Apply(); } /// <summary> /// /// </summary> private void GenerateDitherTexture() { if (_ditheringTexture != null) { return; } int size = 8; #if DITHER_4_4 size = 4; #endif // again, I couldn't make it work with Alpha8 _ditheringTexture = new Texture2D(size, size, TextureFormat.Alpha8, false, true); _ditheringTexture.filterMode = FilterMode.Point; Color32[] c = new Color32[size * size]; byte b; #if DITHER_4_4 b = (byte)(0.0f / 16.0f * 255); c[0] = new Color32(b, b, b, b); b = (byte)(8.0f / 16.0f * 255); c[1] = new Color32(b, b, b, b); b = (byte)(2.0f / 16.0f * 255); c[2] = new Color32(b, b, b, b); b = (byte)(10.0f / 16.0f * 255); c[3] = new Color32(b, b, b, b); b = (byte)(12.0f / 16.0f * 255); c[4] = new Color32(b, b, b, b); b = (byte)(4.0f / 16.0f * 255); c[5] = new Color32(b, b, b, b); b = (byte)(14.0f / 16.0f * 255); c[6] = new Color32(b, b, b, b); b = (byte)(6.0f / 16.0f * 255); c[7] = new Color32(b, b, b, b); b = (byte)(3.0f / 16.0f * 255); c[8] = new Color32(b, b, b, b); b = (byte)(11.0f / 16.0f * 255); c[9] = new Color32(b, b, b, b); b = (byte)(1.0f / 16.0f * 255); c[10] = new Color32(b, b, b, b); b = (byte)(9.0f / 16.0f * 255); c[11] = new Color32(b, b, b, b); b = (byte)(15.0f / 16.0f * 255); c[12] = new Color32(b, b, b, b); b = (byte)(7.0f / 16.0f * 255); c[13] = new Color32(b, b, b, b); b = (byte)(13.0f / 16.0f * 255); c[14] = new Color32(b, b, b, b); b = (byte)(5.0f / 16.0f * 255); c[15] = new Color32(b, b, b, b); #else int i = 0; b = (byte)(1.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(49.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(13.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(61.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(4.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(52.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(16.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(64.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(33.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(17.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(45.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(29.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(36.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(20.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(48.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(32.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(9.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(57.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(5.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(53.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(12.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(60.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(8.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(56.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(41.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(25.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(37.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(21.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(44.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(28.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(40.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(24.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(3.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(51.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(15.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(63.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(2.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(50.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(14.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(62.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(35.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(19.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(47.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(31.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(34.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(18.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(46.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(30.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(11.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(59.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(7.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(55.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(10.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(58.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(6.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(54.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(43.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(27.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(39.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(23.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(42.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(26.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(38.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); b = (byte)(22.0f / 65.0f * 255); c[i++] = new Color32(b, b, b, b); #endif _ditheringTexture.SetPixels32(c); _ditheringTexture.Apply(); } /// <summary> /// /// </summary> /// <returns></returns> private Mesh CreateSpotLightMesh() { // copy & pasted from other project, the geometry is too complex, should be simplified Mesh mesh = new Mesh(); const int segmentCount = 16; Vector3[] vertices = new Vector3[2 + segmentCount * 3]; Color32[] colors = new Color32[2 + segmentCount * 3]; vertices[0] = new Vector3(0, 0, 0); vertices[1] = new Vector3(0, 0, 1); float angle = 0; float step = Mathf.PI * 2.0f / segmentCount; float ratio = 0.9f; for (int i = 0; i < segmentCount; ++i) { vertices[i + 2] = new Vector3(-Mathf.Cos(angle) * ratio, Mathf.Sin(angle) * ratio, ratio); colors[i + 2] = new Color32(255, 255, 255, 255); vertices[i + 2 + segmentCount] = new Vector3(-Mathf.Cos(angle), Mathf.Sin(angle), 1); colors[i + 2 + segmentCount] = new Color32(255, 255, 255, 0); vertices[i + 2 + segmentCount * 2] = new Vector3(-Mathf.Cos(angle) * ratio, Mathf.Sin(angle) * ratio, 1); colors[i + 2 + segmentCount * 2] = new Color32(255, 255, 255, 255); angle += step; } mesh.vertices = vertices; mesh.colors32 = colors; int[] indices = new int[segmentCount * 3 * 2 + segmentCount * 6 * 2]; int index = 0; for (int i = 2; i < segmentCount + 1; ++i) { indices[index++] = 0; indices[index++] = i; indices[index++] = i + 1; } indices[index++] = 0; indices[index++] = segmentCount + 1; indices[index++] = 2; for (int i = 2; i < segmentCount + 1; ++i) { indices[index++] = i; indices[index++] = i + segmentCount; indices[index++] = i + 1; indices[index++] = i + 1; indices[index++] = i + segmentCount; indices[index++] = i + segmentCount + 1; } indices[index++] = 2; indices[index++] = 1 + segmentCount; indices[index++] = 2 + segmentCount; indices[index++] = 2 + segmentCount; indices[index++] = 1 + segmentCount; indices[index++] = 1 + segmentCount + segmentCount; //------------ for (int i = 2 + segmentCount; i < segmentCount + 1 + segmentCount; ++i) { indices[index++] = i; indices[index++] = i + segmentCount; indices[index++] = i + 1; indices[index++] = i + 1; indices[index++] = i + segmentCount; indices[index++] = i + segmentCount + 1; } indices[index++] = 2 + segmentCount; indices[index++] = 1 + segmentCount * 2; indices[index++] = 2 + segmentCount * 2; indices[index++] = 2 + segmentCount * 2; indices[index++] = 1 + segmentCount * 2; indices[index++] = 1 + segmentCount * 3; ////------------------------------------- for (int i = 2 + segmentCount * 2; i < segmentCount * 3 + 1; ++i) { indices[index++] = 1; indices[index++] = i + 1; indices[index++] = i; } indices[index++] = 1; indices[index++] = 2 + segmentCount * 2; indices[index++] = segmentCount * 3 + 1; mesh.triangles = indices; mesh.RecalculateBounds(); return mesh; } }
/** * _____ _____ _____ _____ __ _____ _____ _____ _____ * | __| | | | | | | | | __| | | * |__ | | | | | | | | | |__| | | | |- -| --| * |_____|_____|_|_|_|_____| |_____|_____|_____|_____|_____| * * UNICORNS AT WARP SPEED SINCE 2010 * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ namespace SumoLogic.Logging.Serilog.Tests { using System; using System.Globalization; using System.Threading; using global::Serilog; using global::Serilog.Core; using global::Serilog.Formatting.Display; using SumoLogic.Logging.Common.Log; using SumoLogic.Logging.Common.Sender; using SumoLogic.Logging.Common.Tests; using SumoLogic.Logging.Serilog.Config; using Xunit; /// <summary> /// Buffered Sumo Logic target test implementation. /// </summary> [Collection("Serilog tests")] public class BufferedSumoLogicSinkTests : IDisposable { /// <summary> /// The HTTP messages handler mock. /// </summary> private MockHttpMessageHandler _messagesHandler; /// <summary> /// The buffered SumoLogic sink. /// </summary> private BufferedSumoLogicSink sink; /// <summary> /// The Serilog logger. /// </summary> private Logger logger; /// <summary> /// Test logging of a single message. /// </summary> [Fact] public void TestSingleMessage() { SetUpLogger(1, 10000, 10); logger.Information("This is a message"); Assert.Equal(0, _messagesHandler.ReceivedRequests.Count); TestHelper.Eventually(() => { Assert.Equal(1, _messagesHandler.ReceivedRequests.Count); Assert.Equal($"INFORMATION: This is a message{Environment.NewLine}", _messagesHandler.LastReceivedRequest.Content.ReadAsStringAsync().Result); }); } /// <summary> /// Test logging of multiple messages. /// </summary> [Fact] public void TestMultipleMessages() { SetUpLogger(1, 10000, 10); var numMessages = 20; for (var i = 0; i < numMessages; i++) { logger.Information(i.ToString()); Thread.Sleep(TimeSpan.FromMilliseconds(100)); } TestHelper.Eventually(() => { Assert.Equal(numMessages, _messagesHandler.ReceivedRequests.Count); }); } /// <summary> /// Test batching of multiple messages based on messages per request setting. /// </summary> [Fact] public void TestBatchingBySize() { // Huge time window, ensure all messages get batched into one SetUpLogger(100, 10000, 10); var numMessages = 100; for (var i = 0; i < numMessages; i++) { logger.Information(i.ToString()); } Assert.Equal(0, _messagesHandler.ReceivedRequests.Count); TestHelper.Eventually(() => { Assert.Equal(1, _messagesHandler.ReceivedRequests.Count); }); } /// <summary> /// Test batching of multiple messages based on max flush interval setting. /// </summary> [Fact] public void TestBatchingByWindow() { // Small time window, ensure all messages get batched by time SetUpLogger(10000, 500, 10); for (var i = 1; i <= 5; ++i) { logger.Information(i.ToString()); } Assert.Equal(0, _messagesHandler.ReceivedRequests.Count); TestHelper.Eventually(() => { Assert.Equal(1, _messagesHandler.ReceivedRequests.Count); }); for (var i = 6; i <= 10; ++i) { logger.Information(i.ToString()); } Assert.Equal(1, _messagesHandler.ReceivedRequests.Count); TestHelper.Eventually(() => { Assert.Equal(2, _messagesHandler.ReceivedRequests.Count); }); } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Releases the unmanaged resources used and optionally disposes of the managed resources. /// </summary> /// <param name="disposing">true to release both managed and unmanaged resources; false to releases only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { if (disposing) { sink.Dispose(); _messagesHandler.Dispose(); } } /// <summary> /// Setups the logger with the <see cref="SumoLogicSink"/> based on the given settings. /// </summary> /// <param name="messagesPerRequest">The maximum messages per request.</param> /// <param name="maxFlushInterval">The maximum flush interval, in milliseconds.</param> /// <param name="flushingAccuracy">The flushing accuracy, in milliseconds.</param> /// <param name="retryInterval">The retry interval, in milliseconds.</param> private void SetUpLogger(long messagesPerRequest, long maxFlushInterval, long flushingAccuracy, long retryInterval = 10000) { _messagesHandler = new MockHttpMessageHandler(); sink = new BufferedSumoLogicSink( new ConsoleLog(), _messagesHandler, new SumoLogicConnection { Uri = new Uri("http://www.fakeadress.com"), ClientName = "BufferedSumoLogicSinkTest", MessagesPerRequest = messagesPerRequest, MaxFlushInterval = TimeSpan.FromMilliseconds(maxFlushInterval), FlushingAccuracy = TimeSpan.FromMilliseconds(flushingAccuracy), RetryInterval = TimeSpan.FromMilliseconds(retryInterval), }, new SumoLogicSource { SourceName = "BufferedSumoLogicSinkTest", SourceCategory = "BufferedSumoLogicSinkSourceCategory", SourceHost = "BufferedSumoLogicSinkSourceHost", }, new MessageTemplateTextFormatter("{Level:u}: {Message}", CultureInfo.InvariantCulture)); logger = new LoggerConfiguration() .MinimumLevel.Information() .WriteTo.Sink(sink) .CreateLogger(); } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System.Management.Automation.Internal; using System.Runtime.Serialization; using System.Security.Permissions; namespace System.Management.Automation { /// <summary> /// Defines the exception thrown for all Extended type system related errors. /// </summary> [Serializable] public class ExtendedTypeSystemException : RuntimeException { #region ctor /// <summary> /// Initializes a new instance of ExtendedTypeSystemException with the message set /// to typeof(ExtendedTypeSystemException).FullName. /// </summary> public ExtendedTypeSystemException() : base(typeof(ExtendedTypeSystemException).FullName) { } /// <summary> /// Initializes a new instance of ExtendedTypeSystemException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public ExtendedTypeSystemException(string message) : base(message) { } /// <summary> /// Initializes a new instance of ExtendedTypeSystemException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public ExtendedTypeSystemException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception, null for none.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal ExtendedTypeSystemException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base( StringUtil.Format(resourceString, arguments), innerException) { SetErrorId(errorId); } #region Serialization /// <summary> /// Initializes a new instance of ExtendedTypeSystemException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected ExtendedTypeSystemException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for Method related errors. /// </summary> [Serializable] public class MethodException : ExtendedTypeSystemException { internal const string MethodArgumentCountExceptionMsg = "MethodArgumentCountException"; internal const string MethodAmbiguousExceptionMsg = "MethodAmbiguousException"; internal const string MethodArgumentConversionExceptionMsg = "MethodArgumentConversionException"; internal const string NonRefArgumentToRefParameterMsg = "NonRefArgumentToRefParameter"; internal const string RefArgumentToNonRefParameterMsg = "RefArgumentToNonRefParameter"; #region ctor /// <summary> /// Initializes a new instance of MethodException with the message set /// to typeof(MethodException).FullName. /// </summary> public MethodException() : base(typeof(MethodException).FullName) { } /// <summary> /// Initializes a new instance of MethodException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public MethodException(string message) : base(message) { } /// <summary> /// Initializes a new instance of MethodException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public MethodException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal MethodException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of MethodException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected MethodException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for Method invocation exceptions. /// </summary> [Serializable] public class MethodInvocationException : MethodException { internal const string MethodInvocationExceptionMsg = "MethodInvocationException"; internal const string CopyToInvocationExceptionMsg = "CopyToInvocationException"; internal const string WMIMethodInvocationException = "WMIMethodInvocationException"; #region ctor /// <summary> /// Initializes a new instance of MethodInvocationException with the message set /// to typeof(MethodInvocationException).FullName. /// </summary> public MethodInvocationException() : base(typeof(MethodInvocationException).FullName) { } /// <summary> /// Initializes a new instance of MethodInvocationException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public MethodInvocationException(string message) : base(message) { } /// <summary> /// Initializes a new instance of MethodInvocationException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public MethodInvocationException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal MethodInvocationException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of MethodInvocationException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected MethodInvocationException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for errors getting the value of properties. /// </summary> [Serializable] public class GetValueException : ExtendedTypeSystemException { internal const string GetWithoutGetterExceptionMsg = "GetWithoutGetterException"; internal const string WriteOnlyProperty = "WriteOnlyProperty"; #region ctor /// <summary> /// Initializes a new instance of GetValueException with the message set /// to typeof(GetValueException).FullName. /// </summary> public GetValueException() : base(typeof(GetValueException).FullName) { } /// <summary> /// Initializes a new instance of GetValueException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public GetValueException(string message) : base(message) { } /// <summary> /// Initializes a new instance of GetValueException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public GetValueException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal GetValueException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of GetValueException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected GetValueException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for errors getting the value of properties. /// </summary> [Serializable] public class PropertyNotFoundException : ExtendedTypeSystemException { #region ctor /// <summary> /// Initializes a new instance of GetValueException with the message set /// to typeof(GetValueException).FullName. /// </summary> public PropertyNotFoundException() : base(typeof(PropertyNotFoundException).FullName) { } /// <summary> /// Initializes a new instance of GetValueException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public PropertyNotFoundException(string message) : base(message) { } /// <summary> /// Initializes a new instance of GetValueException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public PropertyNotFoundException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal PropertyNotFoundException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of GetValueException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected PropertyNotFoundException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for exceptions thrown by property getters. /// </summary> [Serializable] public class GetValueInvocationException : GetValueException { internal const string ExceptionWhenGettingMsg = "ExceptionWhenGetting"; #region ctor /// <summary> /// Initializes a new instance of GetValueInvocationException with the message set /// to typeof(GetValueInvocationException).FullName. /// </summary> public GetValueInvocationException() : base(typeof(GetValueInvocationException).FullName) { } /// <summary> /// Initializes a new instance of GetValueInvocationException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public GetValueInvocationException(string message) : base(message) { } /// <summary> /// Initializes a new instance of GetValueInvocationException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public GetValueInvocationException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal GetValueInvocationException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of GetValueInvocationException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected GetValueInvocationException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for errors setting the value of properties. /// </summary> [Serializable] public class SetValueException : ExtendedTypeSystemException { #region ctor /// <summary> /// Initializes a new instance of SetValueException with the message set /// to typeof(SetValueException).FullName. /// </summary> public SetValueException() : base(typeof(SetValueException).FullName) { } /// <summary> /// Initializes a new instance of SetValueException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public SetValueException(string message) : base(message) { } /// <summary> /// Initializes a new instance of SetValueException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public SetValueException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal SetValueException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of SetValueException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected SetValueException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for exceptions thrown by property setters. /// </summary> [Serializable] public class SetValueInvocationException : SetValueException { #region ctor /// <summary> /// Initializes a new instance of SetValueInvocationException with the message set /// to typeof(SetValueInvocationException).FullName. /// </summary> public SetValueInvocationException() : base(typeof(SetValueInvocationException).FullName) { } /// <summary> /// Initializes a new instance of SetValueInvocationException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public SetValueInvocationException(string message) : base(message) { } /// <summary> /// Initializes a new instance of SetValueInvocationException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public SetValueInvocationException(string message, Exception innerException) : base(message, innerException) { } /// <summary> /// Recommended constructor for the class. /// </summary> /// <param name="errorId">String that uniquely identifies each thrown Exception.</param> /// <param name="innerException">The inner exception.</param> /// <param name="resourceString">Resource string.</param> /// <param name="arguments">Arguments to the resource string.</param> internal SetValueInvocationException( string errorId, Exception innerException, string resourceString, params object[] arguments) : base(errorId, innerException, resourceString, arguments) { } #region Serialization /// <summary> /// Initializes a new instance of SetValueInvocationException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected SetValueInvocationException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endregion Serialization #endregion ctor } /// <summary> /// Defines the exception thrown for type conversion errors. /// </summary> [Serializable] public class PSInvalidCastException : InvalidCastException, IContainsErrorRecord { #region Serialization /// <summary> /// Populates a <see cref="System.Runtime.Serialization.SerializationInfo"/> with the /// data needed to serialize the PSInvalidCastException object. /// </summary> /// <param name="info">The <see cref="System.Runtime.Serialization.SerializationInfo"/> to populate with data.</param> /// <param name="context">The destination for this serialization.</param> public override void GetObjectData(SerializationInfo info, StreamingContext context) { if (info == null) { throw new PSArgumentNullException(nameof(info)); } base.GetObjectData(info, context); info.AddValue("ErrorId", _errorId); } /// <summary> /// Initializes a new instance of PSInvalidCastException with serialization parameters. /// </summary> /// <param name="info">Serialization information.</param> /// <param name="context">Streaming context.</param> protected PSInvalidCastException(SerializationInfo info, StreamingContext context) : base(info, context) { _errorId = info.GetString("ErrorId"); } #endregion Serialization /// <summary> /// Initializes a new instance of PSInvalidCastException with the message set /// to typeof(PSInvalidCastException).FullName. /// </summary> public PSInvalidCastException() : base(typeof(PSInvalidCastException).FullName) { } /// <summary> /// Initializes a new instance of PSInvalidCastException setting the message. /// </summary> /// <param name="message">The exception's message.</param> public PSInvalidCastException(string message) : base(message) { } /// <summary> /// Initializes a new instance of PSInvalidCastException setting the message and innerException. /// </summary> /// <param name="message">The exception's message.</param> /// <param name="innerException">The exceptions's inner exception.</param> public PSInvalidCastException(string message, Exception innerException) : base(message, innerException) { } internal PSInvalidCastException(string errorId, string message, Exception innerException) : base(message, innerException) { _errorId = errorId; } internal PSInvalidCastException( string errorId, Exception innerException, string resourceString, params object[] arguments) : this( errorId, StringUtil.Format(resourceString, arguments), innerException) { } /// <summary> /// Gets the ErrorRecord associated with this exception. /// </summary> public ErrorRecord ErrorRecord { get { if (_errorRecord == null) { _errorRecord = new ErrorRecord( new ParentContainsErrorRecordException(this), _errorId, ErrorCategory.InvalidArgument, null); } return _errorRecord; } } private ErrorRecord _errorRecord; private readonly string _errorId = "PSInvalidCastException"; } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using Microsoft.CSharp.RuntimeBinder; namespace Its.Recipes { /// <summary> /// Supports chaining of expressions when intermediate values may be null, to support a fluent API style using common .NET types. /// </summary> #if !RecipesProject [System.Diagnostics.DebuggerStepThrough] [System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverage] #endif internal static class MaybeExtensions { /// <summary> /// Specifies a function that will be evaluated if the source <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> /// <param name="maybe">The source maybe.</param> /// <param name="otherValue">The value to be returned if the <see cref="Recipes.Maybe{T}" /> has no value.</param> /// <returns> /// The value of the Maybe if it has a value; otherwise, the value returned by <paramref name="otherValue" />. /// </returns> public static T Else<T>(this Maybe<T> maybe, Func<T> otherValue) { if (maybe.HasValue) { return maybe.Value; } return otherValue(); } /// <summary> /// Specifies a function that will be evaluated if the source <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> /// <param name="maybe">The source maybe.</param> /// <param name="other">The value to be returned if the <see cref="Recipes.Maybe{T}" /> has no value.</param> /// <returns> /// The value of the Maybe if it has a value; otherwise, the value returned by <paramref name="other" />. /// </returns> public static Maybe<T> Else<T>(this Maybe<T> maybe, Func<Maybe<T>> other) { return maybe.HasValue ? maybe : other(); } /// <summary> /// Specifies a function that will be evaluated if the source <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> /// <param name="maybe">The source maybe.</param> /// <param name="otherValue">The value to be returned if the <see cref="Recipes.Maybe{T}" /> has no value.</param> /// <returns> /// The value of the Maybe if it has a value; otherwise, the value returned by <paramref name="otherValue" />. /// </returns> public static T Else<T>(this Maybe<Maybe<T>> maybe, Func<T> otherValue) { if (maybe.HasValue) { return maybe.Value.Else(otherValue); } return otherValue(); } /// <summary> /// Specifies a function that will be evaluated if the source <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> /// <param name="maybe">The source maybe.</param> /// <param name="otherValue">The value to be returned if the <see cref="Recipes.Maybe{T}" /> has no value.</param> /// <returns> /// The value of the Maybe if it has a value; otherwise, the value returned by <paramref name="otherValue" />. /// </returns> public static T Else<T>(this Maybe<Maybe<Maybe<T>>> maybe, Func<T> otherValue) { if (maybe.HasValue) { return maybe.Value.Else(otherValue); } return otherValue(); } /// <summary> /// Specifies a function that will be evaluated if the source <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> /// <param name="maybe">The source maybe.</param> /// <param name="otherValue">The value to be returned if the <see cref="Recipes.Maybe{T}" /> has no value.</param> /// <returns> /// The value of the Maybe if it has a value; otherwise, the value returned by <paramref name="otherValue" />. /// </returns> public static T Else<T>(this Maybe<Maybe<Maybe<Maybe<T>>>> maybe, Func<T> otherValue) { if (maybe.HasValue) { return maybe.Value.Else(otherValue); } return otherValue(); } /// <summary> /// Returns the default value for <typeparamref name="T" /> if the <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> public static T ElseDefault<T>(this Maybe<T> maybe) { return maybe.Else(() => default(T)); } /// <summary> /// Returns the default value for <typeparamref name="T" /> if the <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> public static T ElseDefault<T>(this Maybe<Maybe<T>> maybe) { return maybe.Else(() => default(T)); } /// <summary> /// Returns the default value for <typeparamref name="T" /> if the <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> public static T ElseDefault<T>(this Maybe<Maybe<Maybe<T>>> maybe) { return maybe.Else(() => default(T)); } /// <summary> /// Returns the default value for <typeparamref name="T" /> if the <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> public static T ElseDefault<T>(this Maybe<Maybe<Maybe<Maybe<T>>>> maybe) { return maybe.Else(() => default(T)); } /// <summary> /// Returns null if the source has no value. /// </summary> /// <typeparam name="T">The type held by the <see cref="Recipes.Maybe{T}" />.</typeparam> public static T? ElseNull<T>(this Maybe<T> maybe) where T : struct { if (maybe.HasValue) { return maybe.Value; } return null; } /// <summary> /// Performs an action if the <see cref="Recipes.Maybe{T}" /> has no value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> public static void ElseDo<T>(this Maybe<T> maybe, Action action) { if (action == null) { throw new ArgumentNullException("action"); } if (!maybe.HasValue) { action(); } } /// <summary> /// If the dictionary contains a value for a specified key, executes an action passing the corresponding value. /// </summary> /// <typeparam name="TKey"> The type of the key. </typeparam> /// <typeparam name="TValue"> The type of the value. </typeparam> /// <param name="dictionary"> The dictionary. </param> /// <param name="key"> The key. </param> /// <exception cref="ArgumentNullException">dictionary</exception> public static Maybe<TValue> IfContains<TKey, TValue>( this IDictionary<TKey, TValue> dictionary, TKey key) { TValue value; if (dictionary != null && dictionary.TryGetValue(key, out value)) { return Maybe<TValue>.Yes(value); } return Maybe<TValue>.No(); } /// <summary> /// Allows two maybes to be combined so that the resulting maybe has its value transformed by the second if and only if the first has a value. /// </summary> /// <typeparam name="T1">The type of the <see cref="Maybe{T}" />.</typeparam> /// <param name="first">The first maybe.</param> /// <returns></returns> public static T1 And<T1>( this Maybe<T1> first) { if (first.HasValue) { return first.Value; } return default(T1); } /// <summary> /// Attempts to retrieve a value dynamically. /// </summary> /// <typeparam name="T">The type of the value expected to be returned.</typeparam> /// <param name="source">The source object.</param> /// <param name="getValue">A delegate that attempts to return a value via a dynamic invocation on the source object.</param> /// <remarks>This method will not cast the result value to <typeparamref name="T" />. If the returned value is not of this type, then a negative <see cref="Recipes.Maybe{T}" /> will be returned.</remarks> public static Maybe<T> IfHas<T>( this object source, Func<dynamic, T> getValue) { try { var value = getValue(source); return value.IfTypeIs<T>(); } catch (RuntimeBinderException) { return Maybe<T>.No(); } } /// <summary> /// Creates a <see cref="Recipes.Maybe{T}" />, allowing <see cref="Then" /> and <see cref="Else" /> operations to be chained and evaluated conditionally based on whether source is null. /// </summary> /// <typeparam name="T">The type of the instance wrapped by the <see cref="Recipes.Maybe{T}" />.</typeparam> /// <param name="source">The source instance, which may be null.</param> public static Maybe<T> IfNotNull<T>(this T source) where T : class { if (source != null) { return Maybe<T>.Yes(source); } return Maybe<T>.No(); } public static Maybe<T> IfNotNull<T>(this Maybe<T> source) where T : class { if (source.HasValue && source.Value != null) { return source; } return Maybe<T>.No(); } /// <summary> /// Creates a <see cref="Recipes.Maybe{T}" />, allowing <see cref="Then" /> and <see cref="Else" /> operations to be chained and evaluated conditionally based on whether source is null. /// </summary> /// <typeparam name="T">The type of the instance wrapped by the <see cref="Recipes.Maybe{T}" />.</typeparam> /// <param name="source">The source instance, which may be null.</param> public static Maybe<T> IfNotNull<T>(this T? source) where T : struct { if (source.HasValue) { return Maybe<T>.Yes(source.Value); } return Maybe<T>.No(); } /// <summary> /// Determines whether a string is null, empty, or consists entirely of whitespace. /// </summary> /// <param name="value">The string.</param> public static Maybe<string> IfNotNullOrEmptyOrWhitespace(this string value) { if (!string.IsNullOrWhiteSpace(value)) { return Maybe<string>.Yes(value); } return Maybe<string>.No(); } /// <summary> /// Returns a Maybe. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="source">The source.</param> /// <returns></returns> public static Maybe<T> IfTypeIs<T>( this object source) { if (source is T) { return Maybe<T>.Yes((T) source); } return Maybe<T>.No(); } /// <summary> /// Returns either the source or, if it is null, an empty <see cref="IEnumerable{T}" /> sequence. /// </summary> /// <typeparam name="T"> The type of the objects in the sequence. </typeparam> /// <param name="source"> The source sequence. </param> /// <returns> The source sequence or, if it is null, an empty sequence. </returns> public static IEnumerable<T> OrEmpty<T>(this IEnumerable<T> source) { return source ?? Enumerable.Empty<T>(); } /// <summary> /// Attempts to get the value of a Try* method with an out parameter, for example <see cref="Dictionary{TKey,TValue}.TryGetValue" /> or <see cref="ConcurrentQueue{T}.TryDequeue" />. /// </summary> /// <typeparam name="T">The type of the source object.</typeparam> /// <typeparam name="TOut">The type the out parameter.</typeparam> /// <param name="source">The source object exposing the Try* method.</param> /// <param name="tryTryGetValue">A delegate to call the Try* method.</param> /// <returns></returns> public static Maybe<TOut> Out<T, TOut>(this T source, TryGetOutParameter<T, TOut> tryTryGetValue) { TOut result; if (tryTryGetValue(source, out result)) { return Maybe<TOut>.Yes(result); } return Maybe<TOut>.No(); } /// <summary> /// Specifies the result of a <see cref="Recipes.Maybe{T}" /> if the <see cref="Recipes.Maybe{T}" /> has a value. /// </summary> /// <typeparam name="TIn">The type of source object.</typeparam> /// <typeparam name="TOut">The type of result.</typeparam> /// <param name="maybe">The maybe.</param> /// <param name="getValue">A delegate to get the value from the source object.</param> /// <returns></returns> public static Maybe<TOut> Then<TIn, TOut>( this Maybe<TIn> maybe, Func<TIn, TOut> getValue) { TOut value; return maybe.HasValue && (value = getValue(maybe.Value)) != null ? Maybe<TOut>.Yes(value) : Maybe<TOut>.No(); } /// <summary> /// Performs an action if the <see cref="Recipes.Maybe{T}" /> has a value. /// </summary> /// <typeparam name="T"> /// The type held by the <see cref="Recipes.Maybe{T}" />. /// </typeparam> public static Maybe<Unit> ThenDo<T>(this Maybe<T> maybe, Action<T> action) { if (action == null) { throw new ArgumentNullException("action"); } if (maybe.HasValue) { action(maybe.Value); return Maybe<Unit>.Yes(Unit.Default); } return Maybe<Unit>.No(); } /// <summary> /// Tries to call the specified method and catches exceptions if they occur. /// </summary> /// <typeparam name="TIn">The type of source object.</typeparam> /// <typeparam name="TOut">The type of result.</typeparam> /// <param name="source">The source object.</param> /// <param name="getValue">A delegate to get the value from the source object.</param> /// <param name="ignore">A predicate to determine whether the exception should be ignored. If this is not specified, all exceptions are ignored. If it is specified and an exception is thrown that matches the predicate, the exception is ignored and a <see cref="Recipes.Maybe{TOut}" /> having no value is returned. If it is specified and an exception is thrown that does not match the predicate, the exception is allowed to propagate.</param> /// <returns></returns> public static Maybe<TOut> Try<TIn, TOut>( this TIn source, Func<TIn, TOut> getValue, Func<Exception, bool> ignore) { if (getValue == null) { throw new ArgumentNullException("getValue"); } if (ignore == null) { throw new ArgumentNullException("ignore"); } try { return Maybe<TOut>.Yes(getValue(source)); } catch (Exception ex) { if (!ignore(ex)) { throw; } } return Maybe<TOut>.No(); } } /// <summary> /// Represents an object that may or may not contain a value, allowing optional chained results to be specified for both possibilities. /// </summary> /// <typeparam name="T">The type of the possible value.</typeparam> #if !RecipesProject [System.Diagnostics.DebuggerStepThrough] [System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverage] #endif internal struct Maybe<T> { private static readonly Maybe<T> no = new Maybe<T> { HasValue = false }; private T value; /// <summary> /// Returns a <see cref="Recipes.Maybe{T}" /> that contains a value. /// </summary> /// <param name="value">The value.</param> public static Maybe<T> Yes(T value) { return new Maybe<T> { HasValue = true, value = value }; } /// <summary> /// Returns a <see cref="Recipes.Maybe{T}" /> that does not contain a value. /// </summary> public static Maybe<T> No() { return no; } /// <summary> /// Gets the value contained by the <see cref="Recipes.Maybe{T}" />. /// </summary> /// <value> /// The value. /// </value> public T Value { get { if (!HasValue) { throw new InvalidOperationException("The Maybe does not contain a value."); } return value; } } /// <summary> /// Gets a value indicating whether this instance has a value. /// </summary> /// <value> /// <c>true</c> if this instance has value; otherwise, <c>false</c>. /// </value> public bool HasValue { get; private set; } } /// <summary> /// A delegate used to return an out parameter from a Try* method that indicates success via a boolean return value. /// </summary> /// <typeparam name="T">The type of the source object.</typeparam> /// <typeparam name="TOut">The type of the out parameter.</typeparam> /// <param name="source">The source.</param> /// <param name="outValue">The out parameter's value.</param> /// <returns>true if the out parameter was set; otherwise, false.</returns> internal delegate bool TryGetOutParameter<in T, TOut>(T source, out TOut outValue); /// <summary> /// A type representing a void return type. /// </summary> #if !RecipesProject [System.Diagnostics.DebuggerStepThrough] [System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverage] #endif internal struct Unit { /// <summary> /// The default instance. /// </summary> public static readonly Unit Default = new Unit(); } }
using System; using System.Linq; using DemoGame.Server.DbObjs; using DemoGame.Server.Queries; using NetGore.Db; using NetGore.Features.Guilds; using NetGore.Features.Quests; using NetGore.Features.Shops; using NetGore.Features.WorldStats; using NetGore.World; namespace DemoGame.Server { /// <summary> /// Provides support for tracking the world statistics and events. /// </summary> public class WorldStatsTracker : WorldStatsTracker<User, NPC, ItemEntity> { /// <summary> /// How frequently to log the network stats. /// </summary> const int _logNetStatsRate = 1000 * 60 * 1; // 1 minute /// <summary> /// The <see cref="WorldStatsTracker"/> instance. /// </summary> static readonly IWorldStatsTracker<User, NPC, ItemEntity> _instance; readonly InsertWorldStatsCountConsumeItemQuery _countConsumeItemQuery; readonly InsertWorldStatsCountItemBuyQuery _countItemBuyQuery; readonly InsertWorldStatsCountItemCreateQuery _countItemCreateQuery; readonly InsertWorldStatsCountItemSellQuery _countItemSellQuery; readonly InsertWorldStatsCountNPCKillUserQuery _countNPCKillUserQuery; readonly InsertWorldStatsCountShopBuyQuery _countShopBuyQuery; readonly InsertWorldStatsCountShopSellQuery _countShopSellQuery; readonly InsertWorldStatsCountUserConsumeItemQuery _countUserConsumeItemQuery; readonly InsertWorldStatsCountUserKillNPCQuery _countUserKillNPCQuery; readonly InsertWorldStatsGuildUserChangeQuery _guildUserChangeQuery; readonly InsertWorldStatsNetworkQuery _networkQuery; readonly InsertWorldStatsNPCKillUserQuery _npcKillUserQuery; readonly InsertWorldStatsQuestAcceptQuery _questAcceptQuery; readonly InsertWorldStatsQuestCancelQuery _questCancelQuery; readonly InsertWorldStatsQuestCompleteQuery _questCompleteQuery; readonly InsertWorldStatsUserConsumeItemQuery _userConsumeItemQuery; readonly InsertWorldStatsUserKillNpcQuery _userKillNPCQuery; readonly InsertWorldStatsUserLevelQuery _userLevelQuery; readonly InsertWorldStatsUserShoppingQuery _userShoppingQuery; /// <summary> /// Initializes the <see cref="WorldStatsTracker"/> class. /// </summary> static WorldStatsTracker() { _instance = new WorldStatsTracker(DbControllerBase.GetInstance()); } /// <summary> /// Initializes a new instance of the <see cref="WorldStatsTracker"/> class. /// </summary> /// <param name="dbController">The db controller.</param> /// <exception cref="ArgumentNullException"><paramref name="dbController"/> is null.</exception> WorldStatsTracker(IDbController dbController) : base(_logNetStatsRate) { // Locally cache an instance of all the queries we will be using _npcKillUserQuery = dbController.GetQuery<InsertWorldStatsNPCKillUserQuery>(); _userConsumeItemQuery = dbController.GetQuery<InsertWorldStatsUserConsumeItemQuery>(); _userKillNPCQuery = dbController.GetQuery<InsertWorldStatsUserKillNpcQuery>(); _userLevelQuery = dbController.GetQuery<InsertWorldStatsUserLevelQuery>(); _userShoppingQuery = dbController.GetQuery<InsertWorldStatsUserShoppingQuery>(); _networkQuery = dbController.GetQuery<InsertWorldStatsNetworkQuery>(); _guildUserChangeQuery = dbController.GetQuery<InsertWorldStatsGuildUserChangeQuery>(); _questCancelQuery = dbController.GetQuery<InsertWorldStatsQuestCancelQuery>(); _questCompleteQuery = dbController.GetQuery<InsertWorldStatsQuestCompleteQuery>(); _questAcceptQuery = dbController.GetQuery<InsertWorldStatsQuestAcceptQuery>(); _countConsumeItemQuery = dbController.GetQuery<InsertWorldStatsCountConsumeItemQuery>(); _countItemBuyQuery = dbController.GetQuery<InsertWorldStatsCountItemBuyQuery>(); _countItemCreateQuery = dbController.GetQuery<InsertWorldStatsCountItemCreateQuery>(); _countItemSellQuery = dbController.GetQuery<InsertWorldStatsCountItemSellQuery>(); _countNPCKillUserQuery = dbController.GetQuery<InsertWorldStatsCountNPCKillUserQuery>(); _countShopBuyQuery = dbController.GetQuery<InsertWorldStatsCountShopBuyQuery>(); _countShopSellQuery = dbController.GetQuery<InsertWorldStatsCountShopSellQuery>(); _countUserConsumeItemQuery = dbController.GetQuery<InsertWorldStatsCountUserConsumeItemQuery>(); _countUserKillNPCQuery = dbController.GetQuery<InsertWorldStatsCountUserKillNPCQuery>(); } /// <summary> /// Gets the <see cref="IWorldStatsTracker{T,U,V}"/> instance. /// </summary> public static IWorldStatsTracker<User, NPC, ItemEntity> Instance { get { return _instance; } } /// <summary> /// When overridden in the derived class, adds to the item purchase counter. /// </summary> /// <param name="itemTID">The template ID of the item that was purchased from a shop.</param> /// <param name="amount">The number of items purchased.</param> protected override void InternalAddCountBuyItem(int itemTID, int amount) { _countItemBuyQuery.Execute(itemTID, amount); } /// <summary> /// When overridden in the derived class, adds to the item consumption counter. /// </summary> /// <param name="itemTID">The template ID of the item that was consumed.</param> protected override void InternalAddCountConsumeItem(int itemTID) { _countConsumeItemQuery.Execute(itemTID); } /// <summary> /// When overridden in the derived class, adds to the item creation counter. /// </summary> /// <param name="itemTID">The template ID of the item that was sold to a shop.</param> /// <param name="amount">The number of items created.</param> protected override void InternalAddCountCreateItem(int itemTID, int amount) { _countItemCreateQuery.Execute(itemTID, amount); } /// <summary> /// When overridden in the derived class, adds to the NPC kill user counter. /// </summary> /// <param name="npcTID">The template ID of the NPC that killed the user.</param> /// <param name="userID">The template ID of the user that was killed.</param> protected override void InternalAddCountNPCKillUser(int npcTID, int userID) { _countNPCKillUserQuery.Execute(userID, npcTID); } /// <summary> /// When overridden in the derived class, adds to the item sell counter. /// </summary> /// <param name="itemTID">The template ID of the item that was sold to a shop.</param> /// <param name="amount">The number of items sold.</param> protected override void InternalAddCountSellItem(int itemTID, int amount) { _countItemSellQuery.Execute(itemTID, amount); } /// <summary> /// When overridden in the derived class, adds to the item being purchased from a shop counter. /// </summary> /// <param name="shopID">The ID of the shop that sold the item.</param> /// <param name="amount">The number of items the shop sold.</param> protected override void InternalAddCountShopBuy(int shopID, int amount) { _countShopBuyQuery.Execute(shopID, amount); } /// <summary> /// When overridden in the derived class, adds to the item being sold to a shop counter. /// </summary> /// <param name="shopID">The ID of the shop the item was sold to.</param> /// <param name="amount">The number of items sold to the shop.</param> protected override void InternalAddCountShopSell(int shopID, int amount) { _countShopSellQuery.Execute(shopID, amount); } /// <summary> /// When overridden in the derived class, adds to the item consumption count. /// </summary> /// <param name="userID">The ID of the user who consumed the item.</param> /// <param name="itemTID">The item template ID of the item consumed.</param> protected override void InternalAddCountUserConsumeItem(int userID, int itemTID) { _countUserConsumeItemQuery.Execute(userID, itemTID); } /// <summary> /// When overridden in the derived class, adds to the user kill a NPC counter. /// </summary> /// <param name="userID">The template ID of the user that killed the NPC.</param> /// <param name="npcTID">The template ID of the NPC that was killed.</param> protected override void InternalAddCountUserKillNPC(int userID, int npcTID) { _countUserKillNPCQuery.Execute(userID, npcTID); } /// <summary> /// When overridden in the derived class, adds when a NPC kills a user. /// </summary> /// <param name="npc">The NPC that killed the <paramref name="user"/>.</param> /// <param name="user">The User that was killed by the <paramref name="npc"/>.</param> protected override void InternalAddNPCKillUser(NPC npc, User user) { var mapID = (npc.Map == null ? (MapID?)null : npc.Map.ID); var args = new WorldStatsNpcKillUserTable(when: Now(), mapID: mapID, nPCTemplateID: npc.CharacterTemplateID, npcX: (ushort)npc.Position.X, npcY: (ushort)npc.Position.Y, userID: user.ID, userLevel: user.Level, userX: (ushort)user.Position.X, userY: (ushort)user.Position.Y, iD: 0); _npcKillUserQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user accepts a quest. /// </summary> /// <param name="user">The user that accepted a quest.</param> /// <param name="questID">The ID of the quest that the user accepted.</param> protected override void InternalAddQuestAccept(User user, QuestID questID) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsQuestAcceptTable(when: Now(), mapID: mapID, questID: questID, userID: user.ID, x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _questAcceptQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user cancels a quest. /// </summary> /// <param name="user">The user that canceled a quest.</param> /// <param name="questID">The ID of the quest that the user canceled.</param> protected override void InternalAddQuestCancel(User user, QuestID questID) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsQuestCancelTable(when: Now(), mapID: mapID, questID: questID, userID: user.ID, x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _questCancelQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user completes a quest. /// </summary> /// <param name="user">The user that completed a quest.</param> /// <param name="questID">The ID of the quest that the user completed.</param> protected override void InternalAddQuestComplete(User user, QuestID questID) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsQuestCompleteTable(when: Now(), mapID: mapID, questID: questID, userID: user.ID, x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _questCompleteQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user consumes a consumable item. /// </summary> /// <param name="user">The user that consumed the item.</param> /// <param name="item">The item that was consumed.</param> protected override void InternalAddUserConsumeItem(User user, ItemEntity item) { if (item.Type != ItemType.UseOnce) return; var itemTemplate = item.ItemTemplateID; if (!itemTemplate.HasValue) return; var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsUserConsumeItemTable(when: Now(), itemTemplateID: itemTemplate.Value, mapID: mapID, userID: user.ID, x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _userConsumeItemQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user changes their guild. /// </summary> /// <param name="user">The user that changed their guild.</param> /// <param name="guildID">The ID of the guild the user changed to. If this event is for when the user left a guild, /// this value will be null.</param> protected override void InternalAddUserGuildChange(User user, GuildID? guildID) { var args = new WorldStatsGuildUserChangeTable(when: Now(), guildID: guildID, userID: user.ID, iD: 0); _guildUserChangeQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user kills a NPC. /// </summary> /// <param name="user">The user that killed the <paramref name="npc"/>.</param> /// <param name="npc">The NPC that was killed by the <paramref name="user"/>.</param> protected override void InternalAddUserKillNPC(User user, NPC npc) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsUserKillNpcTable(when: Now(), mapID: mapID, nPCTemplateID: npc.CharacterTemplateID, npcX: (ushort)npc.Position.X, npcY: (ushort)npc.Position.Y, userID: user.ID, userLevel: user.Level, userX: (ushort)user.Position.X, userY: (ushort)user.Position.Y, iD: 0); _userKillNPCQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user gains a level. /// </summary> /// <param name="user">The user that leveled up.</param> protected override void InternalAddUserLevel(User user) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsUserLevelTable(when: Now(), characterID: user.ID, level: user.Level, mapID: mapID, x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _userLevelQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user buys an item from a shop. /// </summary> /// <param name="user">The user that bought from a shop.</param> /// <param name="itemTemplateID">The template ID of the item that was purchased.</param> /// <param name="amount">How many units of the item was purchased.</param> /// <param name="cost">How much the user bought the items for. When the amount is greater than one, this includes /// the cost of all the items together, not a single item. That is, the cost of the transaction as a whole.</param> /// <param name="shopID">The ID of the shop the transaction took place at.</param> protected override void InternalAddUserShopBuyItem(User user, int? itemTemplateID, byte amount, int cost, ShopID shopID) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsUserShoppingTable(saleType: 0, amount: amount, characterID: user.ID, cost: cost, itemTemplateID: (ItemTemplateID?)itemTemplateID, mapID: mapID, shopID: shopID, when: Now(), x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _userShoppingQuery.Execute(args); } /// <summary> /// When overridden in the derived class, adds when a user sells an item to a shop. /// </summary> /// <param name="user">The user that sold to a shop.</param> /// <param name="itemTemplateID">The template ID of the item that was sold.</param> /// <param name="amount">How many units of the item was sold.</param> /// <param name="cost">How much the user sold the items for. When the amount is greater than one, this includes /// the cost of all the items together, not a single item. That is, the cost of the transaction as a whole.</param> /// <param name="shopID">The ID of the shop the transaction took place at.</param> protected override void InternalAddUserShopSellItem(User user, int? itemTemplateID, byte amount, int cost, ShopID shopID) { var mapID = (user.Map == null ? (MapID?)null : user.Map.ID); var args = new WorldStatsUserShoppingTable(saleType: 1, amount: amount, characterID: user.ID, cost: cost, itemTemplateID: (ItemTemplateID?)itemTemplateID, mapID: mapID, shopID: shopID, when: Now(), x: (ushort)user.Position.X, y: (ushort)user.Position.Y, iD: 0); _userShoppingQuery.Execute(args); } /// <summary> /// When overridden in the derived class, logs the network statistics to the database. /// </summary> /// <param name="connections">The current number of connections.</param> /// <param name="recvBytes">The average bytes received per second.</param> /// <param name="recvPackets">The average packets received per second.</param> /// <param name="recvMsgs">The average messages received per second.</param> /// <param name="sentBytes">The average bytes sent per second.</param> /// <param name="sentPackets">The average packets sent per second.</param> /// <param name="sentMsgs">The average messages sent per second.</param> protected override void LogNetStats(ushort connections, uint recvBytes, uint recvPackets, uint recvMsgs, uint sentBytes, uint sentPackets, uint sentMsgs) { var args = new WorldStatsNetworkTable(iD: 0, when: Now(), connections: connections, recvBytes: recvBytes, recvPackets: recvPackets, recvMessages: recvMsgs, sentBytes: sentBytes, sentPackets: sentPackets, sentMessages: sentMsgs); _networkQuery.Execute(args); } } }
using Lucene.Net.Documents; using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Reflection; using System.Runtime.CompilerServices; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using DocumentStoredFieldVisitor = DocumentStoredFieldVisitor; using IOUtils = Lucene.Net.Util.IOUtils; /// <summary> /// <see cref="IndexReader"/> is an abstract class, providing an interface for accessing an /// index. Search of an index is done entirely through this abstract interface, /// so that any subclass which implements it is searchable. /// /// <para/>There are two different types of <see cref="IndexReader"/>s: /// <list type="bullet"> /// <item><description><see cref="AtomicReader"/>: These indexes do not consist of several sub-readers, /// they are atomic. They support retrieval of stored fields, doc values, terms, /// and postings.</description></item> /// <item><description><see cref="CompositeReader"/>: Instances (like <see cref="DirectoryReader"/>) /// of this reader can only /// be used to get stored fields from the underlying <see cref="AtomicReader"/>s, /// but it is not possible to directly retrieve postings. To do that, get /// the sub-readers via <see cref="CompositeReader.GetSequentialSubReaders()"/>. /// Alternatively, you can mimic an <see cref="AtomicReader"/> (with a serious slowdown), /// by wrapping composite readers with <see cref="SlowCompositeReaderWrapper"/>.</description></item> /// </list> /// /// <para/><see cref="IndexReader"/> instances for indexes on disk are usually constructed /// with a call to one of the static <c>DirectoryReader.Open()</c> methods, /// e.g. <seealso cref="DirectoryReader.Open(Lucene.Net.Store.Directory)"/>. <see cref="DirectoryReader"/> inherits /// the <see cref="CompositeReader"/> abstract class, it is not possible to directly get postings. /// /// <para/> For efficiency, in this API documents are often referred to via /// <i>document numbers</i>, non-negative integers which each name a unique /// document in the index. These document numbers are ephemeral -- they may change /// as documents are added to and deleted from an index. Clients should thus not /// rely on a given document having the same number between sessions. /// /// <para/> /// <b>NOTE</b>: <see cref="IndexReader"/> instances are completely thread /// safe, meaning multiple threads can call any of its methods, /// concurrently. If your application requires external /// synchronization, you should <b>not</b> synchronize on the /// <see cref="IndexReader"/> instance; use your own /// (non-Lucene) objects instead. /// </summary> public abstract class IndexReader : IDisposable { private bool closed = false; private bool closedByChild = false; private readonly AtomicInt32 refCount = new AtomicInt32(1); internal IndexReader() { if (!(this is CompositeReader || this is AtomicReader)) { throw new Exception("IndexReader should never be directly extended, subclass AtomicReader or CompositeReader instead."); } } /// <summary> /// A custom listener that's invoked when the <see cref="IndexReader"/> /// is closed. /// <para/> /// @lucene.experimental /// </summary> public interface IReaderClosedListener { /// <summary> /// Invoked when the <see cref="IndexReader"/> is closed. </summary> void OnClose(IndexReader reader); } private readonly ISet<IReaderClosedListener> readerClosedListeners = new ConcurrentHashSet<IReaderClosedListener>(); private readonly ISet<IdentityWeakReference<IndexReader>> parentReaders = new ConcurrentHashSet<IdentityWeakReference<IndexReader>>(); /// <summary> /// Expert: adds a <see cref="IReaderClosedListener"/>. The /// provided listener will be invoked when this reader is closed. /// <para/> /// @lucene.experimental /// </summary> public void AddReaderClosedListener(IReaderClosedListener listener) { EnsureOpen(); readerClosedListeners.Add(listener); } /// <summary> /// Expert: remove a previously added <see cref="IReaderClosedListener"/>. /// <para/> /// @lucene.experimental /// </summary> public void RemoveReaderClosedListener(IReaderClosedListener listener) { EnsureOpen(); readerClosedListeners.Remove(listener); } /// <summary> /// Expert: this method is called by <see cref="IndexReader"/>s which wrap other readers /// (e.g. <see cref="CompositeReader"/> or <see cref="FilterAtomicReader"/>) to register the parent /// at the child (this reader) on construction of the parent. When this reader is disposed, /// it will mark all registered parents as disposed, too. The references to parent readers /// are weak only, so they can be GCed once they are no longer in use. /// @lucene.experimental /// </summary> public void RegisterParentReader(IndexReader reader) { EnsureOpen(); parentReaders.Add(new IdentityWeakReference<IndexReader>(reader)); } private void NotifyReaderClosedListeners(Exception th) { lock (readerClosedListeners) { foreach (IReaderClosedListener listener in readerClosedListeners) { try { listener.OnClose(this); } catch (Exception t) { if (th == null) { th = t; } else { th.AddSuppressed(t); } } } IOUtils.ReThrowUnchecked(th); } } private void ReportCloseToParentReaders() { lock (parentReaders) { foreach (IdentityWeakReference<IndexReader> parent in parentReaders) { //Using weak references IndexReader target = parent.Target; if (target != null) { target.closedByChild = true; // cross memory barrier by a fake write: target.refCount.AddAndGet(0); // recurse: target.ReportCloseToParentReaders(); } } } } /// <summary> /// Expert: returns the current refCount for this reader </summary> public int RefCount { get { // NOTE: don't ensureOpen, so that callers can see // refCount is 0 (reader is closed) return refCount.Get(); } } /// <summary> /// Expert: increments the <see cref="RefCount"/> of this <see cref="IndexReader"/> /// instance. <see cref="RefCount"/>s are used to determine when a /// reader can be disposed safely, i.e. as soon as there are /// no more references. Be sure to always call a /// corresponding <see cref="DecRef"/>, in a finally clause; /// otherwise the reader may never be disposed. Note that /// <see cref="Dispose(bool)"/> simply calls <see cref="DecRef()"/>, which means that /// the <see cref="IndexReader"/> will not really be disposed until /// <see cref="DecRef()"/> has been called for all outstanding /// references. /// </summary> /// <seealso cref="DecRef"/> /// <seealso cref="TryIncRef"/> public void IncRef() { if (!TryIncRef()) { EnsureOpen(); } } /// <summary> /// Expert: increments the <see cref="RefCount"/> of this <see cref="IndexReader"/> /// instance only if the <see cref="IndexReader"/> has not been disposed yet /// and returns <c>true</c> iff the <see cref="RefCount"/> was /// successfully incremented, otherwise <c>false</c>. /// If this method returns <c>false</c> the reader is either /// already disposed or is currently being disposed. Either way this /// reader instance shouldn't be used by an application unless /// <c>true</c> is returned. /// <para/> /// <see cref="RefCount"/>s are used to determine when a /// reader can be disposed safely, i.e. as soon as there are /// no more references. Be sure to always call a /// corresponding <see cref="DecRef"/>, in a finally clause; /// otherwise the reader may never be disposed. Note that /// <see cref="Dispose(bool)"/> simply calls <see cref="DecRef()"/>, which means that /// the <see cref="IndexReader"/> will not really be disposed until /// <see cref="DecRef()"/> has been called for all outstanding /// references. /// </summary> /// <seealso cref="DecRef"/> /// <seealso cref="IncRef"/> public bool TryIncRef() { int count; while ((count = refCount.Get()) > 0) { if (refCount.CompareAndSet(count, count + 1)) { return true; } } return false; } /// <summary> /// Expert: decreases the <see cref="RefCount"/> of this <see cref="IndexReader"/> /// instance. If the <see cref="RefCount"/> drops to 0, then this /// reader is disposed. If an exception is hit, the <see cref="RefCount"/> /// is unchanged. /// </summary> /// <exception cref="System.IO.IOException"> in case an <see cref="System.IO.IOException"/> occurs in <see cref="DoClose()"/> /// </exception> /// <seealso cref="IncRef"/> public void DecRef() { // only check refcount here (don't call ensureOpen()), so we can // still close the reader if it was made invalid by a child: if (refCount.Get() <= 0) { throw new ObjectDisposedException(this.GetType().GetTypeInfo().FullName, "this IndexReader is closed"); } int rc = refCount.DecrementAndGet(); if (rc == 0) { closed = true; Exception throwable = null; try { DoClose(); } catch (Exception th) { throwable = th; } finally { try { ReportCloseToParentReaders(); } finally { NotifyReaderClosedListeners(throwable); } } } else if (rc < 0) { throw new InvalidOperationException("too many decRef calls: refCount is " + rc + " after decrement"); } } /// <summary> /// Throws <see cref="ObjectDisposedException"/> if this <see cref="IndexReader"/> or any /// of its child readers is disposed, otherwise returns. /// </summary> protected internal void EnsureOpen() { if (refCount.Get() <= 0) { throw new ObjectDisposedException(this.GetType().GetTypeInfo().FullName, "this IndexReader is closed"); } // the happens before rule on reading the refCount, which must be after the fake write, // ensures that we see the value: if (closedByChild) { throw new ObjectDisposedException(this.GetType().GetTypeInfo().FullName, "this IndexReader cannot be used anymore as one of its child readers was closed"); } } /// <summary> /// Determines whether two object instances are equal. /// <para/>For caching purposes, <see cref="IndexReader"/> subclasses are not allowed /// to implement Equals/GetHashCode, so methods are declared sealed. /// To lookup instances from caches use <see cref="CoreCacheKey"/> and /// <see cref="CombinedCoreAndDeletesKey"/>. /// </summary> public override sealed bool Equals(object obj) { return (this == obj); } /// <summary> /// Serves as the default hash function. /// <para/>For caching purposes, <see cref="IndexReader"/> subclasses are not allowed /// to implement Equals/GetHashCode, so methods are declared sealed. /// To lookup instances from caches use <see cref="CoreCacheKey"/> and /// <see cref="CombinedCoreAndDeletesKey"/>. /// </summary> public override sealed int GetHashCode() { return RuntimeHelpers.GetHashCode(this); } /// <summary> /// Returns a <see cref="IndexReader"/> reading the index in the given /// <see cref="Directory"/> </summary> /// <param name="directory"> the index directory </param> /// <exception cref="System.IO.IOException"> if there is a low-level IO error </exception> [Obsolete("Use DirectoryReader.Open(Directory)")] public static DirectoryReader Open(Directory directory) { return DirectoryReader.Open(directory); } /// <summary> /// Expert: Returns a <see cref="IndexReader"/> reading the index in the given /// <see cref="Directory"/> with the given <paramref name="termInfosIndexDivisor"/>. </summary> /// <param name="directory"> the index directory </param> /// <param name="termInfosIndexDivisor"> Subsamples which indexed /// terms are loaded into RAM. this has the same effect as /// <see cref="LiveIndexWriterConfig.TermIndexInterval"/> /// (which can be set on <see cref="IndexWriterConfig"/>) except that setting /// must be done at indexing time while this setting can be /// set per reader. When set to <c>N</c>, then one in every /// <c>N*termIndexInterval</c> terms in the index is loaded into /// memory. By setting this to a value <c>&gt; 1</c> you can reduce /// memory usage, at the expense of higher latency when /// loading a TermInfo. The default value is 1. Set this /// to -1 to skip loading the terms index entirely. </param> /// <exception cref="System.IO.IOException"> if there is a low-level IO error </exception> [Obsolete("Use DirectoryReader.Open(Directory, int)")] public static DirectoryReader Open(Directory directory, int termInfosIndexDivisor) { return DirectoryReader.Open(directory, termInfosIndexDivisor); } /// <summary> /// Open a near real time <see cref="IndexReader"/> from the <see cref="IndexWriter"/>. /// </summary> /// <param name="writer"> The <see cref="IndexWriter"/> to open from </param> /// <param name="applyAllDeletes"> If true, all buffered deletes will /// be applied (made visible) in the returned reader. If /// false, the deletes are not applied but remain buffered /// (in <see cref="IndexWriter"/>) so that they will be applied in the /// future. Applying deletes can be costly, so if your app /// can tolerate deleted documents being returned you might /// gain some performance by passing false. </param> /// <returns> The new <see cref="IndexReader"/> </returns> /// <exception cref="System.IO.IOException"> if there is a low-level IO error /// </exception> /// <seealso cref="DirectoryReader.OpenIfChanged(DirectoryReader, IndexWriter, bool)"/> /// /// @lucene.experimental [Obsolete("Use DirectoryReader.Open(IndexWriter, bool)")] public static DirectoryReader Open(IndexWriter writer, bool applyAllDeletes) { return DirectoryReader.Open(writer, applyAllDeletes); } /// <summary> /// Expert: returns an <see cref="IndexReader"/> reading the index in the given /// <see cref="IndexCommit"/>. /// </summary> /// <param name="commit"> the commit point to open </param> /// <exception cref="System.IO.IOException"> if there is a low-level IO error </exception> [Obsolete("Use DirectoryReader.Open(IndexCommit)")] public static DirectoryReader Open(IndexCommit commit) { return DirectoryReader.Open(commit); } /// <summary> /// Expert: returns an <see cref="IndexReader"/> reading the index in the given /// <see cref="IndexCommit"/> and <paramref name="termInfosIndexDivisor"/>. </summary> /// <param name="commit"> the commit point to open </param> /// <param name="termInfosIndexDivisor"> Subsamples which indexed /// terms are loaded into RAM. this has the same effect as /// <see cref="LiveIndexWriterConfig.TermIndexInterval"/> /// (which can be set in <see cref="IndexWriterConfig"/>) except that setting /// must be done at indexing time while this setting can be /// set per reader. When set to <c>N</c>, then one in every /// <c>N*termIndexInterval</c> terms in the index is loaded into /// memory. By setting this to a value <c>&gt; 1</c> you can reduce /// memory usage, at the expense of higher latency when /// loading a TermInfo. The default value is 1. Set this /// to -1 to skip loading the terms index entirely. </param> /// <exception cref="System.IO.IOException"> if there is a low-level IO error </exception> [Obsolete("Use DirectoryReader.Open(IndexCommit, int)/>")] public static DirectoryReader Open(IndexCommit commit, int termInfosIndexDivisor) { return DirectoryReader.Open(commit, termInfosIndexDivisor); } /// <summary> /// Retrieve term vectors for this document, or <c>null</c> if /// term vectors were not indexed. The returned <see cref="Fields"/> /// instance acts like a single-document inverted index /// (the docID will be 0). /// </summary> public abstract Fields GetTermVectors(int docID); /// <summary> /// Retrieve term vector for this document and field, or /// <c>null</c> if term vectors were not indexed. The returned /// <see cref="Fields"/> instance acts like a single-document inverted /// index (the docID will be 0). /// </summary> public Terms GetTermVector(int docID, string field) { Fields vectors = GetTermVectors(docID); if (vectors == null) { return null; } return vectors.GetTerms(field); } /// <summary> /// Returns the number of documents in this index. </summary> public abstract int NumDocs { get; } /// <summary> /// Returns one greater than the largest possible document number. /// this may be used to, e.g., determine how big to allocate an array which /// will have an element for every document number in an index. /// </summary> public abstract int MaxDoc { get; } /// <summary> /// Returns the number of deleted documents. </summary> public int NumDeletedDocs { get { return MaxDoc - NumDocs; } } /// <summary> /// Expert: visits the fields of a stored document, for /// custom processing/loading of each field. If you /// simply want to load all fields, use /// <see cref="Document(int)"/>. If you want to load a subset, use /// <see cref="DocumentStoredFieldVisitor"/>. /// </summary> public abstract void Document(int docID, StoredFieldVisitor visitor); /// <summary> /// Returns the stored fields of the <c>n</c><sup>th</sup> /// <see cref="Documents.Document"/> in this index. This is just /// sugar for using <see cref="DocumentStoredFieldVisitor"/>. /// <para/> /// <b>NOTE:</b> for performance reasons, this method does not check if the /// requested document is deleted, and therefore asking for a deleted document /// may yield unspecified results. Usually this is not required, however you /// can test if the doc is deleted by checking the /// <see cref="Util.IBits"/> returned from <see cref="MultiFields.GetLiveDocs"/>. /// <para/> /// <b>NOTE:</b> only the content of a field is returned, /// if that field was stored during indexing. Metadata /// like boost, omitNorm, IndexOptions, tokenized, etc., /// are not preserved. /// </summary> /// <exception cref="System.IO.IOException"> if there is a low-level IO error </exception> // TODO: we need a separate StoredField, so that the // Document returned here contains that class not // IndexableField public Document Document(int docID) { var visitor = new DocumentStoredFieldVisitor(); Document(docID, visitor); return visitor.Document; } /// <summary> /// Like <see cref="Document(int)"/> but only loads the specified /// fields. Note that this is simply sugar for /// <see cref="DocumentStoredFieldVisitor.DocumentStoredFieldVisitor(ISet{string})"/>. /// </summary> public Document Document(int docID, ISet<string> fieldsToLoad) { var visitor = new DocumentStoredFieldVisitor(fieldsToLoad); Document(docID, visitor); return visitor.Document; } /// <summary> /// Returns <c>true</c> if any documents have been deleted. Implementers should /// consider overriding this property if <see cref="MaxDoc"/> or <see cref="NumDocs"/> /// are not constant-time operations. /// </summary> public virtual bool HasDeletions { get { return NumDeletedDocs > 0; } } /// <summary> Closes files associated with this index. /// Also saves any new deletions to disk. /// No other methods should be called after this has been called. /// </summary> /// <exception cref="System.IO.IOException">If there is a low-level IO error</exception> public void Dispose() { Dispose(true); } /// <summary> /// Closes files associated with this index. /// This method implements the disposable pattern. /// It may be overridden to dispose any managed or unmanaged resources, /// but be sure to call <c>base.Dispose(disposing)</c> to close files associated with the /// underlying <see cref="IndexReader"/>. /// </summary> /// <param name="disposing"><c>true</c> indicates to dispose all managed /// and unmanaged resources, <c>false</c> indicates dispose unmanaged /// resources only</param> protected virtual void Dispose(bool disposing) { if (disposing) { lock (this) { if (!closed) { DecRef(); closed = true; } } } } /// <summary> /// Implements close. </summary> protected internal abstract void DoClose(); /// <summary> /// Expert: Returns the root <see cref="IndexReaderContext"/> for this /// <see cref="IndexReader"/>'s sub-reader tree. /// <para/> /// Iff this reader is composed of sub /// readers, i.e. this reader being a composite reader, this method returns a /// <see cref="CompositeReaderContext"/> holding the reader's direct children as well as a /// view of the reader tree's atomic leaf contexts. All sub- /// <see cref="IndexReaderContext"/> instances referenced from this readers top-level /// context are private to this reader and are not shared with another context /// tree. For example, <see cref="Search.IndexSearcher"/> uses this API to drive searching by one /// atomic leaf reader at a time. If this reader is not composed of child /// readers, this method returns an <see cref="AtomicReaderContext"/>. /// <para/> /// Note: Any of the sub-<see cref="CompositeReaderContext"/> instances referenced /// from this top-level context do not support <see cref="CompositeReaderContext.Leaves"/>. /// Only the top-level context maintains the convenience leaf-view /// for performance reasons. /// </summary> public abstract IndexReaderContext Context { get; } /// <summary> /// Returns the reader's leaves, or itself if this reader is atomic. /// This is a convenience method calling <c>this.Context.Leaves</c>. /// </summary> /// <seealso cref="IndexReaderContext.Leaves"/> public IList<AtomicReaderContext> Leaves { get { return Context.Leaves; } } /// <summary> /// Expert: Returns a key for this <see cref="IndexReader"/>, so /// <see cref="Search.IFieldCache"/>/<see cref="Search.CachingWrapperFilter"/> can find /// it again. /// This key must not have Equals()/GetHashCode() methods, /// so &quot;equals&quot; means &quot;identical&quot;. /// </summary> public virtual object CoreCacheKey { get { // Don't call ensureOpen since FC calls this (to evict) // on close return this; } } /// <summary> /// Expert: Returns a key for this <see cref="IndexReader"/> that also includes deletions, /// so <see cref="Search.IFieldCache"/>/<see cref="Search.CachingWrapperFilter"/> can find it again. /// This key must not have Equals()/GetHashCode() methods, /// so &quot;equals&quot; means &quot;identical&quot;. /// </summary> public virtual object CombinedCoreAndDeletesKey { get { // Don't call ensureOpen since FC calls this (to evict) // on close return this; } } /// <summary> /// Returns the number of documents containing the /// <paramref name="term"/>. This method returns 0 if the term or /// field does not exist. This method does not take into /// account deleted documents that have not yet been merged /// away. </summary> /// <seealso cref="TermsEnum.DocFreq"/> public abstract int DocFreq(Term term); /// <summary> /// Returns the total number of occurrences of <paramref name="term"/> across all /// documents (the sum of the Freq for each doc that has this term). This /// will be -1 if the codec doesn't support this measure. Note that, like other /// term measures, this measure does not take deleted documents into account. /// </summary> public abstract long TotalTermFreq(Term term); /// <summary> /// Returns the sum of <see cref="TermsEnum.DocFreq"/> for all terms in this field, /// or -1 if this measure isn't stored by the codec. Note that, just like other /// term measures, this measure does not take deleted documents into account. /// </summary> /// <seealso cref="Terms.SumDocFreq"/> public abstract long GetSumDocFreq(string field); /// <summary> /// Returns the number of documents that have at least one term for this field, /// or -1 if this measure isn't stored by the codec. Note that, just like other /// term measures, this measure does not take deleted documents into account. /// </summary> /// <seealso cref="Terms.DocCount"/> public abstract int GetDocCount(string field); /// <summary> /// Returns the sum of <see cref="TermsEnum.TotalTermFreq"/> for all terms in this /// field, or -1 if this measure isn't stored by the codec (or if this fields /// omits term freq and positions). Note that, just like other term measures, /// this measure does not take deleted documents into account. /// </summary> /// <seealso cref="Terms.SumTotalTermFreq"/> public abstract long GetSumTotalTermFreq(string field); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Collections.Generic; using System.Runtime.CompilerServices; namespace Internal.TypeSystem { /// <summary> /// Represents the fundamental base type of all types within the type system. /// </summary> public abstract partial class TypeDesc { public static readonly TypeDesc[] EmptyTypes = new TypeDesc[0]; /// Inherited types are required to override, and should use the algorithms /// in TypeHashingAlgorithms in their implementation. public abstract override int GetHashCode(); public override bool Equals(Object o) { // Its only valid to compare two TypeDescs in the same context Debug.Assert(o == null || !(o is TypeDesc) || Object.ReferenceEquals(((TypeDesc)o).Context, this.Context)); return Object.ReferenceEquals(this, o); } #if DEBUG public static bool operator ==(TypeDesc left, TypeDesc right) { // Its only valid to compare two TypeDescs in the same context Debug.Assert(Object.ReferenceEquals(left, null) || Object.ReferenceEquals(right, null) || Object.ReferenceEquals(left.Context, right.Context)); return Object.ReferenceEquals(left, right); } public static bool operator !=(TypeDesc left, TypeDesc right) { // Its only valid to compare two TypeDescs in the same context Debug.Assert(Object.ReferenceEquals(left, null) || Object.ReferenceEquals(right, null) || Object.ReferenceEquals(left.Context, right.Context)); return !Object.ReferenceEquals(left, right); } #endif // The most frequently used type properties are cached here to avoid excesive virtual calls private TypeFlags _typeFlags; /// <summary> /// Gets the type system context this type belongs to. /// </summary> public abstract TypeSystemContext Context { get; } /// <summary> /// Gets the generic instantiation information of this type. /// For generic definitions, retrieves the generic parameters of the type. /// For generic instantiation, retrieves the generic arguments of the type. /// </summary> public virtual Instantiation Instantiation { get { return Instantiation.Empty; } } /// <summary> /// Gets a value indicating whether this type has a generic instantiation. /// This will be true for generic type instantiations and generic definitions. /// </summary> public bool HasInstantiation { get { return this.Instantiation.Length != 0; } } internal void SetWellKnownType(WellKnownType wellKnownType) { TypeFlags flags; switch (wellKnownType) { case WellKnownType.Void: case WellKnownType.Boolean: case WellKnownType.Char: case WellKnownType.SByte: case WellKnownType.Byte: case WellKnownType.Int16: case WellKnownType.UInt16: case WellKnownType.Int32: case WellKnownType.UInt32: case WellKnownType.Int64: case WellKnownType.UInt64: case WellKnownType.IntPtr: case WellKnownType.UIntPtr: case WellKnownType.Single: case WellKnownType.Double: flags = (TypeFlags)wellKnownType; break; case WellKnownType.ValueType: case WellKnownType.Enum: flags = TypeFlags.Class; break; case WellKnownType.Nullable: flags = TypeFlags.Nullable; break; case WellKnownType.Object: case WellKnownType.String: case WellKnownType.Array: case WellKnownType.MulticastDelegate: case WellKnownType.Exception: flags = TypeFlags.Class; break; case WellKnownType.RuntimeTypeHandle: case WellKnownType.RuntimeMethodHandle: case WellKnownType.RuntimeFieldHandle: flags = TypeFlags.ValueType; break; default: throw new ArgumentException(); } _typeFlags = flags; } protected abstract TypeFlags ComputeTypeFlags(TypeFlags mask); [MethodImpl(MethodImplOptions.NoInlining)] private TypeFlags InitializeTypeFlags(TypeFlags mask) { TypeFlags flags = ComputeTypeFlags(mask); Debug.Assert((flags & mask) != 0); _typeFlags |= flags; return flags & mask; } [MethodImpl(MethodImplOptions.AggressiveInlining)] protected internal TypeFlags GetTypeFlags(TypeFlags mask) { TypeFlags flags = _typeFlags & mask; if (flags != 0) return flags; return InitializeTypeFlags(mask); } /// <summary> /// Retrieves the category of the type. This is one of the possible values of /// <see cref="TypeFlags"/> less than <see cref="TypeFlags.CategoryMask"/>. /// </summary> public TypeFlags Category { get { return GetTypeFlags(TypeFlags.CategoryMask); } } /// <summary> /// Gets a value indicating whether this type is an interface type. /// </summary> public bool IsInterface { get { return GetTypeFlags(TypeFlags.CategoryMask) == TypeFlags.Interface; } } /// <summary> /// Gets a value indicating whether this type is a value type (not a reference type). /// </summary> public bool IsValueType { get { return GetTypeFlags(TypeFlags.CategoryMask) < TypeFlags.Class; } } /// <summary> /// Gets a value indicating whether this is one of the primitive types (boolean, char, void, /// a floating point, or an integer type). /// </summary> public bool IsPrimitive { get { return GetTypeFlags(TypeFlags.CategoryMask) < TypeFlags.ValueType; } } /// <summary> /// Gets a value indicating whether this is an enum type. /// Access <see cref="UnderlyingType"/> to retrieve the underlying integral type. /// </summary> public bool IsEnum { get { return GetTypeFlags(TypeFlags.CategoryMask) == TypeFlags.Enum; } } /// <summary> /// Gets a value indicating whether this is a delegate type. /// </summary> public bool IsDelegate { get { var baseType = this.BaseType; return (baseType != null) ? baseType.IsWellKnownType(WellKnownType.MulticastDelegate) : false; } } /// <summary> /// Gets a value indicating whether this is System.Void type. /// </summary> public bool IsVoid { get { return GetTypeFlags(TypeFlags.CategoryMask) == TypeFlags.Void; } } /// <summary> /// Gets a value indicating whether this is System.String type. /// </summary> public bool IsString { get { return this.IsWellKnownType(WellKnownType.String); } } /// <summary> /// Gets a value indicating whether this is System.Object type. /// </summary> public bool IsObject { get { return this.IsWellKnownType(WellKnownType.Object); } } /// <summary> /// Gets a value indicating whether this is a generic definition, or /// an instance of System.Nullable`1. /// </summary> public bool IsNullable { get { return this.GetTypeDefinition().IsWellKnownType(WellKnownType.Nullable); } } /// <summary> /// Gets a value indicating whether this is an array type (<see cref="ArrayType"/>). /// Note this will return true for both multidimensional array types and vector types. /// Use <see cref="IsSzArray"/> to check for vector types. /// </summary> public bool IsArray { get { return this.GetType() == typeof(ArrayType); } } /// <summary> /// Gets a value indicating whether this is a vector type. A vector is a single-dimensional /// array with a zero lower bound. To check for arrays in general, use <see cref="IsArray"/>. /// </summary> public bool IsSzArray { get { return this.IsArray && ((ArrayType)this).IsSzArray; } } /// <summary> /// Gets a value indicating whether this is a managed pointer type (<see cref="ByRefType"/>). /// </summary> public bool IsByRef { get { return this.GetType() == typeof(ByRefType); } } /// <summary> /// Gets a value indicating whether this is an unmanaged pointer type (<see cref="PointerType"/>). /// </summary> public bool IsPointer { get { return this.GetType() == typeof(PointerType); } } /// <summary> /// Gets a value indicating whether this is a <see cref="SignatureTypeVariable"/> or <see cref="SignatureMethodVariable"/>. /// </summary> public bool IsSignatureVariable { get { return this.GetType() == typeof(SignatureTypeVariable) || this.GetType() == typeof(SignatureMethodVariable); } } /// <summary> /// Gets a value indicating whether this is a generic parameter (<see cref="GenericParameterDesc"/>). /// </summary> public bool IsGenericParameter { get { return GetTypeFlags(TypeFlags.CategoryMask) == TypeFlags.GenericParameter; } } /// <summary> /// Gets a value indicating whether this is a pointer, byref, array, or szarray type, /// and can be used as a ParameterizedType. /// </summary> public bool IsParameterizedType { get { TypeFlags flags = GetTypeFlags(TypeFlags.CategoryMask); Debug.Assert((flags >= TypeFlags.Array && flags <= TypeFlags.Pointer) == (this is ParameterizedType)); return (flags >= TypeFlags.Array && flags <= TypeFlags.Pointer); } } /// <summary> /// Gets a value indicating whether this is a class, an interface, a value type, or a /// generic instance of one of them. /// </summary> public bool IsDefType { get { Debug.Assert(GetTypeFlags(TypeFlags.CategoryMask) <= TypeFlags.Interface == this is DefType); return GetTypeFlags(TypeFlags.CategoryMask) <= TypeFlags.Interface; } } /// <summary> /// Gets a value indicating whether locations of this type refer to an object on the GC heap. /// </summary> public bool IsGCPointer { get { TypeFlags category = GetTypeFlags(TypeFlags.CategoryMask); return category == TypeFlags.Class || category == TypeFlags.Array || category == TypeFlags.SzArray || category == TypeFlags.Interface; } } public bool ContainsGenericVariables { get { return (GetTypeFlags(TypeFlags.ContainsGenericVariables | TypeFlags.ContainsGenericVariablesComputed) & TypeFlags.ContainsGenericVariables) != 0; } } /// <summary> /// Gets the type from which this type derives from, or null if there's no such type. /// </summary> public virtual DefType BaseType { get { return null; } } /// <summary> /// Gets a value indicating whether this type has a base type. /// </summary> public bool HasBaseType { get { return BaseType != null; } } /// <summary> /// If this is an enum type, gets the underlying integral type of the enum type. /// For all other types, returns 'this'. /// </summary> public virtual TypeDesc UnderlyingType { get { if (!this.IsEnum) return this; // TODO: Cache the result? foreach (var field in this.GetFields()) { if (!field.IsStatic) return field.FieldType; } throw new BadImageFormatException(); } } /// <summary> /// Gets a value indicating whether this type has a class constructor method. /// Use <see cref="GetStaticConstructor"/> to retrieve it. /// </summary> public virtual bool HasStaticConstructor { get { return false; } } /// <summary> /// Gets all methods on this type defined within the type's metadata. /// This will not include methods injected by the type system context. /// </summary> public virtual IEnumerable<MethodDesc> GetMethods() { return MethodDesc.EmptyMethods; } /// <summary> /// Gets a named method on the type. This method only looks at methods defined /// in type's metadata. The <paramref name="signature"/> parameter can be null. /// If signature is not specified and there are multiple matches, the first one /// is returned. Returns null if method not found. /// </summary> // TODO: Substitutions, generics, modopts, ... public virtual MethodDesc GetMethod(string name, MethodSignature signature) { foreach (var method in GetMethods()) { if (method.Name == name) { if (signature == null || signature.Equals(method.Signature)) return method; } } return null; } /// <summary> /// Retrieves the class constructor method of this type. /// </summary> /// <returns></returns> public virtual MethodDesc GetStaticConstructor() { return null; } /// <summary> /// Gets all fields on the type as defined in the metadata. /// </summary> public virtual IEnumerable<FieldDesc> GetFields() { return FieldDesc.EmptyFields; } /// <summary> /// Gets a named field on the type. Returns null if the field wasn't found. /// </summary> // TODO: Substitutions, generics, modopts, ... // TODO: field signature public virtual FieldDesc GetField(string name) { foreach (var field in GetFields()) { if (field.Name == name) return field; } return null; } public virtual TypeDesc InstantiateSignature(Instantiation typeInstantiation, Instantiation methodInstantiation) { return this; } /// <summary> /// Gets the definition of the type. If this is a generic type instance, /// this method strips the instantiation (E.g C&lt;int&gt; -> C&lt;T&gt;) /// </summary> public virtual TypeDesc GetTypeDefinition() { return this; } /// <summary> /// Gets a value indicating whether this is a type definition. Returns false /// if this is an instantiated generic type. /// </summary> public bool IsTypeDefinition { get { return GetTypeDefinition() == this; } } /// <summary> /// Determine if two types share the same type definition /// </summary> public bool HasSameTypeDefinition(TypeDesc otherType) { return GetTypeDefinition() == otherType.GetTypeDefinition(); } /// <summary> /// Gets a value indicating whether this type has a finalizer method. /// Use <see cref="GetFinalizer"/> to retrieve the method. /// </summary> public virtual bool HasFinalizer { get { return false; } } /// <summary> /// Gets the finalizer method (an override of the System.Object::Finalize method) /// if this type has one. Returns null if the type doesn't define one. /// </summary> public virtual MethodDesc GetFinalizer() { return null; } /// <summary> /// Gets a value indicating whether this type has generic variance (the definition of the type /// has a generic parameter that is co- or contravariant). /// </summary> public bool HasVariance { get { return (GetTypeFlags(TypeFlags.HasGenericVariance | TypeFlags.HasGenericVarianceComputed) & TypeFlags.HasGenericVariance) != 0; } } /// <summary> /// Gets a value indicating whether this type is an uninstantiated definition of a generic type. /// </summary> public bool IsGenericDefinition { get { return HasInstantiation && IsTypeDefinition; } } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel; using Newtonsoft.Json.Tests.TestObjects; #if !NETFX_CORE using NUnit.Framework; #else using Microsoft.VisualStudio.TestTools.UnitTesting; using TestFixture = Microsoft.VisualStudio.TestTools.UnitTesting.TestClassAttribute; using Test = Microsoft.VisualStudio.TestTools.UnitTesting.TestMethodAttribute; #endif using Newtonsoft.Json.Linq; using System.IO; using System.Collections; #if !PocketPC && !SILVERLIGHT && !NETFX_CORE using System.Web.UI; #endif #if NET20 using Newtonsoft.Json.Utilities.LinqBridge; #else using System.Linq; #endif namespace Newtonsoft.Json.Tests.Linq { [TestFixture] public class JObjectTests : TestFixtureBase { [Test] public void Keys() { var o = new JObject(); var d = (IDictionary<string, JToken>) o; Assert.AreEqual(0, d.Keys.Count); o["value"] = true; Assert.AreEqual(1, d.Keys.Count); } [Test] public void TryGetValue() { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); Assert.AreEqual(1, o.Children().Count()); JToken t; Assert.AreEqual(false, o.TryGetValue("sdf", out t)); Assert.AreEqual(null, t); Assert.AreEqual(false, o.TryGetValue(null, out t)); Assert.AreEqual(null, t); Assert.AreEqual(true, o.TryGetValue("PropertyNameValue", out t)); Assert.AreEqual(true, JToken.DeepEquals(new JValue(1), t)); } [Test] public void DictionaryItemShouldSet() { JObject o = new JObject(); o["PropertyNameValue"] = new JValue(1); Assert.AreEqual(1, o.Children().Count()); JToken t; Assert.AreEqual(true, o.TryGetValue("PropertyNameValue", out t)); Assert.AreEqual(true, JToken.DeepEquals(new JValue(1), t)); o["PropertyNameValue"] = new JValue(2); Assert.AreEqual(1, o.Children().Count()); Assert.AreEqual(true, o.TryGetValue("PropertyNameValue", out t)); Assert.AreEqual(true, JToken.DeepEquals(new JValue(2), t)); o["PropertyNameValue"] = null; Assert.AreEqual(1, o.Children().Count()); Assert.AreEqual(true, o.TryGetValue("PropertyNameValue", out t)); Assert.AreEqual(true, JToken.DeepEquals(new JValue((object)null), t)); } [Test] public void Remove() { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); Assert.AreEqual(1, o.Children().Count()); Assert.AreEqual(false, o.Remove("sdf")); Assert.AreEqual(false, o.Remove(null)); Assert.AreEqual(true, o.Remove("PropertyNameValue")); Assert.AreEqual(0, o.Children().Count()); } [Test] public void GenericCollectionRemove() { JValue v = new JValue(1); JObject o = new JObject(); o.Add("PropertyNameValue", v); Assert.AreEqual(1, o.Children().Count()); Assert.AreEqual(false, ((ICollection<KeyValuePair<string, JToken>>)o).Remove(new KeyValuePair<string, JToken>("PropertyNameValue1", new JValue(1)))); Assert.AreEqual(false, ((ICollection<KeyValuePair<string, JToken>>)o).Remove(new KeyValuePair<string, JToken>("PropertyNameValue", new JValue(2)))); Assert.AreEqual(false, ((ICollection<KeyValuePair<string, JToken>>)o).Remove(new KeyValuePair<string, JToken>("PropertyNameValue", new JValue(1)))); Assert.AreEqual(true, ((ICollection<KeyValuePair<string, JToken>>)o).Remove(new KeyValuePair<string, JToken>("PropertyNameValue", v))); Assert.AreEqual(0, o.Children().Count()); } [Test] public void DuplicatePropertyNameShouldThrow() { ExceptionAssert.Throws<ArgumentException>( "Can not add property PropertyNameValue to Newtonsoft.Json.Linq.JObject. Property with the same name already exists on object.", () => { JObject o = new JObject(); o.Add("PropertyNameValue", null); o.Add("PropertyNameValue", null); }); } [Test] public void GenericDictionaryAdd() { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); Assert.AreEqual(1, (int)o["PropertyNameValue"]); o.Add("PropertyNameValue1", null); Assert.AreEqual(null, ((JValue)o["PropertyNameValue1"]).Value); Assert.AreEqual(2, o.Children().Count()); } [Test] public void GenericCollectionAdd() { JObject o = new JObject(); ((ICollection<KeyValuePair<string,JToken>>)o).Add(new KeyValuePair<string,JToken>("PropertyNameValue", new JValue(1))); Assert.AreEqual(1, (int)o["PropertyNameValue"]); Assert.AreEqual(1, o.Children().Count()); } [Test] public void GenericCollectionClear() { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); Assert.AreEqual(1, o.Children().Count()); JProperty p = (JProperty)o.Children().ElementAt(0); ((ICollection<KeyValuePair<string, JToken>>)o).Clear(); Assert.AreEqual(0, o.Children().Count()); Assert.AreEqual(null, p.Parent); } [Test] public void GenericCollectionContains() { JValue v = new JValue(1); JObject o = new JObject(); o.Add("PropertyNameValue", v); Assert.AreEqual(1, o.Children().Count()); bool contains = ((ICollection<KeyValuePair<string, JToken>>)o).Contains(new KeyValuePair<string, JToken>("PropertyNameValue", new JValue(1))); Assert.AreEqual(false, contains); contains = ((ICollection<KeyValuePair<string, JToken>>)o).Contains(new KeyValuePair<string, JToken>("PropertyNameValue", v)); Assert.AreEqual(true, contains); contains = ((ICollection<KeyValuePair<string, JToken>>)o).Contains(new KeyValuePair<string, JToken>("PropertyNameValue", new JValue(2))); Assert.AreEqual(false, contains); contains = ((ICollection<KeyValuePair<string, JToken>>)o).Contains(new KeyValuePair<string, JToken>("PropertyNameValue1", new JValue(1))); Assert.AreEqual(false, contains); contains = ((ICollection<KeyValuePair<string, JToken>>)o).Contains(default(KeyValuePair<string, JToken>)); Assert.AreEqual(false, contains); } [Test] public void GenericDictionaryContains() { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); Assert.AreEqual(1, o.Children().Count()); bool contains = ((IDictionary<string, JToken>)o).ContainsKey("PropertyNameValue"); Assert.AreEqual(true, contains); } [Test] public void GenericCollectionCopyTo() { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); o.Add("PropertyNameValue2", new JValue(2)); o.Add("PropertyNameValue3", new JValue(3)); Assert.AreEqual(3, o.Children().Count()); KeyValuePair<string, JToken>[] a = new KeyValuePair<string,JToken>[5]; ((ICollection<KeyValuePair<string, JToken>>)o).CopyTo(a, 1); Assert.AreEqual(default(KeyValuePair<string,JToken>), a[0]); Assert.AreEqual("PropertyNameValue", a[1].Key); Assert.AreEqual(1, (int)a[1].Value); Assert.AreEqual("PropertyNameValue2", a[2].Key); Assert.AreEqual(2, (int)a[2].Value); Assert.AreEqual("PropertyNameValue3", a[3].Key); Assert.AreEqual(3, (int)a[3].Value); Assert.AreEqual(default(KeyValuePair<string, JToken>), a[4]); } [Test] public void GenericCollectionCopyToNullArrayShouldThrow() { ExceptionAssert.Throws<ArgumentException>( @"Value cannot be null. Parameter name: array", () => { JObject o = new JObject(); ((ICollection<KeyValuePair<string, JToken>>)o).CopyTo(null, 0); }); } [Test] public void GenericCollectionCopyToNegativeArrayIndexShouldThrow() { ExceptionAssert.Throws<ArgumentOutOfRangeException>( @"arrayIndex is less than 0. Parameter name: arrayIndex", () => { JObject o = new JObject(); ((ICollection<KeyValuePair<string, JToken>>)o).CopyTo(new KeyValuePair<string, JToken>[1], -1); }); } [Test] public void GenericCollectionCopyToArrayIndexEqualGreaterToArrayLengthShouldThrow() { ExceptionAssert.Throws<ArgumentException>( @"arrayIndex is equal to or greater than the length of array.", () => { JObject o = new JObject(); ((ICollection<KeyValuePair<string, JToken>>)o).CopyTo(new KeyValuePair<string, JToken>[1], 1); }); } [Test] public void GenericCollectionCopyToInsufficientArrayCapacity() { ExceptionAssert.Throws<ArgumentException>( @"The number of elements in the source JObject is greater than the available space from arrayIndex to the end of the destination array.", () => { JObject o = new JObject(); o.Add("PropertyNameValue", new JValue(1)); o.Add("PropertyNameValue2", new JValue(2)); o.Add("PropertyNameValue3", new JValue(3)); ((ICollection<KeyValuePair<string, JToken>>)o).CopyTo(new KeyValuePair<string, JToken>[3], 1); }); } [Test] public void FromObjectRaw() { PersonRaw raw = new PersonRaw { FirstName = "FirstNameValue", RawContent = new JRaw("[1,2,3,4,5]"), LastName = "LastNameValue" }; JObject o = JObject.FromObject(raw); Assert.AreEqual("FirstNameValue", (string)o["first_name"]); Assert.AreEqual(JTokenType.Raw, ((JValue)o["RawContent"]).Type); Assert.AreEqual("[1,2,3,4,5]", (string)o["RawContent"]); Assert.AreEqual("LastNameValue", (string)o["last_name"]); } [Test] public void JTokenReader() { PersonRaw raw = new PersonRaw { FirstName = "FirstNameValue", RawContent = new JRaw("[1,2,3,4,5]"), LastName = "LastNameValue" }; JObject o = JObject.FromObject(raw); JsonReader reader = new JTokenReader(o); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.StartObject, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.PropertyName, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.String, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.PropertyName, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.Raw, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.PropertyName, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.String, reader.TokenType); Assert.IsTrue(reader.Read()); Assert.AreEqual(JsonToken.EndObject, reader.TokenType); Assert.IsFalse(reader.Read()); } [Test] public void DeserializeFromRaw() { PersonRaw raw = new PersonRaw { FirstName = "FirstNameValue", RawContent = new JRaw("[1,2,3,4,5]"), LastName = "LastNameValue" }; JObject o = JObject.FromObject(raw); JsonReader reader = new JTokenReader(o); JsonSerializer serializer = new JsonSerializer(); raw = (PersonRaw)serializer.Deserialize(reader, typeof(PersonRaw)); Assert.AreEqual("FirstNameValue", raw.FirstName); Assert.AreEqual("LastNameValue", raw.LastName); Assert.AreEqual("[1,2,3,4,5]", raw.RawContent.Value); } [Test] public void Parse_ShouldThrowOnUnexpectedToken() { ExceptionAssert.Throws<JsonReaderException>("Error reading JObject from JsonReader. Current JsonReader item is not an object: StartArray. Path '', line 1, position 1.", () => { string json = @"[""prop""]"; JObject.Parse(json); }); } [Test] public void ParseJavaScriptDate() { string json = @"[new Date(1207285200000)]"; JArray a = (JArray)JsonConvert.DeserializeObject(json); JValue v = (JValue)a[0]; Assert.AreEqual(JsonConvert.ConvertJavaScriptTicksToDateTime(1207285200000), (DateTime)v); } [Test] public void GenericValueCast() { string json = @"{""foo"":true}"; JObject o = (JObject)JsonConvert.DeserializeObject(json); bool? value = o.Value<bool?>("foo"); Assert.AreEqual(true, value); json = @"{""foo"":null}"; o = (JObject)JsonConvert.DeserializeObject(json); value = o.Value<bool?>("foo"); Assert.AreEqual(null, value); } [Test] public void Blog() { ExceptionAssert.Throws<JsonReaderException>( "Invalid property identifier character: ]. Path 'name', line 3, position 5.", () => { JObject.Parse(@"{ ""name"": ""James"", ]!#$THIS IS: BAD JSON![{}}}}] }"); }); } [Test] public void RawChildValues() { JObject o = new JObject(); o["val1"] = new JRaw("1"); o["val2"] = new JRaw("1"); string json = o.ToString(); Assert.AreEqual(@"{ ""val1"": 1, ""val2"": 1 }", json); } [Test] public void Iterate() { JObject o = new JObject(); o.Add("PropertyNameValue1", new JValue(1)); o.Add("PropertyNameValue2", new JValue(2)); JToken t = o; int i = 1; foreach (JProperty property in t) { Assert.AreEqual("PropertyNameValue" + i, property.Name); Assert.AreEqual(i, (int)property.Value); i++; } } [Test] public void KeyValuePairIterate() { JObject o = new JObject(); o.Add("PropertyNameValue1", new JValue(1)); o.Add("PropertyNameValue2", new JValue(2)); int i = 1; foreach (KeyValuePair<string, JToken> pair in o) { Assert.AreEqual("PropertyNameValue" + i, pair.Key); Assert.AreEqual(i, (int)pair.Value); i++; } } [Test] public void WriteObjectNullStringValue() { string s = null; JValue v = new JValue(s); Assert.AreEqual(null, v.Value); Assert.AreEqual(JTokenType.String, v.Type); JObject o = new JObject(); o["title"] = v; string output = o.ToString(); Assert.AreEqual(@"{ ""title"": null }", output); } [Test] public void Example() { string json = @"{ ""Name"": ""Apple"", ""Expiry"": new Date(1230422400000), ""Price"": 3.99, ""Sizes"": [ ""Small"", ""Medium"", ""Large"" ] }"; JObject o = JObject.Parse(json); string name = (string)o["Name"]; // Apple JArray sizes = (JArray)o["Sizes"]; string smallest = (string)sizes[0]; // Small Console.WriteLine(name); Console.WriteLine(smallest); } [Test] public void DeserializeClassManually() { string jsonText = @"{ ""short"": { ""original"":""http://www.foo.com/"", ""short"":""krehqk"", ""error"": { ""code"":0, ""msg"":""No action taken"" } } }"; JObject json = JObject.Parse(jsonText); Shortie shortie = new Shortie { Original = (string)json["short"]["original"], Short = (string)json["short"]["short"], Error = new ShortieException { Code = (int)json["short"]["error"]["code"], ErrorMessage = (string)json["short"]["error"]["msg"] } }; Console.WriteLine(shortie.Original); // http://www.foo.com/ Console.WriteLine(shortie.Error.ErrorMessage); // No action taken Assert.AreEqual("http://www.foo.com/", shortie.Original); Assert.AreEqual("krehqk", shortie.Short); Assert.AreEqual(null, shortie.Shortened); Assert.AreEqual(0, shortie.Error.Code); Assert.AreEqual("No action taken", shortie.Error.ErrorMessage); } [Test] public void JObjectContainingHtml() { JObject o = new JObject(); o["rc"] = new JValue(200); o["m"] = new JValue(""); o["o"] = new JValue(@"<div class='s1'> <div class='avatar'> <a href='asdf'>asdf</a><br /> <strong>0</strong> </div> <div class='sl'> <p> 444444444 </p> </div> <div class='clear'> </div> </div>"); Assert.AreEqual(@"{ ""rc"": 200, ""m"": """", ""o"": ""<div class='s1'>\r\n <div class='avatar'> \r\n <a href='asdf'>asdf</a><br />\r\n <strong>0</strong>\r\n </div>\r\n <div class='sl'>\r\n <p>\r\n 444444444\r\n </p>\r\n </div>\r\n <div class='clear'>\r\n </div> \r\n</div>"" }", o.ToString()); } [Test] public void ImplicitValueConversions() { JObject moss = new JObject(); moss["FirstName"] = new JValue("Maurice"); moss["LastName"] = new JValue("Moss"); moss["BirthDate"] = new JValue(new DateTime(1977, 12, 30)); moss["Department"] = new JValue("IT"); moss["JobTitle"] = new JValue("Support"); Console.WriteLine(moss.ToString()); //{ // "FirstName": "Maurice", // "LastName": "Moss", // "BirthDate": "\/Date(252241200000+1300)\/", // "Department": "IT", // "JobTitle": "Support" //} JObject jen = new JObject(); jen["FirstName"] = "Jen"; jen["LastName"] = "Barber"; jen["BirthDate"] = new DateTime(1978, 3, 15); jen["Department"] = "IT"; jen["JobTitle"] = "Manager"; Console.WriteLine(jen.ToString()); //{ // "FirstName": "Jen", // "LastName": "Barber", // "BirthDate": "\/Date(258721200000+1300)\/", // "Department": "IT", // "JobTitle": "Manager" //} } [Test] public void ReplaceJPropertyWithJPropertyWithSameName() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); JObject o = new JObject(p1, p2); IList l = o; Assert.AreEqual(p1, l[0]); Assert.AreEqual(p2, l[1]); JProperty p3 = new JProperty("Test1", "III"); p1.Replace(p3); Assert.AreEqual(null, p1.Parent); Assert.AreEqual(l, p3.Parent); Assert.AreEqual(p3, l[0]); Assert.AreEqual(p2, l[1]); Assert.AreEqual(2, l.Count); Assert.AreEqual(2, o.Properties().Count()); JProperty p4 = new JProperty("Test4", "IV"); p2.Replace(p4); Assert.AreEqual(null, p2.Parent); Assert.AreEqual(l, p4.Parent); Assert.AreEqual(p3, l[0]); Assert.AreEqual(p4, l[1]); } #if !(SILVERLIGHT || NET20 || NETFX_CORE || PORTABLE) [Test] public void PropertyChanging() { object changing = null; object changed = null; int changingCount = 0; int changedCount = 0; JObject o = new JObject(); o.PropertyChanging += (sender, args) => { JObject s = (JObject) sender; changing = (s[args.PropertyName] != null) ? ((JValue)s[args.PropertyName]).Value : null; changingCount++; }; o.PropertyChanged += (sender, args) => { JObject s = (JObject)sender; changed = (s[args.PropertyName] != null) ? ((JValue)s[args.PropertyName]).Value : null; changedCount++; }; o["StringValue"] = "value1"; Assert.AreEqual(null, changing); Assert.AreEqual("value1", changed); Assert.AreEqual("value1", (string)o["StringValue"]); Assert.AreEqual(1, changingCount); Assert.AreEqual(1, changedCount); o["StringValue"] = "value1"; Assert.AreEqual(1, changingCount); Assert.AreEqual(1, changedCount); o["StringValue"] = "value2"; Assert.AreEqual("value1", changing); Assert.AreEqual("value2", changed); Assert.AreEqual("value2", (string)o["StringValue"]); Assert.AreEqual(2, changingCount); Assert.AreEqual(2, changedCount); o["StringValue"] = null; Assert.AreEqual("value2", changing); Assert.AreEqual(null, changed); Assert.AreEqual(null, (string)o["StringValue"]); Assert.AreEqual(3, changingCount); Assert.AreEqual(3, changedCount); o["NullValue"] = null; Assert.AreEqual(null, changing); Assert.AreEqual(null, changed); Assert.AreEqual(new JValue((object)null), o["NullValue"]); Assert.AreEqual(4, changingCount); Assert.AreEqual(4, changedCount); o["NullValue"] = null; Assert.AreEqual(4, changingCount); Assert.AreEqual(4, changedCount); } #endif [Test] public void PropertyChanged() { object changed = null; int changedCount = 0; JObject o = new JObject(); o.PropertyChanged += (sender, args) => { JObject s = (JObject)sender; changed = (s[args.PropertyName] != null) ? ((JValue)s[args.PropertyName]).Value : null; changedCount++; }; o["StringValue"] = "value1"; Assert.AreEqual("value1", changed); Assert.AreEqual("value1", (string)o["StringValue"]); Assert.AreEqual(1, changedCount); o["StringValue"] = "value1"; Assert.AreEqual(1, changedCount); o["StringValue"] = "value2"; Assert.AreEqual("value2", changed); Assert.AreEqual("value2", (string)o["StringValue"]); Assert.AreEqual(2, changedCount); o["StringValue"] = null; Assert.AreEqual(null, changed); Assert.AreEqual(null, (string)o["StringValue"]); Assert.AreEqual(3, changedCount); o["NullValue"] = null; Assert.AreEqual(null, changed); Assert.AreEqual(new JValue((object)null), o["NullValue"]); Assert.AreEqual(4, changedCount); o["NullValue"] = null; Assert.AreEqual(4, changedCount); } [Test] public void IListContains() { JProperty p = new JProperty("Test", 1); IList l = new JObject(p); Assert.IsTrue(l.Contains(p)); Assert.IsFalse(l.Contains(new JProperty("Test", 1))); } [Test] public void IListIndexOf() { JProperty p = new JProperty("Test", 1); IList l = new JObject(p); Assert.AreEqual(0, l.IndexOf(p)); Assert.AreEqual(-1, l.IndexOf(new JProperty("Test", 1))); } [Test] public void IListClear() { JProperty p = new JProperty("Test", 1); IList l = new JObject(p); Assert.AreEqual(1, l.Count); l.Clear(); Assert.AreEqual(0, l.Count); } [Test] public void IListCopyTo() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); object[] a = new object[l.Count]; l.CopyTo(a, 0); Assert.AreEqual(p1, a[0]); Assert.AreEqual(p2, a[1]); } [Test] public void IListAdd() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l.Add(p3); Assert.AreEqual(3, l.Count); Assert.AreEqual(p3, l[2]); } [Test] public void IListAddBadToken() { ExceptionAssert.Throws<ArgumentException>( "Can not add Newtonsoft.Json.Linq.JValue to Newtonsoft.Json.Linq.JObject.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); l.Add(new JValue("Bad!")); }); } [Test] public void IListAddBadValue() { ExceptionAssert.Throws<ArgumentException>( "Argument is not a JToken.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); l.Add("Bad!"); }); } [Test] public void IListAddPropertyWithExistingName() { ExceptionAssert.Throws<ArgumentException>( "Can not add property Test2 to Newtonsoft.Json.Linq.JObject. Property with the same name already exists on object.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); JProperty p3 = new JProperty("Test2", "II"); l.Add(p3); }); } [Test] public void IListRemove() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); // won't do anything l.Remove(p3); Assert.AreEqual(2, l.Count); l.Remove(p1); Assert.AreEqual(1, l.Count); Assert.IsFalse(l.Contains(p1)); Assert.IsTrue(l.Contains(p2)); l.Remove(p2); Assert.AreEqual(0, l.Count); Assert.IsFalse(l.Contains(p2)); Assert.AreEqual(null, p2.Parent); } [Test] public void IListRemoveAt() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); // won't do anything l.RemoveAt(0); l.Remove(p1); Assert.AreEqual(1, l.Count); l.Remove(p2); Assert.AreEqual(0, l.Count); } [Test] public void IListInsert() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l.Insert(1, p3); Assert.AreEqual(l, p3.Parent); Assert.AreEqual(p1, l[0]); Assert.AreEqual(p3, l[1]); Assert.AreEqual(p2, l[2]); } [Test] public void IListIsReadOnly() { IList l = new JObject(); Assert.IsFalse(l.IsReadOnly); } [Test] public void IListIsFixedSize() { IList l = new JObject(); Assert.IsFalse(l.IsFixedSize); } [Test] public void IListSetItem() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l[0] = p3; Assert.AreEqual(p3, l[0]); Assert.AreEqual(p2, l[1]); } [Test] public void IListSetItemAlreadyExists() { ExceptionAssert.Throws<ArgumentException>( "Can not add property Test3 to Newtonsoft.Json.Linq.JObject. Property with the same name already exists on object.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l[0] = p3; l[1] = p3; }); } [Test] public void IListSetItemInvalid() { ExceptionAssert.Throws<ArgumentException>( @"Can not add Newtonsoft.Json.Linq.JValue to Newtonsoft.Json.Linq.JObject.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); l[0] = new JValue(true); }); } [Test] public void IListSyncRoot() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); Assert.IsNotNull(l.SyncRoot); } [Test] public void IListIsSynchronized() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList l = new JObject(p1, p2); Assert.IsFalse(l.IsSynchronized); } [Test] public void GenericListJTokenContains() { JProperty p = new JProperty("Test", 1); IList<JToken> l = new JObject(p); Assert.IsTrue(l.Contains(p)); Assert.IsFalse(l.Contains(new JProperty("Test", 1))); } [Test] public void GenericListJTokenIndexOf() { JProperty p = new JProperty("Test", 1); IList<JToken> l = new JObject(p); Assert.AreEqual(0, l.IndexOf(p)); Assert.AreEqual(-1, l.IndexOf(new JProperty("Test", 1))); } [Test] public void GenericListJTokenClear() { JProperty p = new JProperty("Test", 1); IList<JToken> l = new JObject(p); Assert.AreEqual(1, l.Count); l.Clear(); Assert.AreEqual(0, l.Count); } [Test] public void GenericListJTokenCopyTo() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JToken[] a = new JToken[l.Count]; l.CopyTo(a, 0); Assert.AreEqual(p1, a[0]); Assert.AreEqual(p2, a[1]); } [Test] public void GenericListJTokenAdd() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l.Add(p3); Assert.AreEqual(3, l.Count); Assert.AreEqual(p3, l[2]); } [Test] public void GenericListJTokenAddBadToken() { ExceptionAssert.Throws<ArgumentException>("Can not add Newtonsoft.Json.Linq.JValue to Newtonsoft.Json.Linq.JObject.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); l.Add(new JValue("Bad!")); }); } [Test] public void GenericListJTokenAddBadValue() { ExceptionAssert.Throws<ArgumentException>("Can not add Newtonsoft.Json.Linq.JValue to Newtonsoft.Json.Linq.JObject.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); // string is implicitly converted to JValue l.Add("Bad!"); }); } [Test] public void GenericListJTokenAddPropertyWithExistingName() { ExceptionAssert.Throws<ArgumentException>("Can not add property Test2 to Newtonsoft.Json.Linq.JObject. Property with the same name already exists on object.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JProperty p3 = new JProperty("Test2", "II"); l.Add(p3); }); } [Test] public void GenericListJTokenRemove() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); // won't do anything Assert.IsFalse(l.Remove(p3)); Assert.AreEqual(2, l.Count); Assert.IsTrue(l.Remove(p1)); Assert.AreEqual(1, l.Count); Assert.IsFalse(l.Contains(p1)); Assert.IsTrue(l.Contains(p2)); Assert.IsTrue(l.Remove(p2)); Assert.AreEqual(0, l.Count); Assert.IsFalse(l.Contains(p2)); Assert.AreEqual(null, p2.Parent); } [Test] public void GenericListJTokenRemoveAt() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); // won't do anything l.RemoveAt(0); l.Remove(p1); Assert.AreEqual(1, l.Count); l.Remove(p2); Assert.AreEqual(0, l.Count); } [Test] public void GenericListJTokenInsert() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l.Insert(1, p3); Assert.AreEqual(l, p3.Parent); Assert.AreEqual(p1, l[0]); Assert.AreEqual(p3, l[1]); Assert.AreEqual(p2, l[2]); } [Test] public void GenericListJTokenIsReadOnly() { IList<JToken> l = new JObject(); Assert.IsFalse(l.IsReadOnly); } [Test] public void GenericListJTokenSetItem() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l[0] = p3; Assert.AreEqual(p3, l[0]); Assert.AreEqual(p2, l[1]); } [Test] public void GenericListJTokenSetItemAlreadyExists() { ExceptionAssert.Throws<ArgumentException>("Can not add property Test3 to Newtonsoft.Json.Linq.JObject. Property with the same name already exists on object.", () => { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); IList<JToken> l = new JObject(p1, p2); JProperty p3 = new JProperty("Test3", "III"); l[0] = p3; l[1] = p3; }); } #if !(SILVERLIGHT || NETFX_CORE || PORTABLE) [Test] public void IBindingListSortDirection() { IBindingList l = new JObject(); Assert.AreEqual(ListSortDirection.Ascending, l.SortDirection); } [Test] public void IBindingListSortProperty() { IBindingList l = new JObject(); Assert.AreEqual(null, l.SortProperty); } [Test] public void IBindingListSupportsChangeNotification() { IBindingList l = new JObject(); Assert.AreEqual(true, l.SupportsChangeNotification); } [Test] public void IBindingListSupportsSearching() { IBindingList l = new JObject(); Assert.AreEqual(false, l.SupportsSearching); } [Test] public void IBindingListSupportsSorting() { IBindingList l = new JObject(); Assert.AreEqual(false, l.SupportsSorting); } [Test] public void IBindingListAllowEdit() { IBindingList l = new JObject(); Assert.AreEqual(true, l.AllowEdit); } [Test] public void IBindingListAllowNew() { IBindingList l = new JObject(); Assert.AreEqual(true, l.AllowNew); } [Test] public void IBindingListAllowRemove() { IBindingList l = new JObject(); Assert.AreEqual(true, l.AllowRemove); } [Test] public void IBindingListAddIndex() { IBindingList l = new JObject(); // do nothing l.AddIndex(null); } [Test] public void IBindingListApplySort() { ExceptionAssert.Throws<NotSupportedException>( "Specified method is not supported.", () => { IBindingList l = new JObject(); l.ApplySort(null, ListSortDirection.Ascending); }); } [Test] public void IBindingListRemoveSort() { ExceptionAssert.Throws<NotSupportedException>( "Specified method is not supported.", () => { IBindingList l = new JObject(); l.RemoveSort(); }); } [Test] public void IBindingListRemoveIndex() { IBindingList l = new JObject(); // do nothing l.RemoveIndex(null); } [Test] public void IBindingListFind() { ExceptionAssert.Throws<NotSupportedException>( "Specified method is not supported.", () => { IBindingList l = new JObject(); l.Find(null, null); }); } [Test] public void IBindingListIsSorted() { IBindingList l = new JObject(); Assert.AreEqual(false, l.IsSorted); } [Test] public void IBindingListAddNew() { ExceptionAssert.Throws<JsonException>( "Could not determine new value to add to 'Newtonsoft.Json.Linq.JObject'.", () => { IBindingList l = new JObject(); l.AddNew(); }); } [Test] public void IBindingListAddNewWithEvent() { JObject o = new JObject(); o.AddingNew += (s, e) => e.NewObject = new JProperty("Property!"); IBindingList l = o; object newObject = l.AddNew(); Assert.IsNotNull(newObject); JProperty p = (JProperty) newObject; Assert.AreEqual("Property!", p.Name); Assert.AreEqual(o, p.Parent); } [Test] public void ITypedListGetListName() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); ITypedList l = new JObject(p1, p2); Assert.AreEqual(string.Empty, l.GetListName(null)); } [Test] public void ITypedListGetItemProperties() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); ITypedList l = new JObject(p1, p2); PropertyDescriptorCollection propertyDescriptors = l.GetItemProperties(null); Assert.IsNull(propertyDescriptors); } [Test] public void ListChanged() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); JObject o = new JObject(p1, p2); ListChangedType? changedType = null; int? index = null; o.ListChanged += (s, a) => { changedType = a.ListChangedType; index = a.NewIndex; }; JProperty p3 = new JProperty("Test3", "III"); o.Add(p3); Assert.AreEqual(changedType, ListChangedType.ItemAdded); Assert.AreEqual(index, 2); Assert.AreEqual(p3, ((IList<JToken>)o)[index.Value]); JProperty p4 = new JProperty("Test4", "IV"); ((IList<JToken>) o)[index.Value] = p4; Assert.AreEqual(changedType, ListChangedType.ItemChanged); Assert.AreEqual(index, 2); Assert.AreEqual(p4, ((IList<JToken>)o)[index.Value]); Assert.IsFalse(((IList<JToken>)o).Contains(p3)); Assert.IsTrue(((IList<JToken>)o).Contains(p4)); o["Test1"] = 2; Assert.AreEqual(changedType, ListChangedType.ItemChanged); Assert.AreEqual(index, 0); Assert.AreEqual(2, (int)o["Test1"]); } #endif #if SILVERLIGHT || !(NET20 || NET35 || PORTABLE) [Test] public void CollectionChanged() { JProperty p1 = new JProperty("Test1", 1); JProperty p2 = new JProperty("Test2", "Two"); JObject o = new JObject(p1, p2); NotifyCollectionChangedAction? changedType = null; int? index = null; o.CollectionChanged += (s, a) => { changedType = a.Action; index = a.NewStartingIndex; }; JProperty p3 = new JProperty("Test3", "III"); o.Add(p3); Assert.AreEqual(changedType, NotifyCollectionChangedAction.Add); Assert.AreEqual(index, 2); Assert.AreEqual(p3, ((IList<JToken>)o)[index.Value]); JProperty p4 = new JProperty("Test4", "IV"); ((IList<JToken>)o)[index.Value] = p4; Assert.AreEqual(changedType, NotifyCollectionChangedAction.Replace); Assert.AreEqual(index, 2); Assert.AreEqual(p4, ((IList<JToken>)o)[index.Value]); Assert.IsFalse(((IList<JToken>)o).Contains(p3)); Assert.IsTrue(((IList<JToken>)o).Contains(p4)); o["Test1"] = 2; Assert.AreEqual(changedType, NotifyCollectionChangedAction.Replace); Assert.AreEqual(index, 0); Assert.AreEqual(2, (int)o["Test1"]); } #endif [Test] public void GetGeocodeAddress() { string json = @"{ ""name"": ""Address: 435 North Mulford Road Rockford, IL 61107"", ""Status"": { ""code"": 200, ""request"": ""geocode"" }, ""Placemark"": [ { ""id"": ""p1"", ""address"": ""435 N Mulford Rd, Rockford, IL 61107, USA"", ""AddressDetails"": { ""Accuracy"" : 8, ""Country"" : { ""AdministrativeArea"" : { ""AdministrativeAreaName"" : ""IL"", ""SubAdministrativeArea"" : { ""Locality"" : { ""LocalityName"" : ""Rockford"", ""PostalCode"" : { ""PostalCodeNumber"" : ""61107"" }, ""Thoroughfare"" : { ""ThoroughfareName"" : ""435 N Mulford Rd"" } }, ""SubAdministrativeAreaName"" : ""Winnebago"" } }, ""CountryName"" : ""USA"", ""CountryNameCode"" : ""US"" } }, ""ExtendedData"": { ""LatLonBox"": { ""north"": 42.2753076, ""south"": 42.2690124, ""east"": -88.9964645, ""west"": -89.0027597 } }, ""Point"": { ""coordinates"": [ -88.9995886, 42.2721596, 0 ] } } ] }"; JObject o = JObject.Parse(json); string searchAddress = (string)o["Placemark"][0]["AddressDetails"]["Country"]["AdministrativeArea"]["SubAdministrativeArea"]["Locality"]["Thoroughfare"]["ThoroughfareName"]; Assert.AreEqual("435 N Mulford Rd", searchAddress); } [Test] public void SetValueWithInvalidPropertyName() { ExceptionAssert.Throws<ArgumentException>("Set JObject values with invalid key value: 0. Object property name expected.", () => { JObject o = new JObject(); o[0] = new JValue(3); }); } [Test] public void SetValue() { object key = "TestKey"; JObject o = new JObject(); o[key] = new JValue(3); Assert.AreEqual(3, (int)o[key]); } [Test] public void ParseMultipleProperties() { string json = @"{ ""Name"": ""Name1"", ""Name"": ""Name2"" }"; JObject o = JObject.Parse(json); string value = (string)o["Name"]; Assert.AreEqual("Name2", value); } #if !(NETFX_CORE || PORTABLE) [Test] public void WriteObjectNullDBNullValue() { DBNull dbNull = DBNull.Value; JValue v = new JValue(dbNull); Assert.AreEqual(DBNull.Value, v.Value); Assert.AreEqual(JTokenType.Null, v.Type); JObject o = new JObject(); o["title"] = v; string output = o.ToString(); Assert.AreEqual(@"{ ""title"": null }", output); } #endif [Test] public void InvalidValueCastExceptionMessage() { ExceptionAssert.Throws<ArgumentException>("Can not convert Object to String.", () => { string json = @"{ ""responseData"": {}, ""responseDetails"": null, ""responseStatus"": 200 }"; JObject o = JObject.Parse(json); string name = (string)o["responseData"]; }); } [Test] public void InvalidPropertyValueCastExceptionMessage() { ExceptionAssert.Throws<ArgumentException>("Can not convert Object to String.", () => { string json = @"{ ""responseData"": {}, ""responseDetails"": null, ""responseStatus"": 200 }"; JObject o = JObject.Parse(json); string name = (string)o.Property("responseData"); }); } [Test] public void NumberTooBigForInt64() { ExceptionAssert.Throws<JsonReaderException>("JSON integer 307953220000517141511 is too large or small for an Int64. Path 'code', line 1, position 30.", () => { string json = @"{""code"": 307953220000517141511}"; JObject.Parse(json); }); } [Test] public void ParseIncomplete() { ExceptionAssert.Throws<Exception>("Unexpected end of content while loading JObject. Path 'foo', line 1, position 6.", () => { JObject.Parse("{ foo:"); }); } [Test] public void LoadFromNestedObject() { string jsonText = @"{ ""short"": { ""error"": { ""code"":0, ""msg"":""No action taken"" } } }"; JsonReader reader = new JsonTextReader(new StringReader(jsonText)); reader.Read(); reader.Read(); reader.Read(); reader.Read(); reader.Read(); JObject o = (JObject)JToken.ReadFrom(reader); Assert.IsNotNull(o); Assert.AreEqual(@"{ ""code"": 0, ""msg"": ""No action taken"" }", o.ToString(Formatting.Indented)); } [Test] public void LoadFromNestedObjectIncomplete() { ExceptionAssert.Throws<JsonReaderException>("Unexpected end of content while loading JObject. Path 'short.error.code', line 6, position 15.", () => { string jsonText = @"{ ""short"": { ""error"": { ""code"":0"; JsonReader reader = new JsonTextReader(new StringReader(jsonText)); reader.Read(); reader.Read(); reader.Read(); reader.Read(); reader.Read(); JToken.ReadFrom(reader); }); } #if !(SILVERLIGHT || NETFX_CORE || PORTABLE) [Test] public void GetProperties() { JObject o = JObject.Parse("{'prop1':12,'prop2':'hi!','prop3':null,'prop4':[1,2,3]}"); ICustomTypeDescriptor descriptor = o; PropertyDescriptorCollection properties = descriptor.GetProperties(); Assert.AreEqual(4, properties.Count); PropertyDescriptor prop1 = properties[0]; Assert.AreEqual("prop1", prop1.Name); Assert.AreEqual(typeof(long), prop1.PropertyType); Assert.AreEqual(typeof(JObject), prop1.ComponentType); Assert.AreEqual(false, prop1.CanResetValue(o)); Assert.AreEqual(false, prop1.ShouldSerializeValue(o)); PropertyDescriptor prop2 = properties[1]; Assert.AreEqual("prop2", prop2.Name); Assert.AreEqual(typeof(string), prop2.PropertyType); Assert.AreEqual(typeof(JObject), prop2.ComponentType); Assert.AreEqual(false, prop2.CanResetValue(o)); Assert.AreEqual(false, prop2.ShouldSerializeValue(o)); PropertyDescriptor prop3 = properties[2]; Assert.AreEqual("prop3", prop3.Name); Assert.AreEqual(typeof(object), prop3.PropertyType); Assert.AreEqual(typeof(JObject), prop3.ComponentType); Assert.AreEqual(false, prop3.CanResetValue(o)); Assert.AreEqual(false, prop3.ShouldSerializeValue(o)); PropertyDescriptor prop4 = properties[3]; Assert.AreEqual("prop4", prop4.Name); Assert.AreEqual(typeof(JArray), prop4.PropertyType); Assert.AreEqual(typeof(JObject), prop4.ComponentType); Assert.AreEqual(false, prop4.CanResetValue(o)); Assert.AreEqual(false, prop4.ShouldSerializeValue(o)); } #endif [Test] public void ParseEmptyObjectWithComment() { JObject o = JObject.Parse("{ /* A Comment */ }"); Assert.AreEqual(0, o.Count); } [Test] public void FromObjectTimeSpan() { JValue v = (JValue)JToken.FromObject(TimeSpan.FromDays(1)); Assert.AreEqual(v.Value, TimeSpan.FromDays(1)); Assert.AreEqual("1.00:00:00", v.ToString()); } [Test] public void FromObjectUri() { JValue v = (JValue)JToken.FromObject(new Uri("http://www.stuff.co.nz")); Assert.AreEqual(v.Value, new Uri("http://www.stuff.co.nz")); Assert.AreEqual("http://www.stuff.co.nz/", v.ToString()); } [Test] public void FromObjectGuid() { JValue v = (JValue)JToken.FromObject(new Guid("9065ACF3-C820-467D-BE50-8D4664BEAF35")); Assert.AreEqual(v.Value, new Guid("9065ACF3-C820-467D-BE50-8D4664BEAF35")); Assert.AreEqual("9065acf3-c820-467d-be50-8d4664beaf35", v.ToString()); } [Test] public void ParseAdditionalContent() { ExceptionAssert.Throws<JsonReaderException>("Additional text encountered after finished reading JSON content: ,. Path '', line 10, position 2.", () => { string json = @"{ ""Name"": ""Apple"", ""Expiry"": new Date(1230422400000), ""Price"": 3.99, ""Sizes"": [ ""Small"", ""Medium"", ""Large"" ] }, 987987"; JObject o = JObject.Parse(json); }); } [Test] public void DeepEqualsIgnoreOrder() { JObject o1 = new JObject( new JProperty("null", null), new JProperty("integer", 1), new JProperty("string", "string!"), new JProperty("decimal", 0.5m), new JProperty("array", new JArray(1, 2))); Assert.IsTrue(o1.DeepEquals(o1)); JObject o2 = new JObject( new JProperty("null", null), new JProperty("string", "string!"), new JProperty("decimal", 0.5m), new JProperty("integer", 1), new JProperty("array", new JArray(1, 2))); Assert.IsTrue(o1.DeepEquals(o2)); JObject o3 = new JObject( new JProperty("null", null), new JProperty("string", "string!"), new JProperty("decimal", 0.5m), new JProperty("integer", 2), new JProperty("array", new JArray(1, 2))); Assert.IsFalse(o1.DeepEquals(o3)); JObject o4 = new JObject( new JProperty("null", null), new JProperty("string", "string!"), new JProperty("decimal", 0.5m), new JProperty("integer", 1), new JProperty("array", new JArray(2, 1))); Assert.IsFalse(o1.DeepEquals(o4)); JObject o5 = new JObject( new JProperty("null", null), new JProperty("string", "string!"), new JProperty("decimal", 0.5m), new JProperty("integer", 1)); Assert.IsFalse(o1.DeepEquals(o5)); Assert.IsFalse(o1.DeepEquals(null)); } } }
/* --------------------------------------------------------------------------- * * Copyright (c) Routrek Networks, Inc. All Rights Reserved.. * * This file is a part of the Granados SSH Client Library that is subject to * the license included in the distributed package. * You may not use this file except in compliance with the license. * * --------------------------------------------------------------------------- * * I implemented this algorithm with reference to following products and books though the algorithm is known publicly. * * MindTerm ( AppGate Network Security ) * * Applied Cryptography ( Bruce Schneier ) */ using System; namespace Routrek.Crypto { public class Blowfish { byte[] IV; byte[] enc; byte[] dec; private const int BLOCK_SIZE = 8; // bytes in a data-block protected uint[] S0; protected uint[] S1; protected uint[] S2; protected uint[] S3; protected uint[] P; public Blowfish() { S0 = new uint[256]; S1 = new uint[256]; S2 = new uint[256]; S3 = new uint[256]; P = new uint[18]; IV = new byte[8]; enc = new byte[8]; dec = new byte[8]; } public void SetIV(byte[] newiv) { Array.Copy(newiv, 0, IV, 0, IV.Length); } public void initializeKey(byte[] key) { int i, j, len = key.Length; uint temp; Array.Copy(blowfish_pbox, 0, P, 0, 18); Array.Copy(blowfish_sbox, 0, S0, 0, 256); Array.Copy(blowfish_sbox, 256, S1, 0, 256); Array.Copy(blowfish_sbox, 512, S2, 0, 256); Array.Copy(blowfish_sbox, 768, S3, 0, 256); for(j = 0, i = 0; i < 16 + 2; i++) { temp = (((uint)(key[j ]) << 24) | ((uint)(key[(j + 1) % len]) << 16) | ((uint)(key[(j + 2) % len]) << 8) | ((uint)(key[(j + 3) % len]))); P[i] = P[i] ^ temp; j = (j + 4) % len; } byte[] LR = new byte[8]; for(i = 0; i < 16 + 2; i += 2) { blockEncrypt(LR, 0, LR, 0); P[i] = CipherUtil.GetIntBE(LR, 0); P[i + 1] = CipherUtil.GetIntBE(LR, 4); } for(j = 0; j < 256; j += 2) { blockEncrypt(LR, 0, LR, 0); S0[j] = CipherUtil.GetIntBE(LR, 0); S0[j + 1] = CipherUtil.GetIntBE(LR, 4); } for(j = 0; j < 256; j += 2) { blockEncrypt(LR, 0, LR, 0); S1[j] = CipherUtil.GetIntBE(LR, 0); S1[j + 1] = CipherUtil.GetIntBE(LR, 4); } for(j = 0; j < 256; j += 2) { blockEncrypt(LR, 0, LR, 0); S2[j] = CipherUtil.GetIntBE(LR, 0); S2[j + 1] = CipherUtil.GetIntBE(LR, 4); } for(j = 0; j < 256; j += 2) { blockEncrypt(LR, 0, LR, 0); S3[j] = CipherUtil.GetIntBE(LR, 0); S3[j + 1] = CipherUtil.GetIntBE(LR, 4); } } public void blockEncrypt(byte[] input, int inOffset, byte[] output, int outOffset) { uint L, R; L = CipherUtil.GetIntBE(input, inOffset); R = CipherUtil.GetIntBE(input, inOffset + 4); L ^= P[0]; R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[1]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[2]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[3]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[4]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[5]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[6]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[7]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[8]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[9]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[10]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[11]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[12]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[13]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[14]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[15]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[16]); R ^= P[17]; CipherUtil.PutIntBE(R, output, outOffset); CipherUtil.PutIntBE(L, output, outOffset + 4); } public void blockDecrypt(byte[] input, int inOffset, byte[] output, int outOffset) { uint L, R; L = CipherUtil.GetIntBE(input, inOffset); R = CipherUtil.GetIntBE(input, inOffset + 4); L ^= P[17]; R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[16]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[15]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[14]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[13]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[12]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[11]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[10]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[9]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[8]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[7]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[6]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[5]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[4]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[3]); R ^= ((((S0[(int)((L >> 24) & 0xff)] + S1[(int)((L >> 16) & 0xff)]) ^ S2[(int)((L >> 8) & 0xff)]) + S3[(int)(L & 0xff)]) ^ P[2]); L ^= ((((S0[(int)((R >> 24) & 0xff)] + S1[(int)((R >> 16) & 0xff)]) ^ S2[(int)((R >> 8) & 0xff)]) + S3[(int)(R & 0xff)]) ^ P[1]); R ^= P[0]; CipherUtil.PutIntBE(R, output, outOffset); CipherUtil.PutIntBE(L, output, outOffset + 4); } public void encryptSSH1Style(byte[] src, int srcOff, int len, byte[] dest, int destOff) { int end = srcOff + len; int i, j; for(int si = srcOff, di = destOff; si < end; si += 8, di += 8) { for(i = 0; i < 4; i++) { j = 3 - i; IV[i] ^= src[si + j]; IV[i + 4] ^= src[si + 4 + j]; } blockEncrypt(IV, 0, IV, 0); for(i = 0; i < 4; i++) { j = 3 - i; dest[di + i] = IV[j]; dest[di + i + 4] = IV[4 + j]; } } } public void decryptSSH1Style(byte[] src, int srcOff, int len, byte[] dest, int destOff) { int end = srcOff + len; int i, j; for(int si = srcOff, di = destOff; si < end; si += 8, di += 8) { for(i = 0; i < 4; i++) { j = (3 - i); enc[i] = src[si + j]; enc[i + 4] = src[si + 4 + j]; } blockDecrypt(enc, 0, dec, 0); for(i = 0; i < 4; i++) { j = 3 - i; dest[di + i] = (byte)((IV[j] ^ dec[j]) & 0xff); IV[j] = enc[j]; dest[di + i + 4] = (byte)((IV[4 + j] ^ dec[4 + j]) & 0xff); IV[4 + j] = enc[4 + j]; } } } public void encryptCBC(byte[] input, int inputOffset, int inputLen, byte[] output, int outputOffset) { int nBlocks = inputLen / BLOCK_SIZE; for(int bc = 0; bc < nBlocks; bc++) { CipherUtil.BlockXor(input, inputOffset, BLOCK_SIZE, IV, 0); blockEncrypt(IV, 0, output, outputOffset); Array.Copy(output, outputOffset, IV, 0, BLOCK_SIZE); inputOffset += BLOCK_SIZE; outputOffset += BLOCK_SIZE; } } public void decryptCBC(byte[] input, int inputOffset, int inputLen, byte[] output, int outputOffset) { byte[] tmpBlk = new byte[BLOCK_SIZE]; int nBlocks = inputLen / BLOCK_SIZE; for(int bc = 0; bc < nBlocks; bc++) { blockDecrypt(input, inputOffset, tmpBlk, 0); for(int i = 0; i < BLOCK_SIZE; i++) { tmpBlk[i] ^= IV[i]; IV[i] = input[inputOffset + i]; output[outputOffset + i] = tmpBlk[i]; } inputOffset += BLOCK_SIZE; outputOffset += BLOCK_SIZE; } } private static readonly uint[] blowfish_pbox = new uint[] { 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b }; private static readonly uint[] blowfish_sbox = new uint[] { 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6 }; } }
//----------------------------------------------------------------------- // <copyright file="ReadOnlyBindingListBase.cs" company="Marimer LLC"> // Copyright (c) Marimer LLC. All rights reserved. // Website: http://www.lhotka.net/cslanet/ // </copyright> // <summary>This is the base class from which readonly collections</summary> //----------------------------------------------------------------------- #if NETFX_CORE using System; namespace Csla { /// <summary> /// This is the base class from which readonly collections /// of readonly objects should be derived. /// </summary> /// <typeparam name="T">Type of the list class.</typeparam> /// <typeparam name="C">Type of child objects contained in the list.</typeparam> [Serializable] public abstract class ReadOnlyBindingListBase<T, C> : ReadOnlyListBase<T, C> where T : ReadOnlyBindingListBase<T, C> { } } #else using System; using System.Collections.Generic; using System.ComponentModel; using System.Linq.Expressions; using Csla.Properties; namespace Csla { /// <summary> /// This is the base class from which readonly collections /// of readonly objects should be derived. /// </summary> /// <typeparam name="T">Type of the list class.</typeparam> /// <typeparam name="C">Type of child objects contained in the list.</typeparam> [System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")] [Serializable()] public abstract class ReadOnlyBindingListBase<T, C> : Core.ReadOnlyBindingList<C>, Csla.Core.IReadOnlyCollection, ICloneable, Server.IDataPortalTarget where T : ReadOnlyBindingListBase<T, C> { #region Constructors /// <summary> /// Creates an instance of the object. /// </summary> protected ReadOnlyBindingListBase() { Initialize(); } #endregion #region Initialize /// <summary> /// Override this method to set up event handlers so user /// code in a partial class can respond to events raised by /// generated code. /// </summary> protected virtual void Initialize() { /* allows subclass to initialize events before any other activity occurs */ } #endregion #region ICloneable object ICloneable.Clone() { return GetClone(); } /// <summary> /// Creates a clone of the object. /// </summary> /// <returns>A new object containing the exact data of the original object.</returns> [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual object GetClone() { return Core.ObjectCloner.Clone(this); } /// <summary> /// Creates a clone of the object. /// </summary> /// <returns> /// A new object containing the exact data of the original object. /// </returns> public T Clone() { return (T)GetClone(); } #endregion #region Data Access [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "criteria")] private void DataPortal_Create(object criteria) { throw new NotSupportedException(Resources.CreateNotSupportedException); } /// <summary> /// Override this method to allow retrieval of an existing business /// object based on data in the database. /// </summary> /// <param name="criteria">An object containing criteria values to identify the object.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] protected virtual void DataPortal_Fetch(object criteria) { throw new NotSupportedException(Resources.FetchNotSupportedException); } private void DataPortal_Update() { throw new NotSupportedException(Resources.UpdateNotSupportedException); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "criteria")] private void DataPortal_Delete(object criteria) { throw new NotSupportedException(Resources.DeleteNotSupportedException); } /// <summary> /// Called by the server-side DataPortal prior to calling the /// requested DataPortal_xyz method. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual void DataPortal_OnDataPortalInvoke(DataPortalEventArgs e) { } /// <summary> /// Called by the server-side DataPortal after calling the /// requested DataPortal_xyz method. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual void DataPortal_OnDataPortalInvokeComplete(DataPortalEventArgs e) { } /// <summary> /// Called by the server-side DataPortal if an exception /// occurs during data access. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> /// <param name="ex">The Exception thrown during data access.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual void DataPortal_OnDataPortalException(DataPortalEventArgs e, Exception ex) { } /// <summary> /// Called by the server-side DataPortal prior to calling the /// requested DataPortal_XYZ method. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual void Child_OnDataPortalInvoke(DataPortalEventArgs e) { } /// <summary> /// Called by the server-side DataPortal after calling the /// requested DataPortal_XYZ method. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual void Child_OnDataPortalInvokeComplete(DataPortalEventArgs e) { } /// <summary> /// Called by the server-side DataPortal if an exception /// occurs during data access. /// </summary> /// <param name="e">The DataPortalContext object passed to the DataPortal.</param> /// <param name="ex">The Exception thrown during data access.</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1707:IdentifiersShouldNotContainUnderscores", MessageId = "Member")] [EditorBrowsable(EditorBrowsableState.Advanced)] protected virtual void Child_OnDataPortalException(DataPortalEventArgs e, Exception ex) { } #endregion #region ToArray /// <summary> /// Get an array containing all items in the list. /// </summary> public C[] ToArray() { List<C> result = new List<C>(); foreach (C item in this) result.Add(item); return result.ToArray(); } #endregion #region IDataPortalTarget Members void Csla.Server.IDataPortalTarget.CheckRules() { } void Csla.Server.IDataPortalTarget.MarkAsChild() { } void Csla.Server.IDataPortalTarget.MarkNew() { } void Csla.Server.IDataPortalTarget.MarkOld() { } void Csla.Server.IDataPortalTarget.DataPortal_OnDataPortalInvoke(DataPortalEventArgs e) { this.DataPortal_OnDataPortalInvoke(e); } void Csla.Server.IDataPortalTarget.DataPortal_OnDataPortalInvokeComplete(DataPortalEventArgs e) { this.DataPortal_OnDataPortalInvokeComplete(e); } void Csla.Server.IDataPortalTarget.DataPortal_OnDataPortalException(DataPortalEventArgs e, Exception ex) { this.DataPortal_OnDataPortalException(e, ex); } void Csla.Server.IDataPortalTarget.Child_OnDataPortalInvoke(DataPortalEventArgs e) { this.Child_OnDataPortalInvoke(e); } void Csla.Server.IDataPortalTarget.Child_OnDataPortalInvokeComplete(DataPortalEventArgs e) { this.Child_OnDataPortalInvokeComplete(e); } void Csla.Server.IDataPortalTarget.Child_OnDataPortalException(DataPortalEventArgs e, Exception ex) { this.Child_OnDataPortalException(e, ex); } #endregion } } #endif
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput; using Directory = Lucene.Net.Store.Directory; using IndexInput = Lucene.Net.Store.IndexInput; namespace Lucene.Net.Index { /// <version> $Id: TermVectorsReader.java 687046 2008-08-19 13:01:11Z mikemccand $ /// </version> public class TermVectorsReader : System.ICloneable { // NOTE: if you make a new format, it must be larger than // the current format internal const int FORMAT_VERSION = 2; // Changes to speed up bulk merging of term vectors: internal const int FORMAT_VERSION2 = 3; // Changed strings to UTF8 with length-in-bytes not length-in-chars internal const int FORMAT_UTF8_LENGTH_IN_BYTES = 4; // NOTE: always change this if you switch to a new format! internal static readonly int FORMAT_CURRENT = FORMAT_UTF8_LENGTH_IN_BYTES; //The size in bytes that the FORMAT_VERSION will take up at the beginning of each file internal const int FORMAT_SIZE = 4; internal const byte STORE_POSITIONS_WITH_TERMVECTOR = (byte) (0x1); internal const byte STORE_OFFSET_WITH_TERMVECTOR = (byte) (0x2); private FieldInfos fieldInfos; private IndexInput tvx; private IndexInput tvd; private IndexInput tvf; private int size; private int numTotalDocs; // The docID offset where our docs begin in the index // file. This will be 0 if we have our own private file. private int docStoreOffset; private int format; public /*internal*/ TermVectorsReader(Directory d, System.String segment, FieldInfos fieldInfos):this(d, segment, fieldInfos, BufferedIndexInput.BUFFER_SIZE) { } internal TermVectorsReader(Directory d, System.String segment, FieldInfos fieldInfos, int readBufferSize):this(d, segment, fieldInfos, readBufferSize, - 1, 0) { } internal TermVectorsReader(Directory d, System.String segment, FieldInfos fieldInfos, int readBufferSize, int docStoreOffset, int size) { bool success = false; try { if (d.FileExists(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION)) { tvx = d.OpenInput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION, readBufferSize); format = CheckValidFormat(tvx); tvd = d.OpenInput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION, readBufferSize); int tvdFormat = CheckValidFormat(tvd); tvf = d.OpenInput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION, readBufferSize); int tvfFormat = CheckValidFormat(tvf); System.Diagnostics.Debug.Assert(format == tvdFormat); System.Diagnostics.Debug.Assert(format == tvfFormat); if (format >= FORMAT_VERSION2) { System.Diagnostics.Debug.Assert((tvx.Length() - FORMAT_SIZE) % 16 == 0); numTotalDocs = (int)(tvx.Length() >> 4); } else { System.Diagnostics.Debug.Assert((tvx.Length() - FORMAT_SIZE) % 8 == 0); numTotalDocs = (int)(tvx.Length() >> 3); } if (-1 == docStoreOffset) { this.docStoreOffset = 0; this.size = numTotalDocs; System.Diagnostics.Debug.Assert(size == 0 || numTotalDocs == size); } else { this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs System.Diagnostics.Debug.Assert(numTotalDocs >= size + docStoreOffset, "numTotalDocs=" + numTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset); } } else { // If all documents flushed in a segment had hit // non-aborting exceptions, it's possible that // FieldInfos.hasVectors returns true yet the term // vector files don't exist. format = 0; } this.fieldInfos = fieldInfos; success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { Close(); } } } // Used for bulk copy when merging internal virtual IndexInput GetTvdStream() { return tvd; } // Used for bulk copy when merging internal virtual IndexInput GetTvfStream() { return tvf; } private void SeekTvx(int docNum) { if (format < FORMAT_VERSION2) tvx.Seek((docNum + docStoreOffset) * 8L + FORMAT_SIZE); else tvx.Seek((docNum + docStoreOffset) * 16L + FORMAT_SIZE); } internal virtual bool CanReadRawDocs() { return format >= FORMAT_UTF8_LENGTH_IN_BYTES; } /// <summary>Retrieve the length (in bytes) of the tvd and tvf /// entries for the next numDocs starting with /// startDocID. This is used for bulk copying when /// merging segments, if the field numbers are /// congruent. Once this returns, the tvf &amp; tvd streams /// are seeked to the startDocID. /// </summary> internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int numDocs) { if (tvx == null) { for (int i = 0; i < tvdLengths.Length; i++) { tvdLengths[i] = 0; } for (int i = 0; i < tvfLengths.Length; i++) { tvfLengths[i] = 0; } return ; } // SegmentMerger calls canReadRawDocs() first and should // not call us if that returns false. if (format < FORMAT_VERSION2) throw new System.SystemException("cannot read raw docs with older term vector formats"); SeekTvx(startDocID); long tvdPosition = tvx.ReadLong(); tvd.Seek(tvdPosition); long tvfPosition = tvx.ReadLong(); tvf.Seek(tvfPosition); long lastTvdPosition = tvdPosition; long lastTvfPosition = tvfPosition; int count = 0; while (count < numDocs) { int docID = docStoreOffset + startDocID + count + 1; System.Diagnostics.Debug.Assert(docID <= numTotalDocs); if (docID < numTotalDocs) { tvdPosition = tvx.ReadLong(); tvfPosition = tvx.ReadLong(); } else { tvdPosition = tvd.Length(); tvfPosition = tvf.Length(); System.Diagnostics.Debug.Assert(count == numDocs - 1); } tvdLengths[count] = (int) (tvdPosition - lastTvdPosition); tvfLengths[count] = (int) (tvfPosition - lastTvfPosition); count++; lastTvdPosition = tvdPosition; lastTvfPosition = tvfPosition; } } private int CheckValidFormat(IndexInput in_Renamed) { int format = in_Renamed.ReadInt(); if (format > FORMAT_CURRENT) { throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FORMAT_CURRENT + " or less"); } return format; } internal virtual void Close() { // make all effort to close up. Keep the first exception // and throw it as a new one. System.IO.IOException keep = null; if (tvx != null) try { tvx.Close(); } catch (System.IO.IOException e) { if (keep == null) keep = e; } if (tvd != null) try { tvd.Close(); } catch (System.IO.IOException e) { if (keep == null) keep = e; } if (tvf != null) try { tvf.Close(); } catch (System.IO.IOException e) { if (keep == null) keep = e; } if (keep != null) { throw new System.IO.IOException(keep.StackTrace); } } /// <summary> </summary> /// <returns> The number of documents in the reader /// </returns> internal virtual int Size() { return size; } public virtual void Get(int docNum, System.String field, TermVectorMapper mapper) { if (tvx != null) { int fieldNumber = fieldInfos.FieldNumber(field); //We need to account for the FORMAT_SIZE at when seeking in the tvx //We don't need to do this in other seeks because we already have the // file pointer //that was written in another file SeekTvx(docNum); //System.out.println("TVX Pointer: " + tvx.getFilePointer()); long tvdPosition = tvx.ReadLong(); tvd.Seek(tvdPosition); int fieldCount = tvd.ReadVInt(); //System.out.println("Num Fields: " + fieldCount); // There are only a few fields per document. We opt for a full scan // rather then requiring that they be ordered. We need to read through // all of the fields anyway to get to the tvf pointers. int number = 0; int found = - 1; for (int i = 0; i < fieldCount; i++) { if (format >= FORMAT_VERSION) number = tvd.ReadVInt(); else number += tvd.ReadVInt(); if (number == fieldNumber) found = i; } // This field, although valid in the segment, was not found in this // document if (found != - 1) { // Compute position in the tvf file long position; if (format >= FORMAT_VERSION2) position = tvx.ReadLong(); else position = tvd.ReadVLong(); for (int i = 1; i <= found; i++) position += tvd.ReadVLong(); mapper.SetDocumentNumber(docNum); ReadTermVector(field, position, mapper); } else { //System.out.println("Fieldable not found"); } } else { //System.out.println("No tvx file"); } } /// <summary> Retrieve the term vector for the given document and field</summary> /// <param name="docNum">The document number to retrieve the vector for /// </param> /// <param name="field">The field within the document to retrieve /// </param> /// <returns> The TermFreqVector for the document and field or null if there is no termVector for this field. /// </returns> /// <throws> IOException if there is an error reading the term vector files </throws> public /*internal*/ virtual TermFreqVector Get(int docNum, System.String field) { // Check if no term vectors are available for this segment at all ParallelArrayTermVectorMapper mapper = new ParallelArrayTermVectorMapper(); Get(docNum, field, mapper); return mapper.MaterializeVector(); } // Reads the String[] fields; you have to pre-seek tvd to // the right point private System.String[] ReadFields(int fieldCount) { int number = 0; System.String[] fields = new System.String[fieldCount]; for (int i = 0; i < fieldCount; i++) { if (format >= FORMAT_VERSION) number = tvd.ReadVInt(); else number += tvd.ReadVInt(); fields[i] = fieldInfos.FieldName(number); } return fields; } // Reads the long[] offsets into TVF; you have to pre-seek // tvx/tvd to the right point private long[] ReadTvfPointers(int fieldCount) { // Compute position in the tvf file long position; if (format >= FORMAT_VERSION2) position = tvx.ReadLong(); else position = tvd.ReadVLong(); long[] tvfPointers = new long[fieldCount]; tvfPointers[0] = position; for (int i = 1; i < fieldCount; i++) { position += tvd.ReadVLong(); tvfPointers[i] = position; } return tvfPointers; } /// <summary> Return all term vectors stored for this document or null if the could not be read in. /// /// </summary> /// <param name="docNum">The document number to retrieve the vector for /// </param> /// <returns> All term frequency vectors /// </returns> /// <throws> IOException if there is an error reading the term vector files </throws> public /*internal*/ virtual TermFreqVector[] Get(int docNum) { TermFreqVector[] result = null; if (tvx != null) { //We need to offset by SeekTvx(docNum); long tvdPosition = tvx.ReadLong(); tvd.Seek(tvdPosition); int fieldCount = tvd.ReadVInt(); // No fields are vectorized for this document if (fieldCount != 0) { System.String[] fields = ReadFields(fieldCount); long[] tvfPointers = ReadTvfPointers(fieldCount); result = ReadTermVectors(docNum, fields, tvfPointers); } } else { //System.out.println("No tvx file"); } return result; } public virtual void Get(int docNumber, TermVectorMapper mapper) { // Check if no term vectors are available for this segment at all if (tvx != null) { //We need to offset by SeekTvx(docNumber); long tvdPosition = tvx.ReadLong(); tvd.Seek(tvdPosition); int fieldCount = tvd.ReadVInt(); // No fields are vectorized for this document if (fieldCount != 0) { System.String[] fields = ReadFields(fieldCount); long[] tvfPointers = ReadTvfPointers(fieldCount); mapper.SetDocumentNumber(docNumber); ReadTermVectors(fields, tvfPointers, mapper); } } else { //System.out.println("No tvx file"); } } private SegmentTermVector[] ReadTermVectors(int docNum, System.String[] fields, long[] tvfPointers) { SegmentTermVector[] res = new SegmentTermVector[fields.Length]; for (int i = 0; i < fields.Length; i++) { ParallelArrayTermVectorMapper mapper = new ParallelArrayTermVectorMapper(); mapper.SetDocumentNumber(docNum); ReadTermVector(fields[i], tvfPointers[i], mapper); res[i] = (SegmentTermVector) mapper.MaterializeVector(); } return res; } private void ReadTermVectors(System.String[] fields, long[] tvfPointers, TermVectorMapper mapper) { for (int i = 0; i < fields.Length; i++) { ReadTermVector(fields[i], tvfPointers[i], mapper); } } /// <summary> </summary> /// <param name="field">The field to read in /// </param> /// <param name="tvfPointer">The pointer within the tvf file where we should start reading /// </param> /// <param name="mapper">The mapper used to map the TermVector /// </param> /// <throws> IOException </throws> private void ReadTermVector(System.String field, long tvfPointer, TermVectorMapper mapper) { // Now read the data from specified position //We don't need to offset by the FORMAT here since the pointer already includes the offset tvf.Seek(tvfPointer); int numTerms = tvf.ReadVInt(); //System.out.println("Num Terms: " + numTerms); // If no terms - return a constant empty termvector. However, this should never occur! if (numTerms == 0) return ; bool storePositions; bool storeOffsets; if (format >= FORMAT_VERSION) { byte bits = tvf.ReadByte(); storePositions = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0; storeOffsets = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0; } else { tvf.ReadVInt(); storePositions = false; storeOffsets = false; } mapper.SetExpectations(field, numTerms, storeOffsets, storePositions); int start = 0; int deltaLength = 0; int totalLength = 0; byte[] byteBuffer; char[] charBuffer; bool preUTF8 = format < FORMAT_UTF8_LENGTH_IN_BYTES; // init the buffers if (preUTF8) { charBuffer = new char[10]; byteBuffer = null; } else { charBuffer = null; byteBuffer = new byte[20]; } for (int i = 0; i < numTerms; i++) { start = tvf.ReadVInt(); deltaLength = tvf.ReadVInt(); totalLength = start + deltaLength; System.String term; if (preUTF8) { // Term stored as java chars if (charBuffer.Length < totalLength) { char[] newCharBuffer = new char[(int) (1.5 * totalLength)]; Array.Copy(charBuffer, 0, newCharBuffer, 0, start); charBuffer = newCharBuffer; } tvf.ReadChars(charBuffer, start, deltaLength); term = new System.String(charBuffer, 0, totalLength); } else { // Term stored as utf8 bytes if (byteBuffer.Length < totalLength) { byte[] newByteBuffer = new byte[(int) (1.5 * totalLength)]; Array.Copy(byteBuffer, 0, newByteBuffer, 0, start); byteBuffer = newByteBuffer; } tvf.ReadBytes(byteBuffer, start, deltaLength); term = System.Text.Encoding.UTF8.GetString(byteBuffer, 0, totalLength); } int freq = tvf.ReadVInt(); int[] positions = null; if (storePositions) { //read in the positions //does the mapper even care about positions? if (mapper.IsIgnoringPositions() == false) { positions = new int[freq]; int prevPosition = 0; for (int j = 0; j < freq; j++) { positions[j] = prevPosition + tvf.ReadVInt(); prevPosition = positions[j]; } } else { //we need to skip over the positions. Since these are VInts, I don't believe there is anyway to know for sure how far to skip // for (int j = 0; j < freq; j++) { tvf.ReadVInt(); } } } TermVectorOffsetInfo[] offsets = null; if (storeOffsets) { //does the mapper even care about offsets? if (mapper.IsIgnoringOffsets() == false) { offsets = new TermVectorOffsetInfo[freq]; int prevOffset = 0; for (int j = 0; j < freq; j++) { int startOffset = prevOffset + tvf.ReadVInt(); int endOffset = startOffset + tvf.ReadVInt(); offsets[j] = new TermVectorOffsetInfo(startOffset, endOffset); prevOffset = endOffset; } } else { for (int j = 0; j < freq; j++) { tvf.ReadVInt(); tvf.ReadVInt(); } } } mapper.Map(term, freq, offsets, positions); } } public virtual System.Object Clone() { TermVectorsReader clone = (TermVectorsReader) base.MemberwiseClone(); // These are null when a TermVectorsReader was created // on a segment that did not have term vectors saved if (tvx != null && tvd != null && tvf != null) { clone.tvx = (IndexInput) tvx.Clone(); clone.tvd = (IndexInput) tvd.Clone(); clone.tvf = (IndexInput) tvf.Clone(); } return clone; } } /// <summary> Models the existing parallel array structure</summary> class ParallelArrayTermVectorMapper:TermVectorMapper { private System.String[] terms; private int[] termFreqs; private int[][] positions; private TermVectorOffsetInfo[][] offsets; private int currentPosition; private bool storingOffsets; private bool storingPositions; private System.String field; public override void SetExpectations(System.String field, int numTerms, bool storeOffsets, bool storePositions) { this.field = field; terms = new System.String[numTerms]; termFreqs = new int[numTerms]; this.storingOffsets = storeOffsets; this.storingPositions = storePositions; if (storePositions) this.positions = new int[numTerms][]; if (storeOffsets) this.offsets = new TermVectorOffsetInfo[numTerms][]; } public override void Map(System.String term, int frequency, TermVectorOffsetInfo[] offsets, int[] positions) { terms[currentPosition] = term; termFreqs[currentPosition] = frequency; if (storingOffsets) { this.offsets[currentPosition] = offsets; } if (storingPositions) { this.positions[currentPosition] = positions; } currentPosition++; } /// <summary> Construct the vector</summary> /// <returns> The {@link TermFreqVector} based on the mappings. /// </returns> public virtual TermFreqVector MaterializeVector() { SegmentTermVector tv = null; if (field != null && terms != null) { if (storingPositions || storingOffsets) { tv = new SegmentTermPositionVector(field, terms, termFreqs, positions, offsets); } else { tv = new SegmentTermVector(field, terms, termFreqs); } } return tv; } } }
using System; using System.Collections; using System.IO; using System.Text; using NUnit.Framework; using Org.BouncyCastle.Crypto; using Org.BouncyCastle.Crypto.Generators; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Security; using Org.BouncyCastle.Utilities.Encoders; using Org.BouncyCastle.Utilities.Test; namespace Org.BouncyCastle.Bcpg.OpenPgp.Tests { [TestFixture] public class PgpDsaTest : SimpleTest { private static readonly byte[] testPubKey = Base64.Decode( "mQGiBD9HBzURBACzkxRCVGJg5+Ld9DU4Xpnd4LCKgMq7YOY7Gi0EgK92gbaa6+zQ" + "oQFqz1tt3QUmpz3YVkm/zLESBBtC1ACIXGggUdFMUr5I87+1Cb6vzefAtGt8N5VV" + "1F/MXv1gJz4Bu6HyxL/ncfe71jsNhav0i4yAjf2etWFj53zK6R+Ojg5H6wCgpL9/" + "tXVfGP8SqFvyrN/437MlFSUEAIN3V6j/MUllyrZglrtr2+RWIwRrG/ACmrF6hTug" + "Ol4cQxaDYNcntXbhlTlJs9MxjTH3xxzylyirCyq7HzGJxZzSt6FTeh1DFYzhJ7Qu" + "YR1xrSdA6Y0mUv0ixD5A4nPHjupQ5QCqHGeRfFD/oHzD4zqBnJp/BJ3LvQ66bERJ" + "mKl5A/4uj3HoVxpb0vvyENfRqKMmGBISycY4MoH5uWfb23FffsT9r9KL6nJ4syLz" + "aRR0gvcbcjkc9Z3epI7gr3jTrb4d8WPxsDbT/W1tv9bG/EHawomLcihtuUU68Uej" + "6/wZot1XJqu2nQlku57+M/V2X1y26VKsipolPfja4uyBOOyvbLQzRXJpYyBFY2hp" + "ZG5hIChEU0EgVGVzdCBLZXkpIDxlcmljQGJvdW5jeWNhc3RsZS5vcmc+iFkEExEC" + "ABkFAj9HBzUECwcDAgMVAgMDFgIBAh4BAheAAAoJEM0j9enEyjRDAlwAn2rrom0s" + "MhufWK5vIRwg7gj5qsLEAJ4vnT5dPBVblofsG+pDoCVeJXGGng=="); private static readonly byte[] testPrivKey = Base64.Decode( "lQHhBD9HBzURBACzkxRCVGJg5+Ld9DU4Xpnd4LCKgMq7YOY7Gi0EgK92gbaa6+zQ" + "oQFqz1tt3QUmpz3YVkm/zLESBBtC1ACIXGggUdFMUr5I87+1Cb6vzefAtGt8N5VV" + "1F/MXv1gJz4Bu6HyxL/ncfe71jsNhav0i4yAjf2etWFj53zK6R+Ojg5H6wCgpL9/" + "tXVfGP8SqFvyrN/437MlFSUEAIN3V6j/MUllyrZglrtr2+RWIwRrG/ACmrF6hTug" + "Ol4cQxaDYNcntXbhlTlJs9MxjTH3xxzylyirCyq7HzGJxZzSt6FTeh1DFYzhJ7Qu" + "YR1xrSdA6Y0mUv0ixD5A4nPHjupQ5QCqHGeRfFD/oHzD4zqBnJp/BJ3LvQ66bERJ" + "mKl5A/4uj3HoVxpb0vvyENfRqKMmGBISycY4MoH5uWfb23FffsT9r9KL6nJ4syLz" + "aRR0gvcbcjkc9Z3epI7gr3jTrb4d8WPxsDbT/W1tv9bG/EHawomLcihtuUU68Uej" + "6/wZot1XJqu2nQlku57+M/V2X1y26VKsipolPfja4uyBOOyvbP4DAwIDIBTxWjkC" + "GGAWQO2jy9CTvLHJEoTO7moHrp1FxOVpQ8iJHyRqZzLllO26OzgohbiPYz8u9qCu" + "lZ9Xn7QzRXJpYyBFY2hpZG5hIChEU0EgVGVzdCBLZXkpIDxlcmljQGJvdW5jeWNh" + "c3RsZS5vcmc+iFkEExECABkFAj9HBzUECwcDAgMVAgMDFgIBAh4BAheAAAoJEM0j" + "9enEyjRDAlwAnjTjjt57NKIgyym7OTCwzIU3xgFpAJ0VO5m5PfQKmGJRhaewLSZD" + "4nXkHg=="); private static readonly byte[] testPrivKey2 = Base64.Decode( "lQHhBEAnoewRBADRvKgDhbV6pMzqYfUgBsLxSHzmycpuxGbjMrpyKHDOEemj" + "iQb6TyyBKUoR28/pfshFP9R5urtKIT7wjVrDuOkxYkgRhNm+xmPXW2Lw3D++" + "MQrC5VWe8ywBltz6T9msmChsaKo2hDhIiRI/mg9Q6rH9pJKtVGi4R7CgGxM2" + "STQ5fwCgub38qGS1W2O4hUsa+3gva5gaNZUEAItegda4/H4t88XdWxW3D8pv" + "RnFz26/ADdImVaQlBoumD15VmcgYoT1Djizey7X8vfV+pntudESzLbn3GHlI" + "6C09seH4e8eYP63t7KU/qbUCDomlSswd1OgQ/RxfN86q765K2t3K1i3wDSxe" + "EgSRyGKee0VNvOBFOFhuWt+patXaBADE1riNkUxg2P4lBNWwu8tEZRmsl/Ys" + "DBIzXBshoMzZCvS5PnNXMW4G3SAaC9OC9jvKSx9IEWhKjfjs3QcWzXR28mcm" + "5na0bTxeOMlaPPhBdkTCmFl0IITWlH/pFlR2ah9WYoWYhZEL2tqB82wByzxH" + "SkSeD9V5oeSCdCcqiqkEmv4DAwLeNsQ2XGJVRmA4lld+CR5vRxpT/+/2xklp" + "lxVf/nx0+thrHDpro3u/nINIIObk0gh59+zaEEe3APlHqbQVYWFhIGJiYiA8" + "Y2NjQGRkZC5lZWU+iFoEExECABoFAkAnoewFCwcDAgEDFQIDAxYCAQIeAQIX" + "gAAKCRA5nBpCS63az85BAKCbPfU8ATrFvkXhzGNGlc1BJo6DWQCgnK125xVK" + "lWLpt6ZJJ7TXcx3nkm6wAgAAnQFXBEAnoe0QBACsQxPvaeBcv2TkbgU/5Wc/" + "tO222dPE1mxFbXjGTKfb+6ge96iyD8kTRLrKCkEEeVBa8AZqMSoXUVN6tV8j" + "/zD8Bc76o5iJ6wgpg3Mmy2GxInVfsfZN6/G3Y2ukmouz+CDNvQdUw8cTguIb" + "QoV3XhQ03MLbfVmNcHsku9F4CuKNWwADBQP0DSSe8v5PXF9CSCXOIxBDcQ5x" + "RKjyYOveqoH/4lbOV0YNUbIDZq4RaUdotpADuPREFmWf0zTB6KV/WIiag8XU" + "WU9zdDvLKR483Bo6Do5pDBcN+NqfQ+ntGY9WJ7BSFnhQ3+07i1K+NsfFTRfv" + "hf9X3MP75rCf7MxAIWHTabEmUf4DAwLeNsQ2XGJVRmA8DssBUCghogG9n8T3" + "qfBeKsplGyCcF+JjPeQXkKQaoYGJ0aJz36qFP9d8DuWtT9soQcqIxVf6mTa8" + "kN1594hGBBgRAgAGBQJAJ6HtAAoJEDmcGkJLrdrPpMkAnRyjQSKugz0YJqOB" + "yGasMLQLxd2OAKCEIlhtCarlufVQNGZsuWxHVbU8crACAAA="); private static readonly byte[] sig1 = Base64.Decode( "owGbwMvMwCR4VvnryyOnTJwZ10gncZSkFpfolVSU2Ltz78hIzcnJVyjPL8pJUeTq" + "sGdmZQCJwpQLMq3ayTA/0Fj3xf4jbwPfK/H3zj55Z9L1n2k/GOapKJrvMZ4tLiCW" + "GtP/XeDqX4fORDUA"); private static readonly byte[] sig1crc = Base64.Decode("OZa/"); private static readonly byte[] testPubWithUserAttr = Base64.Decode( "mQGiBD2Rqv0RBADqKCkhVEtB/lEEr/9CubuHEy2oN/yU5j+2GXSdcNdVnRI/rwFy" + "fHEQIk3uU7zHSUKFrC59yDm0sODYyjEdE3BVb0xvEJ5LE/OdndcIMXT1DungZ1vB" + "zIK/3lr33W/PHixYxv9jduH3WrTehBpiKkgMZp8XloSFj2Cnw9LDyfqB7QCg/8K1" + "o2k75NkOd9ZjnA9ye7Ri3bEEAKyr61Mo7viPWBK1joWAEsxG0OBWM+iSlG7kwh31" + "8efgC/7Os6x4Y0jzs8mpcbBjeZtZjS9lRbfp7RinhF269xL0TZ3JxIdtaAV/6yDQ" + "9NXfZY9dskN++HIR/5GCEEgq/qTJZt6ti5k7aV19ZFfO6wiK3NUy08wOrVsdOkVE" + "w9IcBADaplhpcel3201uU3OCboogJtw81R5MJMZ4Y9cKL/ca2jGISn0nA7KrAw9v" + "ShheSixGO4BV9JECkLEbtg7i+W/j/De6S+x2GLNcphuTP3UmgtKbhs0ItRqzW561" + "s6gLkqi6aWmgaFLd8E1pMJcd9DSY95P13EYB9VJIUxFNUopzo7QcUmFsZiBIYXVz" + "ZXIgPGhhdXNlckBhY20ub3JnPokAWAQQEQIAGAUCPZGq/QgLAwkIBwIBCgIZAQUb" + "AwAAAAAKCRAqIBiOh4JvOKg4AJ9j14yygOqqzqiLKeaasIzqT8LCIgCggx14WuLO" + "wOUTUswTaVKMFnU7tseJAJwEEAECAAYFAj2Rqx8ACgkQ9aWTKMpUDFV+9QP/RiWT" + "5FAF5Rgb7beaApsgXsME+Pw7HEYFtqGa6VcXEpbcUXO6rjaXsgMgY90klWlWCF1T" + "HOyKITvj2FdhE+0j8NQn4vaGpiTwORW/zMf/BZ0abdSWQybp10Yjs8gXw30UheO+" + "F1E524MC+s2AeUi2hwHMiS+AVYd4WhxWHmWuBpTRypP/AAALTgEQAAEBAAAAAQAA" + "AAABAAAA/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAoHBwgHBgoICAgLCgoLDhgQ" + "Dg0NDh0VFhEYIx8lJCIfIiEmKzcvJik0KSEiMEExNDk7Pj4+JS5ESUM8SDc9Pjv/" + "2wBDAQoLCw4NDhwQEBw7KCIoOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7" + "Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozv/wAARCABqAF0DASIAAhEBAxEB/8QAHwAAAQUB" + "AQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQID" + "AAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0" + "NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKT" + "lJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl" + "5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL" + "/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHB" + "CSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpj" + "ZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3" + "uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIR" + "AxEAPwD2aiiq9xcxWsRllcKqjOT06E/0oAsVm6jrmm6VGXvLuOPGflz8x+grzXxV" + "8U51u5LXRgBGowZHXknnkc9OQcV51caneXdw9xPOXlckl2AJHY4J6cD1oA9J1z4p" + "TRkrYQhRyQ0hIY5/2QRx7k9ulczN8SvEEshdZkX0UorDrznI759a5Mksckkknqec" + "mkoA7WD4oavEoEttbTepYEZ+mCMVv6H8SLTULhbe/gFozAYkD5Unp3Ax/kV5XRQB" + "9EAhgCDkHkEcgilryTwd4zn0m4WzvpTJZSMBuY5MfbueletKyugZWDKwyCOc/j3o" + "AduyWLDeWB5Ynj8jSUUUAdFXn/xU15dO0RbGGYC5uWwUB6L1Jx+n413F1cJa2stz" + "J92JC5+gGa+bdfvp9S1q4urmRneQg5Yk4HGAPYZoAzySxySSSep5yaSvQvAPhOHU" + "rB7u5iLGUlIwQRx7HPr/AJ9LGsfC+dJGngc+X12gc8nvx1/rQB5rRXS3Xg28t9ye" + "VLvA7Ddj8MDt6Vnx6JKJCsocnBwqqQSOxPH+fWgDKorTl0SaLGXxkZ+ZcZ4z1yfb" + "P1qg0MqLueN1A6kqRigCOvVPh74mF9YjS7tgLi3GIm6b17c+oOfrXlda3haeW38R" + "WjxfeMgBOCcD/PHpzQB7nRRRQBqarZjUNLubPJXz4yhI64PFfO3iDRrnRtdm0+cq" + "0ocEbehzyOv1xX0vXnHxU8Kf2hYf23aRk3VsMTAZO6MZ5x7UAbfga1W00WzjRSF8" + "kbsg5z744HT/ADmuoysikdQSVP8AI1yPgq6il0axk27V8sDcTg5x7V1qSxOcJIrH" + "/ZOaAKV5p8JgJSPJGMr97PNcxqOiRXLiRI8nONoIGO55z/8AqyeldhPcQxwyOzoQ" + "owRkflXH6t4q0nTLjy57mNXfJCA5x+Qx0NAGXd6LD5iiaPYwTAAx07+vXvXOXmiR" + "Qu6u5VTk/MQQV7cdvxPT866KbxTpt7HGR8p7SMw5HuOP8/Ws/ULlb2No0bKMOGBJ" + "BHrjHHXn6D8QDzWZQk8iAYVWIA9K6LwDZNeeJ4sEqsaF2YHBHpz2/wA/WsG+V0vZ" + "kkGGVsEZz9OcntXffC62iiS7vJTsklKxRFuAw6nBP+eKAPRKKKKAOiqOSNJYzHIo" + "ZGGCD0NSUUAeRajIunwzQG4e3tYZTHGsPzOxJ6ADuQcH8Pw5v+19Q0rVJVgl1JG3" + "cxykEj13cnHT1r1C38OQ3l063cIkkhmkZDKSeCfx9R/kVLeeGIRKs7hVVDn5OCx9" + "yeTjqMf0oAo3k1xP4biuJFeKV4w7gDaQcen1/wAjt5gbK81HW41kIiJBZppULe47" + "eoxx+YzivW9Vh/0FAE+XPIJGCOR0rnbPT7eG+LyxlkAG1wQSPXrjvg9MfjQBycNj" + "4hMRZgJkUjETQqAy/UAY6DoO/wCNbVlYTNbSNJbmBlBwoUfM30B7j2/lz20VhbKA" + "wHmZOQWbOfyrO1G3jil8tBhWToOcdu+c/wAvagDzbUdGlu9aRxFiB/vsuBggZOfq" + "cfWujSIR2dnNZTEeXKgMcb4BUHjofbjNKmI5juiabaGGxVJLcdh/nFWtI0oxagsD" + "DIkkWXYp4VQDnOemSfyHbigDtgSQMjBI6HqKKKKAOiopoPXjGKdQBnXLiDUI5SMK" + "VwxHGf8APFUtW1A+YkMKmbnc23njuf6D/ObWquoaNSQCM/rwP1rMYxxTGWR1UsoU" + "biAcdep+o/KgDG1LxdpracIirCVRjaykHr6cHGQe1cv/AGjNcXBW3sntyT/rHcjj" + "Hp6Z+nQdAK6PXIdIvcE3Fv5rEfNgP9eRn8c8d/rgzX2i2sqo1y8745CD5WPseOnH" + "f8aANiz1O9gjiR5FMUhAV1wcH0Ix6jHHSrMsskz7pGy2MZNc8PEEM7xxWsM/lr8r" + "b4jtI9CcHt7nr7Vqi4JuEjB2qse9y2Ace47dRn/OQDMuRMl8RHw7SgDBPGT6jpwf" + "yzXa2NmbYF3IMrDB2kkAe3HP5Vwk99u1hdg3ANuOOOB0z6ZwPz6c8eiAhgCDkHkE" + "cgigBaKKKAOiqJiMEb9mBknjim3LFIGcOU285ArNa8mKIN3QclScn6+/FADL9xOc" + "K2Tj7xAxnAwQPqOmawdSNpeSJBfQyGNXwQpIAPvjqOPyPT12nYsxYnJIGSeMnHP+" + "e9UL7TUumEqOYp1GNw6N/vDv/wDXoA5+70vSbFGlhtopUxkBl3EZ45z7/kKwTdpN" + "cIsOmeSCduUiCnB9cdeg/M/j0v8AbFtY5hu0gjmGSRICT19cdMDt3+lULzxPZGZv" + "LXcBnCrwB6Y4PX+ZoAptMRbiMDAGSSMksf8A9Q6DuKzJtVYs+BvcPgMTkEdOTnrx" + "/KoLzVmvZZQjjaT82DyPbqcdx+GKitLf7TNsLYAGWPfH+TQBcsYJDE0rOyu4wjHk" + "gfQ+p/zzWjpnja5sdSOm6yyK0Z2pMCQjZ+6SM9CCMdhnp3E1hYy393FaW0eXfjAx" + "gAdT26D+X4Vg/EuFLbxOsCYBitkQkEdsgcADsB+lAHplvqUbsu5vlYA5PIB7468e" + "nPf8lfUlDkRRrIvqZNn6EV41o3iO/wBFcCJ/MhBP7pjwD6g9ua7G08b6TcRl7h5L" + "eTPKvGz5+hUH9cUAeo3uFDrt+Y4O7HOOB69Pr/8AXqhUlx/r2/z2qOgBCQoJJwBy" + "SeABXHeIfHVvbXcemaW4luHlVJJlIKxjODgg8nqKq/Em6uItOhWOeVAx5CuRnrXn" + "+jf8hyw/6+Y//QhQB6xrmlxzXc0NyuHVyQcdjnBz379D1BGeK5u88LMJGlt2RlX7" + "qkEsPXn6/pXo/ilVzbttG7DDOOeornqAONbRpI4v3pKOQcAqQD+Y/P6j052NK0p5" + "HWHy3IBPyqrfN6gZz+P4/hpXoGzOOiP/ACNdH4XRftsp2jIBxx70AX9E0pdMtvMm" + "VRNt5xyEGOgPf3NeDeLdVOs+J768zlGkKx+yjgfy/WvoPXeNEvMcfujXzJQAUUUU" + "Af/ZiQBGBBARAgAGBQI9katEAAoJECogGI6Hgm84xz8AoNGz1fJrVPxqkBrUDmWA" + "GsP6qVGYAJ0ZOftw/GfQHzdGR8pOK85DLUPEErQkUmFsZiBIYXVzZXIgPGhhdXNl" + "ckBwcml2YXNwaGVyZS5jb20+iQBGBBARAgAGBQI9katmAAoJECogGI6Hgm84m0oA" + "oJS3CTrgpqRZfhgPtHGtUVjRCJbbAJ9stJgPcbqA2xXEg9yl2TQToWdWxbQkUmFs" + "ZiBIYXVzZXIgPGhhdXNlckBwcml2YXNwaGVyZS5vcmc+iQBGBBARAgAGBQI9kauJ" + "AAoJECogGI6Hgm84GfAAnRswktLMzDfIjv6ni76Qp5B850byAJ90I0LEHOLhda7r" + "kqTwZ8rguNssUrQkUmFsZiBIYXVzZXIgPGhhdXNlckBwcml2YXNwaGVyZS5uZXQ+" + "iQBGBBARAgAGBQI9kaubAAoJECogGI6Hgm84zi0An16C4s/B9Z0/AtfoN4ealMh3" + "i3/7AJ9Jg4GOUqGCGRRKUA9Gs5pk8yM8GbQmUmFsZiBDLiBIYXVzZXIgPHJhbGZo" + "YXVzZXJAYmx1ZXdpbi5jaD6JAEYEEBECAAYFAj2Rq8oACgkQKiAYjoeCbzhPOACg" + "iiTohKuIa66FNiI24mQ+XR9nTisAoLmh3lJf16/06qLPsRd9shTkLfmHtB9SYWxm" + "IEhhdXNlciA8cmFsZmhhdXNlckBnbXguY2g+iQBGBBARAgAGBQI9kavvAAoJECog" + "GI6Hgm84ZE8An0RlgL8mPBa/P08S5e/lD35MlDdgAJ99pjCeY46S9+nVyx7ACyKO" + "SZ4OcLQmUmFsZiBIYXVzZXIgPGhhdXNlci5yYWxmQG15c3VucmlzZS5jaD6JAEYE" + "EBECAAYFAj2RrEEACgkQKiAYjoeCbzjz0wCg+q801XrXk+Rf+koSI50MW5OaaKYA" + "oKOVA8SLxE29qSR/bJeuW0ryzRLqtCVSYWxmIEhhdXNlciA8aGF1c2VyLnJhbGZA" + "ZnJlZXN1cmYuY2g+iQBGBBARAgAGBQI9kaxXAAoJECogGI6Hgm848zoAnRBtWH6e" + "fTb3is63s8J2zTfpsyS0AKDxTjl+ZZV0COHLrSCaNLZVcpImFrkEDQQ9kar+EBAA" + "+RigfloGYXpDkJXcBWyHhuxh7M1FHw7Y4KN5xsncegus5D/jRpS2MEpT13wCFkiA" + "tRXlKZmpnwd00//jocWWIE6YZbjYDe4QXau2FxxR2FDKIldDKb6V6FYrOHhcC9v4" + "TE3V46pGzPvOF+gqnRRh44SpT9GDhKh5tu+Pp0NGCMbMHXdXJDhK4sTw6I4TZ5dO" + "khNh9tvrJQ4X/faY98h8ebByHTh1+/bBc8SDESYrQ2DD4+jWCv2hKCYLrqmus2UP" + "ogBTAaB81qujEh76DyrOH3SET8rzF/OkQOnX0ne2Qi0CNsEmy2henXyYCQqNfi3t" + "5F159dSST5sYjvwqp0t8MvZCV7cIfwgXcqK61qlC8wXo+VMROU+28W65Szgg2gGn" + "VqMU6Y9AVfPQB8bLQ6mUrfdMZIZJ+AyDvWXpF9Sh01D49Vlf3HZSTz09jdvOmeFX" + "klnN/biudE/F/Ha8g8VHMGHOfMlm/xX5u/2RXscBqtNbno2gpXI61Brwv0YAWCvl" + "9Ij9WE5J280gtJ3kkQc2azNsOA1FHQ98iLMcfFstjvbzySPAQ/ClWxiNjrtVjLhd" + "ONM0/XwXV0OjHRhs3jMhLLUq/zzhsSlAGBGNfISnCnLWhsQDGcgHKXrKlQzZlp+r" + "0ApQmwJG0wg9ZqRdQZ+cfL2JSyIZJrqrol7DVes91hcAAgIQAKD9MGkS8SUD2irI" + "AiwVHU0WXLBnk2CvvueSmT9YtC34UKkIkDPZ7VoeuXDfqTOlbiE6T16zPvArZfbl" + "JGdrU7HhsTdu+ADxRt1dPur0G0ICJ3pBD3ydGWpdLI/94x1BvTY4rsR5mS4YWmpf" + "e2kWc7ZqezhP7Xt9q7m4EK456ddeUZWtkwGU+PKyRAZ+CK82Uhouw+4aW0NjiqmX" + "hfH9/BUhI1P/8R9VkTfAFGPmZzqoHr4AuO5tLRLD2RFSmQCP8nZTiP9nP+wBBvn7" + "vuqKRQsj9PwwPD4V5SM+kpW+rUIWr9TZYl3UqSnlXlpEZFd2Bfl6NloeH0cfU69E" + "gtjcWGvGxYKPS0cg5yhVb4okka6RqIPQiYl6eJgv4tRTKoPRX29o0aUVdqVvDr5u" + "tnFzcINq7jTo8GiO8Ia3cIFWfo0LyQBd1cf1U+eEOz+DleEFqyljaz9VCbDPE4GP" + "o+ALESBlOwn5daUSaah9iU8aVPaSjn45hoQqxOKPwJxnCKKQ01iy0Gir+CDU8JJB" + "7bmbvQN4bke30EGAeED3oi+3VaBHrhjYLv7SHIxP5jtCJKWMJuLRV709HsWJi3kn" + "fGHwH+yCDF8+PDeROAzpXBaD2EFhKgeUTjP5Rgn6ltRf8TQnfbW4qlwyiXMhPOfC" + "x6qNmwaFPKQJpIkVq5VGfRXAERfkiQBMBBgRAgAMBQI9kar+BRsMAAAAAAoJECog" + "GI6Hgm84CDMAoNrNeP4c8XqFJnsLLPcjk5YGLaVIAKCrL5KFuLQVIp7d0Fkscx3/" + "7DGrzw=="); private static readonly byte[] aesSecretKey = Base64.Decode( "lQHpBEBSdIYRBADpd7MeIxRk4RsvyMnJNIYe4FiVv6i7I7+LPRvnIjDct0bN" + "1gCV48QFej7g/PsvXRjYSowV3VIvchWX8OERd/5i10cLbcs7X52EP1vwYaLj" + "uRfNUBg8Q51RQsKR+/rBmnVsi68rjU4yTH6wpo6FOO4pz4wFV+tWwGOwOitA" + "K31L4wCgqh59eFFBrOlRFAbDvaL7emoCIR8EAOLxDKiLQJYQrKZfXdZnifeo" + "dhEP0uuV4O5TG6nrqkhWffzC9cSoFD0BhMl979d8IB2Uft4FNvQc2u8hbJL5" + "7OCGDCUAidlB9jSdu0/J+kfRaTGhYDjBgw7AA42576BBSMNouJg/aOOQENEN" + "Nn4n7NxR3viBzIsL/OIeU8HSkBgaA/41PsvcgZ3kwpdltJ/FVRWhmMmv/q/X" + "qp1YOnF8xPU9bv2ofELrxJfRsbS4GW1etzD+nXs/woW4Vfixs01x+cutR4iF" + "3hw+eU+yLToMPmmo8D2LUvX1SRODJpx5yBBeRIYv6nz9H3sQRDx3kaLASxDV" + "jTxKmrLYnZz5w5qyVpvRyv4JAwKyWlhdblPudWBFXNkW5ydKn0AV2f51wEtj" + "Zy0aLIeutVMSJf1ytLqjFqrnFe6pdJrHO3G00TE8OuFhftWosLGLbEGytDtF" + "cmljIEguIEVjaGlkbmEgKHRlc3Qga2V5IC0gQUVTMjU2KSA8ZXJpY0Bib3Vu" + "Y3ljYXN0bGUub3JnPohZBBMRAgAZBQJAUnSGBAsHAwIDFQIDAxYCAQIeAQIX" + "gAAKCRBYt1NnUiCgeFKaAKCiqtOO+NQES1gJW6XuOGmSkXt8bQCfcuW7SXZH" + "zxK1FfdcG2HEDs3YEVawAgAA"); private static readonly byte[] aesPublicKey = Base64.Decode( "mQGiBEBSdIYRBADpd7MeIxRk4RsvyMnJNIYe4FiVv6i7I7+LPRvnIjDct0bN" + "1gCV48QFej7g/PsvXRjYSowV3VIvchWX8OERd/5i10cLbcs7X52EP1vwYaLj" + "uRfNUBg8Q51RQsKR+/rBmnVsi68rjU4yTH6wpo6FOO4pz4wFV+tWwGOwOitA" + "K31L4wCgqh59eFFBrOlRFAbDvaL7emoCIR8EAOLxDKiLQJYQrKZfXdZnifeo" + "dhEP0uuV4O5TG6nrqkhWffzC9cSoFD0BhMl979d8IB2Uft4FNvQc2u8hbJL5" + "7OCGDCUAidlB9jSdu0/J+kfRaTGhYDjBgw7AA42576BBSMNouJg/aOOQENEN" + "Nn4n7NxR3viBzIsL/OIeU8HSkBgaA/41PsvcgZ3kwpdltJ/FVRWhmMmv/q/X" + "qp1YOnF8xPU9bv2ofELrxJfRsbS4GW1etzD+nXs/woW4Vfixs01x+cutR4iF" + "3hw+eU+yLToMPmmo8D2LUvX1SRODJpx5yBBeRIYv6nz9H3sQRDx3kaLASxDV" + "jTxKmrLYnZz5w5qyVpvRyrQ7RXJpYyBILiBFY2hpZG5hICh0ZXN0IGtleSAt" + "IEFFUzI1NikgPGVyaWNAYm91bmN5Y2FzdGxlLm9yZz6IWQQTEQIAGQUCQFJ0" + "hgQLBwMCAxUCAwMWAgECHgECF4AACgkQWLdTZ1IgoHhSmgCfU83BLBF2nCua" + "zk2dXB9zO1l6XS8AnA07U4cq5W0GrKM6/kP9HWtPhgOFsAIAAA=="); private static readonly byte[] twofishSecretKey = Base64.Decode( "lQHpBEBSdtIRBACf7WfrqTl8F051+EbaljPf/8/ajFpAfMq/7p3Hri8OCsuc" + "fJJIufEEOV1/Lt/wkN67MmSyrU0fUCsRbEckRiB4EJ0zGHVFfAnku2lzdgc8" + "AVounqcHOmqA/gliFDEnhYOx3bOIAOav+yiOqfKVBhWRCpFdOTE+w/XoDM+p" + "p8bH5wCgmP2FuWpzfSut7GVKp51xNEBRNuED/3t2Q+Mq834FVynmLKEmeXB/" + "qtIz5reHEQR8eMogsOoJS3bXs6v3Oblj4in1gLyTVfcID5tku6kLP20xMRM2" + "zx2oRbz7TyOCrs15IpRXyqqJxUWD8ipgJPkPXE7hK8dh4YSTUi4i5a1ug8xG" + "314twlPzrchpWZiutDvZ+ks1rzOtBACHrEFG2frUu+qVkL43tySE0cV2bnuK" + "LVhXbpzF3Qdkfxou2nuzsCbl6m87OWocJX8uYcQGlHLKv8Q2cfxZyieLFg6v" + "06LSFdE9drGBWz7mbrT4OJjxPyvnkffPfLOOqae3PMYIIuscvswuhm4X5aoj" + "KJs01YT3L6f0iIj03hCeV/4KAwLcGrxT3X0qR2CZyZYSVBdjXeNYKXuGBtOf" + "ood26WOtwLw4+l9sHVoiXNv0LomkO58ndJRPGCeZWZEDMVrfkS7rcOlktDxF" + "cmljIEguIEVjaGlkbmEgKHRlc3Qga2V5IC0gdHdvZmlzaCkgPGVyaWNAYm91" + "bmN5Y2FzdGxlLm9yZz6IWQQTEQIAGQUCQFJ20gQLBwMCAxUCAwMWAgECHgEC" + "F4AACgkQaCCMaHh9zR2+RQCghcQwlt4B4YmNxp2b3v6rP3E8M0kAn2Gspi4u" + "A/ynoqnC1O8HNlbjPdlVsAIAAA=="); private static readonly byte[] twofishPublicKey = Base64.Decode( "mQGiBEBSdtIRBACf7WfrqTl8F051+EbaljPf/8/ajFpAfMq/7p3Hri8OCsuc" + "fJJIufEEOV1/Lt/wkN67MmSyrU0fUCsRbEckRiB4EJ0zGHVFfAnku2lzdgc8" + "AVounqcHOmqA/gliFDEnhYOx3bOIAOav+yiOqfKVBhWRCpFdOTE+w/XoDM+p" + "p8bH5wCgmP2FuWpzfSut7GVKp51xNEBRNuED/3t2Q+Mq834FVynmLKEmeXB/" + "qtIz5reHEQR8eMogsOoJS3bXs6v3Oblj4in1gLyTVfcID5tku6kLP20xMRM2" + "zx2oRbz7TyOCrs15IpRXyqqJxUWD8ipgJPkPXE7hK8dh4YSTUi4i5a1ug8xG" + "314twlPzrchpWZiutDvZ+ks1rzOtBACHrEFG2frUu+qVkL43tySE0cV2bnuK" + "LVhXbpzF3Qdkfxou2nuzsCbl6m87OWocJX8uYcQGlHLKv8Q2cfxZyieLFg6v" + "06LSFdE9drGBWz7mbrT4OJjxPyvnkffPfLOOqae3PMYIIuscvswuhm4X5aoj" + "KJs01YT3L6f0iIj03hCeV7Q8RXJpYyBILiBFY2hpZG5hICh0ZXN0IGtleSAt" + "IHR3b2Zpc2gpIDxlcmljQGJvdW5jeWNhc3RsZS5vcmc+iFkEExECABkFAkBS" + "dtIECwcDAgMVAgMDFgIBAh4BAheAAAoJEGggjGh4fc0dvkUAn2QGdNk8Wrrd" + "+DvKECrO5+yoPRx3AJ91DhCMme6uMrQorKSDYxHlgc7iT7ACAAA="); private static readonly char[] pass = "hello world".ToCharArray(); /** * Generated signature test * * @param sKey * @param pgpPrivKey * @return test result */ public void GenerateTest( PgpSecretKeyRing sKey, IPgpPublicKey pgpPubKey, IPgpPrivateKey pgpPrivKey) { string data = "hello world!"; MemoryStream bOut = new MemoryStream(); byte[] dataBytes = Encoding.ASCII.GetBytes(data); MemoryStream testIn = new MemoryStream(dataBytes, false); PgpSignatureGenerator sGen = new PgpSignatureGenerator(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1); sGen.InitSign(PgpSignature.BinaryDocument, pgpPrivKey); PgpSignatureSubpacketGenerator spGen = new PgpSignatureSubpacketGenerator(); IEnumerator enumerator = sKey.GetSecretKey().PublicKey.GetUserIds().GetEnumerator(); enumerator.MoveNext(); string primaryUserId = (string) enumerator.Current; spGen.SetSignerUserId(true, primaryUserId); sGen.SetHashedSubpackets(spGen.Generate()); PgpCompressedDataGenerator cGen = new PgpCompressedDataGenerator( CompressionAlgorithmTag.Zip); BcpgOutputStream bcOut = new BcpgOutputStream(cGen.Open(new UncloseableStream(bOut))); sGen.GenerateOnePassVersion(false).Encode(bcOut); PgpLiteralDataGenerator lGen = new PgpLiteralDataGenerator(); DateTime testDateTime = new DateTime(1973, 7, 27); Stream lOut = lGen.Open( new UncloseableStream(bcOut), PgpLiteralData.Binary, "_CONSOLE", dataBytes.Length, testDateTime); int ch; while ((ch = testIn.ReadByte()) >= 0) { lOut.WriteByte((byte) ch); sGen.Update((byte)ch); } lGen.Close(); sGen.Generate().Encode(bcOut); cGen.Close(); PgpObjectFactory pgpFact = new PgpObjectFactory(bOut.ToArray()); PgpCompressedData c1 = (PgpCompressedData)pgpFact.NextPgpObject(); pgpFact = new PgpObjectFactory(c1.GetDataStream()); PgpOnePassSignatureList p1 = (PgpOnePassSignatureList)pgpFact.NextPgpObject(); PgpOnePassSignature ops = p1[0]; PgpLiteralData p2 = (PgpLiteralData) pgpFact.NextPgpObject(); if (!p2.ModificationTime.Equals(testDateTime)) { Fail("Modification time not preserved"); } Stream dIn = p2.GetInputStream(); ops.InitVerify(pgpPubKey); while ((ch = dIn.ReadByte()) >= 0) { ops.Update((byte) ch); } PgpSignatureList p3 = (PgpSignatureList) pgpFact.NextPgpObject(); if (!ops.Verify(p3[0])) { Fail("Failed generated signature check"); } } public override void PerformTest() { // // Read the public key // PgpPublicKeyRing pgpPub = new PgpPublicKeyRing(testPubKey); var pubKey = pgpPub.GetPublicKey(); // // Read the private key // PgpSecretKeyRing sKey = new PgpSecretKeyRing(testPrivKey); IPgpSecretKey secretKey = sKey.GetSecretKey(); IPgpPrivateKey pgpPrivKey = secretKey.ExtractPrivateKey(pass); // // test signature message // PgpObjectFactory pgpFact = new PgpObjectFactory(sig1); PgpCompressedData c1 = (PgpCompressedData)pgpFact.NextPgpObject(); pgpFact = new PgpObjectFactory(c1.GetDataStream()); PgpOnePassSignatureList p1 = (PgpOnePassSignatureList)pgpFact.NextPgpObject(); PgpOnePassSignature ops = p1[0]; PgpLiteralData p2 = (PgpLiteralData)pgpFact.NextPgpObject(); Stream dIn = p2.GetInputStream(); ops.InitVerify(pubKey); int ch; while ((ch = dIn.ReadByte()) >= 0) { ops.Update((byte) ch); } PgpSignatureList p3 = (PgpSignatureList) pgpFact.NextPgpObject(); if (!ops.Verify(p3[0])) { Fail("Failed signature check"); } // // signature generation // GenerateTest(sKey, pubKey, pgpPrivKey); // // signature generation - canonical text // const string data = "hello world!"; byte[] dataBytes = Encoding.ASCII.GetBytes(data); MemoryStream bOut = new MemoryStream(); MemoryStream testIn = new MemoryStream(dataBytes, false); PgpSignatureGenerator sGen = new PgpSignatureGenerator( PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1); sGen.InitSign(PgpSignature.CanonicalTextDocument, pgpPrivKey); PgpCompressedDataGenerator cGen = new PgpCompressedDataGenerator( CompressionAlgorithmTag.Zip); BcpgOutputStream bcOut = new BcpgOutputStream(cGen.Open(new UncloseableStream(bOut))); sGen.GenerateOnePassVersion(false).Encode(bcOut); PgpLiteralDataGenerator lGen = new PgpLiteralDataGenerator(); DateTime testDateTime = new DateTime(1973, 7, 27); Stream lOut = lGen.Open( new UncloseableStream(bcOut), PgpLiteralData.Text, "_CONSOLE", dataBytes.Length, testDateTime); while ((ch = testIn.ReadByte()) >= 0) { lOut.WriteByte((byte) ch); sGen.Update((byte)ch); } lGen.Close(); sGen.Generate().Encode(bcOut); cGen.Close(); // // verify Generated signature - canconical text // pgpFact = new PgpObjectFactory(bOut.ToArray()); c1 = (PgpCompressedData) pgpFact.NextPgpObject(); pgpFact = new PgpObjectFactory(c1.GetDataStream()); p1 = (PgpOnePassSignatureList) pgpFact.NextPgpObject(); ops = p1[0]; p2 = (PgpLiteralData) pgpFact.NextPgpObject(); if (!p2.ModificationTime.Equals(testDateTime)) { Fail("Modification time not preserved"); } dIn = p2.GetInputStream(); ops.InitVerify(pubKey); while ((ch = dIn.ReadByte()) >= 0) { ops.Update((byte)ch); } p3 = (PgpSignatureList) pgpFact.NextPgpObject(); if (!ops.Verify(p3[0])) { Fail("Failed generated signature check"); } // // Read the public key with user attributes // pgpPub = new PgpPublicKeyRing(testPubWithUserAttr); pubKey = pgpPub.GetPublicKey(); int count = 0; foreach (PgpUserAttributeSubpacketVector attributes in pubKey.GetUserAttributes()) { int sigCount = 0; foreach (object sigs in pubKey.GetSignaturesForUserAttribute(attributes)) { if (sigs == null) Fail("null signature found"); sigCount++; } if (sigCount != 1) { Fail("Failed user attributes signature check"); } count++; } if (count != 1) { Fail("Failed user attributes check"); } byte[] pgpPubBytes = pgpPub.GetEncoded(); pgpPub = new PgpPublicKeyRing(pgpPubBytes); pubKey = pgpPub.GetPublicKey(); count = 0; foreach (object ua in pubKey.GetUserAttributes()) { if (ua == null) Fail("null user attribute found"); count++; } if (count != 1) { Fail("Failed user attributes reread"); } // // reading test extra data - key with edge condition for DSA key password. // char[] passPhrase = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }; sKey = new PgpSecretKeyRing(testPrivKey2); pgpPrivKey = sKey.GetSecretKey().ExtractPrivateKey(passPhrase); // // reading test - aes256 encrypted passphrase. // sKey = new PgpSecretKeyRing(aesSecretKey); pgpPrivKey = sKey.GetSecretKey().ExtractPrivateKey(pass); // // reading test - twofish encrypted passphrase. // sKey = new PgpSecretKeyRing(twofishSecretKey); pgpPrivKey = sKey.GetSecretKey().ExtractPrivateKey(pass); // // use of PgpKeyPair // DsaParametersGenerator pGen = new DsaParametersGenerator(); pGen.Init(512, 80, new SecureRandom()); // TODO Is the certainty okay? DsaParameters dsaParams = pGen.GenerateParameters(); DsaKeyGenerationParameters kgp = new DsaKeyGenerationParameters(new SecureRandom(), dsaParams); IAsymmetricCipherKeyPairGenerator kpg = GeneratorUtilities.GetKeyPairGenerator("DSA"); kpg.Init(kgp); IAsymmetricCipherKeyPair kp = kpg.GenerateKeyPair(); PgpKeyPair pgpKp = new PgpKeyPair(PublicKeyAlgorithmTag.Dsa, kp.Public, kp.Private, DateTime.UtcNow); PgpPublicKey k1 = pgpKp.PublicKey; PgpPrivateKey k2 = pgpKp.PrivateKey; } public override string Name { get { return "PGPDSATest"; } } public static void Main( string[] args) { RunTest(new PgpDsaTest()); } [Test] public void TestFunction() { string resultText = Perform().ToString(); Assert.AreEqual(Name + ": Okay", resultText); } } }
/* $Id$ * * Project: Swicli.Library - Two Way Interface for .NET and MONO to SWI-Prolog * Author: Douglas R. Miles * E-mail: [email protected] * WWW: http://www.logicmoo.com * Copyright (C): 2010-2012 LogicMOO Developement * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * *********************************************************/ //using System.Linq; using System.Linq; #if USE_IKVM using Hashtable = java.util.Hashtable; using ClassLoader = java.lang.ClassLoader; using JClass = Dead.JClass; using JavaClass = java.lang.Class; using System; #else using SbsSW.SwiPlCs.Callback; using Type = System.Type; using System; #endif using System.Collections.Generic; using System.Reflection; using SbsSW.SwiPlCs; using SbsSW.SwiPlCs.Exceptions; using PlTerm = SbsSW.SwiPlCs.PlTerm; namespace Dead { public class JClass { } } namespace Swicli.Library { public partial class PrologCLR { [TypeConversion] public static System.Type getInstanceTypeFromClass(JavaClass classObject) { try { classObject.ToString(); return ikvm.runtime.Util.getInstanceTypeFromClass(classObject); } catch (Exception odd) { Embedded.Error("getInstanceTypeFromClass", odd); return null; } } [TypeConversion] public static JavaClass getFriendlyClassFromType(Type classObject) { classObject.ToString(); //getInstanceTypeFromClass(classObject); return ikvm.runtime.Util.getFriendlyClassFromType(classObject); } [PrologVisible] static public bool cliTypeToTypespec(PlTerm clazzSpec, PlTerm valueOut) { return valueOut.Unify(typeToSpec(GetType(clazzSpec))); } public static Type GetTypeThrowIfMissing(PlTerm clazzSpec) { Type fi = GetType(clazzSpec,false); if (fi == null) { throw new PlException("cant find class" + clazzSpec); } return fi; } [PrologInvisible] private static Type GetType(PlTerm clazzSpec) { return GetType(clazzSpec, false); } [PrologInvisible] private static Type GetType(PlTerm clazzSpec, bool canBeObjects) { if (clazzSpec.IsVar) { Embedded.Error("GetType IsVar {0}", clazzSpec); return null; } if (IsTaggedObject(clazzSpec)) { object tagObj = tag_to_object(clazzSpec[1].Name); if (tagObj is Type) { return (Type)tagObj; } if (tagObj is JavaClass) { return getInstanceTypeFromClass((JavaClass) tagObj); } if (!canBeObjects) { Embedded.Warn("cant find tagged object as class: {0}=>{1}", clazzSpec, tagObj); } if (tagObj != null) { return tagObj.GetType(); } return null; } Type type = null; Boolean wasString = clazzSpec.IsString; if (clazzSpec.IsAtomOrString) { if (canBeObjects && wasString) return typeof(string); string name = (string)clazzSpec; type = ResolveType(name); if (type != null) return type; if (!canBeObjects) { Embedded.Warn("cant find atom/string as class: {0}", clazzSpec); type = ResolveType(name); } return null; } if (clazzSpec.IsCompound) { string clazzName = clazzSpec.Name; int arity = clazzSpec.Arity; if (clazzName == "arrayOf") { if (arity != 1) { return GetType(clazzSpec[1]).MakeArrayType(clazzSpec[2].intValue()); } return GetType(clazzSpec[1]).MakeArrayType(); } if (clazzName == "type") { return (GetInstance(clazzSpec[1]) ?? NEW_OBJECTFORTYPE).GetType(); } if (clazzName == "static" || clazzName == "typeof") { return GetType(clazzSpec[1]); } if (clazzName == "{}") { return typeof (PlTerm); } if (clazzName == "pointer") { return GetType(clazzSpec[1]).MakePointerType(); } if (clazzName == "byref") { return GetType(clazzSpec[1]).MakeByRefType(); } if (clazzName == "nullable") { return typeof(Nullable<>).MakeGenericType(new[] { GetType(clazzSpec[1]) }); } type = ResolveType(clazzName + "`" + arity); if (type != null) { // 'Dictionary'('Int32','string'). if (type.IsGenericType) { Type[] genr = type.GetGenericArguments(); Type[] genrc = null; Type genrb = null; try { if (type.IsGenericParameter) { genrc = type.GetGenericParameterConstraints(); } } catch (Exception e) { Embedded.Warn("GetGenericParameterConstraints: {0}", e); } try { genrb = type.GetGenericTypeDefinition(); } catch (Exception e) { Embedded.Warn("GetGenericTypeDefinition: {0}", e); } if (arity == genr.Length) { var vt = GetParamSpec(ToTermArray(clazzSpec), false); return type.MakeGenericType(vt); } } // return type; } string key = clazzName + "/" + arity; lock (FunctorToLayout) { PrologTermLayout pltl; if (FunctorToLayout.TryGetValue(key, out pltl)) { return pltl.ObjectType; } } lock (FunctorToRecomposer) { PrologTermRecomposer layout; if (FunctorToRecomposer.TryGetValue(key, out layout)) { return layout.ToType; } } Embedded.WarnMissing("cant find compound as class: " + clazzSpec); } object toObject = GetInstance(clazzSpec); if (toObject is Type) return (Type)toObject; if (toObject != null) { return toObject.GetType(); } Embedded.Warn("@TODO cant figure type from {0}", clazzSpec); return typeof(object); //return null; } [PrologVisible] public static bool cliFindType(PlTerm clazzSpec, PlTerm classRef) { // if (term1.IsAtom) { string className = (string)clazzSpec;//.Name; Type s1 = GetType(clazzSpec); if (s1 != null) { var c = s1;// ikvm.runtime.Util.getFriendlyClassFromType(s1); if (c != null) { // ConsoleTrace("name:" + className + " type:" + s1.FullName + " class:" + c); return UnifyTagged(c, classRef); } ConsoleTrace("cant getFriendlyClassFromType " + s1.FullName); return false; } ConsoleTrace("cant ResolveType " + className); return false; } ConsoleTrace("cant IsAtom " + clazzSpec); return false; } public static void ConsoleTrace(object s) { try { System.Console.WriteLine(s); } catch (System.TypeInitializationException e) { // @TODO } catch (Exception e) { // @TODO } } [IKVMBased] [PrologVisible] public static bool cliFindClass(PlTerm clazzName, PlTerm clazzObjectOut) { if (clazzName.IsAtomOrString) { string className = clazzName.Name; Type c = ResolveType(className); if (c != null) { ConsoleTrace("cliFindClass:" + className + " class:" + c); string tag = object_to_tag(c); return AddTagged(clazzObjectOut.TermRef, tag) != 0; } ConsoleTrace("cant ResolveClass " + className); return false; } Type t = GetType(clazzName); if (t != null) { Type c = null; #if USE_IKVM c = t; // JavaClass t = getFriendlyClassFromType(t); #else c = t; #endif string tag = object_to_tag(c); return AddTagged(clazzObjectOut.TermRef, tag) != 0; } return false; } private static IDictionary<string, Type> ShortNameType; private static IDictionary<string, Type> LongNameType = new Dictionary<string,Type>(); private static readonly Dictionary<Type, string> TypeShortName = new Dictionary<Type, string>(); private static object NEW_OBJECTFORTYPE = new object(); private static PlTerm typeToSpec(Type type) { if (type == null) return PLNULL; if (type.IsArray && type.HasElementType) { if (type.GetArrayRank() != 1) { return PlC("arrayOf", typeToSpec(type.GetElementType()), ToProlog(type.GetArrayRank())); } return PlC("arrayOf", typeToSpec(type.GetElementType())); } if (type.IsGenericParameter) { Type[] gt = type.GetGenericParameterConstraints(); return PlC("<" + type.FullName ?? type.Name + ">", ToPlTermVSpecs(gt)); } if (type.IsPointer) { Type gt = type.GetElementType(); return PlC("pointer", typeToSpec(gt)); } if (type.IsByRef) { Type gt = type.GetElementType(); return PlC("byref", typeToSpec(gt)); } // @todo if false , use IsGenericType if (false) if (typeof(Nullable<>).IsAssignableFrom(type)) { Embedded.Error("@todo Not Implemented NULLABLE"); Type gt = type.GetElementType(); return PlC("nullable", typeToSpec(gt)); } if (type.IsGenericType ) { Type gt = type.GetGenericTypeDefinition(); Type[] gtp = type.GetGenericArguments(); PlTermV vt = ToPlTermVSpecs(gtp); string typeName = type.FullName ?? type.Name; int gtpLength = gtp.Length; int indexOf = typeName.IndexOf("`" + gtpLength); if (indexOf > 0) { typeName = typeName.Substring(0, indexOf); } else { Embedded.Debug("cant chop arity {0} off string '{1}' ", gtpLength, typeName); } return PlC(typeName, vt); } if (type.HasElementType) { string named = typeToName(type); Embedded.Error("@todo Not Implemented " + named); Type gt = type.GetElementType(); if (gt == type) gt = typeof(object); return PlC("elementType", PlTerm.PlAtom(named), typeToSpec(gt)); } if (type.IsSpecialName || String.IsNullOrEmpty(type.Name) || String.IsNullOrEmpty(type.FullName) || String.IsNullOrEmpty(type.Namespace)) { string named = typeToName(type); Embedded.Error("@todo Not Implemented " + named); Type gt = type.UnderlyingSystemType; if (gt == type) gt = typeof (object); return PlC("static", PlTerm.PlAtom(named), typeToSpec(gt)); } return PlTerm.PlAtom(typeToName(type)); } [PrologVisible] static public bool cliGetType(PlTerm valueIn, PlTerm valueOut) { if (!valueOut.IsVar) { var plvar = PlTerm.PlVar(); return cliGetType(valueIn, plvar) && SpecialUnify(valueOut, plvar); } /* if (valueIn.IsAtom) { Type t = GetType(valueIn); return valueOut.FromObject(t); }*/ object val = GetInstance(valueIn); if (val == null) { Embedded.Error("Cannot get object for {0}", valueIn); return false; } return valueOut.FromObject((val.GetType())); } [IKVMBased] [PrologVisible] static public bool cliGetClass(PlTerm valueIn, PlTerm valueOut) { if (!valueOut.IsVar) { var plvar = PlTerm.PlVar(); return cliGetClass(valueIn, plvar) && SpecialUnify(valueOut, plvar); } object val = GetInstance(valueIn); // extension method #if USE_IKVM return valueOut.FromObject((ikvm.extensions.ExtensionMethods.instancehelper_getClass(val))); #else return valueOut.FromObject((val.GetType())); #endif } [IKVMBased] [PrologVisible] static public bool cliClassFromType(PlTerm valueIn, PlTerm valueOut) { if (!valueOut.IsVar) { var plvar = PlTerm.PlVar(); return cliClassFromType(valueIn, plvar) && SpecialUnify(valueOut, plvar); } Type val = GetType(valueIn); if (val == null) return false; #if USE_IKVM JavaClass c = getFriendlyClassFromType(val); return valueOut.FromObject((c)); #else return valueOut.FromObject((val)); #endif } [IKVMBased] [PrologVisible] static public bool cliTypeFromClass(PlTerm valueIn, PlTerm valueOut) { if (!valueOut.IsVar) { var plvar = PlTerm.PlVar(); return cliTypeFromClass(valueIn, plvar) && SpecialUnify(valueOut, plvar); } Type val = GetType(valueIn); if (val == null) return false; #if USE_IKVM Type c = getInstanceTypeFromClass(val); return valueOut.FromObject((c)); #else return valueOut.FromObject(val); #endif } [PrologVisible] static public bool cliAddShorttype(PlTerm valueName, PlTerm valueIn) { if (!valueName.IsAtomOrString) return Embedded.Warn("valueName must be string or atom {0}", valueName); string name = valueName.Name; Type otherType; lock (ShortNameType) { if (ShortNameType.TryGetValue(name, out otherType)) { if (valueIn.IsNumber) { ShortNameType.Remove(name); TypeShortName.Remove(otherType); return true; } if (valueIn.IsVar) { return valueIn.UnifyAtom(otherType.FullName); } Type val = GetType(valueIn); if (val == otherType) return true; return false; } else { if (valueIn.IsNumber) { return true; } if (valueIn.IsVar) { return true; } Type val = GetType(valueIn); if (val == null) return false; ShortNameType[name] = val; TypeShortName[val] = name; return true; } } } [PrologVisible] [IKVMBased] static public bool cliTypeToClassname(PlTerm valueIn, PlTerm valueOut) { if (!valueOut.IsVar) { var plvar = PlTerm.PlVar(); return cliTypeToClassname(valueIn, plvar) && SpecialUnify(valueOut, plvar); } //JClass val = CastTerm(valueIn, typeof(JClass)) as JClass; Type val0 = GetTypeThrowIfMissing(valueIn); if (val0 == null) return false; JavaClass val = getFriendlyClassFromType(val0); #if USE_IKVM return valueOut.Unify(val.getName()); #else return valueOut.Unify(val.GetType().Name); #endif } [PrologVisible] static public bool cliTypeToFullname(PlTerm valueIn, PlTerm valueOut) { if (!valueOut.IsVar) { var plvar = PlTerm.PlVar(); return cliTypeToFullname(valueIn, plvar) && SpecialUnify(valueOut, plvar); } Type val = GetTypeThrowIfMissing(valueIn); //Type val = CastTerm(valueIn, typeof(Type)) as Type; if (val == null) return false; return valueOut.Unify(val.FullName); } private static string typeToName(Type type) { if (type.IsArray && type.HasElementType) { return typeToSpec(type.GetElementType()) + "[]"; } lock (ShortNameType) { string shortName; if (TypeShortName.TryGetValue(type, out shortName)) { return shortName; } string typeName = type.Name; Type otherType; if (ShortNameType.TryGetValue(type.Name, out otherType)) { if (type == otherType) { return typeName; } return type.FullName; } ShortNameType[typeName] = type; TypeShortName[type] = typeName; return typeName; } } /* private static Type ResolveClass(string name) { if (name == "@" || name == "$cli_object" || name == "array" || name == null) return null; Type t = ResolveClassAsType(name); #if USE_IKVM JClass c = getFriendlyClassFromType((Type)t); return c; #else return t; #endif }*/ private static Type ResolveClassAsType(string name) { Type s1 = ResolveType(name); if (s1 != null) return s1; if (name.EndsWith("[]")) { Type t1 = ResolveClassAsType(name.Substring(0, name.Length - 2)); return t1.MakeArrayType(); } var name2 = name.Replace("/", "."); if (name2 != name) { s1 = ResolveType(name2); if (s1 != null) return s1; } name2 = name.Replace("cli.", ""); if (name2 != name) { s1 = ResolveType(name2); if (s1 != null) return s1; } return null; } static readonly private Dictionary<string, Type> typeCache = new Dictionary<string, Type>(); static public Boolean MayUseTypeCache = true; public static Type ResolveType(string name) { if (!MayUseTypeCache || typeCache == null) return ResolveType0(name); lock (typeCache) { Type type; if (!typeCache.TryGetValue(name, out type)) { return typeCache[name] = ResolveType0(name); } return type; } } private static Type ResolveType0(string name) { if (name == "@" || name == "[]" || name == "$cli_object" || name == "array" || name == null) return null; if (name.EndsWith("[]")) { Type t = ResolveType(name.Substring(0, name.Length - 2)); return t.MakeArrayType(); } if (name.EndsWith("?")) { return typeof(Nullable<>).MakeGenericType(new[] { ResolveType(name.Substring(0, name.Length - 1)) }); } if (name.EndsWith("&")) { Type t = ResolveType(name.Substring(0, name.Length - 1)); return t.MakeByRefType(); } if (name.EndsWith("*")) { Type t = ResolveType(name.Substring(0, name.Length - 1)); return t.MakePointerType(); } var s1 = ResolveType1(name); if (s1 != null) return s1; var name2 = name.Replace("/", "."); if (name2 != name) { s1 = ResolveType1(name2); if (s1 != null) return s1; } name2 = name.Replace("cli.", ""); if (name2 != name) { s1 = ResolveType1(name2); if (s1 != null) return s1; } return null; } public static Type ResolveType1(string typeName) { try { Type s1 = ResolveType2(typeName); if (s1 != null) return s1; return null; } catch (NotSupportedException e) { return null; throw new NotSupportedException("ResolveType12", e); } catch (NotImplementedException e) { return null; throw new NotSupportedException("ResolveType12", e); } catch (NullReferenceException e) { return null; throw new NotSupportedException("ResolveType12", e); } catch (Exception e) { throw new NotSupportedException("ResolveType123", e); } } public static Type ResolveType2(string typeName) { Type type; if (!typeName.Contains(".")) { lock (ShortNameType) { if (ShortNameType.TryGetValue(typeName, out type)) { return type; } } type = ResolveTypeInNameSpaces(typeName, false) ?? ResolveTypeInNameSpaces(typeName, true); if (type == null) { type = GetPrimitiveType(typeName); } if (type != null) { lock (ShortNameType) { ShortNameType[typeName] = type; } return type; } } type = Type.GetType(typeName); if (type != null) return type; type = Type.GetTypeFromProgID(typeName); if (type != null) return type; try { type = Type.GetTypeFromCLSID(new Guid(typeName)); if (type != null) return type; } catch (FormatException) { } type = ResolveTypeWithoutNameSpaces(typeName, false) ?? ResolveTypeWithoutNameSpaces(typeName, true); return type; } public static Type ResolveTypeWithoutNameSpaces(string typeName0, bool ignoreCase) { Type t = Type.GetType(typeName0, false, ignoreCase) ?? AssembliesLoaded.Select(loaded => loaded.GetType(typeName0, false, ignoreCase)) .FirstOrDefault(tt => tt != null); if (t != null) { return t; } if (ignoreCase) return null; #if USE_IKVM try { JavaClass javaClass = java.lang.Class.forName(typeName0, true, scriptingClassLoader); if (javaClass != null) { Type type = getInstanceTypeFromClass(javaClass); LongNameType[typeName0] = type; String lower = typeName0.ToLower(); if (!lower.Equals(typeName0)) { LongNameType[lower] = type; } return type; } } catch (java.lang.ClassNotFoundException e) { // java.lang.Class.forName(typeName0, true, scriptingClassLoader); } #endif return null; } public static Type ResolveTypeInNameSpaces(string typeName, bool ignoreCase) { string name0 = typeName; Type t = t = ResolveTypeWithoutNameSpaces(typeName, ignoreCase); if (t != null) return t; System.Diagnostics.Debug.Assert(PrefixStrings != null, "PrefixStrings != null"); if (PrefixStrings != null && PrefixStrings.Count != 0) foreach (string prefix0 in PrefixStrings) { t = ResolveTypeWithoutNameSpaces(prefix0 + typeName, ignoreCase); if (t != null) return t; } return null; } [ThreadStatic] public static List<string> _PrefixStrings; public static List<string> PrefixStrings { get { return _PrefixStrings ?? (_PrefixStrings = new List<string>() {"System."}); } } public static Type GetPrimitiveType(String name) { if (name.StartsWith("[")) { Type t = ResolveType(name.Substring(1)); return t.MakeArrayType(); } switch (name) { case "byte": case ":byte": case "B": case "uint8": case "ubyte": case ":unsigned-byte": return typeof(byte); case "int16": return typeof(Int16); case "int": case ":int": case "int32": case "I": return typeof(int); case "long": case "int64": case "J": return typeof(long); case "short": case "S": return typeof(short); case "sbyte": case "int8": return typeof(sbyte); case "uint": case ":unsigned-int": case "uint32": return typeof(uint); case "uint16": return typeof(UInt16); case "uint64": case "ulong": return typeof(ulong); case "ushort": return typeof(ushort); case "decimal": return typeof(decimal); case "double": case "D": return typeof(double); case "float": case "F": return typeof(float); case "object": return typeof(object); case "string": return typeof(string); case "void": case "V": return typeof(void); case "char": case "C": return typeof(char); case "bool": case "boolean": case "bit": case "Z": return typeof(bool); default: return null; } } } }
// <copyright company="Simply Code Ltd."> // Copyright (c) Simply Code Ltd. All rights reserved. // Licensed under the MIT License. // See LICENSE file in the project root for full license information. // </copyright> namespace PackIt.DTO.DtoPlan { using PackIt.Helpers.Enums; /// <summary> A dto limit. </summary> public class DtoLimit { /// <summary> Gets or sets the identifier of the Plan that owns this item. </summary> /// /// <value> The identifier of the Plan. </value> public string PlanId { get; set; } /// <summary> Gets or sets the stage level. </summary> /// /// <value> The stage level. </value> public StageLevel StageLevel { get; set; } /// <summary> Gets or sets the zero-based index of this Limit. </summary> /// /// <value> The Limit index. </value> public long LimitIndex { get; set; } /// <summary> Gets or sets a value indicating whether the design. </summary> /// /// <value> True if design, false if not. </value> public bool Design { get; set; } /// <summary> Gets or sets the type of the material. </summary> /// /// <value> The type of the material. </value> public MaterialType MaterialType { get; set; } /// <summary> Gets or sets the material code. </summary> /// /// <value> The material code. </value> public string MaterialCode { get; set; } /// <summary> Gets or sets the material code minimum. </summary> /// /// <value> The material code minimum. </value> public string MaterialCodeMin { get; set; } /// <summary> Gets or sets the type of the design. </summary> /// /// <value> The type of the design. </value> public DesignType DesignType { get; set; } /// <summary> Gets or sets the design code. </summary> /// /// <value> The design code. </value> public string DesignCode { get; set; } /// <summary> Gets or sets the design code minimum. </summary> /// /// <value> The design code minimum. </value> public string DesignCodeMin { get; set; } /// <summary> Gets or sets the type of the quality. </summary> /// /// <value> The type of the quality. </value> public QualityType QualityType { get; set; } /// <summary> Gets or sets the quality code. </summary> /// /// <value> The quality code. </value> public string QualityCode { get; set; } /// <summary> Gets or sets the quality code minimum. </summary> /// /// <value> The quality code minimum. </value> public string QualityCodeMin { get; set; } /// <summary> Gets or sets the usage. </summary> /// /// <value> The usage. </value> public UsageType Usage { get; set; } /// <summary> Gets or sets a value indicating whether the inverted. </summary> /// /// <value> True if inverted, false if not. </value> public bool Inverted { get; set; } /// <summary> Gets or sets the number of layers. </summary> /// /// <value> The number of layers. </value> public long LayerCount { get; set; } /// <summary> Gets or sets the layer start. </summary> /// /// <value> The layer start. </value> public long LayerStart { get; set; } /// <summary> Gets or sets the layer step. </summary> /// /// <value> The layer step. </value> public long LayerStep { get; set; } /// <summary> Gets or sets the quality caliper. </summary> /// /// <value> The quality caliper. </value> public double QualityCaliper { get; set; } /// <summary> Gets or sets the quality density. </summary> /// /// <value> The quality density. </value> public double QualityDensity { get; set; } /// <summary> Gets or sets the length minimum. </summary> /// /// <value> The length minimum. </value> public double LengthMin { get; set; } /// <summary> Gets or sets the length maximum. </summary> /// /// <value> The length maximum. </value> public double LengthMax { get; set; } /// <summary> Gets or sets the breadth minimum. </summary> /// /// <value> The breadth minimum. </value> public double BreadthMin { get; set; } /// <summary> Gets or sets the breadth maximum. </summary> /// /// <value> The breadth maximum. </value> public double BreadthMax { get; set; } /// <summary> Gets or sets the height minimum. </summary> /// /// <value> The height minimum. </value> public double HeightMin { get; set; } /// <summary> Gets or sets the height maximum. </summary> /// /// <value> The height maximum. </value> public double HeightMax { get; set; } /// <summary> Gets or sets the caliper minimum. </summary> /// /// <value> The caliper minimum. </value> public double CaliperMin { get; set; } /// <summary> Gets or sets the caliper maximum. </summary> /// /// <value> The caliper maximum. </value> public double CaliperMax { get; set; } /// <summary> Gets or sets the packing gap x coordinate. </summary> /// /// <value> The packing gap x coordinate. </value> public double PackingGapX { get; set; } /// <summary> Gets or sets the packing gap y coordinate. </summary> /// /// <value> The packing gap y coordinate. </value> public double PackingGapY { get; set; } /// <summary> Gets or sets the packing gap z coordinate. </summary> /// /// <value> The packing gap z coordinate. </value> public double PackingGapZ { get; set; } /// <summary> Gets or sets the safety factor minimum. </summary> /// /// <value> The safety factor minimum. </value> public double SafetyFactorMin { get; set; } /// <summary> Gets or sets the safety factor maximum. </summary> /// /// <value> The safety factor maximum. </value> public double SafetyFactorMax { get; set; } /// <summary> Gets or sets the front placement. </summary> /// /// <value> The front placement. </value> public long FrontPlacement { get; set; } /// <summary> Gets or sets the back placement. </summary> /// /// <value> The back placement. </value> public long BackPlacement { get; set; } /// <summary> Gets or sets the left placement. </summary> /// /// <value> The left placement. </value> public long LeftPlacement { get; set; } /// <summary> Gets or sets the right placement. </summary> /// /// <value> The right placement. </value> public long RightPlacement { get; set; } /// <summary> Gets or sets the top placement. </summary> /// /// <value> The top placement. </value> public long TopPlacement { get; set; } /// <summary> Gets or sets the bottom placement. </summary> /// /// <value> The bottom placement. </value> public long BottomPlacement { get; set; } /// <summary> Gets or sets the length thicknesses. </summary> /// /// <value> The length thicknesses. </value> public long LengthThicknesses { get; set; } /// <summary> Gets or sets the length sink change. </summary> /// /// <value> The length sink change. </value> public long LengthSinkChange { get; set; } /// <summary> Gets or sets the breadth thicknesses. </summary> /// /// <value> The breadth thicknesses. </value> public long BreadthThicknesses { get; set; } /// <summary> Gets or sets the breadth sink change. </summary> /// /// <value> The breadth sink change. </value> public long BreadthSinkChange { get; set; } /// <summary> Gets or sets the height thicknesses. </summary> /// /// <value> The height thicknesses. </value> public long HeightThicknesses { get; set; } /// <summary> Gets or sets the height sink change. </summary> /// /// <value> The height sink change. </value> public long HeightSinkChange { get; set; } /// <summary> Gets or sets the type of the costing. </summary> /// /// <value> The type of the costing. </value> public CostType CostingType { get; set; } } }
using System; using System.Collections.Generic; using System.Diagnostics; namespace Foundatio.Collections { /// <summary> /// A double-ended queue (deque), which provides O(1) indexed access, O(1) removals from the front and back, amortized O(1) insertions to the front and back, and O(N) insertions and removals anywhere else (with the operations getting slower as the index approaches the middle). /// </summary> /// <typeparam name="T">The type of elements contained in the deque.</typeparam> [DebuggerDisplay("Count = {Count}, Capacity = {Capacity}")] [DebuggerTypeProxy(typeof(Deque<>.DebugView))] public sealed class Deque<T> : IList<T>, IReadOnlyList<T>, System.Collections.IList { /// <summary> /// The default capacity. /// </summary> private const int DefaultCapacity = 8; /// <summary> /// The circular _buffer that holds the view. /// </summary> private T[] _buffer; /// <summary> /// The offset into <see cref="_buffer"/> where the view begins. /// </summary> private int _offset; /// <summary> /// Initializes a new instance of the <see cref="Deque&lt;T&gt;"/> class with the specified capacity. /// </summary> /// <param name="capacity">The initial capacity. Must be greater than <c>0</c>.</param> public Deque(int capacity) { if (capacity < 0) throw new ArgumentOutOfRangeException(nameof(capacity), "Capacity may not be negative"); _buffer = new T[capacity]; } /// <summary> /// Initializes a new instance of the <see cref="Deque&lt;T&gt;"/> class with the elements from the specified collection. /// </summary> /// <param name="collection">The collection. May not be <c>null</c>.</param> public Deque(IEnumerable<T> collection) { if (collection == null) throw new ArgumentNullException(nameof(collection)); var source = CollectionHelpers.ReifyCollection(collection); var count = source.Count; if (count > 0) { _buffer = new T[count]; DoInsertRange(0, source); } else { _buffer = new T[DefaultCapacity]; } } /// <summary> /// Initializes a new instance of the <see cref="Deque&lt;T&gt;"/> class. /// </summary> public Deque() : this(DefaultCapacity) { } #region GenericListImplementations /// <summary> /// Gets a value indicating whether this list is read-only. This implementation always returns <c>false</c>. /// </summary> /// <returns>true if this list is read-only; otherwise, false.</returns> bool ICollection<T>.IsReadOnly { get { return false; } } /// <summary> /// Gets or sets the item at the specified index. /// </summary> /// <param name="index">The index of the item to get or set.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="index"/> is not a valid index in this list.</exception> /// <exception cref="T:System.NotSupportedException">This property is set and the list is read-only.</exception> public T this[int index] { get { CheckExistingIndexArgument(Count, index); return DoGetItem(index); } set { CheckExistingIndexArgument(Count, index); DoSetItem(index, value); } } /// <summary> /// Inserts an item to this list at the specified index. /// </summary> /// <param name="index">The zero-based index at which <paramref name="item"/> should be inserted.</param> /// <param name="item">The object to insert into this list.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="index"/> is not a valid index in this list. /// </exception> /// <exception cref="T:System.NotSupportedException"> /// This list is read-only. /// </exception> public void Insert(int index, T item) { CheckNewIndexArgument(Count, index); DoInsert(index, item); } /// <summary> /// Removes the item at the specified index. /// </summary> /// <param name="index">The zero-based index of the item to remove.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="index"/> is not a valid index in this list. /// </exception> /// <exception cref="T:System.NotSupportedException"> /// This list is read-only. /// </exception> public void RemoveAt(int index) { CheckExistingIndexArgument(Count, index); DoRemoveAt(index); } /// <summary> /// Determines the index of a specific item in this list. /// </summary> /// <param name="item">The object to locate in this list.</param> /// <returns>The index of <paramref name="item"/> if found in this list; otherwise, -1.</returns> public int IndexOf(T item) { var comparer = EqualityComparer<T>.Default; int ret = 0; foreach (var sourceItem in this) { if (comparer.Equals(item, sourceItem)) return ret; ++ret; } return -1; } /// <summary> /// Adds an item to the end of this list. /// </summary> /// <param name="item">The object to add to this list.</param> /// <exception cref="T:System.NotSupportedException"> /// This list is read-only. /// </exception> void ICollection<T>.Add(T item) { DoInsert(Count, item); } /// <summary> /// Determines whether this list contains a specific value. /// </summary> /// <param name="item">The object to locate in this list.</param> /// <returns> /// true if <paramref name="item"/> is found in this list; otherwise, false. /// </returns> bool ICollection<T>.Contains(T item) { var comparer = EqualityComparer<T>.Default; foreach (var entry in this) { if (comparer.Equals(item, entry)) return true; } return false; } /// <summary> /// Copies the elements of this list to an <see cref="T:System.Array"/>, starting at a particular <see cref="T:System.Array"/> index. /// </summary> /// <param name="array">The one-dimensional <see cref="T:System.Array"/> that is the destination of the elements copied from this slice. The <see cref="T:System.Array"/> must have zero-based indexing.</param> /// <param name="arrayIndex">The zero-based index in <paramref name="array"/> at which copying begins.</param> /// <exception cref="T:System.ArgumentNullException"> /// <paramref name="array"/> is null. /// </exception> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="arrayIndex"/> is less than 0. /// </exception> /// <exception cref="T:System.ArgumentException"> /// <paramref name="arrayIndex"/> is equal to or greater than the length of <paramref name="array"/>. /// -or- /// The number of elements in the source <see cref="T:System.Collections.Generic.ICollection`1"/> is greater than the available space from <paramref name="arrayIndex"/> to the end of the destination <paramref name="array"/>. /// </exception> void ICollection<T>.CopyTo(T[] array, int arrayIndex) { if (array == null) throw new ArgumentNullException(nameof(array)); int count = Count; CheckRangeArguments(array.Length, arrayIndex, count); CopyToArray(array, arrayIndex); } /// <summary> /// Copies the deque elemens into an array. The resulting array always has all the deque elements contiguously. /// </summary> /// <param name="array">The destination array.</param> /// <param name="arrayIndex">The optional index in the destination array at which to begin writing.</param> private void CopyToArray(Array array, int arrayIndex = 0) { if (array == null) throw new ArgumentNullException(nameof(array)); if (IsSplit) { // The existing buffer is split, so we have to copy it in parts int length = Capacity - _offset; Array.Copy(_buffer, _offset, array, arrayIndex, length); Array.Copy(_buffer, 0, array, arrayIndex + length, Count - length); } else { // The existing buffer is whole Array.Copy(_buffer, _offset, array, arrayIndex, Count); } } /// <summary> /// Removes the first occurrence of a specific object from this list. /// </summary> /// <param name="item">The object to remove from this list.</param> /// <returns> /// true if <paramref name="item"/> was successfully removed from this list; otherwise, false. This method also returns false if <paramref name="item"/> is not found in this list. /// </returns> /// <exception cref="T:System.NotSupportedException"> /// This list is read-only. /// </exception> public bool Remove(T item) { int index = IndexOf(item); if (index == -1) return false; DoRemoveAt(index); return true; } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection. /// </returns> public IEnumerator<T> GetEnumerator() { int count = Count; for (int i = 0; i != count; ++i) { yield return DoGetItem(i); } } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection. /// </returns> System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } #endregion #region ObjectListImplementations private static bool IsT(object value) { if (value is T) return true; if (value != null) return false; return default(T) == null; } int System.Collections.IList.Add(object value) { if (value == null && default(T) != null) throw new ArgumentNullException(nameof(value), "Value cannot be null"); if (!IsT(value)) throw new ArgumentException("Value is of incorrect type.", nameof(value)); AddToBack((T)value); return Count - 1; } bool System.Collections.IList.Contains(object value) { return IsT(value) ? ((ICollection<T>)this).Contains((T)value) : false; } int System.Collections.IList.IndexOf(object value) { return IsT(value) ? IndexOf((T)value) : -1; } void System.Collections.IList.Insert(int index, object value) { if (value == null && default(T) != null) throw new ArgumentNullException("value", "Value cannot be null"); if (!IsT(value)) throw new ArgumentException("Value is of incorrect type.", "value"); Insert(index, (T)value); } bool System.Collections.IList.IsFixedSize { get { return false; } } bool System.Collections.IList.IsReadOnly { get { return false; } } void System.Collections.IList.Remove(object value) { if (IsT(value)) Remove((T)value); } object System.Collections.IList.this[int index] { get { return this[index]; } set { if (value == null && default(T) != null) throw new ArgumentNullException(nameof(value), "Value cannot be null"); if (!IsT(value)) throw new ArgumentException("Value is of incorrect type.", nameof(value)); this[index] = (T)value; } } void System.Collections.ICollection.CopyTo(Array array, int index) { if (array == null) throw new ArgumentNullException(nameof(array), "Destination array cannot be null"); CheckRangeArguments(array.Length, index, Count); try { CopyToArray(array, index); } catch (ArrayTypeMismatchException ex) { throw new ArgumentException("Destination array is of incorrect type.", nameof(array), ex); } catch (RankException ex) { throw new ArgumentException("Destination array must be single dimensional.", nameof(array), ex); } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } object System.Collections.ICollection.SyncRoot { get { return this; } } #endregion #region GenericListHelpers /// <summary> /// Checks the <paramref name="index"/> argument to see if it refers to a valid insertion point in a source of a given length. /// </summary> /// <param name="sourceLength">The length of the source. This parameter is not checked for validity.</param> /// <param name="index">The index into the source.</param> /// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is not a valid index to an insertion point for the source.</exception> private static void CheckNewIndexArgument(int sourceLength, int index) { if (index < 0 || index > sourceLength) { throw new ArgumentOutOfRangeException(nameof(index), "Invalid new index " + index + " for source length " + sourceLength); } } /// <summary> /// Checks the <paramref name="index"/> argument to see if it refers to an existing element in a source of a given length. /// </summary> /// <param name="sourceLength">The length of the source. This parameter is not checked for validity.</param> /// <param name="index">The index into the source.</param> /// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is not a valid index to an existing element for the source.</exception> private static void CheckExistingIndexArgument(int sourceLength, int index) { if (index < 0 || index >= sourceLength) { throw new ArgumentOutOfRangeException(nameof(index), "Invalid existing index " + index + " for source length " + sourceLength); } } /// <summary> /// Checks the <paramref name="offset"/> and <paramref name="count"/> arguments for validity when applied to a source of a given length. Allows 0-element ranges, including a 0-element range at the end of the source. /// </summary> /// <param name="sourceLength">The length of the source. This parameter is not checked for validity.</param> /// <param name="offset">The index into source at which the range begins.</param> /// <param name="count">The number of elements in the range.</param> /// <exception cref="ArgumentOutOfRangeException">Either <paramref name="offset"/> or <paramref name="count"/> is less than 0.</exception> /// <exception cref="ArgumentException">The range [offset, offset + count) is not within the range [0, sourceLength).</exception> private static void CheckRangeArguments(int sourceLength, int offset, int count) { if (offset < 0) { throw new ArgumentOutOfRangeException(nameof(offset), "Invalid offset " + offset); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), "Invalid count " + count); } if (sourceLength - offset < count) { throw new ArgumentException("Invalid offset (" + offset + ") or count + (" + count + ") for source length " + sourceLength); } } #endregion /// <summary> /// Gets a value indicating whether this instance is empty. /// </summary> private bool IsEmpty { get { return Count == 0; } } /// <summary> /// Gets a value indicating whether this instance is at full capacity. /// </summary> private bool IsFull { get { return Count == Capacity; } } /// <summary> /// Gets a value indicating whether the buffer is "split" (meaning the beginning of the view is at a later index in <see cref="_buffer"/> than the end). /// </summary> private bool IsSplit { get { // Overflow-safe version of "(offset + Count) > Capacity" return _offset > (Capacity - Count); } } /// <summary> /// Gets or sets the capacity for this deque. This value must always be greater than zero, and this property cannot be set to a value less than <see cref="Count"/>. /// </summary> /// <exception cref="InvalidOperationException"><c>Capacity</c> cannot be set to a value less than <see cref="Count"/>.</exception> public int Capacity { get { return _buffer.Length; } set { if (value < Count) throw new ArgumentOutOfRangeException(nameof(value), "Capacity cannot be set to a value less than Count"); if (value == _buffer.Length) return; // Create the new _buffer and copy our existing range. T[] newBuffer = new T[value]; CopyToArray(newBuffer); // Set up to use the new _buffer. _buffer = newBuffer; _offset = 0; } } /// <summary> /// Gets the number of elements contained in this deque. /// </summary> /// <returns>The number of elements contained in this deque.</returns> public int Count { get; private set; } /// <summary> /// Applies the offset to <paramref name="index"/>, resulting in a buffer index. /// </summary> /// <param name="index">The deque index.</param> /// <returns>The buffer index.</returns> private int DequeIndexToBufferIndex(int index) { return (index + _offset) % Capacity; } /// <summary> /// Gets an element at the specified view index. /// </summary> /// <param name="index">The zero-based view index of the element to get. This index is guaranteed to be valid.</param> /// <returns>The element at the specified index.</returns> private T DoGetItem(int index) { return _buffer[DequeIndexToBufferIndex(index)]; } /// <summary> /// Sets an element at the specified view index. /// </summary> /// <param name="index">The zero-based view index of the element to get. This index is guaranteed to be valid.</param> /// <param name="item">The element to store in the list.</param> private void DoSetItem(int index, T item) { _buffer[DequeIndexToBufferIndex(index)] = item; } /// <summary> /// Inserts an element at the specified view index. /// </summary> /// <param name="index">The zero-based view index at which the element should be inserted. This index is guaranteed to be valid.</param> /// <param name="item">The element to store in the list.</param> private void DoInsert(int index, T item) { EnsureCapacityForOneElement(); if (index == 0) { DoAddToFront(item); return; } else if (index == Count) { DoAddToBack(item); return; } DoInsertRange(index, new[] { item }); } /// <summary> /// Removes an element at the specified view index. /// </summary> /// <param name="index">The zero-based view index of the element to remove. This index is guaranteed to be valid.</param> private void DoRemoveAt(int index) { if (index == 0) { DoRemoveFromFront(); return; } else if (index == Count - 1) { DoRemoveFromBack(); return; } DoRemoveRange(index, 1); } /// <summary> /// Increments <see cref="_offset"/> by <paramref name="value"/> using modulo-<see cref="Capacity"/> arithmetic. /// </summary> /// <param name="value">The value by which to increase <see cref="_offset"/>. May not be negative.</param> /// <returns>The value of <see cref="_offset"/> after it was incremented.</returns> private int PostIncrement(int value) { int ret = _offset; _offset += value; _offset %= Capacity; return ret; } /// <summary> /// Decrements <see cref="_offset"/> by <paramref name="value"/> using modulo-<see cref="Capacity"/> arithmetic. /// </summary> /// <param name="value">The value by which to reduce <see cref="_offset"/>. May not be negative or greater than <see cref="Capacity"/>.</param> /// <returns>The value of <see cref="_offset"/> before it was decremented.</returns> private int PreDecrement(int value) { _offset -= value; if (_offset < 0) _offset += Capacity; return _offset; } /// <summary> /// Inserts a single element to the back of the view. <see cref="IsFull"/> must be false when this method is called. /// </summary> /// <param name="value">The element to insert.</param> private void DoAddToBack(T value) { _buffer[DequeIndexToBufferIndex(Count)] = value; ++Count; } /// <summary> /// Inserts a single element to the front of the view. <see cref="IsFull"/> must be false when this method is called. /// </summary> /// <param name="value">The element to insert.</param> private void DoAddToFront(T value) { _buffer[PreDecrement(1)] = value; ++Count; } /// <summary> /// Removes and returns the last element in the view. <see cref="IsEmpty"/> must be false when this method is called. /// </summary> /// <returns>The former last element.</returns> private T DoRemoveFromBack() { T ret = _buffer[DequeIndexToBufferIndex(Count - 1)]; --Count; return ret; } /// <summary> /// Removes and returns the first element in the view. <see cref="IsEmpty"/> must be false when this method is called. /// </summary> /// <returns>The former first element.</returns> private T DoRemoveFromFront() { --Count; return _buffer[PostIncrement(1)]; } /// <summary> /// Inserts a range of elements into the view. /// </summary> /// <param name="index">The index into the view at which the elements are to be inserted.</param> /// <param name="collection">The elements to insert. The sum of <c>collection.Count</c> and <see cref="Count"/> must be less than or equal to <see cref="Capacity"/>.</param> private void DoInsertRange(int index, IReadOnlyCollection<T> collection) { var collectionCount = collection.Count; // Make room in the existing list if (index < Count / 2) { // Inserting into the first half of the list // Move lower items down: [0, index) -> [Capacity - collectionCount, Capacity - collectionCount + index) // This clears out the low "index" number of items, moving them "collectionCount" places down; // after rotation, there will be a "collectionCount"-sized hole at "index". int copyCount = index; int writeIndex = Capacity - collectionCount; for (int j = 0; j != copyCount; ++j) _buffer[DequeIndexToBufferIndex(writeIndex + j)] = _buffer[DequeIndexToBufferIndex(j)]; // Rotate to the new view PreDecrement(collectionCount); } else { // Inserting into the second half of the list // Move higher items up: [index, count) -> [index + collectionCount, collectionCount + count) int copyCount = Count - index; int writeIndex = index + collectionCount; for (int j = copyCount - 1; j != -1; --j) _buffer[DequeIndexToBufferIndex(writeIndex + j)] = _buffer[DequeIndexToBufferIndex(index + j)]; } // Copy new items into place int i = index; foreach (T item in collection) { _buffer[DequeIndexToBufferIndex(i)] = item; ++i; } // Adjust valid count Count += collectionCount; } /// <summary> /// Removes a range of elements from the view. /// </summary> /// <param name="index">The index into the view at which the range begins.</param> /// <param name="collectionCount">The number of elements in the range. This must be greater than 0 and less than or equal to <see cref="Count"/>.</param> private void DoRemoveRange(int index, int collectionCount) { if (index == 0) { // Removing from the beginning: rotate to the new view PostIncrement(collectionCount); Count -= collectionCount; return; } else if (index == Count - collectionCount) { // Removing from the ending: trim the existing view Count -= collectionCount; return; } if ((index + (collectionCount / 2)) < Count / 2) { // Removing from first half of list // Move lower items up: [0, index) -> [collectionCount, collectionCount + index) int copyCount = index; int writeIndex = collectionCount; for (int j = copyCount - 1; j != -1; --j) _buffer[DequeIndexToBufferIndex(writeIndex + j)] = _buffer[DequeIndexToBufferIndex(j)]; // Rotate to new view PostIncrement(collectionCount); } else { // Removing from second half of list // Move higher items down: [index + collectionCount, count) -> [index, count - collectionCount) int copyCount = Count - collectionCount - index; int readIndex = index + collectionCount; for (int j = 0; j != copyCount; ++j) _buffer[DequeIndexToBufferIndex(index + j)] = _buffer[DequeIndexToBufferIndex(readIndex + j)]; } // Adjust valid count Count -= collectionCount; } /// <summary> /// Doubles the capacity if necessary to make room for one more element. When this method returns, <see cref="IsFull"/> is false. /// </summary> private void EnsureCapacityForOneElement() { if (IsFull) { Capacity = (Capacity == 0) ? 1 : Capacity * 2; } } /// <summary> /// Inserts a single element at the back of this deque. /// </summary> /// <param name="value">The element to insert.</param> public void AddToBack(T value) { EnsureCapacityForOneElement(); DoAddToBack(value); } /// <summary> /// Inserts a single element at the front of this deque. /// </summary> /// <param name="value">The element to insert.</param> public void AddToFront(T value) { EnsureCapacityForOneElement(); DoAddToFront(value); } /// <summary> /// Inserts a collection of elements into this deque. /// </summary> /// <param name="index">The index at which the collection is inserted.</param> /// <param name="collection">The collection of elements to insert.</param> /// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is not a valid index to an insertion point for the source.</exception> public void InsertRange(int index, IEnumerable<T> collection) { CheckNewIndexArgument(Count, index); var source = CollectionHelpers.ReifyCollection(collection); int collectionCount = source.Count; // Overflow-safe check for "Count + collectionCount > Capacity" if (collectionCount > Capacity - Count) { Capacity = checked(Count + collectionCount); } if (collectionCount == 0) { return; } DoInsertRange(index, source); } /// <summary> /// Removes a range of elements from this deque. /// </summary> /// <param name="offset">The index into the deque at which the range begins.</param> /// <param name="count">The number of elements to remove.</param> /// <exception cref="ArgumentOutOfRangeException">Either <paramref name="offset"/> or <paramref name="count"/> is less than 0.</exception> /// <exception cref="ArgumentException">The range [<paramref name="offset"/>, <paramref name="offset"/> + <paramref name="count"/>) is not within the range [0, <see cref="Count"/>).</exception> public void RemoveRange(int offset, int count) { CheckRangeArguments(Count, offset, count); if (count == 0) { return; } DoRemoveRange(offset, count); } /// <summary> /// Removes and returns the last element of this deque. /// </summary> /// <returns>The former last element.</returns> /// <exception cref="InvalidOperationException">The deque is empty.</exception> public T RemoveFromBack() { if (IsEmpty) throw new InvalidOperationException("The deque is empty"); return DoRemoveFromBack(); } /// <summary> /// Removes and returns the first element of this deque. /// </summary> /// <returns>The former first element.</returns> /// <exception cref="InvalidOperationException">The deque is empty.</exception> public T RemoveFromFront() { if (IsEmpty) throw new InvalidOperationException("The deque is empty"); return DoRemoveFromFront(); } /// <summary> /// Removes all items from this deque. /// </summary> public void Clear() { _offset = 0; Count = 0; } /// <summary> /// Creates and returns a new array containing the elements in this deque. /// </summary> public T[] ToArray() { var result = new T[Count]; ((ICollection<T>)this).CopyTo(result, 0); return result; } [DebuggerNonUserCode] private sealed class DebugView { private readonly Deque<T> deque; public DebugView(Deque<T> deque) { this.deque = deque; } [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] public T[] Items { get { return deque.ToArray(); } } } } }
using VRage.Game.Components; using Sandbox.Common.ObjectBuilders; using VRage.ObjectBuilders; using Sandbox.ModAPI; using Sandbox.Game.EntityComponents; using System.Collections.Generic; using VRage.ModAPI; using VRage.Game.ModAPI; using VRageMath; using System; using System.Text; using VRage.Game; using VRage.Game.ObjectBuilders.Definitions; using Sandbox.ModAPI.Interfaces.Terminal; using VRage.Utils; namespace Cython.PowerTransmission { [MyEntityComponentDescriptor(typeof(MyObjectBuilder_Refinery), "LargeBlockSmallOpticalPowerTransmitter", "SmallBlockSmallOpticalPowerTransmitter")] public class OpticalPowerTransmitter: MyGameLogicComponent { static IMyTerminalControlOnOffSwitch m_controlSender = null; static IMyTerminalControlTextbox m_controlId = null; static IMyTerminalControlTextbox m_controlPower = null; static bool m_controlsInit = false; MyObjectBuilder_EntityBase m_objectBuilder; IMyFunctionalBlock m_functionalBlock; IMyCubeBlock m_cubeBlock; IMyTerminalBlock m_terminalBlock; int m_ticks = 0; long m_entityId; string m_subtypeName; int counter; MyResourceSinkComponent m_resourceSink; public MyDefinitionId m_electricityDefinition; public OpticalPowerTransmitterInfo m_info = new OpticalPowerTransmitterInfo(); public PTInfo m_saveInfo; float m_oldTransmittedPower = 0f; public float m_transmittedPower = 0f; uint m_senders = 0; float m_currentOutput = 0f; float m_maxRange = 20.0f; float m_maxRangeSquared = 400.0f; float m_maxPower = 1.0f; float m_currentMaxPower = 1.0f; float m_receivingPower = 0f; public float m_powerToTransfer = 0f; float m_powerMultiplicator = 0.95f; float m_oldPowerToTransfer = 0f; float m_currentRequiredInput = 0f; float m_rayOffset = 4.0f; public uint m_id = 0; public uint m_targetId = 0; uint m_targetIdOld = 0; bool m_targetVisible = false; public bool m_sender = false; OpticalPowerTransmitterInfo m_target = null; Dictionary<IMyFunctionalBlock, float> m_receiver = new Dictionary<IMyFunctionalBlock, float> (); public override void Init (MyObjectBuilder_EntityBase objectBuilder) { base.Init (objectBuilder); m_objectBuilder = m_objectBuilder; Entity.NeedsUpdate |= MyEntityUpdateEnum.BEFORE_NEXT_FRAME | MyEntityUpdateEnum.EACH_FRAME; m_entityId = Entity.EntityId; m_functionalBlock = Entity as IMyFunctionalBlock; m_electricityDefinition = new MyDefinitionId (typeof(MyObjectBuilder_GasProperties), "Electricity"); m_terminalBlock = Entity as IMyTerminalBlock; m_subtypeName = m_functionalBlock.BlockDefinition.SubtypeName; m_maxPower = getMaxPower (m_subtypeName); m_powerMultiplicator = getPowerMultiplicator (m_subtypeName); m_currentMaxPower = m_maxPower; m_maxRange = getMaxRange (m_subtypeName); m_maxRangeSquared = m_maxRange * m_maxRange; m_rayOffset = this.getRayOffset (m_subtypeName); m_info.rayOffset = m_rayOffset; m_info.functionalBlock = m_functionalBlock; m_info.subtypeName = m_subtypeName; m_cubeBlock = Entity as IMyCubeBlock; m_cubeBlock.AddUpgradeValue ("OpticalPowerStrength", 1.0f); m_functionalBlock.CustomNameChanged += parseName; m_cubeBlock.OnUpgradeValuesChanged += onUpgradeValuesChanged; m_terminalBlock.AppendingCustomInfo += appendCustomInfo; } public override MyObjectBuilder_EntityBase GetObjectBuilder (bool copy = false) { return m_objectBuilder; } public override void UpdateOnceBeforeFrame () { base.UpdateOnceBeforeFrame (); m_resourceSink = Entity.Components.Get<MyResourceSinkComponent> (); parseName ((IMyTerminalBlock)m_functionalBlock); m_saveInfo = new PTInfo(Entity.EntityId, m_sender, m_id, m_transmittedPower, "O"); if(!MyAPIGateway.Multiplayer.IsServer) requestSettingsFromServer(); m_info.strength = m_currentMaxPower; createUI(); } void requestSettingsFromServer() { byte[] message = new byte[20]; byte[] messageId = BitConverter.GetBytes(11); byte[] messageSender = BitConverter.GetBytes(MyAPIGateway.Session.Player.SteamUserId); byte[] messageEntityId = BitConverter.GetBytes(Entity.EntityId); for(int i = 0; i < 4; i++) message[i] = messageId[i]; for(int i = 0; i < 8; i++) message[i+4] = messageSender[i]; for(int i = 0; i < 8; i++) message[i+12] = messageEntityId[i]; MyAPIGateway.Multiplayer.SendMessageToServer(5910, message, true); } public override void OnRemovedFromScene () { m_info.functionalBlock = null; if (TransmissionManager.opticalTransmitters.ContainsKey (m_entityId)) { TransmissionManager.opticalTransmitters.Remove (m_entityId); } base.OnRemovedFromScene (); } public override void OnAddedToScene () { base.OnAddedToScene (); m_entityId = Entity.EntityId; m_info.functionalBlock = Entity as IMyFunctionalBlock; if (Entity.InScene) { if (!TransmissionManager.opticalTransmitters.ContainsKey (m_entityId)) { TransmissionManager.opticalTransmitters.Add (m_entityId, m_info); } if(!TransmissionManager.totalPowerPerGrid.ContainsKey(m_functionalBlock.CubeGrid.EntityId)) { TransmissionManager.totalPowerPerGrid.Add (m_functionalBlock.CubeGrid.EntityId, 0); } } } public override void UpdateBeforeSimulation () { base.UpdateBeforeSimulation(); //MyAPIGateway.Utilities.ShowNotification ("YEP: " + m_subtypeName, 17); if (m_functionalBlock.IsFunctional) { if (m_functionalBlock.Enabled) { m_info.enabled = true; } else { m_info.enabled = false; } } else { m_info.enabled = false; } if (!m_sender) { if (m_currentRequiredInput != 0) { m_resourceSink.SetRequiredInputByType (m_electricityDefinition, 0); m_oldTransmittedPower = 0; m_currentRequiredInput = 0; } m_target = null; } else if (m_functionalBlock.Enabled && m_functionalBlock.IsFunctional) { if (m_target != null) { if (m_target.functionalBlock == null) { m_target = null; } else { if (!m_target.functionalBlock.Enabled) { m_target = null; } } } if(m_ticks % 100 == 0) { if (m_sender) { maintainConnection (); } m_targetIdOld = m_targetId; } if (m_target != null) { if (m_targetVisible) { m_currentOutput = m_resourceSink.CurrentInputByType (m_electricityDefinition); m_powerToTransfer = m_currentOutput * m_powerMultiplicator; var transmitterComponent = m_target.functionalBlock.GameLogic.GetAs < OpticalPowerTransmitter>(); if ((transmitterComponent.m_receivingPower + m_powerToTransfer) > m_target.strength) { m_powerToTransfer = m_target.strength - transmitterComponent.m_receivingPower; } transmitterComponent.m_receivingPower += m_powerToTransfer; transmitterComponent.m_senders++; //MyAPIGateway.Utilities.ShowNotification ("ADD: " + m_target.functionalBlock.CubeGrid.EntityId + ":" + powerToTransfer, 17, MyFontEnum.DarkBlue); TransmissionManager.totalPowerPerGrid [m_target.functionalBlock.CubeGrid.EntityId] = TransmissionManager.totalPowerPerGrid [m_target.functionalBlock.CubeGrid.EntityId] + m_powerToTransfer; } } } else { m_target = null; if (m_currentRequiredInput != 0) { m_resourceSink.SetRequiredInputByType (m_electricityDefinition, 0); m_oldTransmittedPower = 0; m_currentRequiredInput = 0; } } if(m_ticks % 100 == 0) { m_terminalBlock.RefreshCustomInfo (); } if(m_sender) { m_saveInfo.ChannelTarget = m_targetId; } else { m_saveInfo.ChannelTarget = m_id; } m_saveInfo.Sender = m_sender; m_saveInfo.Power = m_transmittedPower; m_receivingPower = 0f; m_senders = 0; m_ticks++; } public override void UpdateAfterSimulation () { if(m_ticks == 1) { bool contains = false; foreach(var ptInfo in TransmitterLogic.transmittersSaveFile.Transmitters) { if(ptInfo.Id == Entity.EntityId) { contains = true; m_saveInfo = ptInfo; } } if(!contains) { m_saveInfo = new PTInfo(Entity.EntityId, m_sender, m_id, m_transmittedPower, "O"); TransmitterLogic.transmittersSaveFile.Transmitters.Add(m_saveInfo); } } } public void maintainConnection() { //MyAPIGateway.Utilities.ShowNotification ("MAINTAIN", 1000, MyFontEnum.White); if (m_target != null) { if (m_target.functionalBlock == null) { //MyAPIGateway.Utilities.ShowNotification ("TARGET REMOVED", 1000, MyFontEnum.White); m_target = null; } } if (m_target != null) { //MyAPIGateway.Utilities.ShowNotification ("GOT TARGET", 1000, MyFontEnum.White); if (m_targetIdOld != m_target.id) { m_target = null; } else { m_targetIdOld = m_target.id; } } if (m_target == null) { //MyAPIGateway.Utilities.ShowNotification ("LF TARGET", 1000, MyFontEnum.White); foreach (var transmitter in TransmissionManager.opticalTransmitters) { //MyAPIGateway.Utilities.ShowNotification ("CHECK " + transmitter.Value.id + " " + m_targetId, 1000, MyFontEnum.White); if (transmitter.Value.id == m_targetId) { if (!transmitter.Value.sender) { if (transmitter.Value.functionalBlock.Enabled) { //MyAPIGateway.Utilities.ShowNotification ("GOT TARGET", 1000, MyFontEnum.White); m_target = transmitter.Value; m_targetIdOld = m_target.id; } } } //MyAPIGateway.Utilities.ShowNotification ("COUNT: " + TransmissionManager.opticalTransmitters.Count, 1667); } } if (m_target != null) { //MyAPIGateway.Utilities.ShowNotification ("RECEIVER: " + m_target.functionalBlock.EntityId, 1000, MyFontEnum.White); m_targetVisible = false; Vector3D thisPosition = m_functionalBlock.GetPosition (); Vector3D targetPosition = m_target.functionalBlock.GetPosition (); if (m_transmittedPower != m_oldTransmittedPower) { m_currentRequiredInput = m_transmittedPower; m_resourceSink.SetRequiredInputByType (m_electricityDefinition, m_transmittedPower); //MyAPIGateway.Utilities.ShowNotification ("COUNT: " + TransmissionManager.opticalTransmitters.Count, 3000, MyFontEnum.Red); m_oldTransmittedPower = m_transmittedPower; } if (m_target.enabled) { double distance = Vector3D.DistanceSquared (thisPosition, targetPosition); //MyAPIGateway.Utilities.ShowNotification ("RANGE: " + distance + ":" + m_maxRangeSquared, 1000, MyFontEnum.White); if (distance < m_maxRangeSquared) { Vector3D direction = targetPosition - thisPosition; direction.Normalize (); //MyAPIGateway.Utilities.ShowNotification ("VEC: " + thisPosition + ":" + targetPosition + ":" + direction * 20, 1000, MyFontEnum.White); if (!MyAPIGateway.Entities.IsRaycastBlocked (thisPosition + direction * m_rayOffset, targetPosition - direction * m_target.rayOffset)) { m_targetVisible = true; //MyAPIGateway.Utilities.ShowNotification ("" + m_target.functionalBlock.CubeGrid.EntityId + " VISIBLE", 1000); return; } else { m_targetVisible = false; //MyAPIGateway.Utilities.ShowNotification ("BLOCKED", 1000, MyFontEnum.Red); } } } } m_targetVisible = false; if (m_currentRequiredInput != 0) { m_resourceSink.SetRequiredInputByType (m_electricityDefinition, 0); m_oldTransmittedPower = 0; m_currentRequiredInput = 0; } } float getMaxPower(string subtypeName) { if (TransmissionManager.configuration.UseMaximumPower) { if (subtypeName == "LargeBlockSmallOpticalPowerTransmitter") { return TransmissionManager.configuration.LargeBlockSmallOpticalPowerTransmitter.MaximumPower; } else if (subtypeName == "SmallBlockSmallOpticalPowerTransmitter") { return TransmissionManager.configuration.SmallBlockSmallOpticalPowerTransmitter.MaximumPower; } } else { return float.PositiveInfinity; } return 1.0f; } float getPowerMultiplicator(string subtypeName) { if (TransmissionManager.configuration.UseMaximumPower) { if (subtypeName == "LargeBlockSmallOpticalPowerTransmitter") { return TransmissionManager.configuration.LargeBlockSmallOpticalPowerTransmitter.PowerMultiplicator; } else if (subtypeName == "SmallBlockSmallOpticalPowerTransmitter") { return TransmissionManager.configuration.SmallBlockSmallOpticalPowerTransmitter.PowerMultiplicator; } } else { return float.PositiveInfinity; } return 1.0f; } float getMaxRange(string subtypeName) { if (TransmissionManager.configuration.UseMaximumPower) { if (subtypeName == "LargeBlockSmallOpticalPowerTransmitter") { return TransmissionManager.configuration.LargeBlockSmallOpticalPowerTransmitter.MaximumRange; } else if (subtypeName == "SmallBlockSmallOpticalPowerTransmitter") { return TransmissionManager.configuration.SmallBlockSmallOpticalPowerTransmitter.MaximumRange; } } else { return float.PositiveInfinity; } return 20.0f; } float getRayOffset(string subtypeName) { if (subtypeName == "LargeBlockSmallOpticalPowerTransmitter") { return 4f; } else if (subtypeName == "SmallBlockSmallOpticalPowerTransmitter") { return 0.5f; } return 0.5f; } void parseName(IMyTerminalBlock terminalBlock) { int settingsStart = terminalBlock.CustomName.IndexOf ("("); if (settingsStart != -1) { if(settingsStart < (terminalBlock.CustomName.Length-1)) { int start = terminalBlock.CustomName.IndexOf ("P:", settingsStart + 1); if (start != -1) { if ((start + 2) < (terminalBlock.CustomName.Length - 1)) { int end = terminalBlock.CustomName.IndexOf (',', start + 2); if (end != -1) { try { m_transmittedPower = Convert.ToSingle (terminalBlock.CustomName.Substring (start + 2, end - (start + 2))); if(m_transmittedPower > m_currentMaxPower) { m_transmittedPower = m_currentMaxPower; } //MyAPIGateway.Utilities.ShowNotification ("" + m_transmittedPower, 1000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + (start + 2) + " " + (end - (start + 2)) + e.Message, 1000, MyFontEnum.Red); } } else { end = terminalBlock.CustomName.IndexOf (')', start + 2); if (end != -1) { try { m_transmittedPower = Convert.ToSingle (terminalBlock.CustomName.Substring (start + 2, end - (start + 2))); if(m_transmittedPower > m_currentMaxPower) { m_transmittedPower = m_currentMaxPower; } //MyAPIGateway.Utilities.ShowNotification ("" + m_transmittedPower, 1000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + (start + 2) + " " + (end - (start + 2)) + e.Message, 1000, MyFontEnum.Red); } } } } } else { m_transmittedPower = 0f; } start = terminalBlock.CustomName.IndexOf ("T:", settingsStart + 1); if (start != -1) { if ((start + 2) < (terminalBlock.CustomName.Length-1)) { int end = terminalBlock.CustomName.IndexOf (',', start + 2); if (end != -1) { try { m_targetId = Convert.ToUInt32 (terminalBlock.CustomName.Substring (start + 2, end - (start + 2))); //MyAPIGateway.Utilities.ShowNotification ("" + m_targetId, 4000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + e.Message, 4000, MyFontEnum.Red); } } else { end = terminalBlock.CustomName.IndexOf (')', start + 2); if (end != -1) { try { m_targetId = Convert.ToUInt32 (terminalBlock.CustomName.Substring (start + 2, end - (start + 2))); //MyAPIGateway.Utilities.ShowNotification ("" + m_targetId, 4000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + e.Message, 4000, MyFontEnum.Red); } } } } } start = terminalBlock.CustomName.IndexOf ("I:", settingsStart + 1); if (start != -1) { if ((start + 2) < (terminalBlock.CustomName.Length-1)) { int end = terminalBlock.CustomName.IndexOf (',', start + 2); if (end != -1) { try { m_id = Convert.ToUInt32 (terminalBlock.CustomName.Substring (start + 2, end - (start + 2))); m_info.id = m_id; //MyAPIGateway.Utilities.ShowNotification ("" + m_id, 4000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + e.Message, 4000, MyFontEnum.Red); } } else { end = terminalBlock.CustomName.IndexOf (')', start + 2); if (end != -1) { try { m_id = Convert.ToUInt32 (terminalBlock.CustomName.Substring (start + 2, end - (start + 2))); m_info.id = m_id; //MyAPIGateway.Utilities.ShowNotification ("" + m_id, 4000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + e.Message, 4000, MyFontEnum.Red); } } } } } start = terminalBlock.CustomName.IndexOf ("M:", settingsStart + 1); if (start != -1) { if ((start + 2) < (terminalBlock.CustomName.Length - 1)) { int end = terminalBlock.CustomName.IndexOf (',', start + 2); if (end != -1) { try { m_sender = terminalBlock.CustomName.Substring (start + 2, end - (start + 2)) == "S"; m_info.sender = m_sender; //MyAPIGateway.Utilities.ShowNotification ("" + m_sender, 1000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + e.Message, 1000, MyFontEnum.Red); } } else { end = terminalBlock.CustomName.IndexOf (')', start + 2); if (end != -1) { try { m_sender = terminalBlock.CustomName.Substring (start + 2, end - (start + 2)) == "S"; m_info.sender = m_sender; //MyAPIGateway.Utilities.ShowNotification ("" + m_sender, 1000, MyFontEnum.DarkBlue); } catch (Exception e) { //MyAPIGateway.Utilities.ShowNotification ("" + e.Message, 1000, MyFontEnum.Red); } } } } } else { m_sender = false; m_info.sender = m_sender; } } } } public void appendCustomInfo(IMyTerminalBlock block, StringBuilder info) { info.Clear (); info.AppendLine (" "); info.AppendLine ("-----Optical Transmitter Info-----"); info.AppendLine (" "); if (m_sender) { info.AppendLine ("(M)ode: Sender"); info.AppendLine ("(T)arget ID: " + m_targetId); if (m_functionalBlock.Enabled) { if (m_target == null) { info.AppendLine ("Status: Searching for Target"); } else { if (m_targetVisible) { info.AppendLine ("Status: Connected and Visible"); info.AppendLine ("(P)ower sent: " + m_currentOutput.ToString ("N") + "MW / " + m_transmittedPower.ToString ("N") + "MW"); } else { info.AppendLine ("Status: Connected but Blocked"); } } } else { info.AppendLine ("Status: Disabled"); } info.AppendLine (" "); info.AppendLine ("Range: " + (m_maxRange / 1000d).ToString("N") + "KM"); } else { info.AppendLine ("(M)ode: Receiver"); info.AppendLine ("(I)D: " + m_id); info.AppendLine (" "); info.AppendLine ("Power Receiving: " + m_receivingPower.ToString("N") + "MW / " + m_currentMaxPower.ToString("N") + "MW"); info.AppendLine ("Number of Sources: " + m_senders); } info.AppendLine (" "); info.AppendLine ("-----Usage-----"); info.AppendLine (" "); info.AppendLine ("To configure this Optical Transmitter as a sender you write its configuration tags that are explained below within a pair of brackets into its name."); info.AppendLine (""); info.AppendLine ("Example: Optical Power Transmitter (T:1, P:10, M:S)"); info.AppendLine (""); info.AppendLine ("T: Defines the target ID of the Optical Transmitter you want to send power to. It has to be a positive number."); info.AppendLine (""); info.AppendLine ("P: Defines the amount of power in MW to send to the specified target."); info.AppendLine (""); info.AppendLine ("M: Defines the mode of the Transmitter, in this case it is set so (S)ender. If it is not a sender, it is a receiver by default."); info.AppendLine (""); info.AppendLine ("To configure this Optical Transmitter as a receiver you write its configuration tags that are explained below within a pair of brackets into its name."); info.AppendLine (""); info.AppendLine ("Example: Optical Power Transmitter (I:1)"); info.AppendLine (""); info.AppendLine ("I: Defines the ID of this Optical Transmitter. It has to match with the target IDs of the senders to receive their power. It has to be a positive number."); info.AppendLine (""); info.AppendLine ("(Optional) M: Defines the mode of the Transmitter. By default an Optical Transmitter is in Receiver mode, so you do not have to define it."); } public override void MarkForClose () { m_info.functionalBlock = null; if (TransmissionManager.opticalTransmitters.ContainsKey (m_entityId)) { TransmissionManager.opticalTransmitters.Remove (m_entityId); } base.MarkForClose (); } public override void Close () { m_info.functionalBlock = null; if (TransmissionManager.opticalTransmitters.ContainsKey (m_entityId)) { TransmissionManager.opticalTransmitters.Remove (m_entityId); } if(TransmitterLogic.transmittersSaveFile.Transmitters.Contains(m_saveInfo)) { TransmitterLogic.transmittersSaveFile.Transmitters.Remove(m_saveInfo); } base.Close (); } void onUpgradeValuesChanged () { m_currentMaxPower = m_cubeBlock.UpgradeValues["OpticalPowerStrength"] * m_maxPower; parseName ((IMyTerminalBlock)m_functionalBlock); m_info.strength = m_currentMaxPower; } static void createUI() { if (m_controlsInit) return; m_controlsInit = true; MyAPIGateway.TerminalControls.CustomControlGetter -= customControlGetter; MyAPIGateway.TerminalControls.CustomControlGetter += customControlGetter; // sender/receiver switch m_controlSender = MyAPIGateway.TerminalControls.CreateControl<IMyTerminalControlOnOffSwitch, IMyRefinery>("Cython.OPT.SenderReceiver"); m_controlSender.Enabled = (b) => true; m_controlSender.Visible = (b) => b.BlockDefinition.SubtypeId.Equals("LargeBlockSmallOpticalPowerTransmitter") || b.BlockDefinition.SubtypeId.Equals("SmallBlockSmallOpticalPowerTransmitter"); m_controlSender.Title = MyStringId.GetOrCompute("Mode"); m_controlSender.Tooltip = MyStringId.GetOrCompute("Switches this transmitters mode to Sender or Receiver"); m_controlSender.OnText = MyStringId.GetOrCompute("Send"); m_controlSender.OffText = MyStringId.GetOrCompute("Rec."); m_controlSender.Getter = (b) => b.GameLogic.GetAs<OpticalPowerTransmitter>().m_sender; m_controlSender.Setter = (b, v) => { b.GameLogic.GetAs<OpticalPowerTransmitter>().m_sender = v; b.GameLogic.GetAs<OpticalPowerTransmitter>().m_info.sender = v; m_controlSender.UpdateVisual(); m_controlPower.UpdateVisual(); byte[] message = new byte[13]; byte[] messageId = BitConverter.GetBytes(3); byte[] entityId = BitConverter.GetBytes(b.EntityId); for(int i = 0; i < 4; i++) { message[i] = messageId[i]; } for(int i = 0; i < 8; i++) { message[i+4] = entityId[i]; } message[12] = BitConverter.GetBytes(v)[0]; MyAPIGateway.Multiplayer.SendMessageToOthers(5910, message, true); }; MyAPIGateway.TerminalControls.AddControl<IMyRefinery>(m_controlSender); // channel field m_controlId = MyAPIGateway.TerminalControls.CreateControl<IMyTerminalControlTextbox, IMyRefinery>("Cython.OPT.ID"); m_controlId.Enabled = (b) => true; m_controlId.Visible = (b) => b.BlockDefinition.SubtypeId.Equals("LargeBlockSmallOpticalPowerTransmitter") || b.BlockDefinition.SubtypeId.Equals("SmallBlockSmallOpticalPowerTransmitter"); m_controlId.Title = MyStringId.GetOrCompute("ID"); m_controlId.Tooltip = MyStringId.GetOrCompute("ID this transmitter is being identified as when being receiver or it is supposed to send to."); m_controlId.Getter = (b) => { if(b.GameLogic.GetAs<OpticalPowerTransmitter>().m_sender) { return (new StringBuilder()).Append(b.GameLogic.GetAs<OpticalPowerTransmitter>().m_targetId); } else { return (new StringBuilder()).Append(b.GameLogic.GetAs<OpticalPowerTransmitter>().m_id); } }; m_controlId.Setter = (b, s) => { uint id; if(uint.TryParse(s.ToString(), out id)) { var OPT = b.GameLogic.GetAs<OpticalPowerTransmitter>(); if(OPT.m_sender) { OPT.m_targetId = id; } else { OPT.m_id = id; OPT.m_info.id = id; } byte[] message = new byte[16]; byte[] messageId = BitConverter.GetBytes(4); byte[] entityId = BitConverter.GetBytes(b.EntityId); byte[] value = BitConverter.GetBytes(id); for(int i = 0; i < 4; i++) { message[i] = messageId[i]; } for(int i = 0; i < 8; i++) { message[i+4] = entityId[i]; } for(int i = 0; i < 4; i++) { message[i+12] = value[i]; } MyAPIGateway.Multiplayer.SendMessageToOthers(5910, message, true); } }; MyAPIGateway.TerminalControls.AddControl<Sandbox.ModAPI.Ingame.IMyRefinery>(m_controlId); // power field m_controlPower = MyAPIGateway.TerminalControls.CreateControl<IMyTerminalControlTextbox, Sandbox.ModAPI.Ingame.IMyRefinery>("Cython.OPT.Power"); m_controlPower.Enabled = (b) => b.GameLogic.GetAs<OpticalPowerTransmitter>().m_sender; m_controlPower.Visible = (b) => b.BlockDefinition.SubtypeId.Equals("LargeBlockSmallOpticalPowerTransmitter") || b.BlockDefinition.SubtypeId.Equals("SmallBlockSmallOpticalPowerTransmitter"); m_controlPower.Title = MyStringId.GetOrCompute("Power"); m_controlPower.Tooltip = MyStringId.GetOrCompute("Maximum power this transmitter is supposed to send."); m_controlPower.Getter = (b) => (new StringBuilder()).Append(b.GameLogic.GetAs<OpticalPowerTransmitter>().m_transmittedPower); m_controlPower.Setter = (b, s) => { float power; if(float.TryParse(s.ToString(), out power)) { var OPT = b.GameLogic.GetAs<OpticalPowerTransmitter>(); OPT.m_transmittedPower = power; if(OPT.m_transmittedPower > OPT.m_currentMaxPower) { OPT.m_transmittedPower = OPT.m_currentMaxPower; } byte[] message = new byte[16]; byte[] messageId = BitConverter.GetBytes(5); byte[] entityId = BitConverter.GetBytes(b.EntityId); byte[] value = BitConverter.GetBytes(OPT.m_transmittedPower); for(int i = 0; i < 4; i++) { message[i] = messageId[i]; } for(int i = 0; i < 8; i++) { message[i+4] = entityId[i]; } for(int i = 0; i < 4; i++) { message[i+12] = value[i]; } MyAPIGateway.Multiplayer.SendMessageToOthers(5910, message, true); } }; MyAPIGateway.TerminalControls.AddControl<IMyRefinery>(m_controlPower); } static void customControlGetter(IMyTerminalBlock block, List<IMyTerminalControl> controls) { List<IMyTerminalControl> toRemove = new List<IMyTerminalControl>(); foreach(var control in controls) { if(block is IMyRefinery) { if(block.BlockDefinition.SubtypeName.Equals("LargeBlockSmallOpticalPowerTransmitter") || block.BlockDefinition.SubtypeName.Equals("SmallBlockSmallOpticalPowerTransmitter")) { if(control.Id.Equals("UseConveyor")) { toRemove.Add(control); } } } } foreach(var control in toRemove) { controls.Remove(control); } } } }
using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using System.Web.Mvc; using System.Web.Routing; using Orchard.ContentManagement; using Orchard.Core.Contents.Controllers; using Orchard.Core.Settings.Models; using Orchard.DisplayManagement; using Orchard.Localization; using Orchard.Security; using Orchard.UI.Notify; using Orchard.Users.Events; using Orchard.Users.Models; using Orchard.Users.Services; using Orchard.Users.ViewModels; using Orchard.Mvc.Extensions; using System; using Orchard.Settings; using Orchard.UI.Navigation; using Orchard.Utility.Extensions; namespace Orchard.Users.Controllers { [ValidateInput(false)] public class AdminController : Controller, IUpdateModel { private readonly IMembershipService _membershipService; private readonly IUserService _userService; private readonly IEnumerable<IUserEventHandler> _userEventHandlers; private readonly ISiteService _siteService; public AdminController( IOrchardServices services, IMembershipService membershipService, IUserService userService, IShapeFactory shapeFactory, IEnumerable<IUserEventHandler> userEventHandlers, ISiteService siteService) { Services = services; _membershipService = membershipService; _userService = userService; _userEventHandlers = userEventHandlers; _siteService = siteService; T = NullLocalizer.Instance; Shape = shapeFactory; } dynamic Shape { get; set; } public IOrchardServices Services { get; set; } public Localizer T { get; set; } public ActionResult Index(UserIndexOptions options, PagerParameters pagerParameters) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to list users"))) return new HttpUnauthorizedResult(); var pager = new Pager(_siteService.GetSiteSettings(), pagerParameters); // default options if (options == null) options = new UserIndexOptions(); var users = Services.ContentManager .Query<UserPart, UserPartRecord>(); switch (options.Filter) { case UsersFilter.Approved: users = users.Where(u => u.RegistrationStatus == UserStatus.Approved); break; case UsersFilter.Pending: users = users.Where(u => u.RegistrationStatus == UserStatus.Pending); break; case UsersFilter.EmailPending: users = users.Where(u => u.EmailStatus == UserStatus.Pending); break; } if(!String.IsNullOrWhiteSpace(options.Search)) { users = users.Where(u => u.UserName.Contains(options.Search) || u.Email.Contains(options.Search)); } var pagerShape = Shape.Pager(pager).TotalItemCount(users.Count()); switch (options.Order) { case UsersOrder.Name: users = users.OrderBy(u => u.UserName); break; case UsersOrder.Email: users = users.OrderBy(u => u.Email); break; } var results = users .Slice(pager.GetStartIndex(), pager.PageSize) .ToList(); var model = new UsersIndexViewModel { Users = results .Select(x => new UserEntry { User = x.Record }) .ToList(), Options = options, Pager = pagerShape }; // maintain previous route data when generating page links var routeData = new RouteData(); routeData.Values.Add("Options.Filter", options.Filter); routeData.Values.Add("Options.Search", options.Search); routeData.Values.Add("Options.Order", options.Order); pagerShape.RouteData(routeData); return View(model); } [HttpPost] [FormValueRequired("submit.BulkEdit")] public ActionResult Index(FormCollection input) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var viewModel = new UsersIndexViewModel {Users = new List<UserEntry>(), Options = new UserIndexOptions()}; UpdateModel(viewModel); var checkedEntries = viewModel.Users.Where(c => c.IsChecked); switch (viewModel.Options.BulkAction) { case UsersBulkAction.None: break; case UsersBulkAction.Approve: foreach (var entry in checkedEntries) { Approve(entry.User.Id); } break; case UsersBulkAction.Disable: foreach (var entry in checkedEntries) { Moderate(entry.User.Id); } break; case UsersBulkAction.ChallengeEmail: foreach (var entry in checkedEntries) { SendChallengeEmail(entry.User.Id); } break; case UsersBulkAction.Delete: foreach (var entry in checkedEntries) { Delete(entry.User.Id); } break; } return RedirectToAction("Index", ControllerContext.RouteData.Values); } public ActionResult Create() { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.New<IUser>("User"); var editor = Shape.EditorTemplate(TemplateName: "Parts/User.Create", Model: new UserCreateViewModel(), Prefix: null); editor.Metadata.Position = "2"; dynamic model = Services.ContentManager.BuildEditor(user); model.Content.Add(editor); // Casting to avoid invalid (under medium trust) reflection over the protected View method and force a static invocation. return View((object)model); } [HttpPost, ActionName("Create")] public ActionResult CreatePOST(UserCreateViewModel createModel) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); if (!string.IsNullOrEmpty(createModel.UserName)) { if (!_userService.VerifyUserUnicity(createModel.UserName, createModel.Email)) { AddModelError("NotUniqueUserName", T("User with that username and/or email already exists.")); } } if (!Regex.IsMatch(createModel.Email ?? "", UserPart.EmailPattern, RegexOptions.IgnoreCase)) { // http://haacked.com/archive/2007/08/21/i-knew-how-to-validate-an-email-address-until-i.aspx ModelState.AddModelError("Email", T("You must specify a valid email address.")); } if (createModel.Password != createModel.ConfirmPassword) { AddModelError("ConfirmPassword", T("Password confirmation must match")); } var user = Services.ContentManager.New<IUser>("User"); if (ModelState.IsValid) { user = _membershipService.CreateUser(new CreateUserParams( createModel.UserName, createModel.Password, createModel.Email, null, null, true)); } dynamic model = Services.ContentManager.UpdateEditor(user, this); if (!ModelState.IsValid) { Services.TransactionManager.Cancel(); var editor = Shape.EditorTemplate(TemplateName: "Parts/User.Create", Model: createModel, Prefix: null); editor.Metadata.Position = "2"; model.Content.Add(editor); // Casting to avoid invalid (under medium trust) reflection over the protected View method and force a static invocation. return View((object)model); } Services.Notifier.Information(T("User created")); return RedirectToAction("Index"); } public ActionResult Edit(int id) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.Get<UserPart>(id); var editor = Shape.EditorTemplate(TemplateName: "Parts/User.Edit", Model: new UserEditViewModel {User = user}, Prefix: null); editor.Metadata.Position = "2"; dynamic model = Services.ContentManager.BuildEditor(user); model.Content.Add(editor); // Casting to avoid invalid (under medium trust) reflection over the protected View method and force a static invocation. return View((object)model); } [HttpPost, ActionName("Edit")] public ActionResult EditPOST(int id) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.Get<UserPart>(id, VersionOptions.DraftRequired); string previousName = user.UserName; dynamic model = Services.ContentManager.UpdateEditor(user, this); var editModel = new UserEditViewModel {User = user}; if (TryUpdateModel(editModel)) { if (!_userService.VerifyUserUnicity(id, editModel.UserName, editModel.Email)) { AddModelError("NotUniqueUserName", T("User with that username and/or email already exists.")); } else if (!Regex.IsMatch(editModel.Email ?? "", UserPart.EmailPattern, RegexOptions.IgnoreCase)) { // http://haacked.com/archive/2007/08/21/i-knew-how-to-validate-an-email-address-until-i.aspx ModelState.AddModelError("Email", T("You must specify a valid email address.")); } else { // also update the Super user if this is the renamed account if (String.Equals(Services.WorkContext.CurrentSite.SuperUser, previousName, StringComparison.Ordinal)) { _siteService.GetSiteSettings().As<SiteSettingsPart>().SuperUser = editModel.UserName; } user.NormalizedUserName = editModel.UserName.ToLowerInvariant(); } } if (!ModelState.IsValid) { Services.TransactionManager.Cancel(); var editor = Shape.EditorTemplate(TemplateName: "Parts/User.Edit", Model: editModel, Prefix: null); editor.Metadata.Position = "2"; model.Content.Add(editor); // Casting to avoid invalid (under medium trust) reflection over the protected View method and force a static invocation. return View((object)model); } Services.ContentManager.Publish(user.ContentItem); Services.Notifier.Information(T("User information updated")); return RedirectToAction("Index"); } [HttpPost] public ActionResult Delete(int id) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.Get<IUser>(id); if (user != null) { if (String.Equals(Services.WorkContext.CurrentSite.SuperUser, user.UserName, StringComparison.Ordinal)) { Services.Notifier.Error(T("The Super user can't be removed. Please disable this account or specify another Super user account")); } else if (String.Equals(Services.WorkContext.CurrentUser.UserName, user.UserName, StringComparison.Ordinal)) { Services.Notifier.Error(T("You can't remove your own account. Please log in with another account")); } else{ Services.ContentManager.Remove(user.ContentItem); Services.Notifier.Information(T("User {0} deleted", user.UserName)); } } return RedirectToAction("Index"); } public ActionResult SendChallengeEmail(int id) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.Get<IUser>(id); if ( user != null ) { var siteUrl = Services.WorkContext.CurrentSite.As<SiteSettings2Part>().BaseUrl; if (String.IsNullOrWhiteSpace(siteUrl)) { siteUrl = HttpContext.Request.ToRootUrlString(); } _userService.SendChallengeEmail(user.As<UserPart>(), nonce => Url.MakeAbsolute(Url.Action("ChallengeEmail", "Account", new { Area = "Orchard.Users", nonce = nonce }), siteUrl)); Services.Notifier.Information(T("Challenge email sent to {0}", user.UserName)); } return RedirectToAction("Index"); } public ActionResult Approve(int id) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.Get<IUser>(id); if ( user != null ) { user.As<UserPart>().RegistrationStatus = UserStatus.Approved; Services.Notifier.Information(T("User {0} approved", user.UserName)); foreach (var userEventHandler in _userEventHandlers) { userEventHandler.Approved(user); } } return RedirectToAction("Index"); } public ActionResult Moderate(int id) { if (!Services.Authorizer.Authorize(StandardPermissions.SiteOwner, T("Not authorized to manage users"))) return new HttpUnauthorizedResult(); var user = Services.ContentManager.Get<IUser>(id); if (user != null) { if (String.Equals(Services.WorkContext.CurrentUser.UserName, user.UserName, StringComparison.Ordinal)) { Services.Notifier.Error(T("You can't disable your own account. Please log in with another account")); } else { user.As<UserPart>().RegistrationStatus = UserStatus.Pending; Services.Notifier.Information(T("User {0} disabled", user.UserName)); } } return RedirectToAction("Index"); } bool IUpdateModel.TryUpdateModel<TModel>(TModel model, string prefix, string[] includeProperties, string[] excludeProperties) { return TryUpdateModel(model, prefix, includeProperties, excludeProperties); } public void AddModelError(string key, LocalizedString errorMessage) { ModelState.AddModelError(key, errorMessage.ToString()); } } }
// GtkSharp.Generation.SymbolTable.cs - The Symbol Table Class. // // Author: Mike Kestner <[email protected]> // // Copyright (c) 2001-2003 Mike Kestner // Copyright (c) 2004-2005 Novell, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of version 2 of the GNU General Public // License as published by the Free Software Foundation. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // // You should have received a copy of the GNU General Public // License along with this program; if not, write to the // Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, // Boston, MA 02110-1301 namespace GtkSharp.Generation { using System; using System.Collections; public class SymbolTable { static SymbolTable table = null; Hashtable types = new Hashtable (); public static SymbolTable Table { get { if (table == null) table = new SymbolTable (); return table; } } public SymbolTable () { // Simple easily mapped types AddType (new SimpleGen ("void", "void", String.Empty)); AddType (new SimpleGen ("gpointer", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("gboolean", "bool", "false")); AddType (new SimpleGen ("gint", "int", "0")); AddType (new SimpleGen ("guint", "uint", "0")); AddType (new SimpleGen ("int", "int", "0")); AddType (new SimpleGen ("unsigned", "uint", "0")); AddType (new SimpleGen ("unsigned int", "uint", "0")); AddType (new SimpleGen ("unsigned-int", "uint", "0")); AddType (new SimpleGen ("gshort", "short", "0")); AddType (new SimpleGen ("gushort", "ushort", "0")); AddType (new SimpleGen ("short", "short", "0")); AddType (new SimpleGen ("guchar", "byte", "0")); AddType (new SimpleGen ("unsigned char", "byte", "0")); AddType (new SimpleGen ("unsigned-char", "byte", "0")); AddType (new SimpleGen ("guint1", "bool", "false")); AddType (new SimpleGen ("uint1", "bool", "false")); AddType (new SimpleGen ("gint8", "sbyte", "0")); AddType (new SimpleGen ("guint8", "byte", "0")); AddType (new SimpleGen ("gint16", "short", "0")); AddType (new SimpleGen ("guint16", "ushort", "0")); AddType (new SimpleGen ("gint32", "int", "0")); AddType (new SimpleGen ("guint32", "uint", "0")); AddType (new SimpleGen ("gint64", "long", "0")); AddType (new SimpleGen ("guint64", "ulong", "0")); AddType (new SimpleGen ("long long", "long", "0")); AddType (new SimpleGen ("gfloat", "float", "0.0")); AddType (new SimpleGen ("float", "float", "0.0")); AddType (new SimpleGen ("gdouble", "double", "0.0")); AddType (new SimpleGen ("double", "double", "0.0")); AddType (new SimpleGen ("goffset", "long", "0")); AddType (new SimpleGen ("GQuark", "int", "0")); // platform specific integer types. #if WIN64LONGS AddType (new SimpleGen ("long", "int", "0")); AddType (new SimpleGen ("glong", "int", "0")); AddType (new SimpleGen ("ulong", "uint", "0")); AddType (new SimpleGen ("gulong", "uint", "0")); AddType (new SimpleGen ("unsigned long", "uint", "0")); #else AddType (new LPGen ("long")); AddType (new LPGen ("glong")); AddType (new LPUGen ("ulong")); AddType (new LPUGen ("gulong")); AddType (new LPUGen ("unsigned long")); #endif AddType (new LPGen ("ssize_t")); AddType (new LPGen ("gssize")); AddType (new LPUGen ("size_t")); AddType (new LPUGen ("gsize")); #if OFF_T_8 AddType (new AliasGen ("off_t", "long")); #else AddType (new LPGen ("off_t")); #endif // string types AddType (new ConstStringGen ("const-gchar")); AddType (new ConstStringGen ("const-xmlChar")); AddType (new ConstStringGen ("const-char")); AddType (new ConstFilenameGen ("const-gfilename")); AddType (new MarshalGen ("gfilename", "string", "IntPtr", "GLib.Marshaller.StringToFilenamePtr({0})", "GLib.Marshaller.FilenamePtrToStringGFree({0})")); AddType (new MarshalGen ("gchar", "string", "IntPtr", "GLib.Marshaller.StringToPtrGStrdup({0})", "GLib.Marshaller.PtrToStringGFree({0})")); AddType (new MarshalGen ("char", "string", "IntPtr", "GLib.Marshaller.StringToPtrGStrdup({0})", "GLib.Marshaller.PtrToStringGFree({0})")); AddType (new SimpleGen ("GStrv", "string[]", "null")); // manually wrapped types requiring more complex marshaling AddType (new ManualGen ("GInitiallyUnowned", "GLib.InitiallyUnowned", "GLib.Object.GetObject ({0})")); AddType (new ManualGen ("GObject", "GLib.Object", "GLib.Object.GetObject ({0})")); AddType (new ManualGen ("GList", "GLib.List")); AddType (new ManualGen ("GPtrArray", "GLib.PtrArray")); AddType (new ManualGen ("GSList", "GLib.SList")); AddType (new MarshalGen ("gunichar", "char", "uint", "GLib.Marshaller.CharToGUnichar ({0})", "GLib.Marshaller.GUnicharToChar ({0})")); AddType (new MarshalGen ("time_t", "System.DateTime", "IntPtr", "GLib.Marshaller.DateTimeTotime_t ({0})", "GLib.Marshaller.time_tToDateTime ({0})")); AddType (new MarshalGen ("GString", "string", "IntPtr", "new GLib.GString ({0}).Handle", "GLib.GString.PtrToString ({0})")); AddType (new MarshalGen ("GType", "GLib.GType", "IntPtr", "{0}.Val", "new GLib.GType({0})")); AddType (new ByRefGen ("GValue", "GLib.Value")); AddType (new SimpleGen ("GDestroyNotify", "GLib.DestroyNotify", "null")); // FIXME: These ought to be handled properly. AddType (new SimpleGen ("GC", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GError", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GMemChunk", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GTimeVal", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GClosure", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GArray", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GByteArray", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GData", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GIOChannel", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GTypeModule", "GLib.Object", "null")); AddType (new SimpleGen ("GHashTable", "System.IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("va_list", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("GParamSpec", "IntPtr", "IntPtr.Zero")); AddType (new SimpleGen ("gconstpointer", "IntPtr", "IntPtr.Zero")); } public void AddType (IGeneratable gen) { types [gen.CName] = gen; } public void AddTypes (IGeneratable[] gens) { foreach (IGeneratable gen in gens) types [gen.CName] = gen; } public int Count { get { return types.Count; } } public IEnumerable Generatables { get { return types.Values; } } public IGeneratable this [string ctype] { get { return DeAlias (ctype) as IGeneratable; } } private bool IsConstString (string type) { switch (type) { case "const-gchar": case "const-char": case "const-xmlChar": case "const-gfilename": return true; default: return false; } } private string Trim(string type) { // HACK: If we don't detect this here, there is no // way of indicating it in the symbol table if (type == "void*" || type == "const-void*") return "gpointer"; string trim_type = type.TrimEnd('*'); if (IsConstString (trim_type)) return trim_type; if (trim_type.StartsWith("const-")) return trim_type.Substring(6); return trim_type; } private object DeAlias (string type) { type = Trim (type); while (types [type] is AliasGen) { IGeneratable igen = types [type] as AliasGen; types [type] = types [igen.Name]; type = igen.Name; } return types [type]; } public string FromNativeReturn(string c_type, string val) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.FromNativeReturn (val); } public string ToNativeReturn(string c_type, string val) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.ToNativeReturn (val); } public string FromNative(string c_type, string val) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.FromNative (val); } public string GetCSType(string c_type) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.QualifiedName; } public string GetName(string c_type) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.Name; } public string GetMarshalReturnType(string c_type) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.MarshalReturnType; } public string GetToNativeReturnType(string c_type) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.ToNativeReturnType; } public string GetMarshalType(string c_type) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.MarshalType; } public string CallByName(string c_type, string var_name) { IGeneratable gen = this[c_type]; if (gen == null) return ""; return gen.CallByName(var_name); } public bool IsOpaque(string c_type) { if (this[c_type] is OpaqueGen) return true; return false; } public bool IsBoxed(string c_type) { if (this[c_type] is BoxedGen) return true; return false; } public bool IsStruct(string c_type) { if (this[c_type] is StructGen) return true; return false; } public bool IsEnum(string c_type) { if (this[c_type] is EnumGen) return true; return false; } public bool IsEnumFlags(string c_type) { EnumGen gen = this [c_type] as EnumGen; return (gen != null && gen.Elem.GetAttribute ("type") == "flags"); } public bool IsInterface(string c_type) { if (this[c_type] is InterfaceGen) return true; return false; } public ClassBase GetClassGen(string c_type) { return this[c_type] as ClassBase; } public bool IsObject(string c_type) { if (this[c_type] is ObjectGen) return true; return false; } public bool IsCallback(string c_type) { if (this[c_type] is CallbackGen) return true; return false; } public bool IsManuallyWrapped(string c_type) { if (this[c_type] is ManualGen) return true; return false; } public string MangleName(string name) { switch (name) { case "string": return "str1ng"; case "event": return "evnt"; case "null": return "is_null"; case "object": return "objekt"; case "params": return "parms"; case "ref": return "reference"; case "in": return "in_param"; case "out": return "out_param"; case "fixed": return "mfixed"; case "byte": return "_byte"; case "new": return "_new"; case "base": return "_base"; case "lock": return "_lock"; case "callback": return "cb"; case "readonly": return "read_only"; case "interface": return "iface"; case "internal": return "_internal"; case "where": return "wh3r3"; case "foreach": return "for_each"; case "remove": return "_remove"; default: break; } return name; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 // Changes may cause incorrect behavior and will be lost if the code is regenerated. namespace Microsoft.Azure.Management.TrafficManager { using System; using System.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// ProfilesOperations operations. /// </summary> internal partial class ProfilesOperations : IServiceOperations<TrafficManagerManagementClient>, IProfilesOperations { /// <summary> /// Initializes a new instance of the ProfilesOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal ProfilesOperations(TrafficManagerManagementClient client) { if (client == null) { throw new ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the TrafficManagerManagementClient /// </summary> public TrafficManagerManagementClient Client { get; private set; } /// <summary> /// Checks the availability of a Traffic Manager Relative DNS name. /// </summary> /// <param name='parameters'> /// The Traffic Manager name parameters supplied to the /// CheckTrafficManagerNameAvailability operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<TrafficManagerNameAvailability>> CheckTrafficManagerRelativeDnsNameAvailabilityWithHttpMessagesAsync(CheckTrafficManagerRelativeDnsNameAvailabilityParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "CheckTrafficManagerRelativeDnsNameAvailability", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "providers/Microsoft.Network/checkTrafficManagerNameAvailability").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = SafeJsonConvert.SerializeObject(parameters, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<TrafficManagerNameAvailability>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<TrafficManagerNameAvailability>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Lists all Traffic Manager profiles within a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group containing the Traffic Manager profiles to /// be listed. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IEnumerable<Profile>>> ListAllInResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListAllInResourceGroup", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IEnumerable<Profile>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Page<Profile>>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Lists all Traffic Manager profiles within a subscription. /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IEnumerable<Profile>>> ListAllWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListAll", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Network/trafficmanagerprofiles").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IEnumerable<Profile>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Page<Profile>>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets a Traffic Manager profile. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group containing the Traffic Manager profile. /// </param> /// <param name='profileName'> /// The name of the Traffic Manager profile. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<Profile>> GetWithHttpMessagesAsync(string resourceGroupName, string profileName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (profileName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "profileName"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("profileName", profileName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{profileName}", Uri.EscapeDataString(profileName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<Profile>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Profile>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Create or update a Traffic Manager profile. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group containing the Traffic Manager profile. /// </param> /// <param name='profileName'> /// The name of the Traffic Manager profile. /// </param> /// <param name='parameters'> /// The Traffic Manager profile parameters supplied to the CreateOrUpdate /// operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<Profile>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string profileName, Profile parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (profileName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "profileName"); } if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("profileName", profileName); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "CreateOrUpdate", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{profileName}", Uri.EscapeDataString(profileName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PUT"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = SafeJsonConvert.SerializeObject(parameters, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 201) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<Profile>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Profile>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } // Deserialize Response if ((int)_statusCode == 201) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Profile>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Deletes a Traffic Manager profile. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group containing the Traffic Manager profile to /// be deleted. /// </param> /// <param name='profileName'> /// The name of the Traffic Manager profile to be deleted. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string profileName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (profileName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "profileName"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("profileName", profileName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Delete", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{profileName}", Uri.EscapeDataString(profileName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("DELETE"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 204) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Update a Traffic Manager profile. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group containing the Traffic Manager profile. /// </param> /// <param name='profileName'> /// The name of the Traffic Manager profile. /// </param> /// <param name='parameters'> /// The Traffic Manager profile parameters supplied to the Update operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<Profile>> UpdateWithHttpMessagesAsync(string resourceGroupName, string profileName, Profile parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (profileName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "profileName"); } if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("profileName", profileName); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Update", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficmanagerprofiles/{profileName}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{profileName}", Uri.EscapeDataString(profileName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PATCH"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = SafeJsonConvert.SerializeObject(parameters, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<Profile>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Profile>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Copyright (c) 2001-2021 Aspose Pty Ltd. All Rights Reserved. // // This file is part of Aspose.Words. The source code in this file // is only intended as a supplement to the documentation, and is provided // "as is", without warranty of any kind, either expressed or implied. ////////////////////////////////////////////////////////////////////////// using System.IO; using System.Linq; using Aspose.Words; using Aspose.Words.Saving; using NUnit.Framework; namespace ApiExamples { [TestFixture] internal class ExSavingCallback : ApiExampleBase { [Test] public void CheckThatAllMethodsArePresent() { HtmlFixedSaveOptions htmlFixedSaveOptions = new HtmlFixedSaveOptions(); htmlFixedSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); ImageSaveOptions imageSaveOptions = new ImageSaveOptions(SaveFormat.Png); imageSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); PdfSaveOptions pdfSaveOptions = new PdfSaveOptions(); pdfSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); PsSaveOptions psSaveOptions = new PsSaveOptions(); psSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); SvgSaveOptions svgSaveOptions = new SvgSaveOptions(); svgSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); XamlFixedSaveOptions xamlFixedSaveOptions = new XamlFixedSaveOptions(); xamlFixedSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); XpsSaveOptions xpsSaveOptions = new XpsSaveOptions(); xpsSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); } //ExStart //ExFor:IPageSavingCallback //ExFor:IPageSavingCallback.PageSaving(PageSavingArgs) //ExFor:PageSavingArgs //ExFor:PageSavingArgs.PageFileName //ExFor:PageSavingArgs.KeepPageStreamOpen //ExFor:PageSavingArgs.PageIndex //ExFor:PageSavingArgs.PageStream //ExFor:FixedPageSaveOptions.PageSavingCallback //ExSummary:Shows how to use a callback to save a document to HTML page by page. [Test] //ExSkip public void PageFileNames() { Document doc = new Document(); DocumentBuilder builder = new DocumentBuilder(doc); builder.Writeln("Page 1."); builder.InsertBreak(BreakType.PageBreak); builder.Writeln("Page 2."); builder.InsertImage(ImageDir + "Logo.jpg"); builder.InsertBreak(BreakType.PageBreak); builder.Writeln("Page 3."); // Create an "HtmlFixedSaveOptions" object, which we can pass to the document's "Save" method // to modify how we convert the document to HTML. HtmlFixedSaveOptions htmlFixedSaveOptions = new HtmlFixedSaveOptions(); // We will save each page in this document to a separate HTML file in the local file system. // Set a callback that allows us to name each output HTML document. htmlFixedSaveOptions.PageSavingCallback = new CustomFileNamePageSavingCallback(); doc.Save(ArtifactsDir + "SavingCallback.PageFileNames.html", htmlFixedSaveOptions); string[] filePaths = Directory.GetFiles(ArtifactsDir).Where( s => s.StartsWith(ArtifactsDir + "SavingCallback.PageFileNames.Page_")).OrderBy(s => s).ToArray(); Assert.AreEqual(3, filePaths.Length); } /// <summary> /// Saves all pages to a file and directory specified within. /// </summary> private class CustomFileNamePageSavingCallback : IPageSavingCallback { public void PageSaving(PageSavingArgs args) { string outFileName = $"{ArtifactsDir}SavingCallback.PageFileNames.Page_{args.PageIndex}.html"; // Below are two ways of specifying where Aspose.Words will save each page of the document. // 1 - Set a filename for the output page file: args.PageFileName = outFileName; // 2 - Create a custom stream for the output page file: args.PageStream = new FileStream(outFileName, FileMode.Create); Assert.False(args.KeepPageStreamOpen); } } //ExEnd //ExStart //ExFor:DocumentPartSavingArgs //ExFor:DocumentPartSavingArgs.Document //ExFor:DocumentPartSavingArgs.DocumentPartFileName //ExFor:DocumentPartSavingArgs.DocumentPartStream //ExFor:DocumentPartSavingArgs.KeepDocumentPartStreamOpen //ExFor:IDocumentPartSavingCallback //ExFor:IDocumentPartSavingCallback.DocumentPartSaving(DocumentPartSavingArgs) //ExFor:IImageSavingCallback //ExFor:IImageSavingCallback.ImageSaving //ExFor:ImageSavingArgs //ExFor:ImageSavingArgs.ImageFileName //ExFor:HtmlSaveOptions //ExFor:HtmlSaveOptions.DocumentPartSavingCallback //ExFor:HtmlSaveOptions.ImageSavingCallback //ExSummary:Shows how to split a document into parts and save them. [Test] //ExSkip public void DocumentPartsFileNames() { Document doc = new Document(MyDir + "Rendering.docx"); string outFileName = "SavingCallback.DocumentPartsFileNames.html"; // Create an "HtmlFixedSaveOptions" object, which we can pass to the document's "Save" method // to modify how we convert the document to HTML. HtmlSaveOptions options = new HtmlSaveOptions(); // If we save the document normally, there will be one output HTML // document with all the source document's contents. // Set the "DocumentSplitCriteria" property to "DocumentSplitCriteria.SectionBreak" to // save our document to multiple HTML files: one for each section. options.DocumentSplitCriteria = DocumentSplitCriteria.SectionBreak; // Assign a custom callback to the "DocumentPartSavingCallback" property to alter the document part saving logic. options.DocumentPartSavingCallback = new SavedDocumentPartRename(outFileName, options.DocumentSplitCriteria); // If we convert a document that contains images into html, we will end up with one html file which links to several images. // Each image will be in the form of a file in the local file system. // There is also a callback that can customize the name and file system location of each image. options.ImageSavingCallback = new SavedImageRename(outFileName); doc.Save(ArtifactsDir + outFileName, options); } /// <summary> /// Sets custom filenames for output documents that the saving operation splits a document into. /// </summary> private class SavedDocumentPartRename : IDocumentPartSavingCallback { public SavedDocumentPartRename(string outFileName, DocumentSplitCriteria documentSplitCriteria) { mOutFileName = outFileName; mDocumentSplitCriteria = documentSplitCriteria; } void IDocumentPartSavingCallback.DocumentPartSaving(DocumentPartSavingArgs args) { // We can access the entire source document via the "Document" property. Assert.True(args.Document.OriginalFileName.EndsWith("Rendering.docx")); string partType = string.Empty; switch (mDocumentSplitCriteria) { case DocumentSplitCriteria.PageBreak: partType = "Page"; break; case DocumentSplitCriteria.ColumnBreak: partType = "Column"; break; case DocumentSplitCriteria.SectionBreak: partType = "Section"; break; case DocumentSplitCriteria.HeadingParagraph: partType = "Paragraph from heading"; break; } string partFileName = $"{mOutFileName} part {++mCount}, of type {partType}{Path.GetExtension(args.DocumentPartFileName)}"; // Below are two ways of specifying where Aspose.Words will save each part of the document. // 1 - Set a filename for the output part file: args.DocumentPartFileName = partFileName; // 2 - Create a custom stream for the output part file: args.DocumentPartStream = new FileStream(ArtifactsDir + partFileName, FileMode.Create); Assert.True(args.DocumentPartStream.CanWrite); Assert.False(args.KeepDocumentPartStreamOpen); } private int mCount; private readonly string mOutFileName; private readonly DocumentSplitCriteria mDocumentSplitCriteria; } /// <summary> /// Sets custom filenames for image files that an HTML conversion creates. /// </summary> public class SavedImageRename : IImageSavingCallback { public SavedImageRename(string outFileName) { mOutFileName = outFileName; } void IImageSavingCallback.ImageSaving(ImageSavingArgs args) { string imageFileName = $"{mOutFileName} shape {++mCount}, of type {args.CurrentShape.ShapeType}{Path.GetExtension(args.ImageFileName)}"; // Below are two ways of specifying where Aspose.Words will save each part of the document. // 1 - Set a filename for the output image file: args.ImageFileName = imageFileName; // 2 - Create a custom stream for the output image file: args.ImageStream = new FileStream(ArtifactsDir + imageFileName, FileMode.Create); Assert.True(args.ImageStream.CanWrite); Assert.True(args.IsImageAvailable); Assert.False(args.KeepImageStreamOpen); } private int mCount; private readonly string mOutFileName; } //ExEnd //ExStart //ExFor:CssSavingArgs //ExFor:CssSavingArgs.CssStream //ExFor:CssSavingArgs.Document //ExFor:CssSavingArgs.IsExportNeeded //ExFor:CssSavingArgs.KeepCssStreamOpen //ExFor:CssStyleSheetType //ExFor:HtmlSaveOptions.CssSavingCallback //ExFor:HtmlSaveOptions.CssStyleSheetFileName //ExFor:HtmlSaveOptions.CssStyleSheetType //ExFor:ICssSavingCallback //ExFor:ICssSavingCallback.CssSaving(CssSavingArgs) //ExSummary:Shows how to work with CSS stylesheets that an HTML conversion creates. [Test] //ExSkip public void ExternalCssFilenames() { Document doc = new Document(MyDir + "Rendering.docx"); // Create an "HtmlFixedSaveOptions" object, which we can pass to the document's "Save" method // to modify how we convert the document to HTML. HtmlSaveOptions options = new HtmlSaveOptions(); // Set the "CssStylesheetType" property to "CssStyleSheetType.External" to // accompany a saved HTML document with an external CSS stylesheet file. options.CssStyleSheetType = CssStyleSheetType.External; // Below are two ways of specifying directories and filenames for output CSS stylesheets. // 1 - Use the "CssStyleSheetFileName" property to assign a filename to our stylesheet: options.CssStyleSheetFileName = ArtifactsDir + "SavingCallback.ExternalCssFilenames.css"; // 2 - Use a custom callback to name our stylesheet: options.CssSavingCallback = new CustomCssSavingCallback(ArtifactsDir + "SavingCallback.ExternalCssFilenames.css", true, false); doc.Save(ArtifactsDir + "SavingCallback.ExternalCssFilenames.html", options); } /// <summary> /// Sets a custom filename, along with other parameters for an external CSS stylesheet. /// </summary> private class CustomCssSavingCallback : ICssSavingCallback { public CustomCssSavingCallback(string cssDocFilename, bool isExportNeeded, bool keepCssStreamOpen) { mCssTextFileName = cssDocFilename; mIsExportNeeded = isExportNeeded; mKeepCssStreamOpen = keepCssStreamOpen; } public void CssSaving(CssSavingArgs args) { // We can access the entire source document via the "Document" property. Assert.True(args.Document.OriginalFileName.EndsWith("Rendering.docx")); args.CssStream = new FileStream(mCssTextFileName, FileMode.Create); args.IsExportNeeded = mIsExportNeeded; args.KeepCssStreamOpen = mKeepCssStreamOpen; Assert.True(args.CssStream.CanWrite); } private readonly string mCssTextFileName; private readonly bool mIsExportNeeded; private readonly bool mKeepCssStreamOpen; } //ExEnd } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; namespace System.Net.Sockets { // This class implements a safe socket handle. // It uses an inner and outer SafeHandle to do so. The inner // SafeHandle holds the actual socket, but only ever has one // reference to it. The outer SafeHandle guards the inner // SafeHandle with real ref counting. When the outer SafeHandle // is cleaned up, it releases the inner SafeHandle - since // its ref is the only ref to the inner SafeHandle, it deterministically // gets closed at that point - no races with concurrent IO calls. // This allows Close() on the outer SafeHandle to deterministically // close the inner SafeHandle, in turn allowing the inner SafeHandle // to block the user thread in case a graceful close has been // requested. (It's not legal to block any other thread - such closes // are always abortive.) internal partial class SafeCloseSocket : #if DEBUG DebugSafeHandleMinusOneIsInvalid #else SafeHandleMinusOneIsInvalid #endif { protected SafeCloseSocket() : base(true) { } private InnerSafeCloseSocket _innerSocket; private volatile bool _released; #if DEBUG private InnerSafeCloseSocket _innerSocketCopy; #endif public override bool IsInvalid { get { return IsClosed || base.IsInvalid; } } #if DEBUG public void AddRef() { try { // The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle. InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket); if (innerSocket != null) { innerSocket.AddRef(); } } catch (Exception e) { Debug.Assert(false, "SafeCloseSocket.AddRef after inner socket disposed." + e); } } public void Release() { try { // The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle. InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket); if (innerSocket != null) { innerSocket.Release(); } } catch (Exception e) { Debug.Assert(false, "SafeCloseSocket.Release after inner socket disposed." + e); } } #endif private void SetInnerSocket(InnerSafeCloseSocket socket) { _innerSocket = socket; SetHandle(socket.DangerousGetHandle()); #if DEBUG _innerSocketCopy = socket; #endif } private static SafeCloseSocket CreateSocket(InnerSafeCloseSocket socket) { SafeCloseSocket ret = new SafeCloseSocket(); CreateSocket(socket, ret); if (GlobalLog.IsEnabled) { GlobalLog.Print("SafeCloseSocket#" + LoggingHash.HashString(ret) + "::CreateSocket()"); } return ret; } protected static void CreateSocket(InnerSafeCloseSocket socket, SafeCloseSocket target) { if (socket != null && socket.IsInvalid) { target.SetHandleAsInvalid(); return; } bool b = false; try { socket.DangerousAddRef(ref b); } catch { if (b) { socket.DangerousRelease(); b = false; } } finally { if (b) { target.SetInnerSocket(socket); socket.Dispose(); } else { target.SetHandleAsInvalid(); } } } protected override bool ReleaseHandle() { if (GlobalLog.IsEnabled) { GlobalLog.Print( "SafeCloseSocket#" + LoggingHash.HashString(this) + "::ReleaseHandle() m_InnerSocket=" + _innerSocket == null ? "null" : LoggingHash.HashString(_innerSocket)); } _released = true; InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null); if (innerSocket != null) { #if DEBUG // On AppDomain unload we may still have pending Overlapped operations. // ThreadPoolBoundHandle should handle this scenario by canceling them. innerSocket.LogRemainingOperations(); #endif innerSocket.DangerousRelease(); } InnerReleaseHandle(); return true; } internal void CloseAsIs() { if (GlobalLog.IsEnabled) { GlobalLog.Print( "SafeCloseSocket#" + LoggingHash.HashString(this) + "::CloseAsIs() m_InnerSocket=" + _innerSocket == null ? "null" : LoggingHash.HashString(_innerSocket)); } #if DEBUG // If this throws it could be very bad. try { #endif InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null); Dispose(); if (innerSocket != null) { // Wait until it's safe. SpinWait sw = new SpinWait(); while (!_released) { sw.SpinOnce(); } // Now free it with blocking. innerSocket.BlockingRelease(); } InnerReleaseHandle(); #if DEBUG } catch (Exception exception) { if (!ExceptionCheck.IsFatal(exception)) { if (GlobalLog.IsEnabled) { GlobalLog.Assert("SafeCloseSocket::CloseAsIs(handle:" + handle.ToString("x") + ")", exception.Message); } Debug.Fail("SafeCloseSocket::CloseAsIs(handle:" + handle.ToString("x") + ")", exception.Message); } throw; } #endif } internal sealed partial class InnerSafeCloseSocket : SafeHandleMinusOneIsInvalid { private InnerSafeCloseSocket() : base(true) { } private bool _blockable; public override bool IsInvalid { get { return IsClosed || base.IsInvalid; } } // This method is implicitly reliable and called from a CER. protected override bool ReleaseHandle() { bool ret = false; #if DEBUG try { #endif if (GlobalLog.IsEnabled) { GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ")"); } SocketError errorCode = InnerReleaseHandle(); return ret = errorCode == SocketError.Success; #if DEBUG } catch (Exception exception) { if (!ExceptionCheck.IsFatal(exception)) { if (GlobalLog.IsEnabled) { GlobalLog.Assert("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ")", exception.Message); } Debug.Fail("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ")", exception.Message); } ret = true; // Avoid a second assert. throw; } finally { _closeSocketThread = Environment.CurrentManagedThreadId; _closeSocketTick = Environment.TickCount; if (!ret) { if (GlobalLog.IsEnabled) { GlobalLog.AssertFormat("SafeCloseSocket::ReleaseHandle(handle:{0:x})|ReleaseHandle failed.", handle); } Debug.Fail("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ")|ReleaseHandle failed."); } } #endif } #if DEBUG private IntPtr _closeSocketHandle; private SocketError _closeSocketResult = unchecked((SocketError)0xdeadbeef); private SocketError _closeSocketLinger = unchecked((SocketError)0xdeadbeef); private int _closeSocketThread; private int _closeSocketTick; private int _refCount = 0; public void AddRef() { Interlocked.Increment(ref _refCount); } public void Release() { Interlocked.MemoryBarrier(); Debug.Assert(_refCount > 0, "InnerSafeCloseSocket: Release() called more times than AddRef"); Interlocked.Decrement(ref _refCount); } public void LogRemainingOperations() { Interlocked.MemoryBarrier(); if (GlobalLog.IsEnabled) { GlobalLog.Print("InnerSafeCloseSocket: Releasing with pending operations: " + _refCount); } } #endif // Use this method to close the socket handle using the linger options specified on the socket. // Guaranteed to only be called once, under a CER, and not if regular DangerousRelease is called. internal void BlockingRelease() { #if DEBUG // Expected to have outstanding operations such as Accept. LogRemainingOperations(); #endif _blockable = true; DangerousRelease(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Diagnostics; using System.Security; namespace System.Drawing { internal static class ClientUtils { // ExecutionEngineException is obsolete and shouldn't be used (to catch, throw or reference) anymore. // Pragma added to prevent converting the "type is obsolete" warning into build error. #pragma warning disable 618 public static bool IsCriticalException(Exception ex) { return ex is NullReferenceException || ex is StackOverflowException || ex is OutOfMemoryException || ex is System.Threading.ThreadAbortException || ex is ExecutionEngineException || ex is IndexOutOfRangeException || ex is AccessViolationException; } #pragma warning restore 618 public static bool IsSecurityOrCriticalException(Exception ex) { return (ex is SecurityException) || IsCriticalException(ex); } /// <summary> /// WeakRefCollection - a collection that holds onto weak references. /// /// Essentially you pass in the object as it is, and under the covers /// we only hold a weak reference to the object. /// /// ----------------------------------------------------------------- /// !!!IMPORTANT USAGE NOTE!!! /// Users of this class should set the RefCheckThreshold property /// explicitly or call ScavengeReferences every once in a while to /// remove dead references. /// Also avoid calling Remove(item). Instead call RemoveByHashCode(item) /// to make sure dead refs are removed. /// </summary> internal class WeakRefCollection : IList { internal WeakRefCollection() : this(4) { } internal WeakRefCollection(int size) => InnerList = new ArrayList(size); internal ArrayList InnerList { get; } /// <summary> /// Indicates the value where the collection should check its items to remove dead weakref left over. /// Note: When GC collects weak refs from this collection the WeakRefObject identity changes since its /// Target becomes null. This makes the item unrecognizable by the collection and cannot be /// removed - Remove(item) and Contains(item) will not find it anymore. /// A value of int.MaxValue means disabled by default. /// </summary> public int RefCheckThreshold { get; set; } = int.MaxValue; public object this[int index] { get { if (InnerList[index] is WeakRefObject weakRef && weakRef.IsAlive) { return weakRef.Target; } return null; } set => InnerList[index] = CreateWeakRefObject(value); } public void ScavengeReferences() { int currentIndex = 0; int currentCount = Count; for (int i = 0; i < currentCount; i++) { object item = this[currentIndex]; if (item == null) { InnerList.RemoveAt(currentIndex); } else { // Only incriment if we have not removed the item. currentIndex++; } } } public override bool Equals(object obj) { if (!(obj is WeakRefCollection other)) { return true; } if (other == null || Count != other.Count) { return false; } for (int i = 0; i < Count; i++) { if (InnerList[i] != other.InnerList[i]) { if (InnerList[i] == null || !InnerList[i].Equals(other.InnerList[i])) { return false; } } } return true; } public override int GetHashCode() => base.GetHashCode(); private WeakRefObject CreateWeakRefObject(object value) { if (value == null) { return null; } return new WeakRefObject(value); } private static void Copy(WeakRefCollection sourceList, int sourceIndex, WeakRefCollection destinationList, int destinationIndex, int length) { if (sourceIndex < destinationIndex) { // We need to copy from the back forward to prevent overwrite if source and // destination lists are the same, so we need to flip the source/dest indices // to point at the end of the spans to be copied. sourceIndex = sourceIndex + length; destinationIndex = destinationIndex + length; for (; length > 0; length--) { destinationList.InnerList[--destinationIndex] = sourceList.InnerList[--sourceIndex]; } } else { for (; length > 0; length--) { destinationList.InnerList[destinationIndex++] = sourceList.InnerList[sourceIndex++]; } } } /// <summary> /// Removes the value using its hash code as its identity. /// This is needed because the underlying item in the collection may have already been collected changing /// the identity of the WeakRefObject making it impossible for the collection to identify it. /// See WeakRefObject for more info. /// </summary> public void RemoveByHashCode(object value) { if (value == null) { return; } int hash = value.GetHashCode(); for (int idx = 0; idx < InnerList.Count; idx++) { if (InnerList[idx] != null && InnerList[idx].GetHashCode() == hash) { RemoveAt(idx); return; } } } public void Clear() => InnerList.Clear(); public bool IsFixedSize => InnerList.IsFixedSize; public bool Contains(object value) => InnerList.Contains(CreateWeakRefObject(value)); public void RemoveAt(int index) => InnerList.RemoveAt(index); public void Remove(object value) => InnerList.Remove(CreateWeakRefObject(value)); public int IndexOf(object value) => InnerList.IndexOf(CreateWeakRefObject(value)); public void Insert(int index, object value) => InnerList.Insert(index, CreateWeakRefObject(value)); public int Add(object value) { if (Count > RefCheckThreshold) { ScavengeReferences(); } return InnerList.Add(CreateWeakRefObject(value)); } public int Count => InnerList.Count; object ICollection.SyncRoot => InnerList.SyncRoot; public bool IsReadOnly => InnerList.IsReadOnly; public void CopyTo(Array array, int index) => InnerList.CopyTo(array, index); bool ICollection.IsSynchronized => InnerList.IsSynchronized; public IEnumerator GetEnumerator() => InnerList.GetEnumerator(); /// <summary> /// Wraps a weak ref object. /// WARNING: Use this class carefully! /// When the weak ref is collected, this object looses its identity. This is bad when the object has been /// added to a collection since Contains(WeakRef(item)) and Remove(WeakRef(item)) would not be able to /// identify the item. /// </summary> internal class WeakRefObject { private int _hash; private WeakReference _weakHolder; internal WeakRefObject(object obj) { Debug.Assert(obj != null, "Unexpected null object!"); _weakHolder = new WeakReference(obj); _hash = obj.GetHashCode(); } internal bool IsAlive => _weakHolder.IsAlive; internal object Target => _weakHolder.Target; public override int GetHashCode() => _hash; public override bool Equals(object obj) { WeakRefObject other = obj as WeakRefObject; if (other == this) { return true; } if (other == null) { return false; } if (other.Target != Target) { if (Target == null || !Target.Equals(other.Target)) { return false; } } return true; } } } } }