context
stringlengths
2.52k
185k
gt
stringclasses
1 value
//////////////////////////////////////////////////////////////////////// // Copyright (c) 2011-2016 by Rob Jellinghaus. // // All Rights Reserved. // //////////////////////////////////////////////////////////////////////// using Holofunk.Core; using Microsoft.VisualStudio.TestTools.UnitTesting; using System; namespace VSUnitTests { [TestClass] public class UnitTest1 { const int FloatSliverSize = 2; const int FloatNumSlices = 128; [TestMethod] public void TestBufferAllocator() { BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(FloatNumSlices * 2048, 1, sizeof(float)); Buf<float> f = bufferAllocator.Allocate(); HoloDebug.Assert(f.Data.Length == FloatSliverSize * 1024 * FloatNumSlices); Buf<float> f2 = bufferAllocator.Allocate(); HoloDebug.Assert(f.Data.Length == f2.Data.Length); bufferAllocator.Free(f2); Buf<float> f3 = bufferAllocator.Allocate(); HoloDebug.Assert(f2.Data == f3.Data); // need to pull from free list first } [TestMethod] public void TestSlice() { BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(FloatNumSlices * 2048, 1, sizeof(float)); Buf<float> buffer = bufferAllocator.Allocate(); Slice<Sample, float> slice = new Slice<Sample, float>(buffer, 0, FloatNumSlices, FloatSliverSize); HoloDebug.Assert(slice.Duration == FloatNumSlices); HoloDebug.Assert(slice.IsEmpty() == false); HoloDebug.Assert(slice.SliverSize == FloatSliverSize); var halfDuration = (FloatNumSlices / 2); Slice<Sample, float> prefixSlice = slice.Subslice(0, halfDuration); Slice<Sample, float> prefixSlice2 = slice.SubsliceOfDuration(halfDuration); Slice<Sample, float> suffixSlice = slice.Subslice(halfDuration, halfDuration); Slice<Sample, float> suffixSlice2 = slice.SubsliceStartingAt(halfDuration); HoloDebug.Assert(prefixSlice.Precedes(suffixSlice)); HoloDebug.Assert(prefixSlice.Precedes(suffixSlice2)); HoloDebug.Assert(prefixSlice2.Precedes(suffixSlice)); HoloDebug.Assert(prefixSlice2.Precedes(suffixSlice2)); PopulateFloatSlice(slice); Buf<float> buffer2 = bufferAllocator.Allocate(); Slice<Sample, float> slice2 = new Slice<Sample, float>(buffer2, 0, FloatNumSlices, FloatSliverSize); slice.CopyTo(slice2); VerifySlice(slice2); } static void PopulateFloatSlice(Slice<Sample, float> slice) { for (int i = 0; i < (int)slice.Duration; i++) { slice[i, 0] = i; slice[i, 1] = i + 0.5f; } } static void VerifySlice(Slice<Sample, float> slice) { for (int i = 0; i < (int)slice.Duration; i++) { HoloDebug.Assert(slice[i, 0] == i); HoloDebug.Assert(slice[i, 1] == i + 0.5); } } /// <summary> /// Simple basic stream test: make one, append two slices to it, ensure they get merged. /// </summary> [TestMethod] public void TestStream() { BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(FloatNumSlices * 2048, 1, sizeof(float)); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, FloatSliverSize); HoloDebug.Assert(stream.DiscreteDuration == 0); var interval = new Interval<Sample>(0, 10); Slice<Sample, float> firstSlice = stream.GetNextSliceAt(interval); HoloDebug.Assert(firstSlice.IsEmpty()); // Now let's fill a float array... float[] buffer = new float[FloatNumSlices * FloatSliverSize]; Duration<Sample> floatNumSlicesDuration = FloatNumSlices; Slice<Sample, float> tempSlice = new Slice<Sample, float>(new Buf<float>(-1, buffer), FloatSliverSize); PopulateFloatSlice(tempSlice); // now append in chunks stream.Append(tempSlice.SubsliceOfDuration(tempSlice.Duration / 2)); stream.Append(tempSlice.SubsliceStartingAt(tempSlice.Duration / 2)); HoloDebug.Assert(stream.InitialTime == 0); HoloDebug.Assert(stream.DiscreteDuration == FloatNumSlices); Slice<Sample, float> theSlice = stream.GetNextSliceAt(stream.DiscreteInterval); VerifySlice(theSlice); HoloDebug.Assert(theSlice.Duration == floatNumSlicesDuration); } [TestMethod] public void TestStreamChunky() { const int sliverSize = 4; // 4 floats = 16 bytes const int floatNumSlices = 11; // 11 slices per buffer, to test various cases const int biggestChunk = 5; // max size of slice to copy in middle loop BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(sliverSize * floatNumSlices, 1, sizeof(float)); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, sliverSize); HoloDebug.Assert(stream.DiscreteDuration == 0); float f = 0; float[] tinyBuffer = new float[biggestChunk * sliverSize]; for (int i = 0; i < 100; i++) { for (int c = 1; c <= 5; c++) { for (int j = 0; j < c; j++) { tinyBuffer[j * sliverSize] = f; tinyBuffer[j * sliverSize + 1] = f + 0.25f; tinyBuffer[j * sliverSize + 2] = f + 0.5f; tinyBuffer[j * sliverSize + 3] = f + 0.75f; f++; } Slice<Sample, float> tempSlice = new Slice<Sample, float>( new Buf<float>(-2, tinyBuffer), 0, c, sliverSize); stream.Append(tempSlice); } } // Now after this we will need a verification loop. BufferAllocator<float> bigBufferAllocator = new BufferAllocator<float>(sliverSize * 1024, 1, sizeof(float)); DenseSampleFloatStream bigStream = new DenseSampleFloatStream(0, bigBufferAllocator, sliverSize); stream.CopyTo(stream.DiscreteInterval, bigStream); HoloDebug.Assert(Verify4SliceFloatStream(stream, 0) == 1500); HoloDebug.Assert(Verify4SliceFloatStream(bigStream, 0) == 1500); DenseSampleFloatStream stream2 = new DenseSampleFloatStream(0, bufferAllocator, sliverSize); bigStream.CopyTo(bigStream.DiscreteInterval, stream2); HoloDebug.Assert(Verify4SliceFloatStream(stream2, 0) == 1500); } static float Verify4SliceFloatStream(DenseSampleFloatStream stream, float f) { Interval<Sample> interval = stream.DiscreteInterval; while (!interval.IsEmpty) { Slice<Sample, float> nextSlice = stream.GetNextSliceAt(interval); for (int i = 0; i < (int)nextSlice.Duration; i++) { HoloDebug.Assert(nextSlice[i, 0] == f); HoloDebug.Assert(nextSlice[i, 1] == f + 0.25f); HoloDebug.Assert(nextSlice[i, 2] == f + 0.5f); HoloDebug.Assert(nextSlice[i, 3] == f + 0.75f); f++; } interval = interval.SubintervalStartingAt(nextSlice.Duration); } return f; } static float[] AllocateSmall4FloatArray(int numSlices, int sliverSize) { float[] tinyBuffer = new float[numSlices * sliverSize]; float f = 0; for (int i = 0; i < numSlices; i++) { tinyBuffer[i * sliverSize] = f; tinyBuffer[i * sliverSize + 1] = f + 0.25f; tinyBuffer[i * sliverSize + 2] = f + 0.5f; tinyBuffer[i * sliverSize + 3] = f + 0.75f; f++; } return tinyBuffer; } [TestMethod] public void TestStreamAppending() { const int sliverSize = 4; // 4 floats = 16 bytes const int floatNumSlices = 11; // 11 slices per buffer, to test various cases BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(sliverSize * floatNumSlices, 1, sizeof(float)); float[] buffer = AllocateSmall4FloatArray(floatNumSlices, sliverSize); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, sliverSize); unsafe { fixed (float* f = buffer) { IntPtr pf = new IntPtr(f); stream.Append(floatNumSlices, pf); } } HoloDebug.Assert(stream.DiscreteDuration == floatNumSlices); HoloDebug.Assert(Verify4SliceFloatStream(stream, 0) == 11); // clear original buffer to test copying back into it for (int i = 0; i < buffer.Length; i++) { buffer[i] = 0; } unsafe { fixed (float* f = buffer) { IntPtr pf = new IntPtr(f); stream.CopyTo(stream.DiscreteInterval, pf); } } DenseSampleFloatStream stream2 = new DenseSampleFloatStream(0, bufferAllocator, sliverSize); stream2.Append(new Slice<Sample, float>(new Buf<float>(-3, buffer), sliverSize)); HoloDebug.Assert(Verify4SliceFloatStream(stream2, 0) == 11); } [TestMethod] public void TestStreamSlicing() { const int sliverSize = 4; // 4 floats = 16 bytes const int floatNumSlices = 11; // 11 slices per buffer, to test various cases BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(sliverSize * floatNumSlices, 1, sizeof(float)); float[] buffer = AllocateSmall4FloatArray(floatNumSlices * 2, sliverSize); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, sliverSize); stream.Append(new Slice<Sample, float>(new Buf<float>(-4, buffer), sliverSize)); // test getting slices from existing stream Slice<Sample, float> beforeFirst = stream.GetNextSliceAt(new Interval<Sample>((-2), 4)); // should return slice with duration 2 HoloDebug.Assert(beforeFirst.Duration == 2); Slice<Sample, float> afterLast = stream.GetNextSliceAt(new Interval<Sample>(19, 5)); HoloDebug.Assert(afterLast.Duration == 3); // now get slice across the buffer boundary, verify it is split as expected Interval<Sample> splitInterval = new Interval<Sample>(7, 8); Slice<Sample, float> beforeSplit = stream.GetNextSliceAt(splitInterval); HoloDebug.Assert(beforeSplit.Duration == 4); Slice<Sample, float> afterSplit = stream.GetNextSliceAt(splitInterval.SubintervalStartingAt(beforeSplit.Duration)); HoloDebug.Assert(afterSplit.Duration == beforeSplit.Duration); float lastBefore = beforeSplit[3, 0]; float firstAfter = afterSplit[0, 0]; HoloDebug.Assert(lastBefore + 1 == firstAfter); float[] testStrideCopy = new float[] { 0, 0, 1, 1, 0, 0, 0, 0, 2, 2, 0, 0, }; stream.AppendSliver(testStrideCopy, 2, 2, 6, 2); Slice<Sample, float> lastSliver = stream.GetNextSliceAt(new Interval<Sample>(22, 1)); HoloDebug.Assert(lastSliver.Duration == 1); HoloDebug.Assert(lastSliver[0, 0] == 1f); HoloDebug.Assert(lastSliver[0, 1] == 1f); HoloDebug.Assert(lastSliver[0, 2] == 2f); HoloDebug.Assert(lastSliver[0, 3] == 2f); Slice<Sample, float> firstSlice = stream.GetNextSliceAt(new Interval<Sample>(-2, 100)); HoloDebug.Assert(firstSlice.Duration == 11); } [TestMethod] public void TestStreamShutting() { const int sliverSize = 4; // 4 floats = 16 bytes const int floatNumSlices = 11; // 11 slices per buffer, to test various cases BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(sliverSize * floatNumSlices, 1, sizeof(float)); float continuousDuration = 2.4f; int discreteDuration = (int)Math.Round(continuousDuration + 1); float[] buffer = AllocateSmall4FloatArray(discreteDuration, sliverSize); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, sliverSize, useContinuousLoopingMapper: true); stream.Append(new Slice<Sample, float>(new Buf<float>(-5, buffer), sliverSize)); // OK, time to get this fractional business right. stream.Shut((ContinuousDuration)continuousDuration); HoloDebug.Assert(stream.IsShut); // now test looping Interval<Sample> interval = new Interval<Sample>(0, 10); // we expect this to be [0, 1, 2, 0, 1, 0, 1, 2, 0, 1] // or rather, [0>3], [0>2], [0>3], [0>2] Slice<Sample, float> slice = stream.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 3); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[2, 0] == 2f); interval = interval.SubintervalStartingAt(slice.Duration); slice = stream.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 2); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[1, 0] == 1f); interval = interval.SubintervalStartingAt(slice.Duration); slice = stream.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 3); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[2, 0] == 2f); interval = interval.SubintervalStartingAt(slice.Duration); slice = stream.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 2); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[1, 0] == 1f); interval = interval.SubintervalStartingAt(slice.Duration); HoloDebug.Assert(interval.IsEmpty); DenseSampleFloatStream stream2 = new DenseSampleFloatStream(0, bufferAllocator, sliverSize, useContinuousLoopingMapper: false); stream2.Append(new Slice<Sample, float>(new Buf<float>(-5, buffer), sliverSize)); stream2.Shut((ContinuousDuration)continuousDuration); interval = new Interval<Sample>(0, 10); slice = stream2.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 3); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[2, 0] == 2f); interval = interval.SubintervalStartingAt(slice.Duration); slice = stream2.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 3); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[1, 0] == 1f); interval = interval.SubintervalStartingAt(slice.Duration); slice = stream2.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 3); HoloDebug.Assert(slice[0, 0] == 0f); HoloDebug.Assert(slice[2, 0] == 2f); interval = interval.SubintervalStartingAt(slice.Duration); slice = stream2.GetNextSliceAt(interval); HoloDebug.Assert(slice.Duration == 1); HoloDebug.Assert(slice[0, 0] == 0f); } [TestMethod] public void TestDispose() { const int sliverSize = 4; // 4 floats = 16 bytes const int floatNumSlices = 11; // 11 slices per buffer, to test various cases BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(sliverSize * floatNumSlices, 1, sizeof(float)); float continuousDuration = 2.4f; int discreteDuration = (int)Math.Round(continuousDuration + 1); float[] tempBuffer = AllocateSmall4FloatArray(discreteDuration, sliverSize); // check that allocated, then freed, buffers are used first for next allocation Buf<float> buffer = bufferAllocator.Allocate(); bufferAllocator.Free(buffer); Buf<float> buffer2 = bufferAllocator.Allocate(); HoloDebug.Assert(buffer.Data == buffer2.Data); // free it again so stream can get it bufferAllocator.Free(buffer); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, sliverSize); stream.Append(new Slice<Sample, float>(new Buf<float>(-6, tempBuffer), sliverSize)); Verify4SliceFloatStream(stream, 0); // have stream drop it; should free buffer stream.Dispose(); // make sure we get it back again buffer2 = bufferAllocator.Allocate(); HoloDebug.Assert(buffer.Data == buffer2.Data); } [TestMethod] public void TestLimitedBufferingStream() { const int sliverSize = 4; // 4 floats = 16 bytes const int floatNumSlices = 11; // 11 slices per buffer, to test various cases BufferAllocator<float> bufferAllocator = new BufferAllocator<float>(sliverSize * floatNumSlices, 1, sizeof(float)); float[] tempBuffer = AllocateSmall4FloatArray(20, sliverSize); DenseSampleFloatStream stream = new DenseSampleFloatStream(0, bufferAllocator, sliverSize, 5); stream.Append(new Slice<Sample, float>(new Buf<float>(-7, tempBuffer), 0, 11, sliverSize)); HoloDebug.Assert(stream.DiscreteDuration == 5); Slice<Sample, float> slice = stream.GetNextSliceAt(stream.DiscreteInterval); HoloDebug.Assert(slice[0, 0] == 6f); stream.Append(new Slice<Sample, float>(new Buf<float>(-8, tempBuffer), 11, 5, sliverSize)); HoloDebug.Assert(stream.DiscreteDuration == 5); HoloDebug.Assert(stream.InitialTime == 11); slice = stream.GetNextSliceAt(stream.DiscreteInterval); HoloDebug.Assert(slice[0, 0] == 11f); } [TestMethod] public void TestSparseSampleByteStream() { const int sliverSize = 2 * 2 * 4; // uncompressed 2x2 RGBA image... worst case const int bufferSlivers = 10; BufferAllocator<byte> allocator = new BufferAllocator<byte>(sliverSize * bufferSlivers, 1, sizeof(float)); byte[] appendBuffer = new byte[sliverSize]; for (int i = 0; i < sliverSize; i++) { appendBuffer[i] = (byte)i; } SparseSampleByteStream stream = new SparseSampleByteStream(10, allocator, sliverSize); stream.Append(11, new Slice<Frame, byte>(new Buf<byte>(-9, appendBuffer), sliverSize)); // now let's get it back out Slice<Frame, byte> slice = stream.GetClosestSliver(11); HoloDebug.Assert(slice.Duration == 1); HoloDebug.Assert(slice.SliverSize == sliverSize); for (int i = 0; i < sliverSize; i++) { HoloDebug.Assert(slice[0, i] == (byte)i); } // now let's copy it to intptr byte[] target = new byte[sliverSize]; unsafe { fixed (byte* p = target) { IntPtr pp = new IntPtr(p); stream.CopyTo(11, pp); } } for (int i = 0; i < sliverSize; i++) { HoloDebug.Assert(target[i] == (byte)i); } SparseSampleByteStream stream2 = new SparseSampleByteStream(10, allocator, sliverSize); unsafe { fixed (byte* p = target) { IntPtr pp = new IntPtr(p); stream2.Append(11, pp); } } Slice<Frame, byte> slice2 = stream2.GetClosestSliver(12); HoloDebug.Assert(slice2.Duration == 1); HoloDebug.Assert(slice2.SliverSize == sliverSize); for (int i = 0; i < sliverSize; i++) { HoloDebug.Assert(slice2[0, i] == (byte)i); } // now verify looping and shutting work as expected for (int i = 0; i < appendBuffer.Length; i++) { appendBuffer[i] += (byte)appendBuffer.Length; } stream2.Append(21, new Slice<Frame, byte>(new Buf<byte>(-10, appendBuffer), sliverSize)); Slice<Frame, byte> slice3 = stream2.GetClosestSliver(12); HoloDebug.Assert(slice3.Duration == 1); HoloDebug.Assert(slice3.SliverSize == sliverSize); HoloDebug.Assert(slice3[0, 0] == (byte)0); Slice<Frame, byte> slice4 = stream2.GetClosestSliver(22); HoloDebug.Assert(slice4.Duration == 1); HoloDebug.Assert(slice4.SliverSize == sliverSize); HoloDebug.Assert(slice4[0, 0] == (byte)sliverSize); stream2.Shut((ContinuousDuration)20); // now the closest sliver to 32 should be the first sliver Slice<Frame, byte> slice5 = stream2.GetClosestSliver(32); HoloDebug.Assert(slice5.Duration == 1); HoloDebug.Assert(slice5.SliverSize == sliverSize); HoloDebug.Assert(slice5[0, 0] == (byte)0); // and 42, the second Slice<Frame, byte> slice6 = stream2.GetClosestSliver(42); HoloDebug.Assert(slice6.Duration == 1); HoloDebug.Assert(slice6.SliverSize == sliverSize); HoloDebug.Assert(slice6[0, 0] == (byte)sliverSize); } } }
using System; using NUnit.Framework; using Whois.Parsers; namespace Whois.Parsing.Whois.Nic.Pm.Pm { [TestFixture] public class PmParsingTests : ParsingTests { private WhoisParser parser; [SetUp] public void SetUp() { SerilogConfig.Init(); parser = new WhoisParser(); } [Test] public void Test_found() { var sample = SampleReader.Read("whois.nic.pm", "pm", "found.txt"); var response = parser.Parse("whois.nic.pm", sample); Assert.Greater(sample.Length, 0); Assert.AreEqual(WhoisStatus.Found, response.Status); Assert.AreEqual(0, response.ParsingErrors); Assert.AreEqual("generic/tld/Found05", response.TemplateName); Assert.AreEqual("nic.pm", response.DomainName.ToString()); // Registrar Details Assert.AreEqual("AFNIC registry", response.Registrar.Name); Assert.AreEqual(new DateTime(2004, 09, 17, 00, 00, 00, 000, DateTimeKind.Utc), response.Updated); Assert.AreEqual(new DateTime(1995, 01, 01, 00, 00, 00, 000, DateTimeKind.Utc), response.Registered); // Registrant Details Assert.AreEqual("APEM2-FRNIC", response.Registrant.RegistryId); Assert.AreEqual("Afnic (Saint-Pierre et Miquelon - CTOM)", response.Registrant.Name); // Registrant Address Assert.AreEqual(5, response.Registrant.Address.Count); Assert.AreEqual("immeuble international", response.Registrant.Address[0]); Assert.AreEqual("2, rue Stephenson", response.Registrant.Address[1]); Assert.AreEqual("Montigny-Le-Bretonneux", response.Registrant.Address[2]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.Registrant.Address[3]); Assert.AreEqual("FR", response.Registrant.Address[4]); // AdminContact Details Assert.AreEqual("NFC1-FRNIC", response.AdminContact.RegistryId); Assert.AreEqual("NIC France Contact", response.AdminContact.Name); Assert.AreEqual("+33 1 39 30 83 00", response.AdminContact.TelephoneNumber); Assert.AreEqual("[email protected]", response.AdminContact.Email); // AdminContact Address Assert.AreEqual(6, response.AdminContact.Address.Count); Assert.AreEqual("AFNIC", response.AdminContact.Address[0]); Assert.AreEqual("immeuble international", response.AdminContact.Address[1]); Assert.AreEqual("2, rue Stephenson", response.AdminContact.Address[2]); Assert.AreEqual("Montigny le Bretonneux", response.AdminContact.Address[3]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.AdminContact.Address[4]); Assert.AreEqual("FR", response.AdminContact.Address[5]); // TechnicalContact Details Assert.AreEqual("NFC1-FRNIC", response.TechnicalContact.RegistryId); Assert.AreEqual("NIC France Contact", response.TechnicalContact.Name); Assert.AreEqual("+33 1 39 30 83 00", response.TechnicalContact.TelephoneNumber); Assert.AreEqual("[email protected]", response.TechnicalContact.Email); // TechnicalContact Address Assert.AreEqual(6, response.TechnicalContact.Address.Count); Assert.AreEqual("AFNIC", response.TechnicalContact.Address[0]); Assert.AreEqual("immeuble international", response.TechnicalContact.Address[1]); Assert.AreEqual("2, rue Stephenson", response.TechnicalContact.Address[2]); Assert.AreEqual("Montigny le Bretonneux", response.TechnicalContact.Address[3]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.TechnicalContact.Address[4]); Assert.AreEqual("FR", response.TechnicalContact.Address[5]); // ZoneContact Details Assert.AreEqual("NFC1-FRNIC", response.ZoneContact.RegistryId); Assert.AreEqual("NIC France Contact", response.ZoneContact.Name); Assert.AreEqual("+33 1 39 30 83 00", response.ZoneContact.TelephoneNumber); Assert.AreEqual("[email protected]", response.ZoneContact.Email); // ZoneContact Address Assert.AreEqual(6, response.ZoneContact.Address.Count); Assert.AreEqual("AFNIC", response.ZoneContact.Address[0]); Assert.AreEqual("immeuble international", response.ZoneContact.Address[1]); Assert.AreEqual("2, rue Stephenson", response.ZoneContact.Address[2]); Assert.AreEqual("Montigny le Bretonneux", response.ZoneContact.Address[3]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.ZoneContact.Address[4]); Assert.AreEqual("FR", response.ZoneContact.Address[5]); // Nameservers Assert.AreEqual(3, response.NameServers.Count); Assert.AreEqual("ns1.nic.fr", response.NameServers[0]); Assert.AreEqual("ns2.nic.fr", response.NameServers[1]); Assert.AreEqual("ns3.nic.fr", response.NameServers[2]); // Domain Status Assert.AreEqual(1, response.DomainStatus.Count); Assert.AreEqual("ACTIVE", response.DomainStatus[0]); Assert.AreEqual(31, response.FieldsParsed); } [Test] public void Test_throttled() { var sample = SampleReader.Read("whois.nic.pm", "pm", "throttled.txt"); var response = parser.Parse("whois.nic.pm", sample); Assert.Greater(sample.Length, 0); Assert.AreEqual(WhoisStatus.Throttled, response.Status); Assert.AreEqual(0, response.ParsingErrors); Assert.AreEqual("generic/tld/Throttled02", response.TemplateName); Assert.AreEqual(1, response.FieldsParsed); } [Test] public void Test_not_found() { var sample = SampleReader.Read("whois.nic.pm", "pm", "not_found.txt"); var response = parser.Parse("whois.nic.pm", sample); Assert.Greater(sample.Length, 0); Assert.AreEqual(WhoisStatus.NotFound, response.Status); Assert.AreEqual(0, response.ParsingErrors); Assert.AreEqual("generic/tld/NotFound06", response.TemplateName); Assert.AreEqual(1, response.FieldsParsed); } [Test] public void Test_found_status_registered() { var sample = SampleReader.Read("whois.nic.pm", "pm", "found_status_registered.txt"); var response = parser.Parse("whois.nic.pm", sample); Assert.Greater(sample.Length, 0); Assert.AreEqual(WhoisStatus.Found, response.Status); Assert.AreEqual(0, response.ParsingErrors); Assert.AreEqual("generic/tld/Found05", response.TemplateName); Assert.AreEqual("nic.pm", response.DomainName.ToString()); // Registrar Details Assert.AreEqual("AFNIC registry", response.Registrar.Name); Assert.AreEqual(new DateTime(2016, 12, 31, 00, 00, 00, 000, DateTimeKind.Utc), response.Updated); Assert.AreEqual(new DateTime(1995, 01, 01, 00, 00, 00, 000, DateTimeKind.Utc), response.Registered); Assert.AreEqual(new DateTime(2017, 12, 31, 00, 00, 00, 000, DateTimeKind.Utc), response.Expiration); // Registrant Details Assert.AreEqual("APEM2-FRNIC", response.Registrant.RegistryId); Assert.AreEqual("Afnic (Saint-Pierre et Miquelon - CTOM)", response.Registrant.Name); // Registrant Address Assert.AreEqual(5, response.Registrant.Address.Count); Assert.AreEqual("immeuble international", response.Registrant.Address[0]); Assert.AreEqual("2, rue Stephenson", response.Registrant.Address[1]); Assert.AreEqual("Montigny-Le-Bretonneux", response.Registrant.Address[2]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.Registrant.Address[3]); Assert.AreEqual("FR", response.Registrant.Address[4]); // AdminContact Details Assert.AreEqual("NFC1-FRNIC", response.AdminContact.RegistryId); Assert.AreEqual("NIC France Contact", response.AdminContact.Name); Assert.AreEqual("+33 1 39 30 83 00", response.AdminContact.TelephoneNumber); Assert.AreEqual("[email protected]", response.AdminContact.Email); // AdminContact Address Assert.AreEqual(6, response.AdminContact.Address.Count); Assert.AreEqual("AFNIC", response.AdminContact.Address[0]); Assert.AreEqual("immeuble international", response.AdminContact.Address[1]); Assert.AreEqual("2, rue Stephenson", response.AdminContact.Address[2]); Assert.AreEqual("Montigny le Bretonneux", response.AdminContact.Address[3]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.AdminContact.Address[4]); Assert.AreEqual("FR", response.AdminContact.Address[5]); // TechnicalContact Details Assert.AreEqual("NFC1-FRNIC", response.TechnicalContact.RegistryId); Assert.AreEqual("NIC France Contact", response.TechnicalContact.Name); Assert.AreEqual("+33 1 39 30 83 00", response.TechnicalContact.TelephoneNumber); Assert.AreEqual("[email protected]", response.TechnicalContact.Email); // TechnicalContact Address Assert.AreEqual(6, response.TechnicalContact.Address.Count); Assert.AreEqual("AFNIC", response.TechnicalContact.Address[0]); Assert.AreEqual("immeuble international", response.TechnicalContact.Address[1]); Assert.AreEqual("2, rue Stephenson", response.TechnicalContact.Address[2]); Assert.AreEqual("Montigny le Bretonneux", response.TechnicalContact.Address[3]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.TechnicalContact.Address[4]); Assert.AreEqual("FR", response.TechnicalContact.Address[5]); // ZoneContact Details Assert.AreEqual("NFC1-FRNIC", response.ZoneContact.RegistryId); Assert.AreEqual("NIC France Contact", response.ZoneContact.Name); Assert.AreEqual("+33 1 39 30 83 00", response.ZoneContact.TelephoneNumber); Assert.AreEqual("[email protected]", response.ZoneContact.Email); // ZoneContact Address Assert.AreEqual(6, response.ZoneContact.Address.Count); Assert.AreEqual("AFNIC", response.ZoneContact.Address[0]); Assert.AreEqual("immeuble international", response.ZoneContact.Address[1]); Assert.AreEqual("2, rue Stephenson", response.ZoneContact.Address[2]); Assert.AreEqual("Montigny le Bretonneux", response.ZoneContact.Address[3]); Assert.AreEqual("78181 Saint Quentin en Yvelines Cedex", response.ZoneContact.Address[4]); Assert.AreEqual("FR", response.ZoneContact.Address[5]); // Nameservers Assert.AreEqual(3, response.NameServers.Count); Assert.AreEqual("ns1.nic.fr", response.NameServers[0]); Assert.AreEqual("ns2.nic.fr", response.NameServers[1]); Assert.AreEqual("ns3.nic.fr", response.NameServers[2]); // Domain Status Assert.AreEqual(1, response.DomainStatus.Count); Assert.AreEqual("ACTIVE", response.DomainStatus[0]); Assert.AreEqual(32, response.FieldsParsed); } } }
// **************************************************************** // Copyright 2009, Charlie Poole // This is free software licensed under the NUnit license. You may // obtain a copy of the license at http://nunit.org // **************************************************************** // **************************************************************** // Generated by the NUnit Syntax Generator // // Command Line: GenSyntax.exe SyntaxElements.txt // // DO NOT MODIFY THIS FILE DIRECTLY // **************************************************************** using System; using System.Collections; namespace NUnit.Constraints.Constraints { /// <summary> /// Helper class with properties and methods that supply /// a number of constraints used in Asserts. /// </summary> public class ConstraintFactory { #region Not /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression Not { get { return Is.Not; } } /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression No { get { return Has.No; } } #endregion #region All /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them succeed. /// </summary> public ConstraintExpression All { get { return Is.All; } } #endregion #region Some /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if at least one of them succeeds. /// </summary> public ConstraintExpression Some { get { return Has.Some; } } #endregion #region None /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them fail. /// </summary> public ConstraintExpression None { get { return Has.None; } } #endregion #region Property /// <summary> /// Returns a new PropertyConstraintExpression, which will either /// test for the existence of the named property on the object /// being tested or apply any following constraint to that property. /// </summary> public ResolvableConstraintExpression Property(string name) { return Has.Property(name); } #endregion #region Length /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Length property of the object being tested. /// </summary> public ResolvableConstraintExpression Length { get { return Has.Length; } } #endregion #region Count /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Count property of the object being tested. /// </summary> public ResolvableConstraintExpression Count { get { return Has.Count; } } #endregion #region Message /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Message property of the object being tested. /// </summary> public ResolvableConstraintExpression Message { get { return Has.Message; } } #endregion #region InnerException /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the InnerException property of the object being tested. /// </summary> public ResolvableConstraintExpression InnerException { get { return Has.InnerException; } } #endregion #region Attribute /// <summary> /// Returns a new AttributeConstraint checking for the /// presence of a particular attribute on an object. /// </summary> public ResolvableConstraintExpression Attribute(Type expectedType) { return Has.Attribute(expectedType); } #if NET_2_0 /// <summary> /// Returns a new AttributeConstraint checking for the /// presence of a particular attribute on an object. /// </summary> public ResolvableConstraintExpression Attribute<T>() { return Attribute(typeof(T)); } #endif #endregion #region Null /// <summary> /// Returns a constraint that tests for null /// </summary> public NullConstraint Null { get { return new NullConstraint(); } } #endregion #region True /// <summary> /// Returns a constraint that tests for True /// </summary> public TrueConstraint True { get { return new TrueConstraint(); } } #endregion #region False /// <summary> /// Returns a constraint that tests for False /// </summary> public FalseConstraint False { get { return new FalseConstraint(); } } #endregion #region NaN /// <summary> /// Returns a constraint that tests for NaN /// </summary> public NaNConstraint NaN { get { return new NaNConstraint(); } } #endregion #region Empty /// <summary> /// Returns a constraint that tests for empty /// </summary> public EmptyConstraint Empty { get { return new EmptyConstraint(); } } #endregion #region Unique /// <summary> /// Returns a constraint that tests whether a collection /// contains all unique items. /// </summary> public UniqueItemsConstraint Unique { get { return new UniqueItemsConstraint(); } } #endregion #region BinarySerializable /// <summary> /// Returns a constraint that tests whether an object graph is serializable in binary format. /// </summary> public BinarySerializableConstraint BinarySerializable { get { return new BinarySerializableConstraint(); } } #endregion #region XmlSerializable /// <summary> /// Returns a constraint that tests whether an object graph is serializable in xml format. /// </summary> public XmlSerializableConstraint XmlSerializable { get { return new XmlSerializableConstraint(); } } #endregion #region EqualTo /// <summary> /// Returns a constraint that tests two items for equality /// </summary> public EqualConstraint EqualTo(object expected) { return new EqualConstraint(expected); } #endregion #region SameAs /// <summary> /// Returns a constraint that tests that two references are the same object /// </summary> public SameAsConstraint SameAs(object expected) { return new SameAsConstraint(expected); } #endregion #region GreaterThan /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than the suppled argument /// </summary> public GreaterThanConstraint GreaterThan(object expected) { return new GreaterThanConstraint(expected); } #endregion #region GreaterThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the suppled argument /// </summary> public GreaterThanOrEqualConstraint GreaterThanOrEqualTo(object expected) { return new GreaterThanOrEqualConstraint(expected); } /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the suppled argument /// </summary> public GreaterThanOrEqualConstraint AtLeast(object expected) { return new GreaterThanOrEqualConstraint(expected); } #endregion #region LessThan /// <summary> /// Returns a constraint that tests whether the /// actual value is less than the suppled argument /// </summary> public LessThanConstraint LessThan(object expected) { return new LessThanConstraint(expected); } #endregion #region LessThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the suppled argument /// </summary> public LessThanOrEqualConstraint LessThanOrEqualTo(object expected) { return new LessThanOrEqualConstraint(expected); } /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the suppled argument /// </summary> public LessThanOrEqualConstraint AtMost(object expected) { return new LessThanOrEqualConstraint(expected); } #endregion #region TypeOf /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf(Type expectedType) { return new ExactTypeConstraint(expectedType); } #if NET_2_0 /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf<T>() { return new ExactTypeConstraint(typeof(T)); } #endif #endregion #region InstanceOf /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf(Type expectedType) { return new InstanceOfTypeConstraint(expectedType); } #if NET_2_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf<T>() { return new InstanceOfTypeConstraint(typeof(T)); } #endif /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> [Obsolete("Use InstanceOf(expectedType)")] public InstanceOfTypeConstraint InstanceOfType(Type expectedType) { return new InstanceOfTypeConstraint(expectedType); } #if NET_2_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> [Obsolete("Use InstanceOf<T>()")] public InstanceOfTypeConstraint InstanceOfType<T>() { return new InstanceOfTypeConstraint(typeof(T)); } #endif #endregion #region AssignableFrom /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom(Type expectedType) { return new AssignableFromConstraint(expectedType); } #if NET_2_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom<T>() { return new AssignableFromConstraint(typeof(T)); } #endif #endregion #region AssignableTo /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo(Type expectedType) { return new AssignableToConstraint(expectedType); } #if NET_2_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo<T>() { return new AssignableToConstraint(typeof(T)); } #endif #endregion #region EquivalentTo /// <summary> /// Returns a constraint that tests whether the actual value /// is a collection containing the same elements as the /// collection supplied as an argument. /// </summary> public CollectionEquivalentConstraint EquivalentTo(IEnumerable expected) { return new CollectionEquivalentConstraint(expected); } #endregion #region SubsetOf /// <summary> /// Returns a constraint that tests whether the actual value /// is a subset of the collection supplied as an argument. /// </summary> public CollectionSubsetConstraint SubsetOf(IEnumerable expected) { return new CollectionSubsetConstraint(expected); } #endregion #region Ordered /// <summary> /// Returns a constraint that tests whether a collection is ordered /// </summary> public CollectionOrderedConstraint Ordered { get { return new CollectionOrderedConstraint(); } } #endregion #region Member /// <summary> /// Returns a new CollectionContainsConstraint checking for the /// presence of a particular object in the collection. /// </summary> public CollectionContainsConstraint Member(object expected) { return new CollectionContainsConstraint(expected); } /// <summary> /// Returns a new CollectionContainsConstraint checking for the /// presence of a particular object in the collection. /// </summary> public CollectionContainsConstraint Contains(object expected) { return new CollectionContainsConstraint(expected); } #endregion #region Contains /// <summary> /// Returns a new ContainsConstraint. This constraint /// will, in turn, make use of the appropriate second-level /// constraint, depending on the type of the actual argument. /// This overload is only used if the item sought is a string, /// since any other type implies that we are looking for a /// collection member. /// </summary> public ContainsConstraint Contains(string expected) { return new ContainsConstraint(expected); } #endregion #region StringContaining /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint StringContaining(string expected) { return new SubstringConstraint(expected); } /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint ContainsSubstring(string expected) { return new SubstringConstraint(expected); } #endregion #region DoesNotContain /// <summary> /// Returns a constraint that fails if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint DoesNotContain(string expected) { return new ConstraintExpression().Not.ContainsSubstring(expected); } #endregion #region StartsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StartsWith(string expected) { return new StartsWithConstraint(expected); } /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StringStarting(string expected) { return new StartsWithConstraint(expected); } #endregion #region DoesNotStartWith /// <summary> /// Returns a constraint that fails if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint DoesNotStartWith(string expected) { return new ConstraintExpression().Not.StartsWith(expected); } #endregion #region EndsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint EndsWith(string expected) { return new EndsWithConstraint(expected); } /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint StringEnding(string expected) { return new EndsWithConstraint(expected); } #endregion #region DoesNotEndWith /// <summary> /// Returns a constraint that fails if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint DoesNotEndWith(string expected) { return new ConstraintExpression().Not.EndsWith(expected); } #endregion #region Matches /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the Regex pattern supplied as an argument. /// </summary> public RegexConstraint Matches(string pattern) { return new RegexConstraint(pattern); } /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the Regex pattern supplied as an argument. /// </summary> public RegexConstraint StringMatching(string pattern) { return new RegexConstraint(pattern); } #endregion #region DoesNotMatch /// <summary> /// Returns a constraint that fails if the actual /// value matches the pattern supplied as an argument. /// </summary> public RegexConstraint DoesNotMatch(string pattern) { return new ConstraintExpression().Not.Matches(pattern); } #endregion #region SamePath /// <summary> /// Returns a constraint that tests whether the path provided /// is the same as an expected path after canonicalization. /// </summary> public SamePathConstraint SamePath(string expected) { return new SamePathConstraint(expected); } #endregion #region SamePathOrUnder /// <summary> /// Returns a constraint that tests whether the path provided /// is the same path or under an expected path after canonicalization. /// </summary> public SamePathOrUnderConstraint SamePathOrUnder(string expected) { return new SamePathOrUnderConstraint(expected); } #endregion #region InRange /// <summary> /// Returns a constraint that tests whether the actual value falls /// within a specified range. /// </summary> public RangeConstraint InRange(IComparable from, IComparable to) { return new RangeConstraint(from, to); } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Net; using System.Security.Cryptography; using DeOps.Implementation.Protocol; using DeOps.Implementation.Protocol.Net; namespace DeOps.Implementation.Dht { /// <summary> /// Summary description for DhtBucket. /// </summary> public class DhtBucket { public Dictionary<ulong, List<DhtContact>> ContactMap = new Dictionary<ulong, List<DhtContact>>(); public List<DhtContact> ContactList = new List<DhtContact>(); public int Depth; public bool Last; DhtRouting Routing; // dont refresh immediately because searches/self search will take care of it public DateTime NextRefresh; public DhtBucket(DhtRouting routing, int depth, bool lastBucket) { Depth = depth; Routing = routing; Last = lastBucket; NextRefresh = Routing.Core.TimeNow.AddMinutes(15); } public UInt64 GetRandomBucketID() { UInt64 randomID = Utilities.StrongRandUInt64(Routing.Core.StrongRndGen); UInt64 localID = Routing.Network.Local.UserID; // ex.. Dht id 00000 // depth 0, 1... // depth 1, 01... // depth 2, 001... // depth 3 (last), 000... // set depth number of bits to same as LocalDhtID for(int x = 0; x < Depth; x++) Utilities.SetBit(ref randomID, x, Utilities.GetBit(localID, x)); // if this is the last bucket, keep id the same in the final bit bool finalBit = Utilities.GetBit(localID, Depth); Utilities.SetBit(ref randomID, Depth, Last ? finalBit : !finalBit); return randomID; } public bool Add(DhtContact newContact) { // duplicate already checked for in routing.add // check if bucket full if (ContactList.Count >= Routing.ContactsPerBucket) return false; // else good to go ContactList.Add(newContact); Routing.ContactMap[newContact.RoutingID] = newContact; return true; } } public class DhtContact : DhtAddress { new const int PAYLOAD_SIZE = 14; const byte Packet_IP = 0x10; const byte Packet_Server = 0x20; const byte Packet_Client = 0x30; public ushort TcpPort; public DateTime LastSeen; public int Attempts; public DateTime NextTry; public DateTime NextTryIP; public DateTime NextTryProxy; // required because attempts more spaced out public ushort Ident; public DhtContact() { } public DhtContact(UInt64 user, ushort client, IPAddress address, ushort tcpPort, ushort udpPort) { UserID = user; ClientID = client; IP = address; TcpPort = tcpPort; UdpPort = udpPort; } public DhtContact(DhtAddress address) { UserID = address.UserID; ClientID = address.ClientID; IP = address.IP; UdpPort = address.UdpPort; } // used to add global proxies public DhtContact(DhtSource opHost, IPAddress opIP, TunnelAddress client, DhtAddress server) { UserID = opHost.UserID; ClientID = opHost.ClientID; IP = opIP; TcpPort = opHost.TcpPort; UdpPort = opHost.UdpPort; TunnelServer = new DhtAddress(server.UserID, server.ClientID, server.IP, server.UdpPort); TunnelClient = client; } public DhtContact(DhtSource source, IPAddress ip) { UserID = source.UserID; ClientID = source.ClientID; IP = ip; TcpPort = source.TcpPort; UdpPort = source.UdpPort; } public override string ToString() { return IP.ToString() + ":" + TcpPort.ToString() + ":" + UdpPort.ToString(); ; } public new void WritePacket(G2Protocol protocol, G2Frame root, byte name) { byte[] payload = new byte[PAYLOAD_SIZE]; BitConverter.GetBytes(UserID).CopyTo(payload, 0); BitConverter.GetBytes(ClientID).CopyTo(payload, 8); BitConverter.GetBytes(UdpPort).CopyTo(payload, 10); BitConverter.GetBytes(TcpPort).CopyTo(payload, 12); G2Frame address = protocol.WritePacket(root, name, payload); protocol.WritePacket(address, Packet_IP, IP.GetAddressBytes()); if (TunnelServer != null) TunnelServer.WritePacket(protocol, address, Packet_Server); if (TunnelClient != null) protocol.WritePacket(address, Packet_Client, TunnelClient.ToBytes()); } public byte[] Encode(G2Protocol protocol, byte name) { lock (protocol.WriteSection) { WritePacket(protocol, null, name); return protocol.WriteFinish(); } } public static new DhtContact ReadPacket(G2Header root) { // read payload DhtContact contact = new DhtContact(); contact.UserID = BitConverter.ToUInt64(root.Data, root.PayloadPos); contact.ClientID = BitConverter.ToUInt16(root.Data, root.PayloadPos + 8); contact.UdpPort = BitConverter.ToUInt16(root.Data, root.PayloadPos + 10); contact.TcpPort = BitConverter.ToUInt16(root.Data, root.PayloadPos + 12); // read packets G2Protocol.ResetPacket(root); G2Header child = new G2Header(root.Data); while (G2Protocol.ReadNextChild(root, child) == G2ReadResult.PACKET_GOOD) { if (!G2Protocol.ReadPayload(child)) continue; switch (child.Name) { case Packet_IP: contact.IP = new IPAddress(Utilities.ExtractBytes(child.Data, child.PayloadPos, child.PayloadSize)); break; case Packet_Server: contact.TunnelServer = DhtAddress.ReadPacket(child); break; case Packet_Client: contact.TunnelClient = TunnelAddress.FromBytes(child.Data, child.PayloadPos); break; } } return contact; } public void Alive(DateTime latest) { if (latest > LastSeen) { LastSeen = latest; Attempts = 0; } } } }
using System; using SharpFlame.Domain; using SharpFlame.Mapping; using SharpFlame.Mapping.Tiles; using SharpFlame.Mapping.Tools; namespace SharpFlame.Generators { public sealed class DefaultGenerator { public static clsGeneratorTileset Generator_TilesetArizona; public static clsGeneratorTileset Generator_TilesetUrban; public static clsGeneratorTileset Generator_TilesetRockies; public static UnitTypeBase UnitTypeBaseOilResource; public static UnitTypeBase UnitTypeBaseCommandCentre; public static UnitTypeBase UnitTypeBaseTruck; public static UnitTypeBase UnitTypeBaseFactory; public static UnitTypeBase UnitTypeBaseFactoryModule; public static UnitTypeBase UnitTypeBaseCyborgFactory; public static UnitTypeBase UnitTypeBaseResearchFacility; public static UnitTypeBase UnitTypeBaseResearchModule; public static UnitTypeBase UnitTypeBasePowerGenerator; public static UnitTypeBase UnitTypeBasePowerModule; public static UnitTypeBase UnitTypeBaseDerrick; public static UnitTypeBase UnitTypeBaseBoulder1; public static UnitTypeBase UnitTypeBaseBoulder2; public static UnitTypeBase UnitTypeBaseBoulder3; public static UnitTypeBase UnitTypeBaseRuin1; public static UnitTypeBase UnitTypeBaseRuin3; public static UnitTypeBase UnitTypeBaseRuin4; public static UnitTypeBase UnitTypeBaseRuin5; public static UnitTypeBase UnitTypeBase5Trees; public static UnitTypeBase UnitTypeBase4Trees; public static UnitTypeBase UnitTypeBase1Tree; public static UnitTypeBase UnitTypeBase5TreesSnowy; public static UnitTypeBase UnitTypeBase4TreesSnowy; public static UnitTypeBase UnitTypeBase1TreeSnowy; public static UnitTypeBase UnitTypeBaseHighrise1; public static UnitTypeBase UnitTypeBaseHighrise2; public static UnitTypeBase UnitTypeBaseHighrise3; public static UnitTypeBase UnitTypeBaseHalfHighrise; public static UnitTypeBase UnitTypeBaseHighriseStump1; public static UnitTypeBase UnitTypeBaseHighriseStump2; public static UnitTypeBase UnitTypeBaseBuildingStump; public static UnitTypeBase UnitTypeBaseSmallBuilding1; public static UnitTypeBase UnitTypeBaseSmallBuilding2; public static UnitTypeBase UnitTypeBaseSmallBuilding3; public static UnitTypeBase UnitTypeBaseLogCabin1; public static UnitTypeBase UnitTypeBaseLogCabin2; public static UnitTypeBase UnitTypeBaseLogCabin3; public static UnitTypeBase UnitTypeBaseLogCabin4; public static UnitTypeBase UnitTypeBaseLogCabin5; public static UnitTypeBase UnitTypeBaseCrane; public static UnitTypeBase UnitTypeBaseOilDrum; public static sGenerateMasterTerrainArgs TerrainStyle_Arizona; public static sGenerateMasterTerrainArgs TerrainStyle_Urban; public static sGenerateMasterTerrainArgs TerrainStyle_Rockies; public static UnitTypeBase GetUnitTypeFromCode(string Code) { UnitTypeBase unitTypeBase = default(UnitTypeBase); foreach ( UnitTypeBase tempLoopVar_UnitType in App.ObjectData.UnitTypes ) { unitTypeBase = tempLoopVar_UnitType; string UnitCode = null; if ( unitTypeBase.GetCode(ref UnitCode) ) { if ( UnitCode == Code ) { return unitTypeBase; } } } return null; } public static void CreateGeneratorTilesets() { Generator_TilesetArizona = new clsGeneratorTileset(); Generator_TilesetArizona.Tileset = App.Tileset_Arizona; Generator_TilesetUrban = new clsGeneratorTileset(); Generator_TilesetUrban.Tileset = App.Tileset_Urban; Generator_TilesetRockies = new clsGeneratorTileset(); Generator_TilesetRockies.Tileset = App.Tileset_Rockies; UnitTypeBaseOilResource = GetUnitTypeFromCode("OilResource"); UnitTypeBaseCommandCentre = GetUnitTypeFromCode("A0CommandCentre"); UnitTypeBaseTruck = GetUnitTypeFromCode("ConstructionDroid"); UnitTypeBaseFactory = GetUnitTypeFromCode("A0LightFactory"); UnitTypeBaseFactoryModule = GetUnitTypeFromCode("A0FacMod1"); UnitTypeBaseCyborgFactory = GetUnitTypeFromCode("A0CyborgFactory"); UnitTypeBaseResearchFacility = GetUnitTypeFromCode("A0ResearchFacility"); UnitTypeBaseResearchModule = GetUnitTypeFromCode("A0ResearchModule1"); UnitTypeBasePowerGenerator = GetUnitTypeFromCode("A0PowerGenerator"); UnitTypeBasePowerModule = GetUnitTypeFromCode("A0PowMod1"); UnitTypeBaseDerrick = GetUnitTypeFromCode("A0ResourceExtractor"); UnitTypeBaseBoulder1 = GetUnitTypeFromCode("Boulder1"); UnitTypeBaseBoulder2 = GetUnitTypeFromCode("Boulder2"); UnitTypeBaseBoulder3 = GetUnitTypeFromCode("Boulder3"); UnitTypeBaseRuin1 = GetUnitTypeFromCode("Ruin1"); UnitTypeBaseRuin3 = GetUnitTypeFromCode("Ruin3"); UnitTypeBaseRuin4 = GetUnitTypeFromCode("Ruin4"); UnitTypeBaseRuin5 = GetUnitTypeFromCode("Ruin5"); UnitTypeBase5Trees = GetUnitTypeFromCode("Tree1"); UnitTypeBase4Trees = GetUnitTypeFromCode("Tree2"); UnitTypeBase1Tree = GetUnitTypeFromCode("Tree3"); UnitTypeBase5TreesSnowy = GetUnitTypeFromCode("TreeSnow1"); UnitTypeBase4TreesSnowy = GetUnitTypeFromCode("TreeSnow2"); UnitTypeBase1TreeSnowy = GetUnitTypeFromCode("TreeSnow3"); UnitTypeBaseHighrise1 = GetUnitTypeFromCode("building1"); UnitTypeBaseHighrise2 = GetUnitTypeFromCode("building2"); UnitTypeBaseHighrise3 = GetUnitTypeFromCode("building3"); UnitTypeBaseHalfHighrise = GetUnitTypeFromCode("building11"); UnitTypeBaseHighriseStump1 = GetUnitTypeFromCode("building7"); UnitTypeBaseHighriseStump2 = GetUnitTypeFromCode("building8"); UnitTypeBaseBuildingStump = GetUnitTypeFromCode("WreckedBuilding9"); UnitTypeBaseSmallBuilding1 = GetUnitTypeFromCode("building10"); UnitTypeBaseSmallBuilding2 = GetUnitTypeFromCode("building12"); UnitTypeBaseSmallBuilding3 = GetUnitTypeFromCode("WreckedBuilding17"); UnitTypeBaseLogCabin1 = GetUnitTypeFromCode("LogCabin1"); UnitTypeBaseLogCabin2 = GetUnitTypeFromCode("LogCabin2"); UnitTypeBaseLogCabin3 = GetUnitTypeFromCode("LogCabin3"); UnitTypeBaseLogCabin4 = GetUnitTypeFromCode("LogCabin4"); UnitTypeBaseLogCabin5 = GetUnitTypeFromCode("LogCabin5"); UnitTypeBaseCrane = GetUnitTypeFromCode("Crane"); UnitTypeBaseOilDrum = GetUnitTypeFromCode("OilDrum"); Generator_TilesetArizona.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseBoulder1, 1)); Generator_TilesetArizona.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseBoulder2, 1)); Generator_TilesetArizona.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseBoulder3, 1)); Generator_TilesetArizona.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseRuin1, 1)); Generator_TilesetArizona.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseRuin3, 1)); Generator_TilesetArizona.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseRuin4, 1)); Generator_TilesetArizona.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseRuin5, 1)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseHighrise1, 3)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseHighrise2, 3)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseHighrise3, 3)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseHalfHighrise, 1)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseSmallBuilding1, 3)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseSmallBuilding2, 3)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseSmallBuilding3, 3)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseHighriseStump1, 1)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseHighriseStump2, 1)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseBuildingStump, 1)); Generator_TilesetUrban.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseCrane, 2)); Generator_TilesetRockies.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBase5Trees, 1)); Generator_TilesetRockies.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBase4Trees, 1)); Generator_TilesetRockies.ScatteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBase1Tree, 2)); //Tileset_Rockies.ScatteredUnit_Add(New clsWZTileset.sUnitChance(UnitType_5TreesSnowy, 1)) //Tileset_Rockies.ScatteredUnit_Add(New clsWZTileset.sUnitChance(UnitType_4TreesSnowy, 1)) //Tileset_Rockies.ScatteredUnit_Add(New clsWZTileset.sUnitChance(UnitType_1TreeSnowy, 2)) Generator_TilesetRockies.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseLogCabin1, 3)); Generator_TilesetRockies.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseLogCabin2, 1)); Generator_TilesetRockies.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseLogCabin3, 1)); Generator_TilesetRockies.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseLogCabin4, 1)); Generator_TilesetRockies.ClusteredUnit_Add(new clsGeneratorTileset.sUnitChance(UnitTypeBaseLogCabin5, 3)); int Num = 0; //terrain arizona TerrainStyle_Arizona.LayerCount = 5; Num = 0; Array.Resize(ref TerrainStyle_Arizona.Layers, Num + 1); TerrainStyle_Arizona.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Arizona.Layers[Num].TileNum = 48; //red TerrainStyle_Arizona.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Arizona.Layers[Num].TerrainmapScale = 0.0F; TerrainStyle_Arizona.Layers[Num].HeightMax = 256.0F; TerrainStyle_Arizona.Layers[Num].AvoidLayers = new bool[TerrainStyle_Arizona.LayerCount]; TerrainStyle_Arizona.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Arizona.Layers, Num + 1); TerrainStyle_Arizona.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Arizona.Layers[Num].TileNum = 11; //yellow TerrainStyle_Arizona.Layers[Num].TerrainmapDensity = 0.5F; TerrainStyle_Arizona.Layers[Num].TerrainmapScale = 2.0F; TerrainStyle_Arizona.Layers[Num].HeightMax = 256.0F; TerrainStyle_Arizona.Layers[Num].AvoidLayers = new bool[TerrainStyle_Arizona.LayerCount]; TerrainStyle_Arizona.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Arizona.Layers, Num + 1); TerrainStyle_Arizona.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Arizona.Layers[Num].TileNum = 5; //brown TerrainStyle_Arizona.Layers[Num].TerrainmapDensity = 0.4F; TerrainStyle_Arizona.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Arizona.Layers[Num].HeightMax = 256.0F; TerrainStyle_Arizona.Layers[Num].AvoidLayers = new bool[TerrainStyle_Arizona.LayerCount]; TerrainStyle_Arizona.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Arizona.Layers, Num + 1); TerrainStyle_Arizona.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Arizona.Layers[Num].TileNum = 23; //green TerrainStyle_Arizona.Layers[Num].TerrainmapDensity = 0.75F; TerrainStyle_Arizona.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Arizona.Layers[Num].HeightMax = 256.0F; TerrainStyle_Arizona.Layers[Num].AvoidLayers = new bool[TerrainStyle_Arizona.LayerCount]; TerrainStyle_Arizona.Layers[Num].WithinLayer = Num - 1; Num++; Array.Resize(ref TerrainStyle_Arizona.Layers, Num + 1); TerrainStyle_Arizona.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Arizona.Layers[Num].TileNum = 18; //cliff TerrainStyle_Arizona.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Arizona.Layers[Num].TerrainmapScale = 0.0F; TerrainStyle_Arizona.Layers[Num].HeightMax = 256.0F; TerrainStyle_Arizona.Layers[Num].IsCliff = true; TerrainStyle_Arizona.Layers[Num].AvoidLayers = new bool[TerrainStyle_Arizona.LayerCount]; TerrainStyle_Arizona.Layers[Num].WithinLayer = -1; TerrainStyle_Arizona.Tileset = Generator_TilesetArizona; Generator_TilesetArizona.BorderTextureNum = 18; //terrain urban TerrainStyle_Urban.LayerCount = 6; Num = 0; Array.Resize(ref TerrainStyle_Urban.Layers, Num + 1); TerrainStyle_Urban.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Urban.Layers[Num].TileNum = 7; TerrainStyle_Urban.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Urban.Layers[Num].HeightMax = 256.0F; TerrainStyle_Urban.Layers[Num].AvoidLayers = new bool[TerrainStyle_Urban.LayerCount]; TerrainStyle_Urban.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Urban.Layers, Num + 1); TerrainStyle_Urban.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Urban.Layers[Num].TileNum = 0; TerrainStyle_Urban.Layers[Num].TerrainmapDensity = 0.5F; TerrainStyle_Urban.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Urban.Layers[Num].HeightMax = 256.0F; TerrainStyle_Urban.Layers[Num].AvoidLayers = new bool[TerrainStyle_Urban.LayerCount]; TerrainStyle_Urban.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Urban.Layers, Num + 1); TerrainStyle_Urban.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Urban.Layers[Num].TileNum = 22; TerrainStyle_Urban.Layers[Num].TerrainmapDensity = 0.333F; TerrainStyle_Urban.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Urban.Layers[Num].HeightMax = 256.0F; TerrainStyle_Urban.Layers[Num].AvoidLayers = new bool[TerrainStyle_Urban.LayerCount]; TerrainStyle_Urban.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Urban.Layers, Num + 1); TerrainStyle_Urban.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Urban.Layers[Num].TileNum = 50; TerrainStyle_Urban.Layers[Num].TerrainmapDensity = 0.333F; TerrainStyle_Urban.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Urban.Layers[Num].HeightMax = 256.0F; TerrainStyle_Urban.Layers[Num].AvoidLayers = new bool[TerrainStyle_Urban.LayerCount]; TerrainStyle_Urban.Layers[Num].WithinLayer = -1; //Num += 1 //ReDim Preserve TerrainStyle_Urban.Layers(Num) //TerrainStyle_Urban.Layers(Num) = New clsMap.sGenerateMasterTerrainArgs.clsLayer //TerrainStyle_Urban.Layers(Num).TileNum = 19 //TerrainStyle_Urban.Layers(Num).TerrainmapDensity = 0.25F //TerrainStyle_Urban.Layers(Num).TerrainmapScale = 1.5F //TerrainStyle_Urban.Layers(Num).HeightMax = 256.0F //ReDim TerrainStyle_Urban.Layers(Num).AvoidLayers(TerrainStyle_Urban.LayerCount - 1) //TerrainStyle_Urban.Layers(Num).WithinLayer = -1 Num++; Array.Resize(ref TerrainStyle_Urban.Layers, Num + 1); TerrainStyle_Urban.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Urban.Layers[Num].TileNum = 51; TerrainStyle_Urban.Layers[Num].TerrainmapDensity = 0.4F; TerrainStyle_Urban.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Urban.Layers[Num].HeightMax = 256.0F; TerrainStyle_Urban.Layers[Num].AvoidLayers = new bool[TerrainStyle_Urban.LayerCount]; TerrainStyle_Urban.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Urban.Layers, Num + 1); TerrainStyle_Urban.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Urban.Layers[Num].TileNum = 70; TerrainStyle_Urban.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Urban.Layers[Num].TerrainmapScale = 0.0F; TerrainStyle_Urban.Layers[Num].HeightMax = 256.0F; TerrainStyle_Urban.Layers[Num].IsCliff = true; TerrainStyle_Urban.Layers[Num].AvoidLayers = new bool[TerrainStyle_Urban.LayerCount]; TerrainStyle_Urban.Layers[Num].WithinLayer = -1; TerrainStyle_Urban.Tileset = Generator_TilesetUrban; Generator_TilesetUrban.BorderTextureNum = 70; //terrain rockies TerrainStyle_Rockies.LayerCount = 7; Num = 0; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 0; //green TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 0.0F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 53; //brown TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 0.4F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 23; //green & snow TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 0.333F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 1.5F; TerrainStyle_Rockies.Layers[Num].HeightMin = 85.0F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 64; //snow TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 0.5F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 1.0F; TerrainStyle_Rockies.Layers[Num].HeightMin = 85.0F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = Num - 1; Num++; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 41; //brown & snow TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 0.0F; TerrainStyle_Rockies.Layers[Num].HeightMin = 170.0F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = -1; Num++; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 64; //snow TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 0.875F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 1.0F; TerrainStyle_Rockies.Layers[Num].HeightMin = 170.0F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = Num - 1; Num++; Array.Resize(ref TerrainStyle_Rockies.Layers, Num + 1); TerrainStyle_Rockies.Layers[Num] = new sGenerateMasterTerrainArgs.clsLayer(); TerrainStyle_Rockies.Layers[Num].TileNum = 30; //cliff TerrainStyle_Rockies.Layers[Num].TerrainmapDensity = 1.0F; TerrainStyle_Rockies.Layers[Num].TerrainmapScale = 0.0F; TerrainStyle_Rockies.Layers[Num].HeightMax = 256.0F; TerrainStyle_Rockies.Layers[Num].IsCliff = true; TerrainStyle_Rockies.Layers[Num].AvoidLayers = new bool[TerrainStyle_Rockies.LayerCount]; TerrainStyle_Rockies.Layers[Num].WithinLayer = -1; TerrainStyle_Rockies.Tileset = Generator_TilesetRockies; Generator_TilesetRockies.BorderTextureNum = 30; } } }
using CollectionManager.Enums; using OsuMemoryDataProvider; using StreamCompanionTypes.DataTypes; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using OsuMemoryDataProvider.OsuMemoryModels.Abstract; using OsuMemoryDataProvider.OsuMemoryModels.Direct; using StreamCompanion.Common; using StreamCompanion.Common.Helpers; using StreamCompanionTypes; using StreamCompanionTypes.Enums; using StreamCompanionTypes.Interfaces.Consumers; using StreamCompanionTypes.Interfaces.Services; using StreamCompanionTypes.Interfaces.Sources; namespace OsuMemoryEventSource { public class MemoryListener : IOsuEventSource, IDisposable { public EventHandler<IMapSearchArgs> NewOsuEvent { get; set; } private int _lastMapId = -1; private int _currentMapId = -2; private int _lastGameMode = -1; private int _lastRetries = -1; private int _lastTime = -1; private readonly IToken _osuIsRunningToken = OsuMemoryEventSourceBase.LiveTokenSetter("osuIsRunning", 0, TokenType.Live, null, 0); private DateTime _nextReplayRetryAllowedAt = DateTime.MinValue; private OsuMemoryStatus _lastStatus = OsuMemoryStatus.Unknown; private OsuMemoryStatus _lastStatusLog = OsuMemoryStatus.Unknown; private OsuMemoryStatus _currentStatus = OsuMemoryStatus.Unknown; private string _lastMapHash = ""; private int _lastSentMods = -2; private List<MemoryDataProcessor> _memoryDataProcessors; private PatternsDispatcher _patternsDispatcher; private ISettings _settings; private readonly IContextAwareLogger _logger; public MemoryListener(ISettings settings, ISaver saver, IModParser modParser, IContextAwareLogger logger, int clientCount = 1) { _settings = settings; _logger = logger; _settings.SettingUpdated += SettingUpdated; var tournamentMode = clientCount > 1; var mainClientId = tournamentMode ? _settings.Get<int>(OsuMemoryEventSourceBase.DataClientId) : 0; _memoryDataProcessors = Enumerable.Range(0, clientCount) .Select(x => new MemoryDataProcessor(settings, logger, modParser, x == mainClientId, tournamentMode ? $"client_{x}_" : string.Empty)).ToList(); _patternsDispatcher = new PatternsDispatcher(settings, saver); var mainProcessor = _memoryDataProcessors.First(p => p.IsMainProcessor); mainProcessor.TokensUpdated += (sender, status) => _patternsDispatcher.TokensUpdated(status); foreach (var memoryDataProcessor in _memoryDataProcessors) { memoryDataProcessor.ToggleSmoothing(_settings.Get<bool>(Helpers.EnablePpSmoothing)); } } public void Tick(List<StructuredOsuMemoryReader> clientReaders, bool sendEvents) { var reader = clientReaders[0]; var osuData = reader.OsuMemoryAddresses; var canRead = reader.CanRead; _osuIsRunningToken.Value = canRead ? 1 : 0; if (!canRead || !reader.TryRead(osuData.GeneralData)) return; _currentStatus = osuData.GeneralData.OsuStatus; if (_lastStatusLog != _currentStatus) { _lastStatusLog = _currentStatus; //Console.WriteLine("status: {0} {1}", _currentStatus, _currentStatus == OsuMemoryStatus.Unknown ? num.ToString() : ""); } if (_currentStatus == OsuMemoryStatus.NotRunning) return; if (_currentStatus == OsuMemoryStatus.Unknown) { _logger.Log($"Unknown memory status: {osuData.GeneralData.RawStatus}", LogLevel.Warning); return; } if (!reader.TryRead(osuData.Beatmap) || !TryGetOsuFileLocation(osuData.Beatmap, out var osuFileLocation)) return; _currentMapId = osuData.Beatmap.Id; OsuStatus status = _currentStatus.Convert(); object rawIsReplay = false; if (status == OsuStatus.Playing && !reader.TryReadProperty(osuData.Player, nameof(Player.IsReplay), out rawIsReplay)) return; var isReplay = (bool)rawIsReplay; status = status == OsuStatus.Playing && isReplay ? OsuStatus.Watching : status; var gameMode = osuData.GeneralData.GameMode; if (gameMode < 0 || gameMode > 3) return; var mapHash = osuData.Beatmap.Md5; var mods = osuData.GeneralData.Mods; if (status == OsuStatus.Playing || status == OsuStatus.Watching) { if (!reader.TryReadProperty(osuData.Player, nameof(Player.Mods), out var rawMods) || rawMods == null) return; mods = ((Mods)rawMods).Value; } else if (status == OsuStatus.ResultsScreen) { if (!reader.TryReadProperty(osuData.ResultsScreen, nameof(ResultsScreen.Mods), out var rawMods) || rawMods == null) return; mods = ((Mods)rawMods).Value; } if (Helpers.IsInvalidCombination((CollectionManager.DataTypes.Mods)mods)) return; var retries = osuData.GeneralData.Retries; var currentTime = osuData.GeneralData.AudioTime; var mapHashDiffers = mapHash != null && _lastMapHash != null && _lastMapHash != mapHash; var mapIdDiffers = _lastMapId != _currentMapId; var memoryStatusDiffers = _lastStatus != _currentStatus; var gameModeDiffers = gameMode != _lastGameMode; var modsDiffer = mods != _lastSentMods; OsuEventType? osuEventType = null; //"good enough" replay retry detection. if (isReplay && _currentStatus == OsuMemoryStatus.Playing && _lastTime > currentTime && DateTime.UtcNow > _nextReplayRetryAllowedAt) { osuEventType = OsuEventType.PlayChange; _nextReplayRetryAllowedAt = DateTime.UtcNow.AddMilliseconds(500); } _lastTime = currentTime; var playInitialized = (status != OsuStatus.Watching && status != OsuStatus.Playing) || mods != -1; if (sendEvents && playInitialized && ( osuEventType.HasValue || mapIdDiffers || memoryStatusDiffers || mapHashDiffers || gameModeDiffers || modsDiffer || retries != _lastRetries ) ) { if (!osuEventType.HasValue || modsDiffer) { osuEventType = mapIdDiffers || mapHashDiffers || gameModeDiffers || modsDiffer ? OsuEventType.MapChange //different mapId/hash/mode/mods(changed stats) = different map : memoryStatusDiffers ? OsuEventType.SceneChange //memory scene(status) change = Scene change : _currentStatus == OsuMemoryStatus.Playing ? OsuEventType.PlayChange // map retry : OsuEventType.MapChange; //bail } _lastMapId = _currentMapId; _lastStatus = _currentStatus; _lastRetries = retries; _lastGameMode = gameMode; _lastMapHash = mapHash; _lastSentMods = mods; var rawString = Retry.RetryMe( () => { var validRead = reader.TryReadProperty(osuData.Beatmap, nameof(CurrentBeatmap.MapString), out var result); return (validRead, (string)result); }, s => (s.validRead, s.Item2), 5) ?? string.Empty; NewOsuEvent?.Invoke(this, new MapSearchArgs("OsuMemory",osuEventType.Value) { MapId = _currentMapId, Status = status, Raw = rawString, MapHash = mapHash, PlayMode = (PlayMode)gameMode, Mods = (CollectionManager.DataTypes.Mods)mods, OsuFilePath = osuFileLocation }); } for (int i = 0; i < clientReaders.Count; i++) { _memoryDataProcessors[i].Tick(status, _currentStatus, clientReaders[i]); } } private bool TryGetOsuFileLocation(CurrentBeatmap memoryBeatmap, out string osuFileLocation) { osuFileLocation = null; try { var songsLocation = _settings.GetFullSongsLocation(); if (string.IsNullOrEmpty(memoryBeatmap.FolderName) || string.IsNullOrEmpty(songsLocation)) return false; osuFileLocation = Path.Combine(songsLocation, memoryBeatmap.FolderName.TrimEnd(), memoryBeatmap.OsuFileName); return true; } catch (ArgumentException) { //we have garbage data in either FolderName or OsuFileName return false; } } public void SetNewMap(IMapSearchResult map, CancellationToken cancellationToken) { foreach (var memoryDataProcessor in _memoryDataProcessors) { _ = memoryDataProcessor.SetNewMap(map, cancellationToken); } _patternsDispatcher.SetOutputPatterns(map.OutputPatterns); } public void SetHighFrequencyDataHandlers(List<Lazy<IHighFrequencyDataConsumer>> consumers) { _patternsDispatcher.HighFrequencyDataConsumers = consumers; } public void Dispose() { foreach (var memoryDataProcessor in _memoryDataProcessors) { memoryDataProcessor?.Dispose(); } _settings.SettingUpdated -= SettingUpdated; } private void SettingUpdated(object sender, SettingUpdated settingUpdated) { if (settingUpdated.Name == Helpers.EnablePpSmoothing.Name) { var enableSmoothing = _settings.Get<bool>(Helpers.EnablePpSmoothing); foreach (var memoryDataProcessor in _memoryDataProcessors) { memoryDataProcessor.ToggleSmoothing(enableSmoothing); } } } public Task CreateTokensAsync(IMapSearchResult map, CancellationToken cancellationToken) { var tasks = new List<Task>(); foreach (var memoryDataProcessor in _memoryDataProcessors) { tasks.Add(memoryDataProcessor.CreateTokensAsync(map, cancellationToken)); } return Task.WhenAll(tasks); } } }
// Copyright 2019 Esri // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Windows.Input; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using System.Threading.Tasks; using ArcGIS.Core.Geometry; using ArcGIS.Desktop.Framework.Threading.Tasks; namespace GeometrySamples { /// <summary> /// This sample show how to convert a multi part geometry in single part /// geometries and how to retrieve the outermost ring(s) of a polygon. /// </summary> /// <remarks> /// 1. Download the Community Sample data (see under the 'Resources' section for downloading sample data). The sample data contains a folder called 'C:\Data\Working with Core Geometry and Data' with sample data required for this solution. Make sure that the Sample data is unzipped in c:\data and "C:\Data\Working with Core Geometry and Data" is available. /// 1. In Visual Studio click the Build menu. Then select Build Solution. /// 1. Click Start button to debug ArcGIS Pro. /// 1. In ArcGIS Pro open the Project called "Workshop.aprx" in the "C:\Data\Working with Core Geometry and Data" folder. This project and data is required because it contains data that is attachment enabled. /// 1. Digitize a polygon with multiple parts including holes using the sdk_polygons layer. /// ![UI](Screenshots/Screen1.png) /// 1. Switch to the Add-in tab on the ArcGIS Pro ribbon and click the 'Outermost Ring(s)' button in the ArcGIS Pro SDK Samples group. /// 1. The button will inspect the geometries of the polygon features and store the outer most ring(s) of the existing features as new features in the same feature class. /// ![UI](Screenshots/Screen2.png) /// 1. Use the Undo pull-down to undo the "Create outer ring" operation. /// ![UI](Screenshots/Screen3.png) /// 1. The create polygon feature operation is undone. /// </remarks> internal class Module1 : Module { private static Module1 _this = null; /// <summary> /// Retrieve the singleton instance to this module here /// </summary> public static Module1 Current { get { return _this ?? (_this = (Module1)FrameworkApplication.FindModule("GeometrySamples_Module")); } } #region Overrides /// <summary> /// Called by Framework when ArcGIS Pro is closing /// </summary> /// <returns>False to prevent Pro from closing, otherwise True</returns> protected override bool CanUnload() { //TODO - add your business logic //return false to ~cancel~ Application close return true; } #endregion Overrides /// <summary> /// The methods retrieves the outer ring(s) of the input polygon. /// </summary> /// <param name="inputPolygon">Input Polygon.</param> /// <returns>The outer most (exterior, clockwise) ring(s) of the polygon. If the input is null or empty, a null pointer is returned.</returns> public Task<Polygon> GetOutermostRingsAsync(Polygon inputPolygon) { return QueuedTask.Run(() => GetOutermostRings(inputPolygon)); } /// <summary> /// The methods retrieves the outer ring(s) of the input polygon. /// This method must be called on the MCT. Use QueuedTask.Run. /// </summary> /// <param name="inputPolygon">Input Polygon.</param> /// <returns>The outer most (exterior, clockwise) ring(s) of the polygon. If the input is null or empty, a null pointer is returned.</returns> /// <remarks>This method must be called on the MCT. Use QueuedTask.Run.</remarks> public Polygon GetOutermostRings(Polygon inputPolygon) { if (inputPolygon == null || inputPolygon.IsEmpty) return null; PolygonBuilder outerRings = new PolygonBuilder(); List<Polygon> internalRings = new List<Polygon>(); // explode the parts of the polygon into a list of individual geometries var parts = MultipartToSinglePart(inputPolygon); // get an enumeration of clockwise geometries (area > 0) ordered by the area var clockwiseParts = parts.Where(geom => ((Polygon)geom).Area > 0).OrderByDescending(geom => ((Polygon)geom).Area); // for each of the exterior rings foreach (var part in clockwiseParts) { // add the first (the largest) ring into the internal collection if (internalRings.Count == 0) internalRings.Add(part as Polygon); // use flag to indicate if current part is within the already selection polygons bool isWithin = false; foreach (var item in internalRings) { if (GeometryEngine.Instance.Within(part, item)) isWithin = true; } // if the current polygon is not within any polygon of the internal collection // then it is disjoint and needs to be added to if (isWithin == false) internalRings.Add(part as Polygon); } // now assemble a new polygon geometry based on the internal polygon collection foreach (var ring in internalRings) { outerRings.AddParts(ring.Parts); } // return the final geometry of the outer rings return outerRings.ToGeometry(); } /// <summary> /// This method takes an input multi part geometry and breaks the parts into individual standalone geometries. /// </summary> /// <param name="inputGeometry">The geometry to be exploded into the individual parts.</param> /// <returns>An enumeration of individual parts as standalone geometries. The type of geometry is maintained, i.e. /// if the input geometry is of type Polyline then each geometry in the return is of type Polyline as well. /// If the input geometry is of type Unknown then an empty list is returned.</returns> public Task<IEnumerable<Geometry>> MultipartToSinglePartAsync(Geometry inputGeometry) { return QueuedTask.Run(() => MultipartToSinglePart(inputGeometry)); } /// <summary> /// This method takes an input multi part geometry and breaks the parts into individual standalone geometries. /// This method must be called on the MCT. Use QueuedTask.Run. /// </summary> /// <param name="inputGeometry">The geometry to be exploded into the individual parts.</param> /// <returns>An enumeration of individual parts as standalone geometries. The type of geometry is maintained, i.e. /// if the input geometry is of type Polyline then each geometry in the return is of type Polyline as well. /// If the input geometry is of type Unknown then an empty list is returned.</returns> /// <remarks>This method must be called on the MCT. Use QueuedTask.Run.</remarks> public IEnumerable<Geometry> MultipartToSinglePart(Geometry inputGeometry) { // list holding the part(s) of the input geometry List<Geometry> singleParts = new List<Geometry>(); // check if the input is a null pointer or if the geometry is empty if (inputGeometry == null || inputGeometry.IsEmpty) return singleParts; // based on the type of geometry, take the parts/points and add them individually into a list switch (inputGeometry.GeometryType) { case GeometryType.Envelope: singleParts.Add(inputGeometry.Clone() as Envelope); break; case GeometryType.Multipatch: singleParts.Add(inputGeometry.Clone() as Multipatch); break; case GeometryType.Multipoint: var multiPoint = inputGeometry as Multipoint; foreach (var point in multiPoint.Points) { // add each point of collection as a standalone point into the list singleParts.Add(point); } break; case GeometryType.Point: singleParts.Add(inputGeometry.Clone() as MapPoint); break; case GeometryType.Polygon: var polygon = inputGeometry as Polygon; foreach (var polygonPart in polygon.Parts) { // use the PolygonBuilder turning the segments into a standalone // polygon instance singleParts.Add(PolygonBuilder.CreatePolygon(polygonPart)); } break; case GeometryType.Polyline: var polyline = inputGeometry as Polyline; foreach (var polylinePart in polyline.Parts) { // use the PolylineBuilder turning the segments into a standalone // polyline instance singleParts.Add(PolylineBuilder.CreatePolyline(polylinePart)); } break; case GeometryType.Unknown: break; default: break; } return singleParts; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Net; using System.Collections.Generic; using Nini.Config; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Framework.Communications; using OpenSim.Framework.Console; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Region.Physics.Manager; using OpenSim.Region.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.CoreModules.Avatar.Gods; using OpenSim.Region.CoreModules.Asset; using OpenSim.Region.CoreModules.ServiceConnectorsOut.Asset; using OpenSim.Region.CoreModules.ServiceConnectorsOut.Authentication; using OpenSim.Region.CoreModules.ServiceConnectorsOut.Inventory; using OpenSim.Region.CoreModules.ServiceConnectorsOut.Grid; using OpenSim.Region.CoreModules.ServiceConnectorsOut.UserAccounts; using OpenSim.Region.CoreModules.ServiceConnectorsOut.Presence; using OpenSim.Services.Interfaces; using OpenSim.Tests.Common.Mock; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Tests.Common { /// <summary> /// Helpers for setting up scenes. /// </summary> public class SceneHelpers { /// <summary> /// We need a scene manager so that test clients can retrieve a scene when performing teleport tests. /// </summary> public SceneManager SceneManager { get; private set; } private AgentCircuitManager m_acm = new AgentCircuitManager(); private ISimulationDataService m_simDataService = OpenSim.Server.Base.ServerUtils.LoadPlugin<ISimulationDataService>("OpenSim.Tests.Common.dll", null); private IEstateDataService m_estateDataService = null; private LocalAssetServicesConnector m_assetService; private LocalAuthenticationServicesConnector m_authenticationService; private LocalInventoryServicesConnector m_inventoryService; private LocalGridServicesConnector m_gridService; private LocalUserAccountServicesConnector m_userAccountService; private LocalPresenceServicesConnector m_presenceService; private CoreAssetCache m_cache; public SceneHelpers() : this(null) {} public SceneHelpers(CoreAssetCache cache) { SceneManager = new SceneManager(); m_assetService = StartAssetService(cache); m_authenticationService = StartAuthenticationService(); m_inventoryService = StartInventoryService(); m_gridService = StartGridService(); m_userAccountService = StartUserAccountService(); m_presenceService = StartPresenceService(); m_inventoryService.PostInitialise(); m_assetService.PostInitialise(); m_userAccountService.PostInitialise(); m_presenceService.PostInitialise(); m_cache = cache; } /// <summary> /// Set up a test scene /// </summary> /// <remarks> /// Automatically starts services, as would the normal runtime. /// </remarks> /// <returns></returns> public TestScene SetupScene() { return SetupScene("Unit test region", UUID.Random(), 1000, 1000); } public TestScene SetupScene(string name, UUID id, uint x, uint y) { return SetupScene(name, id, x, y, new IniConfigSource()); } /// <summary> /// Set up a scene. /// </summary> /// <param name="name">Name of the region</param> /// <param name="id">ID of the region</param> /// <param name="x">X co-ordinate of the region</param> /// <param name="y">Y co-ordinate of the region</param> /// <param name="configSource"></param> /// <returns></returns> public TestScene SetupScene( string name, UUID id, uint x, uint y, IConfigSource configSource) { Console.WriteLine("Setting up test scene {0}", name); // We must set up a console otherwise setup of some modules may fail MainConsole.Instance = new MockConsole(); RegionInfo regInfo = new RegionInfo(x, y, new IPEndPoint(IPAddress.Loopback, 9000), "127.0.0.1"); regInfo.RegionName = name; regInfo.RegionID = id; SceneCommunicationService scs = new SceneCommunicationService(); TestScene testScene = new TestScene( regInfo, m_acm, scs, m_simDataService, m_estateDataService, configSource, null); INonSharedRegionModule godsModule = new GodsModule(); godsModule.Initialise(new IniConfigSource()); godsModule.AddRegion(testScene); // Add scene to services m_assetService.AddRegion(testScene); if (m_cache != null) { m_cache.AddRegion(testScene); m_cache.RegionLoaded(testScene); testScene.AddRegionModule(m_cache.Name, m_cache); } m_assetService.RegionLoaded(testScene); testScene.AddRegionModule(m_assetService.Name, m_assetService); m_authenticationService.AddRegion(testScene); m_authenticationService.RegionLoaded(testScene); testScene.AddRegionModule(m_authenticationService.Name, m_authenticationService); m_inventoryService.AddRegion(testScene); m_inventoryService.RegionLoaded(testScene); testScene.AddRegionModule(m_inventoryService.Name, m_inventoryService); m_gridService.AddRegion(testScene); m_gridService.RegionLoaded(testScene); testScene.AddRegionModule(m_gridService.Name, m_gridService); m_userAccountService.AddRegion(testScene); m_userAccountService.RegionLoaded(testScene); testScene.AddRegionModule(m_userAccountService.Name, m_userAccountService); m_presenceService.AddRegion(testScene); m_presenceService.RegionLoaded(testScene); testScene.AddRegionModule(m_presenceService.Name, m_presenceService); testScene.RegionInfo.EstateSettings.EstateOwner = UUID.Random(); testScene.SetModuleInterfaces(); testScene.LandChannel = new TestLandChannel(testScene); testScene.LoadWorldMap(); PhysicsPluginManager physicsPluginManager = new PhysicsPluginManager(); physicsPluginManager.LoadPluginsFromAssembly("Physics/OpenSim.Region.Physics.BasicPhysicsPlugin.dll"); testScene.PhysicsScene = physicsPluginManager.GetPhysicsScene("basicphysics", "ZeroMesher", new IniConfigSource(), "test"); testScene.RegionInfo.EstateSettings = new EstateSettings(); testScene.LoginsEnabled = true; testScene.RegisterRegionWithGrid(); SceneManager.Add(testScene); return testScene; } private static LocalAssetServicesConnector StartAssetService(CoreAssetCache cache) { IConfigSource config = new IniConfigSource(); config.AddConfig("Modules"); config.Configs["Modules"].Set("AssetServices", "LocalAssetServicesConnector"); config.AddConfig("AssetService"); config.Configs["AssetService"].Set("LocalServiceModule", "OpenSim.Services.AssetService.dll:AssetService"); config.Configs["AssetService"].Set("StorageProvider", "OpenSim.Tests.Common.dll"); LocalAssetServicesConnector assetService = new LocalAssetServicesConnector(); assetService.Initialise(config); if (cache != null) { IConfigSource cacheConfig = new IniConfigSource(); cacheConfig.AddConfig("Modules"); cacheConfig.Configs["Modules"].Set("AssetCaching", "CoreAssetCache"); cacheConfig.AddConfig("AssetCache"); cache.Initialise(cacheConfig); } return assetService; } private static LocalAuthenticationServicesConnector StartAuthenticationService() { IConfigSource config = new IniConfigSource(); config.AddConfig("Modules"); config.AddConfig("AuthenticationService"); config.Configs["Modules"].Set("AuthenticationServices", "LocalAuthenticationServicesConnector"); config.Configs["AuthenticationService"].Set( "LocalServiceModule", "OpenSim.Services.AuthenticationService.dll:PasswordAuthenticationService"); config.Configs["AuthenticationService"].Set("StorageProvider", "OpenSim.Data.Null.dll"); LocalAuthenticationServicesConnector service = new LocalAuthenticationServicesConnector(); service.Initialise(config); return service; } private static LocalInventoryServicesConnector StartInventoryService() { IConfigSource config = new IniConfigSource(); config.AddConfig("Modules"); config.AddConfig("InventoryService"); config.Configs["Modules"].Set("InventoryServices", "LocalInventoryServicesConnector"); config.Configs["InventoryService"].Set("LocalServiceModule", "OpenSim.Services.InventoryService.dll:XInventoryService"); config.Configs["InventoryService"].Set("StorageProvider", "OpenSim.Tests.Common.dll"); LocalInventoryServicesConnector inventoryService = new LocalInventoryServicesConnector(); inventoryService.Initialise(config); return inventoryService; } private static LocalGridServicesConnector StartGridService() { IConfigSource config = new IniConfigSource(); config.AddConfig("Modules"); config.AddConfig("GridService"); config.Configs["Modules"].Set("GridServices", "LocalGridServicesConnector"); config.Configs["GridService"].Set("StorageProvider", "OpenSim.Data.Null.dll:NullRegionData"); config.Configs["GridService"].Set("LocalServiceModule", "OpenSim.Services.GridService.dll:GridService"); config.Configs["GridService"].Set("ConnectionString", "!static"); LocalGridServicesConnector gridService = new LocalGridServicesConnector(); gridService.Initialise(config); return gridService; } /// <summary> /// Start a user account service /// </summary> /// <param name="testScene"></param> /// <returns></returns> private static LocalUserAccountServicesConnector StartUserAccountService() { IConfigSource config = new IniConfigSource(); config.AddConfig("Modules"); config.AddConfig("UserAccountService"); config.Configs["Modules"].Set("UserAccountServices", "LocalUserAccountServicesConnector"); config.Configs["UserAccountService"].Set("StorageProvider", "OpenSim.Data.Null.dll"); config.Configs["UserAccountService"].Set( "LocalServiceModule", "OpenSim.Services.UserAccountService.dll:UserAccountService"); LocalUserAccountServicesConnector userAccountService = new LocalUserAccountServicesConnector(); userAccountService.Initialise(config); return userAccountService; } /// <summary> /// Start a presence service /// </summary> /// <param name="testScene"></param> private static LocalPresenceServicesConnector StartPresenceService() { IConfigSource config = new IniConfigSource(); config.AddConfig("Modules"); config.AddConfig("PresenceService"); config.Configs["Modules"].Set("PresenceServices", "LocalPresenceServicesConnector"); config.Configs["PresenceService"].Set("StorageProvider", "OpenSim.Data.Null.dll"); config.Configs["PresenceService"].Set( "LocalServiceModule", "OpenSim.Services.PresenceService.dll:PresenceService"); LocalPresenceServicesConnector presenceService = new LocalPresenceServicesConnector(); presenceService.Initialise(config); return presenceService; } /// <summary> /// Setup modules for a scene using their default settings. /// </summary> /// <param name="scene"></param> /// <param name="modules"></param> public static void SetupSceneModules(Scene scene, params object[] modules) { SetupSceneModules(scene, new IniConfigSource(), modules); } /// <summary> /// Setup modules for a scene. /// </summary> /// <remarks> /// If called directly, then all the modules must be shared modules. /// </remarks> /// <param name="scenes"></param> /// <param name="config"></param> /// <param name="modules"></param> public static void SetupSceneModules(Scene scene, IConfigSource config, params object[] modules) { SetupSceneModules(new Scene[] { scene }, config, modules); } /// <summary> /// Setup modules for a scene using their default settings. /// </summary> /// <param name="scenes"></param> /// <param name="modules"></param> public static void SetupSceneModules(Scene[] scenes, params object[] modules) { SetupSceneModules(scenes, new IniConfigSource(), modules); } /// <summary> /// Setup modules for scenes. /// </summary> /// <remarks> /// If called directly, then all the modules must be shared modules. /// /// We are emulating here the normal calls made to setup region modules /// (Initialise(), PostInitialise(), AddRegion, RegionLoaded()). /// TODO: Need to reuse normal runtime module code. /// </remarks> /// <param name="scenes"></param> /// <param name="config"></param> /// <param name="modules"></param> public static void SetupSceneModules(Scene[] scenes, IConfigSource config, params object[] modules) { List<IRegionModuleBase> newModules = new List<IRegionModuleBase>(); foreach (object module in modules) { IRegionModuleBase m = (IRegionModuleBase)module; // Console.WriteLine("MODULE {0}", m.Name); m.Initialise(config); newModules.Add(m); } foreach (IRegionModuleBase module in newModules) { if (module is ISharedRegionModule) ((ISharedRegionModule)module).PostInitialise(); } foreach (IRegionModuleBase module in newModules) { foreach (Scene scene in scenes) { module.AddRegion(scene); scene.AddRegionModule(module.Name, module); } } // RegionLoaded is fired after all modules have been appropriately added to all scenes foreach (IRegionModuleBase module in newModules) foreach (Scene scene in scenes) module.RegionLoaded(scene); foreach (Scene scene in scenes) { scene.SetModuleInterfaces(); } } /// <summary> /// Generate some standard agent connection data. /// </summary> /// <param name="agentId"></param> /// <returns></returns> public static AgentCircuitData GenerateAgentData(UUID agentId) { AgentCircuitData acd = GenerateCommonAgentData(); acd.AgentID = agentId; acd.firstname = "testfirstname"; acd.lastname = "testlastname"; acd.ServiceURLs = new Dictionary<string, object>(); return acd; } /// <summary> /// Generate some standard agent connection data. /// </summary> /// <param name="agentId"></param> /// <returns></returns> public static AgentCircuitData GenerateAgentData(UserAccount ua) { AgentCircuitData acd = GenerateCommonAgentData(); acd.AgentID = ua.PrincipalID; acd.firstname = ua.FirstName; acd.lastname = ua.LastName; acd.ServiceURLs = ua.ServiceURLs; return acd; } private static AgentCircuitData GenerateCommonAgentData() { AgentCircuitData acd = new AgentCircuitData(); // XXX: Sessions must be unique, otherwise one presence can overwrite another in NullPresenceData. acd.SessionID = UUID.Random(); acd.SecureSessionID = UUID.Random(); acd.circuitcode = 123; acd.BaseFolder = UUID.Zero; acd.InventoryFolder = UUID.Zero; acd.startpos = Vector3.Zero; acd.CapsPath = "http://wibble.com"; acd.Appearance = new AvatarAppearance(); return acd; } /// <summary> /// Add a root agent where the details of the agent connection (apart from the id) are unimportant for the test /// </summary> /// <remarks> /// This can be used for tests where there is only one region or where there are multiple non-neighbour regions /// and teleport doesn't take place. /// /// XXX: Use the version of this method that takes the UserAccount structure wherever possible - this will /// make the agent circuit data (e.g. first, lastname) consistent with the user account data. /// </remarks> /// <param name="scene"></param> /// <param name="agentId"></param> /// <returns></returns> public static ScenePresence AddScenePresence(Scene scene, UUID agentId) { return AddScenePresence(scene, GenerateAgentData(agentId)); } /// <summary> /// Add a root agent where the details of the agent connection (apart from the id) are unimportant for the test /// </summary> /// <remarks> /// XXX: Use the version of this method that takes the UserAccount structure wherever possible - this will /// make the agent circuit data (e.g. first, lastname) consistent with the user account data. /// </remarks> /// <param name="scene"></param> /// <param name="agentId"></param> /// <param name="sceneManager"></param> /// <returns></returns> public static ScenePresence AddScenePresence(Scene scene, UUID agentId, SceneManager sceneManager) { return AddScenePresence(scene, GenerateAgentData(agentId), sceneManager); } /// <summary> /// Add a root agent. /// </summary> /// <param name="scene"></param> /// <param name="ua"></param> /// <returns></returns> public static ScenePresence AddScenePresence(Scene scene, UserAccount ua) { return AddScenePresence(scene, GenerateAgentData(ua)); } /// <summary> /// Add a root agent. /// </summary> /// <remarks> /// This function /// /// 1) Tells the scene that an agent is coming. Normally, the login service (local if standalone, from the /// userserver if grid) would give initial login data back to the client and separately tell the scene that the /// agent was coming. /// /// 2) Connects the agent with the scene /// /// This function performs actions equivalent with notifying the scene that an agent is /// coming and then actually connecting the agent to the scene. The one step missed out is the very first /// </remarks> /// <param name="scene"></param> /// <param name="agentData"></param> /// <returns></returns> public static ScenePresence AddScenePresence(Scene scene, AgentCircuitData agentData) { return AddScenePresence(scene, agentData, null); } /// <summary> /// Add a root agent. /// </summary> /// <remarks> /// This function /// /// 1) Tells the scene that an agent is coming. Normally, the login service (local if standalone, from the /// userserver if grid) would give initial login data back to the client and separately tell the scene that the /// agent was coming. /// /// 2) Connects the agent with the scene /// /// This function performs actions equivalent with notifying the scene that an agent is /// coming and then actually connecting the agent to the scene. The one step missed out is the very first /// </remarks> /// <param name="scene"></param> /// <param name="agentData"></param> /// <param name="sceneManager"></param> /// <returns></returns> public static ScenePresence AddScenePresence(Scene scene, AgentCircuitData agentData, SceneManager sceneManager) { // We emulate the proper login sequence here by doing things in four stages // Stage 0: login // We need to punch through to the underlying service because scene will not, correctly, let us call it // through it's reference to the LPSC LocalPresenceServicesConnector lpsc = (LocalPresenceServicesConnector)scene.PresenceService; lpsc.m_PresenceService.LoginAgent(agentData.AgentID.ToString(), agentData.SessionID, agentData.SecureSessionID); // Stages 1 & 2 ScenePresence sp = IntroduceClientToScene(scene, sceneManager, agentData, TeleportFlags.ViaLogin); // Stage 3: Complete the entrance into the region. This converts the child agent into a root agent. sp.CompleteMovement(sp.ControllingClient, true); return sp; } /// <summary> /// Introduce an agent into the scene by adding a new client. /// </summary> /// <returns>The scene presence added</returns> /// <param name='sceneManager'> /// Scene manager. Can be null if there is only one region in the test or multiple regions that are not /// neighbours and where no teleporting takes place. /// </param> /// <param name='scene'></param> /// <param name='sceneManager></param> /// <param name='agentData'></param> /// <param name='tf'></param> private static ScenePresence IntroduceClientToScene( Scene scene, SceneManager sceneManager, AgentCircuitData agentData, TeleportFlags tf) { string reason; // Stage 1: tell the scene to expect a new user connection if (!scene.NewUserConnection(agentData, (uint)tf, out reason)) Console.WriteLine("NewUserConnection failed: " + reason); // Stage 2: add the new client as a child agent to the scene TestClient client = new TestClient(agentData, scene, sceneManager); scene.AddNewClient(client, PresenceType.User); return scene.GetScenePresence(agentData.AgentID); } public static ScenePresence AddChildScenePresence(Scene scene, UUID agentId) { AgentCircuitData acd = GenerateAgentData(agentId); acd.child = true; // XXX: ViaLogin may not be correct for child agents return IntroduceClientToScene(scene, null, acd, TeleportFlags.ViaLogin); } /// <summary> /// Add a test object /// </summary> /// <param name="scene"></param> /// <returns></returns> public static SceneObjectGroup AddSceneObject(Scene scene) { return AddSceneObject(scene, "Test Object", UUID.Zero); } /// <summary> /// Add a test object /// </summary> /// <param name="scene"></param> /// <param name="name"></param> /// <param name="ownerId"></param> /// <returns></returns> public static SceneObjectGroup AddSceneObject(Scene scene, string name, UUID ownerId) { SceneObjectGroup so = new SceneObjectGroup(CreateSceneObjectPart(name, UUID.Random(), ownerId)); //part.UpdatePrimFlags(false, false, true); //part.ObjectFlags |= (uint)PrimFlags.Phantom; scene.AddNewSceneObject(so, false); return so; } /// <summary> /// Create a scene object part. /// </summary> /// <param name="name"></param> /// <param name="id"></param> /// <param name="ownerId"></param> /// <returns></returns> public static SceneObjectPart CreateSceneObjectPart(string name, UUID id, UUID ownerId) { return new SceneObjectPart( ownerId, PrimitiveBaseShape.Default, Vector3.Zero, Quaternion.Identity, Vector3.Zero) { Name = name, UUID = id, Scale = new Vector3(1, 1, 1) }; } /// <summary> /// Create a scene object but do not add it to the scene. /// </summary> /// <remarks> /// UUID always starts at 00000000-0000-0000-0000-000000000001. For some purposes, (e.g. serializing direct /// to another object's inventory) we do not need a scene unique ID. So it would be better to add the /// UUID when we actually add an object to a scene rather than on creation. /// </remarks> /// <param name="parts">The number of parts that should be in the scene object</param> /// <param name="ownerId"></param> /// <returns></returns> public static SceneObjectGroup CreateSceneObject(int parts, UUID ownerId) { return CreateSceneObject(parts, ownerId, 0x1); } /// <summary> /// Create a scene object but do not add it to the scene. /// </summary> /// <param name="parts">The number of parts that should be in the scene object</param> /// <param name="ownerId"></param> /// <param name="uuidTail"> /// The hexadecimal last part of the UUID for parts created. A UUID of the form "00000000-0000-0000-0000-{0:XD12}" /// will be given to the root part, and incremented for each part thereafter. /// </param> /// <returns></returns> public static SceneObjectGroup CreateSceneObject(int parts, UUID ownerId, int uuidTail) { return CreateSceneObject(parts, ownerId, "", uuidTail); } /// <summary> /// Create a scene object but do not add it to the scene. /// </summary> /// <param name="parts"> /// The number of parts that should be in the scene object /// </param> /// <param name="ownerId"></param> /// <param name="partNamePrefix"> /// The prefix to be given to part names. This will be suffixed with "Part<part no>" /// (e.g. mynamePart1 for the root part) /// </param> /// <param name="uuidTail"> /// The hexadecimal last part of the UUID for parts created. A UUID of the form "00000000-0000-0000-0000-{0:XD12}" /// will be given to the root part, and incremented for each part thereafter. /// </param> /// <returns></returns> public static SceneObjectGroup CreateSceneObject(int parts, UUID ownerId, string partNamePrefix, int uuidTail) { string rawSogId = string.Format("00000000-0000-0000-0000-{0:X12}", uuidTail); SceneObjectGroup sog = new SceneObjectGroup( CreateSceneObjectPart(string.Format("{0}Part1", partNamePrefix), new UUID(rawSogId), ownerId)); if (parts > 1) for (int i = 2; i <= parts; i++) sog.AddPart( CreateSceneObjectPart( string.Format("{0}Part{1}", partNamePrefix, i), new UUID(string.Format("00000000-0000-0000-0000-{0:X12}", uuidTail + i - 1)), ownerId)); return sog; } } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the workspaces-2015-04-08.normal.json service model. */ using System; using System.Collections.Generic; using Amazon.WorkSpaces.Model; using Amazon.WorkSpaces.Model.Internal.MarshallTransformations; using Amazon.Runtime; using Amazon.Runtime.Internal; using Amazon.Runtime.Internal.Auth; using Amazon.Runtime.Internal.Transform; namespace Amazon.WorkSpaces { /// <summary> /// Implementation for accessing WorkSpaces /// /// Amazon WorkSpaces Service /// <para> /// This is the <i>Amazon WorkSpaces API Reference</i>. This guide provides detailed information /// about Amazon WorkSpaces operations, data types, parameters, and errors. /// </para> /// </summary> public partial class AmazonWorkSpacesClient : AmazonServiceClient, IAmazonWorkSpaces { #region Constructors /// <summary> /// Constructs AmazonWorkSpacesClient with the credentials loaded from the application's /// default configuration, and if unsuccessful from the Instance Profile service on an EC2 instance. /// /// Example App.config with credentials set. /// <code> /// &lt;?xml version="1.0" encoding="utf-8" ?&gt; /// &lt;configuration&gt; /// &lt;appSettings&gt; /// &lt;add key="AWSProfileName" value="AWS Default"/&gt; /// &lt;/appSettings&gt; /// &lt;/configuration&gt; /// </code> /// /// </summary> public AmazonWorkSpacesClient() : base(FallbackCredentialsFactory.GetCredentials(), new AmazonWorkSpacesConfig()) { } /// <summary> /// Constructs AmazonWorkSpacesClient with the credentials loaded from the application's /// default configuration, and if unsuccessful from the Instance Profile service on an EC2 instance. /// /// Example App.config with credentials set. /// <code> /// &lt;?xml version="1.0" encoding="utf-8" ?&gt; /// &lt;configuration&gt; /// &lt;appSettings&gt; /// &lt;add key="AWSProfileName" value="AWS Default"/&gt; /// &lt;/appSettings&gt; /// &lt;/configuration&gt; /// </code> /// /// </summary> /// <param name="region">The region to connect.</param> public AmazonWorkSpacesClient(RegionEndpoint region) : base(FallbackCredentialsFactory.GetCredentials(), new AmazonWorkSpacesConfig{RegionEndpoint = region}) { } /// <summary> /// Constructs AmazonWorkSpacesClient with the credentials loaded from the application's /// default configuration, and if unsuccessful from the Instance Profile service on an EC2 instance. /// /// Example App.config with credentials set. /// <code> /// &lt;?xml version="1.0" encoding="utf-8" ?&gt; /// &lt;configuration&gt; /// &lt;appSettings&gt; /// &lt;add key="AWSProfileName" value="AWS Default"/&gt; /// &lt;/appSettings&gt; /// &lt;/configuration&gt; /// </code> /// /// </summary> /// <param name="config">The AmazonWorkSpacesClient Configuration Object</param> public AmazonWorkSpacesClient(AmazonWorkSpacesConfig config) : base(FallbackCredentialsFactory.GetCredentials(), config) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Credentials /// </summary> /// <param name="credentials">AWS Credentials</param> public AmazonWorkSpacesClient(AWSCredentials credentials) : this(credentials, new AmazonWorkSpacesConfig()) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Credentials /// </summary> /// <param name="credentials">AWS Credentials</param> /// <param name="region">The region to connect.</param> public AmazonWorkSpacesClient(AWSCredentials credentials, RegionEndpoint region) : this(credentials, new AmazonWorkSpacesConfig{RegionEndpoint = region}) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Credentials and an /// AmazonWorkSpacesClient Configuration object. /// </summary> /// <param name="credentials">AWS Credentials</param> /// <param name="clientConfig">The AmazonWorkSpacesClient Configuration Object</param> public AmazonWorkSpacesClient(AWSCredentials credentials, AmazonWorkSpacesConfig clientConfig) : base(credentials, clientConfig) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Access Key ID and AWS Secret Key /// </summary> /// <param name="awsAccessKeyId">AWS Access Key ID</param> /// <param name="awsSecretAccessKey">AWS Secret Access Key</param> public AmazonWorkSpacesClient(string awsAccessKeyId, string awsSecretAccessKey) : this(awsAccessKeyId, awsSecretAccessKey, new AmazonWorkSpacesConfig()) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Access Key ID and AWS Secret Key /// </summary> /// <param name="awsAccessKeyId">AWS Access Key ID</param> /// <param name="awsSecretAccessKey">AWS Secret Access Key</param> /// <param name="region">The region to connect.</param> public AmazonWorkSpacesClient(string awsAccessKeyId, string awsSecretAccessKey, RegionEndpoint region) : this(awsAccessKeyId, awsSecretAccessKey, new AmazonWorkSpacesConfig() {RegionEndpoint=region}) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Access Key ID, AWS Secret Key and an /// AmazonWorkSpacesClient Configuration object. /// </summary> /// <param name="awsAccessKeyId">AWS Access Key ID</param> /// <param name="awsSecretAccessKey">AWS Secret Access Key</param> /// <param name="clientConfig">The AmazonWorkSpacesClient Configuration Object</param> public AmazonWorkSpacesClient(string awsAccessKeyId, string awsSecretAccessKey, AmazonWorkSpacesConfig clientConfig) : base(awsAccessKeyId, awsSecretAccessKey, clientConfig) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Access Key ID and AWS Secret Key /// </summary> /// <param name="awsAccessKeyId">AWS Access Key ID</param> /// <param name="awsSecretAccessKey">AWS Secret Access Key</param> /// <param name="awsSessionToken">AWS Session Token</param> public AmazonWorkSpacesClient(string awsAccessKeyId, string awsSecretAccessKey, string awsSessionToken) : this(awsAccessKeyId, awsSecretAccessKey, awsSessionToken, new AmazonWorkSpacesConfig()) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Access Key ID and AWS Secret Key /// </summary> /// <param name="awsAccessKeyId">AWS Access Key ID</param> /// <param name="awsSecretAccessKey">AWS Secret Access Key</param> /// <param name="awsSessionToken">AWS Session Token</param> /// <param name="region">The region to connect.</param> public AmazonWorkSpacesClient(string awsAccessKeyId, string awsSecretAccessKey, string awsSessionToken, RegionEndpoint region) : this(awsAccessKeyId, awsSecretAccessKey, awsSessionToken, new AmazonWorkSpacesConfig{RegionEndpoint = region}) { } /// <summary> /// Constructs AmazonWorkSpacesClient with AWS Access Key ID, AWS Secret Key and an /// AmazonWorkSpacesClient Configuration object. /// </summary> /// <param name="awsAccessKeyId">AWS Access Key ID</param> /// <param name="awsSecretAccessKey">AWS Secret Access Key</param> /// <param name="awsSessionToken">AWS Session Token</param> /// <param name="clientConfig">The AmazonWorkSpacesClient Configuration Object</param> public AmazonWorkSpacesClient(string awsAccessKeyId, string awsSecretAccessKey, string awsSessionToken, AmazonWorkSpacesConfig clientConfig) : base(awsAccessKeyId, awsSecretAccessKey, awsSessionToken, clientConfig) { } #endregion #region Overrides /// <summary> /// Creates the signer for the service. /// </summary> protected override AbstractAWSSigner CreateSigner() { return new AWS4Signer(); } #endregion #region Dispose /// <summary> /// Disposes the service client. /// </summary> protected override void Dispose(bool disposing) { base.Dispose(disposing); } #endregion #region CreateWorkspaces /// <summary> /// Creates one or more WorkSpaces. /// /// <note> /// <para> /// This operation is asynchronous and returns before the WorkSpaces are created. /// </para> /// </note> /// </summary> /// <param name="request">Container for the necessary parameters to execute the CreateWorkspaces service method.</param> /// /// <returns>The response from the CreateWorkspaces service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.ResourceLimitExceededException"> /// Your resource limits have been exceeded. /// </exception> public CreateWorkspacesResponse CreateWorkspaces(CreateWorkspacesRequest request) { var marshaller = new CreateWorkspacesRequestMarshaller(); var unmarshaller = CreateWorkspacesResponseUnmarshaller.Instance; return Invoke<CreateWorkspacesRequest,CreateWorkspacesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the CreateWorkspaces operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the CreateWorkspaces operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndCreateWorkspaces /// operation.</returns> public IAsyncResult BeginCreateWorkspaces(CreateWorkspacesRequest request, AsyncCallback callback, object state) { var marshaller = new CreateWorkspacesRequestMarshaller(); var unmarshaller = CreateWorkspacesResponseUnmarshaller.Instance; return BeginInvoke<CreateWorkspacesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the CreateWorkspaces operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginCreateWorkspaces.</param> /// /// <returns>Returns a CreateWorkspacesResult from WorkSpaces.</returns> public CreateWorkspacesResponse EndCreateWorkspaces(IAsyncResult asyncResult) { return EndInvoke<CreateWorkspacesResponse>(asyncResult); } #endregion #region DescribeWorkspaceBundles /// <summary> /// Obtains information about the WorkSpace bundles that are available to your account /// in the specified region. /// /// /// <para> /// You can filter the results with either the <code>BundleIds</code> parameter, or the /// <code>Owner</code> parameter, but not both. /// </para> /// /// <para> /// This operation supports pagination with the use of the <code>NextToken</code> request /// and response parameters. If more results are available, the <code>NextToken</code> /// response member contains a token that you pass in the next call to this operation /// to retrieve the next set of items. /// </para> /// </summary> /// /// <returns>The response from the DescribeWorkspaceBundles service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.InvalidParameterValuesException"> /// One or more parameter values are not valid. /// </exception> public DescribeWorkspaceBundlesResponse DescribeWorkspaceBundles() { var request = new DescribeWorkspaceBundlesRequest(); return DescribeWorkspaceBundles(request); } /// <summary> /// Obtains information about the WorkSpace bundles that are available to your account /// in the specified region. /// /// /// <para> /// You can filter the results with either the <code>BundleIds</code> parameter, or the /// <code>Owner</code> parameter, but not both. /// </para> /// /// <para> /// This operation supports pagination with the use of the <code>NextToken</code> request /// and response parameters. If more results are available, the <code>NextToken</code> /// response member contains a token that you pass in the next call to this operation /// to retrieve the next set of items. /// </para> /// </summary> /// <param name="request">Container for the necessary parameters to execute the DescribeWorkspaceBundles service method.</param> /// /// <returns>The response from the DescribeWorkspaceBundles service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.InvalidParameterValuesException"> /// One or more parameter values are not valid. /// </exception> public DescribeWorkspaceBundlesResponse DescribeWorkspaceBundles(DescribeWorkspaceBundlesRequest request) { var marshaller = new DescribeWorkspaceBundlesRequestMarshaller(); var unmarshaller = DescribeWorkspaceBundlesResponseUnmarshaller.Instance; return Invoke<DescribeWorkspaceBundlesRequest,DescribeWorkspaceBundlesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the DescribeWorkspaceBundles operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the DescribeWorkspaceBundles operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndDescribeWorkspaceBundles /// operation.</returns> public IAsyncResult BeginDescribeWorkspaceBundles(DescribeWorkspaceBundlesRequest request, AsyncCallback callback, object state) { var marshaller = new DescribeWorkspaceBundlesRequestMarshaller(); var unmarshaller = DescribeWorkspaceBundlesResponseUnmarshaller.Instance; return BeginInvoke<DescribeWorkspaceBundlesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the DescribeWorkspaceBundles operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginDescribeWorkspaceBundles.</param> /// /// <returns>Returns a DescribeWorkspaceBundlesResult from WorkSpaces.</returns> public DescribeWorkspaceBundlesResponse EndDescribeWorkspaceBundles(IAsyncResult asyncResult) { return EndInvoke<DescribeWorkspaceBundlesResponse>(asyncResult); } #endregion #region DescribeWorkspaceDirectories /// <summary> /// Retrieves information about the AWS Directory Service directories in the region that /// are registered with Amazon WorkSpaces and are available to your account. /// /// /// <para> /// This operation supports pagination with the use of the <code>NextToken</code> request /// and response parameters. If more results are available, the <code>NextToken</code> /// response member contains a token that you pass in the next call to this operation /// to retrieve the next set of items. /// </para> /// </summary> /// /// <returns>The response from the DescribeWorkspaceDirectories service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.InvalidParameterValuesException"> /// One or more parameter values are not valid. /// </exception> public DescribeWorkspaceDirectoriesResponse DescribeWorkspaceDirectories() { var request = new DescribeWorkspaceDirectoriesRequest(); return DescribeWorkspaceDirectories(request); } /// <summary> /// Retrieves information about the AWS Directory Service directories in the region that /// are registered with Amazon WorkSpaces and are available to your account. /// /// /// <para> /// This operation supports pagination with the use of the <code>NextToken</code> request /// and response parameters. If more results are available, the <code>NextToken</code> /// response member contains a token that you pass in the next call to this operation /// to retrieve the next set of items. /// </para> /// </summary> /// <param name="request">Container for the necessary parameters to execute the DescribeWorkspaceDirectories service method.</param> /// /// <returns>The response from the DescribeWorkspaceDirectories service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.InvalidParameterValuesException"> /// One or more parameter values are not valid. /// </exception> public DescribeWorkspaceDirectoriesResponse DescribeWorkspaceDirectories(DescribeWorkspaceDirectoriesRequest request) { var marshaller = new DescribeWorkspaceDirectoriesRequestMarshaller(); var unmarshaller = DescribeWorkspaceDirectoriesResponseUnmarshaller.Instance; return Invoke<DescribeWorkspaceDirectoriesRequest,DescribeWorkspaceDirectoriesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the DescribeWorkspaceDirectories operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the DescribeWorkspaceDirectories operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndDescribeWorkspaceDirectories /// operation.</returns> public IAsyncResult BeginDescribeWorkspaceDirectories(DescribeWorkspaceDirectoriesRequest request, AsyncCallback callback, object state) { var marshaller = new DescribeWorkspaceDirectoriesRequestMarshaller(); var unmarshaller = DescribeWorkspaceDirectoriesResponseUnmarshaller.Instance; return BeginInvoke<DescribeWorkspaceDirectoriesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the DescribeWorkspaceDirectories operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginDescribeWorkspaceDirectories.</param> /// /// <returns>Returns a DescribeWorkspaceDirectoriesResult from WorkSpaces.</returns> public DescribeWorkspaceDirectoriesResponse EndDescribeWorkspaceDirectories(IAsyncResult asyncResult) { return EndInvoke<DescribeWorkspaceDirectoriesResponse>(asyncResult); } #endregion #region DescribeWorkspaces /// <summary> /// Obtains information about the specified WorkSpaces. /// /// /// <para> /// Only one of the filter parameters, such as <code>BundleId</code>, <code>DirectoryId</code>, /// or <code>WorkspaceIds</code>, can be specified at a time. /// </para> /// /// <para> /// This operation supports pagination with the use of the <code>NextToken</code> request /// and response parameters. If more results are available, the <code>NextToken</code> /// response member contains a token that you pass in the next call to this operation /// to retrieve the next set of items. /// </para> /// </summary> /// /// <returns>The response from the DescribeWorkspaces service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.InvalidParameterValuesException"> /// One or more parameter values are not valid. /// </exception> /// <exception cref="Amazon.WorkSpaces.Model.ResourceUnavailableException"> /// The specified resource is not available. /// </exception> public DescribeWorkspacesResponse DescribeWorkspaces() { var request = new DescribeWorkspacesRequest(); return DescribeWorkspaces(request); } /// <summary> /// Obtains information about the specified WorkSpaces. /// /// /// <para> /// Only one of the filter parameters, such as <code>BundleId</code>, <code>DirectoryId</code>, /// or <code>WorkspaceIds</code>, can be specified at a time. /// </para> /// /// <para> /// This operation supports pagination with the use of the <code>NextToken</code> request /// and response parameters. If more results are available, the <code>NextToken</code> /// response member contains a token that you pass in the next call to this operation /// to retrieve the next set of items. /// </para> /// </summary> /// <param name="request">Container for the necessary parameters to execute the DescribeWorkspaces service method.</param> /// /// <returns>The response from the DescribeWorkspaces service method, as returned by WorkSpaces.</returns> /// <exception cref="Amazon.WorkSpaces.Model.InvalidParameterValuesException"> /// One or more parameter values are not valid. /// </exception> /// <exception cref="Amazon.WorkSpaces.Model.ResourceUnavailableException"> /// The specified resource is not available. /// </exception> public DescribeWorkspacesResponse DescribeWorkspaces(DescribeWorkspacesRequest request) { var marshaller = new DescribeWorkspacesRequestMarshaller(); var unmarshaller = DescribeWorkspacesResponseUnmarshaller.Instance; return Invoke<DescribeWorkspacesRequest,DescribeWorkspacesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the DescribeWorkspaces operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the DescribeWorkspaces operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndDescribeWorkspaces /// operation.</returns> public IAsyncResult BeginDescribeWorkspaces(DescribeWorkspacesRequest request, AsyncCallback callback, object state) { var marshaller = new DescribeWorkspacesRequestMarshaller(); var unmarshaller = DescribeWorkspacesResponseUnmarshaller.Instance; return BeginInvoke<DescribeWorkspacesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the DescribeWorkspaces operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginDescribeWorkspaces.</param> /// /// <returns>Returns a DescribeWorkspacesResult from WorkSpaces.</returns> public DescribeWorkspacesResponse EndDescribeWorkspaces(IAsyncResult asyncResult) { return EndInvoke<DescribeWorkspacesResponse>(asyncResult); } #endregion #region RebootWorkspaces /// <summary> /// Reboots the specified WorkSpaces. /// /// /// <para> /// To be able to reboot a WorkSpace, the WorkSpace must have a <b>State</b> of <code>AVAILABLE</code>, /// <code>IMPAIRED</code>, or <code>INOPERABLE</code>. /// </para> /// <note> /// <para> /// This operation is asynchronous and will return before the WorkSpaces have rebooted. /// </para> /// </note> /// </summary> /// <param name="request">Container for the necessary parameters to execute the RebootWorkspaces service method.</param> /// /// <returns>The response from the RebootWorkspaces service method, as returned by WorkSpaces.</returns> public RebootWorkspacesResponse RebootWorkspaces(RebootWorkspacesRequest request) { var marshaller = new RebootWorkspacesRequestMarshaller(); var unmarshaller = RebootWorkspacesResponseUnmarshaller.Instance; return Invoke<RebootWorkspacesRequest,RebootWorkspacesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the RebootWorkspaces operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the RebootWorkspaces operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndRebootWorkspaces /// operation.</returns> public IAsyncResult BeginRebootWorkspaces(RebootWorkspacesRequest request, AsyncCallback callback, object state) { var marshaller = new RebootWorkspacesRequestMarshaller(); var unmarshaller = RebootWorkspacesResponseUnmarshaller.Instance; return BeginInvoke<RebootWorkspacesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the RebootWorkspaces operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginRebootWorkspaces.</param> /// /// <returns>Returns a RebootWorkspacesResult from WorkSpaces.</returns> public RebootWorkspacesResponse EndRebootWorkspaces(IAsyncResult asyncResult) { return EndInvoke<RebootWorkspacesResponse>(asyncResult); } #endregion #region RebuildWorkspaces /// <summary> /// Rebuilds the specified WorkSpaces. /// /// /// <para> /// Rebuilding a WorkSpace is a potentially destructive action that can result in the /// loss of data. Rebuilding a WorkSpace causes the following to occur: /// </para> /// <ul> <li>The system is restored to the image of the bundle that the WorkSpace is /// created from. Any applications that have been installed, or system settings that have /// been made since the WorkSpace was created will be lost.</li> <li>The data drive (D /// drive) is re-created from the last automatic snapshot taken of the data drive. The /// current contents of the data drive are overwritten. Automatic snapshots of the data /// drive are taken every 12 hours, so the snapshot can be as much as 12 hours old.</li> /// </ul> /// <para> /// To be able to rebuild a WorkSpace, the WorkSpace must have a <b>State</b> of <code>AVAILABLE</code> /// or <code>ERROR</code>. /// </para> /// <note> /// <para> /// This operation is asynchronous and will return before the WorkSpaces have been completely /// rebuilt. /// </para> /// </note> /// </summary> /// <param name="request">Container for the necessary parameters to execute the RebuildWorkspaces service method.</param> /// /// <returns>The response from the RebuildWorkspaces service method, as returned by WorkSpaces.</returns> public RebuildWorkspacesResponse RebuildWorkspaces(RebuildWorkspacesRequest request) { var marshaller = new RebuildWorkspacesRequestMarshaller(); var unmarshaller = RebuildWorkspacesResponseUnmarshaller.Instance; return Invoke<RebuildWorkspacesRequest,RebuildWorkspacesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the RebuildWorkspaces operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the RebuildWorkspaces operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndRebuildWorkspaces /// operation.</returns> public IAsyncResult BeginRebuildWorkspaces(RebuildWorkspacesRequest request, AsyncCallback callback, object state) { var marshaller = new RebuildWorkspacesRequestMarshaller(); var unmarshaller = RebuildWorkspacesResponseUnmarshaller.Instance; return BeginInvoke<RebuildWorkspacesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the RebuildWorkspaces operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginRebuildWorkspaces.</param> /// /// <returns>Returns a RebuildWorkspacesResult from WorkSpaces.</returns> public RebuildWorkspacesResponse EndRebuildWorkspaces(IAsyncResult asyncResult) { return EndInvoke<RebuildWorkspacesResponse>(asyncResult); } #endregion #region TerminateWorkspaces /// <summary> /// Terminates the specified WorkSpaces. /// /// /// <para> /// Terminating a WorkSpace is a permanent action and cannot be undone. The user's data /// is not maintained and will be destroyed. If you need to archive any user data, contact /// Amazon Web Services before terminating the WorkSpace. /// </para> /// /// <para> /// You can terminate a WorkSpace that is in any state except <code>SUSPENDED</code>. /// </para> /// <note> /// <para> /// This operation is asynchronous and will return before the WorkSpaces have been completely /// terminated. /// </para> /// </note> /// </summary> /// <param name="request">Container for the necessary parameters to execute the TerminateWorkspaces service method.</param> /// /// <returns>The response from the TerminateWorkspaces service method, as returned by WorkSpaces.</returns> public TerminateWorkspacesResponse TerminateWorkspaces(TerminateWorkspacesRequest request) { var marshaller = new TerminateWorkspacesRequestMarshaller(); var unmarshaller = TerminateWorkspacesResponseUnmarshaller.Instance; return Invoke<TerminateWorkspacesRequest,TerminateWorkspacesResponse>(request, marshaller, unmarshaller); } /// <summary> /// Initiates the asynchronous execution of the TerminateWorkspaces operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the TerminateWorkspaces operation on AmazonWorkSpacesClient.</param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback /// procedure using the AsyncState property.</param> /// /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndTerminateWorkspaces /// operation.</returns> public IAsyncResult BeginTerminateWorkspaces(TerminateWorkspacesRequest request, AsyncCallback callback, object state) { var marshaller = new TerminateWorkspacesRequestMarshaller(); var unmarshaller = TerminateWorkspacesResponseUnmarshaller.Instance; return BeginInvoke<TerminateWorkspacesRequest>(request, marshaller, unmarshaller, callback, state); } /// <summary> /// Finishes the asynchronous execution of the TerminateWorkspaces operation. /// </summary> /// /// <param name="asyncResult">The IAsyncResult returned by the call to BeginTerminateWorkspaces.</param> /// /// <returns>Returns a TerminateWorkspacesResult from WorkSpaces.</returns> public TerminateWorkspacesResponse EndTerminateWorkspaces(IAsyncResult asyncResult) { return EndInvoke<TerminateWorkspacesResponse>(asyncResult); } #endregion } }
using System; using static OneOf.Functions; namespace OneOf { public struct OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> : IOneOf { readonly T0 _value0; readonly T1 _value1; readonly T2 _value2; readonly T3 _value3; readonly T4 _value4; readonly T5 _value5; readonly T6 _value6; readonly T7 _value7; readonly T8 _value8; readonly int _index; OneOf(int index, T0 value0 = default, T1 value1 = default, T2 value2 = default, T3 value3 = default, T4 value4 = default, T5 value5 = default, T6 value6 = default, T7 value7 = default, T8 value8 = default) { _index = index; _value0 = value0; _value1 = value1; _value2 = value2; _value3 = value3; _value4 = value4; _value5 = value5; _value6 = value6; _value7 = value7; _value8 = value8; } public object Value => _index switch { 0 => _value0, 1 => _value1, 2 => _value2, 3 => _value3, 4 => _value4, 5 => _value5, 6 => _value6, 7 => _value7, 8 => _value8, _ => throw new InvalidOperationException() }; public int Index => _index; public bool IsT0 => _index == 0; public bool IsT1 => _index == 1; public bool IsT2 => _index == 2; public bool IsT3 => _index == 3; public bool IsT4 => _index == 4; public bool IsT5 => _index == 5; public bool IsT6 => _index == 6; public bool IsT7 => _index == 7; public bool IsT8 => _index == 8; public T0 AsT0 => _index == 0 ? _value0 : throw new InvalidOperationException($"Cannot return as T0 as result is T{_index}"); public T1 AsT1 => _index == 1 ? _value1 : throw new InvalidOperationException($"Cannot return as T1 as result is T{_index}"); public T2 AsT2 => _index == 2 ? _value2 : throw new InvalidOperationException($"Cannot return as T2 as result is T{_index}"); public T3 AsT3 => _index == 3 ? _value3 : throw new InvalidOperationException($"Cannot return as T3 as result is T{_index}"); public T4 AsT4 => _index == 4 ? _value4 : throw new InvalidOperationException($"Cannot return as T4 as result is T{_index}"); public T5 AsT5 => _index == 5 ? _value5 : throw new InvalidOperationException($"Cannot return as T5 as result is T{_index}"); public T6 AsT6 => _index == 6 ? _value6 : throw new InvalidOperationException($"Cannot return as T6 as result is T{_index}"); public T7 AsT7 => _index == 7 ? _value7 : throw new InvalidOperationException($"Cannot return as T7 as result is T{_index}"); public T8 AsT8 => _index == 8 ? _value8 : throw new InvalidOperationException($"Cannot return as T8 as result is T{_index}"); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T0 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(0, value0: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T1 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(1, value1: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T2 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(2, value2: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T3 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(3, value3: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T4 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(4, value4: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T5 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(5, value5: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T6 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(6, value6: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T7 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(7, value7: t); public static implicit operator OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(T8 t) => new OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8>(8, value8: t); public void Switch(Action<T0> f0, Action<T1> f1, Action<T2> f2, Action<T3> f3, Action<T4> f4, Action<T5> f5, Action<T6> f6, Action<T7> f7, Action<T8> f8) { if (_index == 0 && f0 != null) { f0(_value0); return; } if (_index == 1 && f1 != null) { f1(_value1); return; } if (_index == 2 && f2 != null) { f2(_value2); return; } if (_index == 3 && f3 != null) { f3(_value3); return; } if (_index == 4 && f4 != null) { f4(_value4); return; } if (_index == 5 && f5 != null) { f5(_value5); return; } if (_index == 6 && f6 != null) { f6(_value6); return; } if (_index == 7 && f7 != null) { f7(_value7); return; } if (_index == 8 && f8 != null) { f8(_value8); return; } throw new InvalidOperationException(); } public TResult Match<TResult>(Func<T0, TResult> f0, Func<T1, TResult> f1, Func<T2, TResult> f2, Func<T3, TResult> f3, Func<T4, TResult> f4, Func<T5, TResult> f5, Func<T6, TResult> f6, Func<T7, TResult> f7, Func<T8, TResult> f8) { if (_index == 0 && f0 != null) { return f0(_value0); } if (_index == 1 && f1 != null) { return f1(_value1); } if (_index == 2 && f2 != null) { return f2(_value2); } if (_index == 3 && f3 != null) { return f3(_value3); } if (_index == 4 && f4 != null) { return f4(_value4); } if (_index == 5 && f5 != null) { return f5(_value5); } if (_index == 6 && f6 != null) { return f6(_value6); } if (_index == 7 && f7 != null) { return f7(_value7); } if (_index == 8 && f8 != null) { return f8(_value8); } throw new InvalidOperationException(); } public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT0(T0 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT1(T1 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT2(T2 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT3(T3 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT4(T4 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT5(T5 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT6(T6 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT7(T7 input) => input; public static OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> FromT8(T8 input) => input; public OneOf<TResult, T1, T2, T3, T4, T5, T6, T7, T8> MapT0<TResult>(Func<T0, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => mapFunc(AsT0), 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, TResult, T2, T3, T4, T5, T6, T7, T8> MapT1<TResult>(Func<T1, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => mapFunc(AsT1), 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, TResult, T3, T4, T5, T6, T7, T8> MapT2<TResult>(Func<T2, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => mapFunc(AsT2), 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, T2, TResult, T4, T5, T6, T7, T8> MapT3<TResult>(Func<T3, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => mapFunc(AsT3), 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, T2, T3, TResult, T5, T6, T7, T8> MapT4<TResult>(Func<T4, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => mapFunc(AsT4), 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, T2, T3, T4, TResult, T6, T7, T8> MapT5<TResult>(Func<T5, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => mapFunc(AsT5), 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, T2, T3, T4, T5, TResult, T7, T8> MapT6<TResult>(Func<T6, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => mapFunc(AsT6), 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, T2, T3, T4, T5, T6, TResult, T8> MapT7<TResult>(Func<T7, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => mapFunc(AsT7), 8 => AsT8, _ => throw new InvalidOperationException() }; } public OneOf<T0, T1, T2, T3, T4, T5, T6, T7, TResult> MapT8<TResult>(Func<T8, TResult> mapFunc) { if (mapFunc == null) { throw new ArgumentNullException(nameof(mapFunc)); } return _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => mapFunc(AsT8), _ => throw new InvalidOperationException() }; } public bool TryPickT0(out T0 value, out OneOf<T1, T2, T3, T4, T5, T6, T7, T8> remainder) { value = IsT0 ? AsT0 : default; remainder = _index switch { 0 => default, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT0; } public bool TryPickT1(out T1 value, out OneOf<T0, T2, T3, T4, T5, T6, T7, T8> remainder) { value = IsT1 ? AsT1 : default; remainder = _index switch { 0 => AsT0, 1 => default, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT1; } public bool TryPickT2(out T2 value, out OneOf<T0, T1, T3, T4, T5, T6, T7, T8> remainder) { value = IsT2 ? AsT2 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => default, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT2; } public bool TryPickT3(out T3 value, out OneOf<T0, T1, T2, T4, T5, T6, T7, T8> remainder) { value = IsT3 ? AsT3 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => default, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT3; } public bool TryPickT4(out T4 value, out OneOf<T0, T1, T2, T3, T5, T6, T7, T8> remainder) { value = IsT4 ? AsT4 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => default, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT4; } public bool TryPickT5(out T5 value, out OneOf<T0, T1, T2, T3, T4, T6, T7, T8> remainder) { value = IsT5 ? AsT5 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => default, 6 => AsT6, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT5; } public bool TryPickT6(out T6 value, out OneOf<T0, T1, T2, T3, T4, T5, T7, T8> remainder) { value = IsT6 ? AsT6 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => default, 7 => AsT7, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT6; } public bool TryPickT7(out T7 value, out OneOf<T0, T1, T2, T3, T4, T5, T6, T8> remainder) { value = IsT7 ? AsT7 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => default, 8 => AsT8, _ => throw new InvalidOperationException() }; return this.IsT7; } public bool TryPickT8(out T8 value, out OneOf<T0, T1, T2, T3, T4, T5, T6, T7> remainder) { value = IsT8 ? AsT8 : default; remainder = _index switch { 0 => AsT0, 1 => AsT1, 2 => AsT2, 3 => AsT3, 4 => AsT4, 5 => AsT5, 6 => AsT6, 7 => AsT7, 8 => default, _ => throw new InvalidOperationException() }; return this.IsT8; } bool Equals(OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> other) => _index == other._index && _index switch { 0 => Equals(_value0, other._value0), 1 => Equals(_value1, other._value1), 2 => Equals(_value2, other._value2), 3 => Equals(_value3, other._value3), 4 => Equals(_value4, other._value4), 5 => Equals(_value5, other._value5), 6 => Equals(_value6, other._value6), 7 => Equals(_value7, other._value7), 8 => Equals(_value8, other._value8), _ => false }; public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) { return false; } return obj is OneOf<T0, T1, T2, T3, T4, T5, T6, T7, T8> o && Equals(o); } public override string ToString() => _index switch { 0 => FormatValue(_value0), 1 => FormatValue(_value1), 2 => FormatValue(_value2), 3 => FormatValue(_value3), 4 => FormatValue(_value4), 5 => FormatValue(_value5), 6 => FormatValue(_value6), 7 => FormatValue(_value7), 8 => FormatValue(_value8), _ => throw new InvalidOperationException("Unexpected index, which indicates a problem in the OneOf codegen.") }; public override int GetHashCode() { unchecked { int hashCode = _index switch { 0 => _value0?.GetHashCode(), 1 => _value1?.GetHashCode(), 2 => _value2?.GetHashCode(), 3 => _value3?.GetHashCode(), 4 => _value4?.GetHashCode(), 5 => _value5?.GetHashCode(), 6 => _value6?.GetHashCode(), 7 => _value7?.GetHashCode(), 8 => _value8?.GetHashCode(), _ => 0 } ?? 0; return (hashCode*397) ^ _index; } } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== /*============================================================ ** ** Class: SortedList ** ** Purpose: A generic sorted list. ** ** Date: January 28, 2003 ** ===========================================================*/ namespace System.Collections.Generic { using System; using System.Security.Permissions; using System.Diagnostics; // The SortedDictionary class implements a generic sorted list of keys // and values. Entries in a sorted list are sorted by their keys and // are accessible both by key and by index. The keys of a sorted dictionary // can be ordered either according to a specific IComparer // implementation given when the sorted dictionary is instantiated, or // according to the IComparable implementation provided by the keys // themselves. In either case, a sorted dictionary does not allow entries // with duplicate or null keys. // // A sorted list internally maintains two arrays that store the keys and // values of the entries. The capacity of a sorted list is the allocated // length of these internal arrays. As elements are added to a sorted list, the // capacity of the sorted list is automatically increased as required by // reallocating the internal arrays. The capacity is never automatically // decreased, but users can call either TrimExcess or // Capacity explicitly. // // The GetKeyList and GetValueList methods of a sorted list // provides access to the keys and values of the sorted list in the form of // List implementations. The List objects returned by these // methods are aliases for the underlying sorted list, so modifications // made to those lists are directly reflected in the sorted list, and vice // versa. // // The SortedList class provides a convenient way to create a sorted // copy of another dictionary, such as a Hashtable. For example: // // Hashtable h = new Hashtable(); // h.Add(...); // h.Add(...); // ... // SortedList s = new SortedList(h); // // The last line above creates a sorted list that contains a copy of the keys // and values stored in the hashtable. In this particular example, the keys // will be ordered according to the IComparable interface, which they // all must implement. To impose a different ordering, SortedList also // has a constructor that allows a specific IComparer implementation to // be specified. // [DebuggerTypeProxy(typeof(System_DictionaryDebugView<,>))] [DebuggerDisplay("Count = {Count}")] #if !FEATURE_NETCORE [Serializable()] #endif [System.Runtime.InteropServices.ComVisible(false)] public class SortedList<TKey, TValue> : IDictionary<TKey, TValue>, System.Collections.IDictionary, IReadOnlyDictionary<TKey, TValue> { private TKey[] keys; private TValue[] values; private int _size; private int version; private IComparer<TKey> comparer; private KeyList keyList; private ValueList valueList; #if !FEATURE_NETCORE [NonSerialized] #endif private Object _syncRoot; static TKey[] emptyKeys = new TKey[0]; static TValue[] emptyValues = new TValue[0]; private const int _defaultCapacity = 4; // Constructs a new sorted list. The sorted list is initially empty and has // a capacity of zero. Upon adding the first element to the sorted list the // capacity is increased to _defaultCapacity, and then increased in multiples of two as // required. The elements of the sorted list are ordered according to the // IComparable interface, which must be implemented by the keys of // all entries added to the sorted list. public SortedList() { keys = emptyKeys; values = emptyValues; _size = 0; comparer = Comparer<TKey>.Default; } // Constructs a new sorted list. The sorted list is initially empty and has // a capacity of zero. Upon adding the first element to the sorted list the // capacity is increased to 16, and then increased in multiples of two as // required. The elements of the sorted list are ordered according to the // IComparable interface, which must be implemented by the keys of // all entries added to the sorted list. // public SortedList(int capacity) { if (capacity < 0) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity, ExceptionResource.ArgumentOutOfRange_NeedNonNegNumRequired); keys = new TKey[capacity]; values = new TValue[capacity]; comparer = Comparer<TKey>.Default; } // Constructs a new sorted list with a given IComparer // implementation. The sorted list is initially empty and has a capacity of // zero. Upon adding the first element to the sorted list the capacity is // increased to 16, and then increased in multiples of two as required. The // elements of the sorted list are ordered according to the given // IComparer implementation. If comparer is null, the // elements are compared to each other using the IComparable // interface, which in that case must be implemented by the keys of all // entries added to the sorted list. // public SortedList(IComparer<TKey> comparer) : this() { if (comparer != null) { this.comparer = comparer; } } // Constructs a new sorted dictionary with a given IComparer // implementation and a given initial capacity. The sorted list is // initially empty, but will have room for the given number of elements // before any reallocations are required. The elements of the sorted list // are ordered according to the given IComparer implementation. If // comparer is null, the elements are compared to each other using // the IComparable interface, which in that case must be implemented // by the keys of all entries added to the sorted list. // public SortedList(int capacity, IComparer<TKey> comparer) : this(comparer) { Capacity = capacity; } // Constructs a new sorted list containing a copy of the entries in the // given dictionary. The elements of the sorted list are ordered according // to the IComparable interface, which must be implemented by the // keys of all entries in the the given dictionary as well as keys // subsequently added to the sorted list. // public SortedList(IDictionary<TKey, TValue> dictionary) : this(dictionary, null) { } // Constructs a new sorted list containing a copy of the entries in the // given dictionary. The elements of the sorted list are ordered according // to the given IComparer implementation. If comparer is // null, the elements are compared to each other using the // IComparable interface, which in that case must be implemented // by the keys of all entries in the the given dictionary as well as keys // subsequently added to the sorted list. // public SortedList(IDictionary<TKey, TValue> dictionary, IComparer<TKey> comparer) : this((dictionary != null ? dictionary.Count : 0), comparer) { if (dictionary==null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.dictionary); dictionary.Keys.CopyTo(keys, 0); dictionary.Values.CopyTo(values, 0); Array.Sort<TKey, TValue>(keys, values, comparer); _size = dictionary.Count; } // Adds an entry with the given key and value to this sorted list. An // ArgumentException is thrown if the key is already present in the sorted list. // public void Add(TKey key, TValue value) { if (key == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); int i = Array.BinarySearch<TKey>(keys, 0, _size, key, comparer); if (i >= 0) ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_AddingDuplicate); Insert(~i, key, value); } void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> keyValuePair) { Add(keyValuePair.Key, keyValuePair.Value); } bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> keyValuePair) { int index = IndexOfKey(keyValuePair.Key); if( index >= 0 && EqualityComparer<TValue>.Default.Equals(values[index], keyValuePair.Value)) { return true; } return false; } bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> keyValuePair) { int index = IndexOfKey(keyValuePair.Key); if( index >= 0 && EqualityComparer<TValue>.Default.Equals(values[index], keyValuePair.Value)) { RemoveAt(index); return true; } return false; } // Returns the capacity of this sorted list. The capacity of a sorted list // represents the allocated length of the internal arrays used to store the // keys and values of the list, and thus also indicates the maximum number // of entries the list can contain before a reallocation of the internal // arrays is required. // public int Capacity { get { return keys.Length; } set { if (value != keys.Length) { if (value < _size) { ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.value, ExceptionResource.ArgumentOutOfRange_SmallCapacity); } if (value > 0) { TKey[] newKeys = new TKey[value]; TValue[] newValues = new TValue[value]; if (_size > 0) { Array.Copy(keys, 0, newKeys, 0, _size); Array.Copy(values, 0, newValues, 0, _size); } keys = newKeys; values = newValues; } else { keys = emptyKeys; values = emptyValues; } } } } public IComparer<TKey> Comparer { get { return comparer; } } void System.Collections.IDictionary.Add(Object key, Object value) { if (key == null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); } ThrowHelper.IfNullAndNullsAreIllegalThenThrow<TValue>(value, ExceptionArgument.value); try { TKey tempKey = (TKey)key; try { Add(tempKey, (TValue)value); } catch (InvalidCastException) { ThrowHelper.ThrowWrongValueTypeArgumentException(value, typeof(TValue)); } } catch (InvalidCastException) { ThrowHelper.ThrowWrongKeyTypeArgumentException(key, typeof(TKey)); } } // Returns the number of entries in this sorted list. // public int Count { get { return _size; } } // Returns a collection representing the keys of this sorted list. This // method returns the same object as GetKeyList, but typed as an // ICollection instead of an IList. // public IList<TKey> Keys { get { return GetKeyListHelper(); } } ICollection<TKey> IDictionary<TKey,TValue>.Keys { get { return GetKeyListHelper(); } } System.Collections.ICollection System.Collections.IDictionary.Keys { get { return GetKeyListHelper(); } } IEnumerable<TKey> IReadOnlyDictionary<TKey,TValue>.Keys { get { return GetKeyListHelper(); } } // Returns a collection representing the values of this sorted list. This // method returns the same object as GetValueList, but typed as an // ICollection instead of an IList. // public IList<TValue> Values { get { return GetValueListHelper(); } } ICollection<TValue> IDictionary<TKey,TValue>.Values { get { return GetValueListHelper(); } } System.Collections.ICollection System.Collections.IDictionary.Values { get { return GetValueListHelper(); } } IEnumerable<TValue> IReadOnlyDictionary<TKey,TValue>.Values { get { return GetValueListHelper(); } } private KeyList GetKeyListHelper() { if (keyList == null) keyList = new KeyList(this); return keyList; } private ValueList GetValueListHelper() { if (valueList == null) valueList = new ValueList(this); return valueList; } bool ICollection<KeyValuePair<TKey, TValue>>.IsReadOnly { get { return false; } } bool System.Collections.IDictionary.IsReadOnly { get { return false; } } bool System.Collections.IDictionary.IsFixedSize { get { return false; } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } // Synchronization root for this object. Object System.Collections.ICollection.SyncRoot { get { if( _syncRoot == null) { System.Threading.Interlocked.CompareExchange(ref _syncRoot, new Object(), null); } return _syncRoot; } } // Removes all entries from this sorted list. public void Clear() { // clear does not change the capacity version++; // Don't need to doc this but we clear the elements so that the gc can reclaim the references. Array.Clear(keys, 0, _size); Array.Clear(values, 0, _size); _size = 0; } bool System.Collections.IDictionary.Contains(Object key) { if( IsCompatibleKey(key)) { return ContainsKey((TKey) key); } return false; } // Checks if this sorted list contains an entry with the given key. // public bool ContainsKey(TKey key) { return IndexOfKey(key) >= 0; } // Checks if this sorted list contains an entry with the given value. The // values of the entries of the sorted list are compared to the given value // using the Object.Equals method. This method performs a linear // search and is substantially slower than the Contains // method. // public bool ContainsValue(TValue value) { return IndexOfValue(value) >= 0; } // Copies the values in this SortedList to an array. void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { if (array == null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array); } if (arrayIndex < 0 || arrayIndex > array.Length) { ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.arrayIndex, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } if (array.Length - arrayIndex < Count) { ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall); } for (int i = 0; i < Count; i++) { KeyValuePair<TKey, TValue> entry = new KeyValuePair<TKey, TValue>(keys[i],values[i]); array[arrayIndex + i] = entry; } } void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) { if (array == null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array); } if (array.Rank != 1) { ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported); } if( array.GetLowerBound(0) != 0 ) { ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_NonZeroLowerBound); } if (arrayIndex < 0 || arrayIndex > array.Length) { ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.arrayIndex, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } if (array.Length - arrayIndex < Count) { ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall); } KeyValuePair<TKey, TValue>[] keyValuePairArray = array as KeyValuePair<TKey, TValue>[]; if (keyValuePairArray != null) { for (int i = 0; i < Count; i++) { keyValuePairArray[i + arrayIndex] = new KeyValuePair<TKey, TValue>(keys[i],values[i]); } } else { object[] objects = array as object[]; if( objects == null) { ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType); } try { for (int i = 0; i < Count; i++) { objects[i + arrayIndex] = new KeyValuePair<TKey, TValue>(keys[i],values[i]); } } catch(ArrayTypeMismatchException) { ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType); } } } private const int MaxArrayLength = 0X7FEFFFFF; // Ensures that the capacity of this sorted list is at least the given // minimum value. If the currect capacity of the list is less than // min, the capacity is increased to twice the current capacity or // to min, whichever is larger. private void EnsureCapacity(int min) { int newCapacity = keys.Length == 0? _defaultCapacity: keys.Length * 2; // Allow the list to grow to maximum possible capacity (~2G elements) before encountering overflow. // Note that this check works even when _items.Length overflowed thanks to the (uint) cast if ((uint)newCapacity > MaxArrayLength) newCapacity = MaxArrayLength; if (newCapacity < min) newCapacity = min; Capacity = newCapacity; } // Returns the value of the entry at the given index. // private TValue GetByIndex(int index) { if (index < 0 || index >= _size) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index); return values[index]; } public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { return new Enumerator(this, Enumerator.KeyValuePair); } IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { return new Enumerator(this, Enumerator.KeyValuePair); } System.Collections.IDictionaryEnumerator System.Collections.IDictionary.GetEnumerator() { return new Enumerator(this, Enumerator.DictEntry); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new Enumerator(this, Enumerator.KeyValuePair); } // Returns the key of the entry at the given index. // private TKey GetKey(int index) { if (index < 0 || index >= _size) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index); return keys[index]; } // Returns the value associated with the given key. If an entry with the // given key is not found, the returned value is null. // public TValue this[TKey key] { get { int i = IndexOfKey(key); if (i >= 0) return values[i]; ThrowHelper.ThrowKeyNotFoundException(); return default(TValue); } set { if (((Object) key) == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); int i = Array.BinarySearch<TKey>(keys, 0, _size, key, comparer); if (i >= 0) { values[i] = value; version++; return; } Insert(~i, key, value); } } Object System.Collections.IDictionary.this[Object key] { get { if( IsCompatibleKey(key)) { int i = IndexOfKey((TKey)key); if (i >= 0) { return values[i]; } } return null; } set { if(!IsCompatibleKey(key)) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); } ThrowHelper.IfNullAndNullsAreIllegalThenThrow<TValue>(value, ExceptionArgument.value); try { TKey tempKey = (TKey)key; try { this[tempKey] = (TValue)value; } catch (InvalidCastException) { ThrowHelper.ThrowWrongValueTypeArgumentException(value, typeof(TValue)); } } catch (InvalidCastException) { ThrowHelper.ThrowWrongKeyTypeArgumentException(key, typeof(TKey)); } } } // Returns the index of the entry with a given key in this sorted list. The // key is located through a binary search, and thus the average execution // time of this method is proportional to Log2(size), where // size is the size of this sorted list. The returned value is -1 if // the given key does not occur in this sorted list. Null is an invalid // key value. // public int IndexOfKey(TKey key) { if (key == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); int ret = Array.BinarySearch<TKey>(keys, 0, _size, key, comparer); return ret >=0 ? ret : -1; } // Returns the index of the first occurrence of an entry with a given value // in this sorted list. The entry is located through a linear search, and // thus the average execution time of this method is proportional to the // size of this sorted list. The elements of the list are compared to the // given value using the Object.Equals method. // public int IndexOfValue(TValue value) { return Array.IndexOf(values, value, 0, _size); } // Inserts an entry with a given key and value at a given index. private void Insert(int index, TKey key, TValue value) { if (_size == keys.Length) EnsureCapacity(_size + 1); if (index < _size) { Array.Copy(keys, index, keys, index + 1, _size - index); Array.Copy(values, index, values, index + 1, _size - index); } keys[index] = key; values[index] = value; _size++; version++; } public bool TryGetValue(TKey key, out TValue value) { int i = IndexOfKey(key); if (i >= 0) { value =values[i]; return true; } value = default(TValue); return false; } // Removes the entry at the given index. The size of the sorted list is // decreased by one. // public void RemoveAt(int index) { if (index < 0 || index >= _size) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index); _size--; if (index < _size) { Array.Copy(keys, index + 1, keys, index, _size - index); Array.Copy(values, index + 1, values, index, _size - index); } keys[_size] = default(TKey); values[_size] = default(TValue); version++; } // Removes an entry from this sorted list. If an entry with the specified // key exists in the sorted list, it is removed. An ArgumentException is // thrown if the key is null. // public bool Remove(TKey key) { int i = IndexOfKey(key); if (i >= 0) RemoveAt(i); return i >= 0; } void System.Collections.IDictionary.Remove(Object key) { if( IsCompatibleKey(key)) { Remove((TKey) key); } } // Sets the capacity of this sorted list to the size of the sorted list. // This method can be used to minimize a sorted list's memory overhead once // it is known that no new elements will be added to the sorted list. To // completely clear a sorted list and release all memory referenced by the // sorted list, execute the following statements: // // SortedList.Clear(); // SortedList.TrimExcess(); // public void TrimExcess() { int threshold = (int)(((double)keys.Length) * 0.9); if( _size < threshold ) { Capacity = _size; } } private static bool IsCompatibleKey(object key) { if( key == null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); } return (key is TKey); } /// <include file='doc\SortedList.uex' path='docs/doc[@for="SortedListEnumerator"]/*' /> #if !FEATURE_NETCORE [Serializable()] #endif private struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>, System.Collections.IDictionaryEnumerator { private SortedList<TKey, TValue> _sortedList; private TKey key; private TValue value; private int index; private int version; private int getEnumeratorRetType; // What should Enumerator.Current return? internal const int KeyValuePair = 1; internal const int DictEntry = 2; internal Enumerator(SortedList<TKey, TValue> sortedList, int getEnumeratorRetType) { this._sortedList = sortedList; this.index = 0; version = _sortedList.version; this.getEnumeratorRetType = getEnumeratorRetType; key = default(TKey); value = default(TValue); } public void Dispose() { index = 0; key = default(TKey); value = default(TValue); } Object System.Collections.IDictionaryEnumerator.Key { get { if( index == 0 || (index == _sortedList.Count + 1) ) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } return key; } } public bool MoveNext() { if (version != _sortedList.version) ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); if ( (uint)index < (uint)_sortedList.Count ) { key = _sortedList.keys[index]; value = _sortedList.values[index]; index++; return true; } index = _sortedList.Count + 1; key = default(TKey); value = default(TValue); return false; } DictionaryEntry System.Collections.IDictionaryEnumerator.Entry { get { if( index == 0 || (index == _sortedList.Count + 1) ) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } return new DictionaryEntry(key, value); } } public KeyValuePair<TKey, TValue> Current { get { return new KeyValuePair<TKey, TValue>(key, value); } } Object System.Collections.IEnumerator.Current { get { if( index == 0 || (index == _sortedList.Count + 1) ) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } if (getEnumeratorRetType == DictEntry) { return new System.Collections.DictionaryEntry(key, value); } else { return new KeyValuePair<TKey, TValue>(key, value); } } } Object System.Collections.IDictionaryEnumerator.Value { get { if( index == 0 || (index == _sortedList.Count + 1) ) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } return value; } } void System.Collections.IEnumerator.Reset() { if (version != _sortedList.version) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); } index = 0; key = default(TKey); value = default(TValue); } } #if !FEATURE_NETCORE [Serializable()] #endif private sealed class SortedListKeyEnumerator : IEnumerator<TKey>, System.Collections.IEnumerator { private SortedList<TKey, TValue> _sortedList; private int index; private int version; private TKey currentKey; internal SortedListKeyEnumerator(SortedList<TKey, TValue> sortedList) { _sortedList = sortedList; version = sortedList.version; } public void Dispose() { index = 0; currentKey = default(TKey); } public bool MoveNext() { if (version != _sortedList.version) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); } if ( (uint)index < (uint)_sortedList.Count) { currentKey = _sortedList.keys[index]; index++; return true; } index = _sortedList.Count + 1; currentKey = default(TKey); return false; } public TKey Current { get { return currentKey; } } Object System.Collections.IEnumerator.Current { get { if( index == 0 || (index == _sortedList.Count + 1) ) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } return currentKey; } } void System.Collections.IEnumerator.Reset() { if (version != _sortedList.version) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); } index = 0; currentKey = default(TKey); } } #if !FEATURE_NETCORE [Serializable()] #endif private sealed class SortedListValueEnumerator : IEnumerator<TValue>, System.Collections.IEnumerator { private SortedList<TKey, TValue> _sortedList; private int index; private int version; private TValue currentValue; internal SortedListValueEnumerator(SortedList<TKey, TValue> sortedList) { _sortedList = sortedList; version = sortedList.version; } public void Dispose() { index = 0; currentValue = default(TValue); } public bool MoveNext() { if (version != _sortedList.version) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); } if ( (uint)index < (uint)_sortedList.Count) { currentValue = _sortedList.values[index]; index++; return true; } index = _sortedList.Count + 1; currentValue = default(TValue); return false; } public TValue Current { get { return currentValue; } } Object System.Collections.IEnumerator.Current { get { if( index == 0 || (index == _sortedList.Count + 1) ) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen); } return currentValue; } } void System.Collections.IEnumerator.Reset() { if (version != _sortedList.version) { ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion); } index = 0; currentValue = default(TValue); } } [DebuggerTypeProxy(typeof(System_DictionaryKeyCollectionDebugView<,>))] [DebuggerDisplay("Count = {Count}")] #if !FEATURE_NETCORE [Serializable()] #endif private sealed class KeyList : IList<TKey>, System.Collections.ICollection { private SortedList<TKey, TValue> _dict; internal KeyList(SortedList<TKey, TValue> dictionary) { this._dict = dictionary; } public int Count { get { return _dict._size; } } public bool IsReadOnly { get { return true; } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } Object System.Collections.ICollection.SyncRoot { get { return ((ICollection)_dict).SyncRoot; } } public void Add(TKey key) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } public void Clear() { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } public bool Contains(TKey key) { return _dict.ContainsKey(key); } public void CopyTo(TKey[] array, int arrayIndex) { // defer error checking to Array.Copy Array.Copy(_dict.keys, 0, array, arrayIndex, _dict.Count); } void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) { if (array != null && array.Rank != 1) ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported); try { // defer error checking to Array.Copy Array.Copy(_dict.keys, 0, array, arrayIndex, _dict.Count); } catch(ArrayTypeMismatchException){ ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType); } } public void Insert(int index, TKey value) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } public TKey this[int index] { get { return _dict.GetKey(index); } set { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_KeyCollectionSet); } } public IEnumerator<TKey> GetEnumerator() { return new SortedListKeyEnumerator(_dict); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new SortedListKeyEnumerator(_dict); } public int IndexOf(TKey key) { if (((Object) key) == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key); int i = Array.BinarySearch<TKey>(_dict.keys, 0, _dict.Count, key, _dict.comparer); if (i >= 0) return i; return -1; } public bool Remove(TKey key) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); return false; } public void RemoveAt(int index) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } } [DebuggerTypeProxy(typeof(System_DictionaryValueCollectionDebugView<,>))] [DebuggerDisplay("Count = {Count}")] #if !FEATURE_NETCORE [Serializable()] #endif private sealed class ValueList : IList<TValue>, System.Collections.ICollection { private SortedList<TKey, TValue> _dict; internal ValueList(SortedList<TKey, TValue> dictionary) { this._dict = dictionary; } public int Count { get { return _dict._size; } } public bool IsReadOnly { get { return true; } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } Object System.Collections.ICollection.SyncRoot { get { return ((ICollection)_dict).SyncRoot; } } public void Add(TValue key) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } public void Clear() { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } public bool Contains(TValue value) { return _dict.ContainsValue(value); } public void CopyTo(TValue[] array, int arrayIndex) { // defer error checking to Array.Copy Array.Copy(_dict.values, 0, array, arrayIndex, _dict.Count); } void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) { if (array != null && array.Rank != 1) ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported); try { // defer error checking to Array.Copy Array.Copy(_dict.values, 0, array, arrayIndex, _dict.Count); } catch(ArrayTypeMismatchException){ ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType); } } public void Insert(int index, TValue value) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } public TValue this[int index] { get { return _dict.GetByIndex(index); } set { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } } public IEnumerator<TValue> GetEnumerator() { return new SortedListValueEnumerator(_dict); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new SortedListValueEnumerator(_dict); } public int IndexOf(TValue value) { return Array.IndexOf(_dict.values, value, 0, _dict.Count); } public bool Remove(TValue value) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); return false; } public void RemoveAt(int index) { ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_SortedListNestedWrite); } } } }
using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.Data; using System.Windows.Forms; using System.Drawing.Drawing2D; namespace Jovian.UI.Controls { public class ForkLayerEditor : System.Windows.Forms.Control, ImageResolveTarget { //public ForkDOM.GuiSystem ParentSystem; public void NudgeSelection(int dx, int dy) { if (SelectionBuffer == null) return; foreach (LayerGuiControlWrapper cgcw in SelectionBuffer) { cgcw.dLeft += dx; cgcw.dRight += dx; cgcw.dTop += dy; cgcw.dBottom += dy; cgcw.Commit(1, false); inv(); } } public void ReSelect() { if (SelectionBuffer == null) { selRecv.OnSelect(fgm._MyLayer); } else { SendSelectionBuffer(); } } #region Memory Model private System.ComponentModel.Container components = null; LayerGuiModel fgm; private ILayerGuiSelectionReciever selRecv = null; private InterfaceStatus STATUS = new InterfaceStatus(); private ArrayList UberSelection = new ArrayList(); #endregion #region Action Menu at 11 private MenuItem CreateMI(Jovian.UI.Actions.IAction a, int mode) { MenuItem mi = new MenuItem(); mi.Text = a.Name(); mi.Tag = a; if (mode == 0) mi.Click += new EventHandler(mi_Click0); if (mode == 1) mi.Click += new EventHandler(mi_Click1); if (mode == 2) mi.Click += new EventHandler(mi_Click2); return mi; } void mi_Click0(object sender, EventArgs e) { MenuItem mi = sender as MenuItem; Jovian.UI.Actions.IAction a = mi.Tag as Jovian.UI.Actions.IAction; if (fgm._MyLayer != null) { fgm.StartMovementActivity(); a.ActLayer(fgm._MyLayer); fgm.StopMovementAndRecord(); } ReGenerate(); } void mi_Click1(object sender, EventArgs e) { MenuItem mi = sender as MenuItem; Jovian.UI.Actions.IAction a = mi.Tag as Jovian.UI.Actions.IAction; ArrayList CS = new ArrayList(); if (SelectionBuffer != null) { if (SelectionBuffer.Count > 0) { foreach (LayerGuiControlWrapper cgcw in SelectionBuffer) CS.Add(cgcw._Control); fgm.StartMovementActivity(); a.ActSet(fgm._MyLayer, CS); fgm.StopMovementAndRecord(); ReGenerate(); foreach (LayerGuiControlWrapper lgcw in fgm.Doc_Controls()) { if (CS.Contains(lgcw._Control)) { lgcw.Selected = true; SelectionBuffer.Add(lgcw); SendSelectionBuffer(); } } InvCalled = false; inv(); } } } void mi_Click2(object sender, EventArgs e) { MenuItem mi = sender as MenuItem; Jovian.UI.Actions.IAction a = mi.Tag as Jovian.UI.Actions.IAction; if (HoveringOver != null) { fgm.StartMovementActivity(); a.ActControl(fgm._MyLayer, HoveringOver._Control, mouse_x, mouse_y); fgm.StopMovementAndRecord(); } ReGenerate(); } public void FireActionMenu(int x, int y) { ForcePickHover = true; ContextMenu cm = new ContextMenu(); //InterfaceImplementors ArrayList Actions = ZenReflection.InterfaceImplementors("Jovian.UI.Actions.IAction"); Actions.Sort(new Jovian.UI.Actions.CompAction()); MenuItem Bucket_Layer; MenuItem Bucket_Set; MenuItem Bucket_Control; Bucket_Layer = new MenuItem(); Bucket_Layer.Text = "Layer Operations"; Bucket_Layer.Tag = "LAY"; cm.MenuItems.Add(Bucket_Layer); foreach (Jovian.UI.Actions.IAction Act in Actions) { if (Act.CanActOnLayer()) Bucket_Layer.MenuItems.Add(CreateMI(Act, 0)); } if (this.SelectionBuffer.Count > 0) { Bucket_Set = new MenuItem(); Bucket_Set.Text = "Set Operations (" + this.SelectionBuffer.Count + ")"; Bucket_Set.Tag = "SET"; cm.MenuItems.Add(Bucket_Set); foreach (Jovian.UI.Actions.IAction Act in Actions) { if (Act.CanActOnSet()) Bucket_Set.MenuItems.Add(CreateMI(Act, 1)); } } if (HoveringOver != null) { Bucket_Control = new MenuItem(); Bucket_Control.Text = "Control Operations"; Bucket_Control.Tag = "CTL"; cm.MenuItems.Add(Bucket_Control); foreach (Jovian.UI.Actions.IAction Act in Actions) { if (Act.CanActOnControl(HoveringOver._Control)) Bucket_Control.MenuItems.Add(CreateMI(Act, 2)); } ArrayList Editors = ZenReflection.InterfaceImplementors("Jovian.UI.ITextEdit"); MenuItem Bucket_Editors = new MenuItem(); Bucket_Editors.Text = "Text Editing"; int cnt = 0; foreach (ITextEdit ite in Editors) { if (ite.canedit(HoveringOver._Control)) { MenuItem miz = new MenuItem(); miz.Text = ite.name(); miz.Tag = ite.instance(HoveringOver._Control); miz.Click += new EventHandler(miz_Click); Bucket_Editors.MenuItems.Add(miz); cnt++; } } if (cnt > 0) { cm.MenuItems.Add(Bucket_Editors); } } cm.Show(this, new Point(x, y)); } ArrayList MakeSureKillOnExit = new ArrayList(); void miz_Click(object sender, EventArgs e) { MenuItem miz = sender as MenuItem; EditTextField etf = new EditTextField(miz.Tag as ITextEdit, MakeSureKillOnExit); etf.Show(ParentWindow.MdiParent); MakeSureKillOnExit.Add(etf); } public void save() { foreach (EditTextField etf in MakeSureKillOnExit) etf.save(); fgm.saveUNDO(); } public void Cleanup() { ArrayList L = new ArrayList(); L.AddRange(MakeSureKillOnExit); foreach (EditTextField etf in L) etf.Close(); } #endregion #region Drag and Drop bool SuperInvalidate = false; bool SuperAwesomeCommit = false; public void Add(Jovian.Data.Control C) { fgm.AddToModel(C); inv(); SuperInvalidate = true; SuperAwesomeCommit = true; } protected override void OnDragDrop(DragEventArgs drgevent) { object x = drgevent.Data.GetData(drgevent.Data.GetFormats()[0]); if (x is Jovian.Data.Control) { xmljr.XmlJrWriter writ = new xmljr.XmlJrWriter(null); (x as Jovian.Data.Control).WriteXML(writ); string xml = writ.GetXML(); Jovian.Data.Control c = Jovian.Data.Jovian_Data.BuildObjectTable(xmljr.XmlJrDom.Read(xml, null), null).LookUpObject(1) as Jovian.Data.Control; Rectangle R = this.RectangleToScreen(this.ClientRectangle); PointF P = Screen2Dom(drgevent.X - R.X, drgevent.Y - R.Y); c.X = (int)P.X; c.Y = (int)P.Y; Add(c); } /* // DRAGDROP if (x is Creators.CreationLink) { Creators.CreationLink cl = x as Creators.CreationLink; Rectangle R = this.RectangleToScreen(this.ClientRectangle); PointF P = Screen2Dom(drgevent.X - R.X, drgevent.Y - R.Y); fgm.ExecuteCreatorLink(cl, (int)P.X, (int)P.Y); inv(); SuperInvalidate = true; } */ //Creators.CreationLink cl = drgevent.Data as //string[] s = drgevent.Data.GetFormats(); //drgevent.AllowedEffect = DragDropEffects.Copy; //drgevent.Effect = DragDropEffects.All; // base.OnDragDrop(drgevent); } protected override void OnDragLeave(EventArgs e) { // base.OnDragLeave(e); } protected override void OnDragEnter(DragEventArgs drgevent) { //drgevent.AllowedEffect = DragDropEffects.Copy; drgevent.Effect = DragDropEffects.All; // base.OnDragEnter(drgevent); } protected override void OnDragOver(DragEventArgs drgevent) { //drgevent.AllowedEffect = DragDropEffects.Copy; drgevent.Effect = DragDropEffects.All; // base.OnDragOver(drgevent); } #endregion Form ParentWindow; #region Comms and Init public void SetReciever(ILayerGuiSelectionReciever sr) { selRecv = sr; sr.OnSelect(fgm._MyLayer); } private void SendSelectionBuffer() { if (selRecv == null) return; if (SelectionBuffer.Count == 0) { selRecv.OnSelect(fgm._MyLayer); return; } if (SelectionBuffer.Count == 1) selRecv.OnSelect((SelectionBuffer[0] as LayerGuiControlWrapper)._Control); else { Jovian.Data.Control[] oa = new Jovian.Data.Control[SelectionBuffer.Count]; for (int k = 0; k < SelectionBuffer.Count; k++) oa[k] = (SelectionBuffer[k] as LayerGuiControlWrapper)._Control; selRecv.OnSelect(oa); } foreach (LayerGuiControlWrapper c in SelectionBuffer) { c.SmartH = -1; c.SmartV = -1; } fgm.PrepareSmartGuides(); } public void Sync() { fgm.Sync(); inv(); } public void OnResolution() { inv(); } ImageResolutionCache Cache; ImageResolver Resolve; public void OnImageInfo(string image, int w, int h) { Cache.Add(image, w, h); } public ForkLayerEditor()//Fork.Layer C, jkWindow Wnd, SubFormEditor Owner) { Resolve = new NullResolver(); Cache = new ImageResolutionCache(Resolve); Viewing_TranslateX = 0; Viewing_TranslateY = 0; Viewing_Scaling = 1; InitializeComponent(); fgm = LayerGuiModel.Generate(); } public void Init(Jovian.Data.Layer L, Form P, ImageResolver ir) { Resolve = ir; Cache = new ImageResolutionCache(ir); ParentWindow = P; fgm = LayerGuiModel.Generate(L); TranslateDomByScreen(0, 0); } public void ReGenerate() { SelectionBuffer.Clear(); /* fgm = LayerGuiModel.Generate(fgm._MyLayer); */ Sync(); fgm.Rebuild(); SendSelectionBuffer(); inv(); } #endregion #region Lazy Invalidation private bool InvCalled = false; private void inv() { // Console.WriteLine("INV() called"); if (!InvCalled) { InvCalled = true; this.Invalidate(); } } private void inv(Rectangle R) { // Console.WriteLine("INV() called"); if (!InvCalled) { InvCalled = true; this.Invalidate(R); } } #endregion #region Space Operation private float Viewing_TranslateX; private float Viewing_TranslateY; private float Viewing_Scaling; private PointF Dom2Screen(int x, int y) { float a = (x - Viewing_TranslateX) * Viewing_Scaling; float b = (y - Viewing_TranslateY) * Viewing_Scaling; return new PointF(a, b); } private PointF Screen2Dom(float x, float y) { return new PointF(x / Viewing_Scaling + Viewing_TranslateX, y / Viewing_Scaling + Viewing_TranslateY); } private RectangleF Dom2Screen(Rectangle F) { PointF A = Dom2Screen(F.X, F.Y); PointF B = Dom2Screen(F.X + F.Width, F.Y + F.Height); return new RectangleF(A, new SizeF(B.X - A.X, B.Y - A.Y)); } public int mouse_x = 0; public int mouse_y = 0; private void TranslateDomByScreen(int dx, int dy) { Viewing_TranslateX -= dx / Viewing_Scaling; Viewing_TranslateY -= dy / Viewing_Scaling; int Gutter = fgm.Doc_Gutter; PointF M = Dom2Screen(fgm.Doc_Width() + 2 * Gutter, 2 * fgm.Doc_Height() + Gutter * 2); if (M.X <= this.Width) Viewing_TranslateX = (fgm.Doc_Width() + Gutter * 2 - this.Width / Viewing_Scaling); if (M.Y <= this.Height) Viewing_TranslateY = (fgm.Doc_Height() + Gutter * 2 - this.Height / Viewing_Scaling); PointF O = Dom2Screen(-Gutter, -Gutter); if (O.X >= 0) Viewing_TranslateX = -Gutter; if (O.Y >= 0) Viewing_TranslateY = -Gutter; if (Math.Abs(dx) + Math.Abs(dy) > 0) inv(); } protected override void OnMouseWheel(MouseEventArgs e) { float dz = e.Delta / 600.0f; float oldScaling = Viewing_Scaling; PointF oldCenter = Screen2Dom(mouse_x, mouse_y); Viewing_Scaling = Math.Max(0.2f, Math.Min(Viewing_Scaling + dz, 10.0f)); PointF newCenter = Screen2Dom(mouse_x, mouse_y); TranslateDomByScreen((int)(newCenter.X - oldCenter.X), (int)(newCenter.Y - oldCenter.Y)); inv(); } #endregion #region Left Mouse State Model LayerSelectionBufferOperation SelectBufferOp; ArrayList SelectionBuffer = new ArrayList(); bool kShiftDown = false; bool kCtrlDown = false; bool kAltDown = false; bool bRubber = false; PointF pRubberStart; PointF pRubberEnd; int SelectGuide = -1; int GetWidgetDim(LayerGuiControlWrapper c) { if (Viewing_Scaling < 1) return 2; if (Viewing_Scaling < 2) return 3; return 4; } IDimensionChange idc = null; public void SetIDC(IDimensionChange i) { idc = i; } public void RaiseIDC() { if (idc == null) return; idc.onDimensionChange(); } public void OnBar(int x, int y) { Viewing_TranslateX = x; Viewing_TranslateY = y; inv(); } public void SyncHBar(HScrollBar v) { int sz = (int)( this.ClientSize.Width); // - this.ClientSize.Height / Viewing_Scaling v.Minimum = 0; v.Maximum = (int)(22 + fgm.Doc_Width()); v.SmallChange = 1; v.LargeChange = (int)((this.ClientSize.Width) / Viewing_Scaling); if (v.LargeChange >= v.Maximum) { v.Enabled = false; } else { v.Enabled = true; } int y = (int) (Viewing_TranslateX); if (y > v.Maximum) y = v.Maximum; if (y < 0) y = 0; if (y >= 0 && y < v.Maximum) { if (v.Value != y) v.Value = y; } } public void SyncVBar(VScrollBar v) { int sz = (int)( this.ClientSize.Height); // - this.ClientSize.Height / Viewing_Scaling v.Minimum = 0; v.Maximum = (int)(22 + fgm.Doc_Height()); v.SmallChange = 1; v.LargeChange = (int)((this.ClientSize.Height) / Viewing_Scaling); if (v.LargeChange >= v.Maximum) { v.Enabled = false; } else { v.Enabled = true; } int y = (int) (Viewing_TranslateY); if (y > v.Maximum) y = v.Maximum; if (y < 0) y = 0; if (y >= 0 && y < v.Maximum) { if (v.Value != y) v.Value = y; } } PointF CopyDom = new PointF(0, 0); bool ForcePickHover = false; void LeftDown(int x, int y) { CopyDom = Screen2Dom(x, y); fgm.StartMovementActivity(); SelectGuide = -1; if (x < Dom2Screen(1, 0).X) { int[] domY = fgm.GetSplits(); for (int k = 0; k < domY.Length; k++) { if (Math.Abs(y - Dom2Screen(0, domY[k]).Y) < 6) { SelectGuide = k; } } if (SelectGuide >= 0) return; } if (!this.Focused || ForcePickHover) { this.Focus(); if (SelectionBuffer.Count == 0) PickHover(false, x, y); ForcePickHover = false; } SelectBufferOp = LayerSelectionBufferOperation.None; bool KillSelectionFlagOnAll = false; if (SelectionBuffer.Count > 0) { bool ClearAll = false; foreach (LayerGuiControlWrapper cw in fgm.Doc_Controls()) { if (!cw.Selected) { PointF A = Dom2Screen(cw.Left, cw.Top); PointF B = Dom2Screen(cw.Right, cw.Bottom); if (A.X <= x && x <= B.X) { if (A.Y <= y && y <= B.Y) { ClearAll = true; } } } } if (ClearAll) { foreach (LayerGuiControlWrapper cw in SelectionBuffer) { int wd = GetWidgetDim(cw); PointF A = Dom2Screen(cw.Left, cw.Top); PointF B = Dom2Screen(cw.Right, cw.Bottom); if (A.X - wd <= x && x <= B.X + wd) { if (A.Y - wd <= y && y <= B.Y + wd) { ClearAll = false; } } } } ArrayList L = new ArrayList(); foreach (LayerGuiControlWrapper c in SelectionBuffer) { if (!c._Control.DesignLocked && !ClearAll) L.Add(c); else c.Selected = false; } SelectionBuffer = L; // Did I click on something outside } if (SelectionBuffer.Count > 0) { bool SelectInside = false; foreach (LayerGuiControlWrapper c in SelectionBuffer) { PointF A = Dom2Screen(c.Left, c.Top); PointF B = Dom2Screen(c.Right, c.Bottom); int w = c.Right - c.Left; int h = c.Bottom - c.Top; PointF OneThirdAB = Dom2Screen(c.Left + w / 4, c.Top + h / 4); PointF TwoThirdAB = Dom2Screen(c.Left + (3 * w) / 4, c.Top + (3 * h) / 4); int WidgetDim = GetWidgetDim(c); bool OldSelectInside = SelectInside; if (A.X <= x && x <= B.X) { if (A.Y <= y && y <= B.Y) { SelectInside = true; } } if (!OldSelectInside && SelectInside) SelectBufferOp = LayerSelectionBufferOperation.Drag; RectangleF R; if (c.CanResizeH) { R = new RectangleF(A.X - WidgetDim, OneThirdAB.Y, 2 * WidgetDim, TwoThirdAB.Y - OneThirdAB.Y); if (R.Contains(x, y)) SelectBufferOp = LayerSelectionBufferOperation.ResizeW; R = new RectangleF(B.X - WidgetDim, OneThirdAB.Y, 2 * WidgetDim, TwoThirdAB.Y - OneThirdAB.Y); if (R.Contains(x, y)) SelectBufferOp = LayerSelectionBufferOperation.ResizeE; } if (c.CanResizeV) { R = new RectangleF(OneThirdAB.X, A.Y - WidgetDim, TwoThirdAB.X - OneThirdAB.X, 2 * WidgetDim); if (R.Contains(x, y)) { SelectBufferOp = LayerSelectionBufferOperation.ResizeN; } R = new RectangleF(OneThirdAB.X, B.Y - WidgetDim, TwoThirdAB.X - OneThirdAB.X, 2 * WidgetDim); if (R.Contains(x, y)) { SelectBufferOp = LayerSelectionBufferOperation.ResizeS; } } if (c.CanResizeH && c.CanResizeV) { R = new RectangleF(A.X - WidgetDim, A.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); if (R.Contains(x, y)) SelectBufferOp = LayerSelectionBufferOperation.ResizeNW; R = new RectangleF(B.X - WidgetDim, A.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); if (R.Contains(x, y)) SelectBufferOp = LayerSelectionBufferOperation.ResizeNE; R = new RectangleF(B.X - WidgetDim, B.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); if (R.Contains(x, y)) SelectBufferOp = LayerSelectionBufferOperation.ResizeSE; R = new RectangleF(A.X - WidgetDim, B.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); if (R.Contains(x, y)) SelectBufferOp = LayerSelectionBufferOperation.ResizeSW; } if (SelectBufferOp != LayerSelectionBufferOperation.None) { SelectInside = true; } else { } } if (!SelectInside && kShiftDown && HoveringOver != null) { HoveringOver.Selected = true; SelectionBuffer.Add(HoveringOver); SendSelectionBuffer(); SelectBufferOp = LayerSelectionBufferOperation.Drag; } else { if (!SelectInside) { KillSelectionFlagOnAll = true; } if (KillSelectionFlagOnAll) { SelectionBuffer.Clear(); foreach (LayerGuiControlWrapper w in fgm.Doc_Controls()) w.Selected = false; SendSelectionBuffer(); KillSelectionFlagOnAll = false; } } } if (SelectionBuffer.Count == 0) { if (HoveringOver != null) { HoveringOver.Selected = true; SelectionBuffer.Add(HoveringOver); SelectBufferOp = LayerSelectionBufferOperation.Drag; SendSelectionBuffer(); } else { KillSelectionFlagOnAll = true; bRubber = true; pRubberStart = Screen2Dom(x, y); pRubberEnd = Screen2Dom(x, y); } } if (KillSelectionFlagOnAll) { SelectionBuffer.Clear(); foreach (LayerGuiControlWrapper w in fgm.Doc_Controls()) w.Selected = false; SendSelectionBuffer(); } inv(); } Rectangle RectUnion(Rectangle R, Rectangle J, int grow) { int mL = Math.Min(R.Left, J.Left) - grow; int mR = Math.Max(R.Right, J.Right) + grow; int mT = Math.Min(R.Top, J.Top) - grow; int mB = Math.Max(R.Bottom, J.Bottom) + grow; return new Rectangle(mL, mT, mR - mL, mB - mT); } void LeftDrag(int x, int y, int dx, int dy) { if (SelectGuide >= 0) { (fgm.GetSplits())[SelectGuide] += (int)((float)(dy / Viewing_Scaling)); inv(); return; } if (SelectionBuffer.Count > 0) { if (SelectBufferOp != LayerSelectionBufferOperation.None) { int s = fgm.GetIdealGrid(1.0f / Viewing_Scaling); Rectangle R = new Rectangle(1, 1, 1, 1); int UnionCount = 0; bool sendinv = false; foreach (LayerGuiControlWrapper c in SelectionBuffer) { //c.GetRectOnGrid(s, !kShiftDown&& SelectionBuffer.Count == 1); int dG = 16; Rectangle J = c.GetRectOnGrid(s, !kShiftDown && SelectionBuffer.Count == 1); STATUS.X = J.X; STATUS.Y = J.Y; STATUS.W = J.Width; STATUS.H = J.Height; PointF A = Dom2Screen(J.X, J.Y); PointF B = Dom2Screen(J.X + J.Width, J.Y + J.Height); J = new Rectangle((int)A.X - dG, (int)A.Y - dG, (int)(B.X - A.X) + 2 * dG, (int)(B.Y - A.Y) + 2 * dG); if (UnionCount == 0) R = J; R = RectUnion(R, J, 5); UnionCount++; float speed = 1.0f; if (kCtrlDown) speed = 0.1f; if (c.Apply(SelectBufferOp, speed * (float)(dx / Viewing_Scaling), speed * (float)(dy / Viewing_Scaling))) sendinv = true; J = c.GetRectOnGrid(s, !kShiftDown && SelectionBuffer.Count == 1); A = Dom2Screen(J.X, J.Y); B = Dom2Screen(J.X + J.Width, J.Y + J.Height); J = new Rectangle((int)A.X - dG, (int)A.Y - dG, (int)(B.X - A.X) + 2 * dG, (int)(B.Y - A.Y) + 2 * dG); R = RectUnion(R, J, 5); UnionCount++; } if (sendinv) { selRecv.UpdateStatus(STATUS); //R = new Rectangle(1, 1, 10, 10); ///Invalidate(R); if (Math.Max(dx, dy) > 15) inv(); else { if (R.IntersectsWith(this.ClientRectangle)) inv(R); } // inv(); } } } if (bRubber) { pRubberEnd = Screen2Dom(x, y); inv(); } } void LeftCommit() { if (SelectionBuffer.Count > 0) { foreach (LayerGuiControlWrapper c in SelectionBuffer) { c.Commit(fgm.GetIdealGrid(1.0f / Viewing_Scaling), !kShiftDown && SelectionBuffer.Count == 1); } inv(); } if (bRubber) { int x = (int)Math.Min(pRubberStart.X, pRubberEnd.X); int y = (int)Math.Min(pRubberStart.Y, pRubberEnd.Y); int w = (int)Math.Max(pRubberStart.X, pRubberEnd.X) - x; int h = (int)Math.Max(pRubberStart.Y, pRubberEnd.Y) - y; ArrayList Merge = fgm.GetBatchControls(x, y, w, h); if (!kShiftDown) { SelectionBuffer.Clear(); foreach (LayerGuiControlWrapper cw in fgm.Doc_Controls()) cw.Selected = false; } if (Merge.Count > 0) { foreach (LayerGuiControlWrapper cw in Merge) { cw.Selected = true; SelectionBuffer.Add(cw); } SendSelectionBuffer(); } bRubber = false; inv(); } fgm.StopMovementAndRecord(); } void LeftFailed() { fgm.StopMovementAndDestory(); if (SelectionBuffer.Count > 0) { foreach (LayerGuiControlWrapper c in SelectionBuffer) { c.Rollback(); } inv(); } } #endregion #region Selectall / Delete Selection void SelectAll() { SelectionBuffer.Clear(); foreach (LayerGuiControlWrapper w in fgm.Doc_Controls()) { SelectionBuffer.Add(w); w.Selected = true; } SendSelectionBuffer(); inv(); } void DeleteSelection() { fgm.DeleteSelection(SelectionBuffer); SelectionBuffer.Clear(); SendSelectionBuffer(); inv(); } #endregion public bool NeedRenderCall = false; #region Mouse/Keyboard State Model (simplified) private static int CopyPasteBufferMinX = 0; private static int CopyPasteBufferMinY = 0; private static ArrayList CopyPasteBufferLocal = null; private static bool DeletedOnPaste = false; protected override void OnKeyDown(KeyEventArgs e) { if (e.KeyData == (Keys.A | Keys.Control)) { SelectAll(); } if (e.KeyData == (Keys.Z | Keys.Control)) { fgm.Undo(); fgm.Sync(); inv(); } if (e.KeyData == (Keys.Y | Keys.Control)) { fgm.Redo(); fgm.Sync(); inv(); } if (e.KeyData == (Keys.U | Keys.Control)) { foreach (LayerGuiControlWrapper w in SelectionBuffer) { w.Selected = false; w.UberSelected = true; UberSelection.Add(w); } SelectionBuffer.Clear(); SendSelectionBuffer(); inv(); } if (e.KeyData == (Keys.T | Keys.Control)) { foreach (LayerGuiControlWrapper w in UberSelection) { w.UberSelected = false; w.Selected = true; SelectionBuffer.Add(w); } UberSelection.Clear(); SendSelectionBuffer(); inv(); } if (e.KeyData == Keys.Alt) { kAltDown = true; } if (e.KeyData == (Keys.C | Keys.Control)) { //Clipboard.SetData( if (SelectionBuffer.Count > 0) { CopyPasteBufferMinX = fgm._MyLayer.Width; CopyPasteBufferMinY = fgm._MyLayer.Height; ArrayList L = new ArrayList(); foreach (LayerGuiControlWrapper w in SelectionBuffer) { CopyPasteBufferMinX = Math.Min(w._Control.X, CopyPasteBufferMinX); CopyPasteBufferMinY = Math.Min(w._Control.Y, CopyPasteBufferMinY); xmljr.XmlJrWriter writ = new xmljr.XmlJrWriter(null); w._Control.WriteXML(writ); object cc = Jovian.Data.Jovian_Data.BuildObjectTable(xmljr.XmlJrDom.Read(writ.GetXML(), null), null).LookUpObject(1); L.Add(cc); } DeletedOnPaste = false; CopyPasteBufferLocal = L; } // Copy Current Selection } if (e.KeyData == (Keys.X | Keys.Control)) { // Copy Current Selection } if (e.KeyData == (Keys.V | Keys.Control)) { if (CopyPasteBufferLocal != null) { ArrayList NL = new ArrayList(); foreach (Jovian.Data.Control C in CopyPasteBufferLocal) { C.X -= (int)(CopyPasteBufferMinX - CopyDom.X); C.Y -= (int)(CopyPasteBufferMinY - CopyDom.Y); int ugk = C.UndoGenKey; C.UndoGenKey = 0; xmljr.XmlJrWriter writ = new xmljr.XmlJrWriter(null); C.WriteXML(writ); Jovian.Data.Control CC = Jovian.Data.Jovian_Data.BuildObjectTable(xmljr.XmlJrDom.Read(writ.GetXML(), null), null).LookUpObject(1) as Jovian.Data.Control; C.UndoGenKey = ugk; Add(CC); } } if (DeletedOnPaste) { CopyPasteBufferLocal = null; } // Copy Current Selection } if (e.KeyData == Keys.Delete || e.KeyData == Keys.Back) { DeleteSelection(); } if (e.Shift || e.KeyData == Keys.Shift || e.KeyData == Keys.ShiftKey) { kShiftDown = true; } if (e.Control || e.KeyData == Keys.Control || e.KeyData == Keys.ControlKey) { kCtrlDown = true; } if (e.KeyData == (Keys.Left | Keys.Control)) { fgm.StartMovementActivity(); NudgeSelection(-10, 0); fgm.StopMovementAndRecord(); } if (e.KeyData == (Keys.Right | Keys.Control)) { fgm.StartMovementActivity(); NudgeSelection( 10, 0); fgm.StopMovementAndRecord(); } if (e.KeyData == (Keys.Up | Keys.Control)) { fgm.StartMovementActivity(); NudgeSelection(0, -10); fgm.StopMovementAndRecord(); } if (e.KeyData == (Keys.Down | Keys.Control)) { fgm.StartMovementActivity(); NudgeSelection(0, 10); fgm.StopMovementAndRecord(); } if (e.KeyCode == Keys.R) { Viewing_Scaling = 1.0f; inv(); } //kCtrlDown } protected override bool ProcessDialogKey(Keys keyData) { fgm.StartMovementActivity(); int ds = 1; if (kCtrlDown) ds = 10; if (keyData == Keys.Left) { NudgeSelection(-ds, 0); } if (keyData == Keys.Right) { NudgeSelection(ds, 0); } if (keyData == Keys.Up) { NudgeSelection(0, -ds); } if (keyData == Keys.Down) { NudgeSelection(0, ds); } fgm.StopMovementAndRecord(); return base.ProcessDialogKey(keyData); } protected override void OnKeyUp(KeyEventArgs e) { if (e.KeyData == Keys.Escape) { if (bLeftDown) { bLeftDown = false; LeftFailed(); } } if (e.KeyData == Keys.Alt) { kAltDown = false; } if (e.Shift || e.KeyData == Keys.Shift || e.KeyData == Keys.ShiftKey) { kShiftDown = false; } if (e.Control || e.KeyData == Keys.Control || e.KeyData == Keys.ControlKey) { kCtrlDown = false; } } bool bPanning = false; bool bLeftDown = false; protected override void OnMouseDown(MouseEventArgs e) { mouse_x = e.X; mouse_y = e.Y; this.Focus(); if (e.Button == MouseButtons.Left && !kCtrlDown && !kAltDown) { bLeftDown = true; LeftDown(e.X, e.Y); } if (e.Button == MouseButtons.Middle || (e.Button == MouseButtons.Left && (kCtrlDown || kAltDown) )) { bPanning = true; } if (e.Button == MouseButtons.Right) { FireActionMenu(e.X, e.Y); } } private void PickHover(bool skipHover, int x, int y) { if (!skipHover) { PointF p = Screen2Dom(x, y); LayerGuiControlWrapper nextHover = null; nextHover = fgm.GetSingleControl((int)p.X, (int)p.Y); if (nextHover != HoveringOver) { HoveringOver = nextHover; inv(); } } } protected override void OnMouseMove(MouseEventArgs e) { bool skipHover = false; if (bPanning) { skipHover = true; TranslateDomByScreen(e.X - mouse_x, e.Y - mouse_y); } if (bLeftDown) { skipHover = true; LeftDrag(e.X, e.Y, e.X - mouse_x, e.Y - mouse_y); } PickHover(skipHover, e.X, e.Y); mouse_x = e.X; mouse_y = e.Y; } protected override void OnMouseUp(MouseEventArgs e) { if (e.Button == MouseButtons.Left) { if (bLeftDown) { LeftCommit(); } if (bPanning && (kCtrlDown || kAltDown)) { bPanning = false; } bLeftDown = false; } if (e.Button == MouseButtons.Middle) { bPanning = false; } } #endregion #region Document Rendering LayerGuiControlWrapper HoveringOver = null; private void DrawDocument(Graphics g) { PointF A = Dom2Screen(0, 0); PointF B = Dom2Screen(fgm.Doc_Width(), fgm.Doc_Height()); g.FillRectangle(fgm.Doc_Shadow, A.X + 4, A.Y + 4, B.X - A.X, B.Y - A.Y); g.FillRectangle(fgm.Doc_Fill, A.X, A.Y, B.X - A.X, B.Y - A.Y); g.DrawRectangle(fgm.Doc_Border, A.X, A.Y, B.X - A.X, B.Y - A.Y); int[] S = fgm.GetSplits(); for (int k = 0; k < S.Length; k++) { PointF p = Dom2Screen(0, S[k]); g.DrawLine(new Pen(Color.LightPink), 0, p.Y, this.Width, p.Y); g.FillRectangle(new SolidBrush(Color.Orange), 0, p.Y - 3, 16, 6); g.DrawRectangle(new Pen(Color.Black), 0, p.Y - 3, 16, 6); } int w = fgm.Doc_Width(); int h = fgm.Doc_Height(); Pen cR = fgm.GetGridPen(1.0f / Viewing_Scaling); Pen cR10 = fgm.GetGridPen10(1.0f / Viewing_Scaling); int gi = fgm.GetIdealGrid(1.0f / Viewing_Scaling); PointF zp = Dom2Screen((int)CopyDom.X, (int)CopyDom.Y); g.DrawLine(fgm.Doc_Cursor, (int)(zp.X ),(int)(zp.Y - 2),(int)(zp.X ),(int)(zp.Y + 2)); g.DrawLine(fgm.Doc_Cursor, (int)(zp.X - 2),(int)(zp.Y ),(int)(zp.X + 2),(int)(zp.Y)); int o10 = 1; for (int x = gi; x < w; x += gi) { A = Dom2Screen(x, 0); B = Dom2Screen(x, h); g.DrawLine(cR, A, B); if (o10 % 10 == 0) g.DrawLine(cR10, A, B); o10++; } o10 = 1; for (int y = gi; y < h; y += gi) { A = Dom2Screen(0, y); B = Dom2Screen(w, y); g.DrawLine(cR, A, B); if (o10 % 10 == 0) g.DrawLine(cR10, A, B); o10++; } ArrayList set = fgm.Doc_Controls(); foreach (LayerGuiControlWrapper c in set) { if (!c.Selected) DrawControl(g, c, c == HoveringOver); } foreach (LayerGuiControlWrapper c in set) { if (c.Selected) DrawControl(g, c, c == HoveringOver); } if (NeedRenderCall) { Resolve.rcu(); NeedRenderCall = false; } } #endregion #region Control Rendering private Font _FontTip = new Font("Arial", 7); private void DrawControl(Graphics g, LayerGuiControlWrapper c, bool hover) { if (c._Control.Width <= 0) c._Control.Width = 1; if (c._Control.Height <= 0) c._Control.Height = 1; if (c._Control is Jovian.Data.Picture) { ImageInformation ii = Cache.Get((c._Control as Jovian.Data.Picture).Image); if (ii != null) c.SetMins(ii.Width, ii.Height); /* if (ii != null) { c._Control.Width = ii.Width; c._Control.Height = ii.Height; c.dBottom = c.dTop; c.dRight = c.dLeft; c.Right = c.Left + ii.Width; c.Bottom = c.Top + ii.Height; } */ } int s = fgm.GetIdealGrid(1.0f / Viewing_Scaling); Rectangle Rz = c.GetRectOnGrid(s, !kShiftDown && SelectionBuffer.Count == 1); int x = Rz.X; int y = Rz.Y; int w = Rz.Width; int h = Rz.Height; PointF A = Dom2Screen(x, y); PointF B = Dom2Screen(x + w, y + h); int SmartLineOffSet = 15; Rectangle BoxBounds = new Rectangle((int)A.X, (int)A.Y, (int)(B.X - A.X), (int)(B.Y - A.Y)); if (c.Selected) { RectangleF BX = Dom2Screen(Rz); if (c.SmartH >= 0) { PointF p = Dom2Screen(c.SmartH, 0); //BoxBounds.Y - SmartLineOffSet // Console.WriteLine(":" + BoxBounds.Y + " , " + BoxBounds.Height); g.DrawLine(new Pen(Color.Orange), p.X, BoxBounds.Y - SmartLineOffSet, p.X, BoxBounds.Y + BoxBounds.Height + SmartLineOffSet); } if (c.SmartV >= 0) { PointF p = Dom2Screen(0, c.SmartV); g.DrawLine(new Pen(Color.Orange), BoxBounds.X - SmartLineOffSet, p.Y, BoxBounds.X + BoxBounds.Width + SmartLineOffSet, p.Y); } } /* int x = (int)( Math.Round( c.Left + c.dLeft, 1) ); int y = (int)( Math.Round( c.Top + c.dTop, 1) ); int w = (int)( Math.Round(c.Right + c.dRight, 1)) - x; int h = (int)( Math.Round( c.Bottom + c.dBottom, 1) ) - y; */ // float fs = s; // x = ((int)Math.Round(x / fs)) * s; // y = ((int)Math.Round(y / fs)) * s; int WidgetDim = GetWidgetDim(c); PointF MidAB = Dom2Screen(x + w / 2, y + h / 2); PointF OneThirdAB = Dom2Screen(x + w / 4, y + h / 4); PointF TwoThirdAB = Dom2Screen(x + (3 * w) / 4, y + (3 * h) / 4); if (c.Render(g, hover, BoxBounds, "C:\\jk_css_cache\\")) { NeedRenderCall = true; } if (c.RenderFrame) { Pen fborder = fgm.Doc_ControlFrame_Normal; if (hover) { fborder = fgm.Doc_ControlFrame_Hover; g.DrawString(Jovian.UI.Layers.ControlTitle.GetTitle(c._Control), _FontTip, new SolidBrush(Color.Red), new PointF(A.X + 2, A.Y + 2)); } if (c.Selected) fborder = fgm.Doc_ControlFrame_Selected; if (c.UberSelected) fborder = fgm.Doc_ControlFrame_UberSelected; g.DrawRectangle(fborder, BoxBounds); } if (c.Selected && !c._Control._DesignLocked) { RectangleF R; if (c.CanResizeH) { R = new RectangleF(A.X - WidgetDim, OneThirdAB.Y, 2 * WidgetDim, TwoThirdAB.Y - OneThirdAB.Y); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); R = new RectangleF(B.X - WidgetDim, OneThirdAB.Y, 2 * WidgetDim, TwoThirdAB.Y - OneThirdAB.Y); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); } if (c.CanResizeV) { R = new RectangleF(OneThirdAB.X, A.Y - WidgetDim, TwoThirdAB.X - OneThirdAB.X, 2 * WidgetDim); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); R = new RectangleF(OneThirdAB.X, B.Y - WidgetDim, TwoThirdAB.X - OneThirdAB.X, 2 * WidgetDim); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); } if (c.CanResizeH && c.CanResizeV) { R = new RectangleF(A.X - WidgetDim, A.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); R = new RectangleF(B.X - WidgetDim, A.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); R = new RectangleF(B.X - WidgetDim, B.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); R = new RectangleF(A.X - WidgetDim, B.Y - WidgetDim, 2 * WidgetDim, 2 * WidgetDim); g.FillRectangle(fgm.Widget_Fill, R); g.DrawRectangle(fgm.Widget_Border, R.X, R.Y, R.Width, R.Height); } } } #endregion #region Double Buffer + OnPaint private Bitmap offScreenBmp = null; private Graphics offScreenDC = null; int ForceLimit = 0; protected override void OnPaint(PaintEventArgs pe) { if (fgm._MyLayer == null) { pe.Graphics.Clear(Color.White); return; } InvCalled = false; try { TranslateDomByScreen(0, 0); } catch (Exception e) { Console.WriteLine("ER:" + e.ToString()); } if (offScreenBmp == null) { offScreenBmp = new Bitmap(this.Width, this.Height); offScreenDC = Graphics.FromImage(offScreenBmp); } else if (offScreenBmp.Width != this.Width || offScreenBmp.Height != this.Height) { offScreenBmp = new Bitmap(this.Width, this.Height); offScreenDC = Graphics.FromImage(offScreenBmp); offScreenDC.SmoothingMode = SmoothingMode.HighSpeed; offScreenDC.CompositingQuality = CompositingQuality.HighSpeed; offScreenDC.InterpolationMode = InterpolationMode.Low; } offScreenDC.Clear(Color.LightGray); offScreenDC.Clip = new Region(pe.ClipRectangle); DrawDocument(offScreenDC); if (SuperInvalidate) { SuperInvalidate = false; offScreenDC.Clear(Color.LightGray); DrawDocument(offScreenDC); } if (bRubber) { PointF s1 = Dom2Screen((int)Math.Min(pRubberStart.X, pRubberEnd.X), (int)Math.Min(pRubberStart.Y, pRubberEnd.Y)); PointF s2 = Dom2Screen((int)Math.Max(pRubberStart.X, pRubberEnd.X), (int)Math.Max(pRubberStart.Y, pRubberEnd.Y)); offScreenDC.DrawRectangle(new Pen(Color.Red), (int)s1.X, (int)s1.Y, (int)(s2.X - s1.X), (int)(s2.Y - s1.Y)); } if (SuperAwesomeCommit) { ArrayList set = fgm.Doc_Controls(); foreach (LayerGuiControlWrapper c in set) { c.Commit(1, false); } SuperAwesomeCommit = false; } pe.Graphics.DrawImage(offScreenBmp, 0, 0); RaiseIDC(); } protected override void OnPaintBackground(PaintEventArgs pevent) { } #endregion #region Dispose protected override void Dispose(bool disposing) { if (disposing) { if (components != null) components.Dispose(); } base.Dispose(disposing); } #endregion #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.SuspendLayout(); // // ForkLayerEditor // this.AllowDrop = true; this.ResumeLayout(false); } #endregion } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ using System; using System.ComponentModel.Design; using System.ComponentModel.Design.Serialization; using System.Globalization; using System.Runtime.InteropServices; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Designer.Interfaces; using Microsoft.VisualStudio.OLE.Interop; using Microsoft.VisualStudio.Shell; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudio.TextManager.Interop; using IOleServiceProvider = Microsoft.VisualStudio.OLE.Interop.IServiceProvider; using Microsoft.MultiverseInterfaceStudio.Services; using System.Windows.Forms; namespace Microsoft.MultiverseInterfaceStudio.FrameXml { [ComVisible(true)] [Guid(GuidStrings.FrameXmlEditorFactory)] public class FrameXmlEditorFactory : IVsEditorFactory { private FrameXmlEditorPackage package; private ServiceProvider serviceProvider; public FrameXmlEditorFactory(FrameXmlEditorPackage package) { if (package == null) throw new ArgumentNullException("package"); this.package = package; } public virtual int SetSite(IOleServiceProvider serviceProvider) { this.serviceProvider = new ServiceProvider(serviceProvider); return VSConstants.S_OK; } public virtual object GetService(Type serviceType) { return serviceProvider.GetService(serviceType); } public int MapLogicalView(ref Guid logicalView, out string physicalView) { // Initialize out parameter physicalView = null; bool isSupportedView = false; // Determine the physical view if (VSConstants.LOGVIEWID_Primary == logicalView) { physicalView = "Design"; isSupportedView = true; } else if (VSConstants.LOGVIEWID_Designer == logicalView) { physicalView = "Design"; isSupportedView = true; } else if (VSConstants.LOGVIEWID_TextView == logicalView) { isSupportedView = true; } else if (VSConstants.LOGVIEWID_Code == logicalView) { isSupportedView = true; } return isSupportedView ? VSConstants.S_OK : VSConstants.E_NOTIMPL; } public int Close() { return VSConstants.S_OK; } public virtual int CreateEditorInstance( uint createEditorFlags, string documentMoniker, string physicalView, IVsHierarchy hierarchy, uint itemid, IntPtr docDataExisting, out IntPtr docView, out IntPtr docData, out string editorCaption, out Guid commandUIGuid, out int createDocumentWindowFlags) { // Initialize out parameters docView = IntPtr.Zero; docData = IntPtr.Zero; commandUIGuid = Guids.FrameXmlEditorFactory; createDocumentWindowFlags = 0; editorCaption = null; // Validate inputs if ((createEditorFlags & (VSConstants.CEF_OPENFILE | VSConstants.CEF_SILENT)) == 0) return VSConstants.E_INVALIDARG; // Get the text buffer IVsTextLines textLines = GetTextBuffer(docDataExisting, documentMoniker); // Assign docData IntPtr to either existing docData or the new text buffer if (docDataExisting != IntPtr.Zero) { docData = docDataExisting; Marshal.AddRef(docData); } else { docData = Marshal.GetIUnknownForObject(textLines); } try { docView = CreateDocumentView(physicalView, hierarchy, itemid, textLines, out editorCaption, out commandUIGuid, documentMoniker); } finally { if (docView == IntPtr.Zero) { if (docDataExisting != docData && docData != IntPtr.Zero) { // Cleanup the instance of the docData that we have addref'ed Marshal.Release(docData); docData = IntPtr.Zero; } } } return VSConstants.S_OK; } private IVsTextLines GetTextBuffer(IntPtr docDataExisting, string documentMoniker) { IVsTextLines textLines; if (docDataExisting == IntPtr.Zero) { // Create a new IVsTextLines buffer textLines = this.CreateInstance<IVsTextLines, VsTextBufferClass>(); // set the buffer's site ((IObjectWithSite)textLines).SetSite(serviceProvider.GetService(typeof(IOleServiceProvider))); // Fcuk COM Guid GUID_VsBufferMoniker = typeof(IVsUserData).GUID; // Explicitly load the data through IVsPersistDocData ((IVsPersistDocData)textLines).LoadDocData(documentMoniker); } else { // Use the existing text buffer object dataObject = Marshal.GetObjectForIUnknown(docDataExisting); textLines = dataObject as IVsTextLines; if (textLines == null) { // Try get the text buffer from textbuffer provider IVsTextBufferProvider textBufferProvider = dataObject as IVsTextBufferProvider; if (textBufferProvider != null) textBufferProvider.GetTextBuffer(out textLines); } if (textLines == null) { // Unknown docData type then, so we have to force VS to close the other editor. ErrorHandler.ThrowOnFailure(VSConstants.VS_E_INCOMPATIBLEDOCDATA); } } return textLines; } private IntPtr CreateDocumentView(string physicalView, IVsHierarchy hierarchy, uint itemid, IVsTextLines textLines, out string editorCaption, out Guid cmdUI, string documentPath) { //Init out params editorCaption = String.Empty; cmdUI = Guid.Empty; if (String.IsNullOrEmpty(physicalView)) { // create code window as default physical view return this.CreateCodeView(textLines, ref editorCaption, ref cmdUI); } else if (String.Compare(physicalView, "design", true, CultureInfo.InvariantCulture) == 0) { try { // Create Form view return this.CreateDesignerView(hierarchy, itemid, textLines, ref editorCaption, ref cmdUI, documentPath); } catch(InvalidOperationException ex) { var message = String.Format(VSPackage.OPEN_CODE_EDITOR, ex.Message); var openCodeEditor = MessageBox.Show(message, null, MessageBoxButtons.YesNo, MessageBoxIcon.Warning) == DialogResult.Yes; if (openCodeEditor) { // Create Code view instead return this.CreateCodeView(textLines, ref editorCaption, ref cmdUI); } else { throw; } } } // We couldn't create the view // Return special error code so VS can try another editor factory. ErrorHandler.ThrowOnFailure(VSConstants.VS_E_UNSUPPORTEDFORMAT); return IntPtr.Zero; } private IntPtr CreateDesignerView(IVsHierarchy hierarchy, uint itemid, IVsTextLines textLines, ref string editorCaption, ref Guid cmdUI, string documentMoniker) { // Request the Designer Service IVSMDDesignerService designerService = (IVSMDDesignerService)GetService(typeof(IVSMDDesignerService)); try { // Get the service provider IOleServiceProvider provider = serviceProvider.GetService(typeof(IOleServiceProvider)) as IOleServiceProvider; // Create loader for the designer FrameXmlDesignerLoader designerLoader = new FrameXmlDesignerLoader(textLines, documentMoniker, itemid); // Create the designer using the provider and the loader IVSMDDesigner designer = designerService.CreateDesigner(provider, designerLoader); #if !HIDE_FRAME_XML_PANE // Retrieve the design surface DesignSurface designSurface = (DesignSurface)designer; // Create pane with this surface FrameXmlPane frameXmlPane = new FrameXmlPane(designSurface); designerLoader.InitializeFrameXmlPane(frameXmlPane); // Get command guid from designer cmdUI = frameXmlPane.CommandGuid; editorCaption = " [Design]"; // Return FrameXmlPane return Marshal.GetIUnknownForObject(frameXmlPane); #else object view = designer.View; cmdUI = designer.CommandGuid; editorCaption = " [Design]"; designerLoader.InitializeFrameXmlPane(null); // Return view return Marshal.GetIUnknownForObject(view); #endif } catch (Exception ex) { // Just rethrow for now throw; } } private IntPtr CreateCodeView(IVsTextLines textLines, ref string editorCaption, ref Guid cmdUI) { IVsCodeWindow window = this.CreateInstance<IVsCodeWindow, VsCodeWindowClass>(); ErrorHandler.ThrowOnFailure(window.SetBuffer(textLines)); ErrorHandler.ThrowOnFailure(window.SetBaseEditorCaption(null)); ErrorHandler.ThrowOnFailure(window.GetEditorCaption(READONLYSTATUS.ROSTATUS_Unknown, out editorCaption)); cmdUI = VSConstants.GUID_TextEditorFactory; return Marshal.GetIUnknownForObject(window); } private TInterface CreateInstance<TInterface, TClass>() where TInterface : class where TClass : class { Guid clsid = typeof(TClass).GUID; Guid riid = typeof(TInterface).GUID; return (TInterface)package.CreateInstance(ref clsid, ref riid, typeof(TInterface)); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using Nmap.Properties; namespace Nmap.Internal { internal static class Error { #region Public Methods public static void ArgumentNullException_IfNull(object argument, string argumentName) { if (argument == null) { throw new ArgumentNullException(string.Format(Resources.ArgumentIsNull1, argumentName ?? string.Empty)); } } public static void ArgumentException_IfNotSimple(Type argumentType, string argumentName) { if (argumentType == null || !ReflectionHelper.IsSimple(argumentType)) { throw new ArgumentException(string.Format(Resources.NotSimple1, argumentName ?? string.Empty)); } } public static void ArgumentException_IfSimpleOrSimpleEnumerable(Type argumentType, string argumentName) { if (argumentType == null || ReflectionHelper.IsSimple(argumentType) || ReflectionHelper.IsSimpleEnumerable(argumentType)) { throw new ArgumentException(string.Format(Resources.SimpleOrSimpleEnumerable1, argumentName)); } } public static void ArgumentException_IfNotOneLevelMemberExpression(Expression expression) { var lambdaExpression = expression as LambdaExpression; if (lambdaExpression == null) { throw new ArgumentException(Resources.NotOneLevelMemberExpression); } Expression body = lambdaExpression.Body; if (body.NodeType == ExpressionType.MemberAccess || body.NodeType == ExpressionType.Convert) { if (!(body.ToString().Count((char c) => c == '.') <= 1)) { throw new ArgumentException(Resources.NotOneLevelMemberExpression); } } else { throw new ArgumentException(Resources.NotOneLevelMemberExpression); } } public static void MapValidationException_IfTypeMapDuplicated(TypeMapBase map, IEnumerable<TypeMapBase> maps) { if (maps.Count((TypeMapBase m) => m.Equals(map)) > 1) { throw new MapValidationException(string.Format(Resources.TypeMapDuplicated1, (map != null) ? map.ToString() : string.Empty), null); } } public static void MapValidationException_IfPropertyMapDuplicated(TypeMapBase typeMap, PropertyMapBase propertyMap, IEnumerable<PropertyMapBase> propertyMaps) { if (propertyMaps.Count((PropertyMapBase m) => m.SourcePropertyInfo == propertyMap.SourcePropertyInfo || m.DestinationPropertyInfo == propertyMap.DestinationPropertyInfo) > 1) { throw new MapValidationException(string.Format(Resources.PropertyMapDuplicated2, (propertyMap != null) ? propertyMap.ToString() : string.Empty, (typeMap != null) ? typeMap.ToString() : string.Empty), null); } } public static void MapValidationException_IfTypeMapIsNotForComplexTypes(TypeMapBase map) { if (map == null || !ReflectionHelper.IsComplex(map.SourceType) || !ReflectionHelper.IsComplex(map.DestinationType)) { throw new MapValidationException(string.Format(Resources.TypeMapIsNotForComplexTypes1, (map != null) ? map.ToString() : string.Empty), null); } } public static void MapValidationException_IfInheritanceMapIsNotForDerivedTypes(TypeMapBase map, PropertyMapBase propertyMap, TypeMapBase inheritanceMap) { Type baseType = ReflectionHelper.IsComplexEnumerable(propertyMap.SourcePropertyInfo.PropertyType) ? ReflectionHelper.GetEnumerableItemType(propertyMap.SourcePropertyInfo.PropertyType) : propertyMap.SourcePropertyInfo.PropertyType; Type type = (propertyMap.DestinationPropertyInfo != null) ? (ReflectionHelper.IsComplexEnumerable(propertyMap.DestinationPropertyInfo.PropertyType) ? ReflectionHelper.GetEnumerableItemType(propertyMap.DestinationPropertyInfo.PropertyType) : propertyMap.DestinationPropertyInfo.PropertyType) : null; if (!ReflectionHelper.IsAssignable(baseType, inheritanceMap.SourceType) || (type != null && !ReflectionHelper.IsAssignable(type, inheritanceMap.DestinationType)) || (type == null && map.DestinationType != inheritanceMap.DestinationType)) { throw new MapValidationException(string.Format(Resources.InheritanceMapIsNotForDerivedTypes3, (inheritanceMap != null) ? inheritanceMap.ToString() : string.Empty, (propertyMap != null) ? propertyMap.ToString() : string.Empty, (map != null) ? map.ToString() : string.Empty), null); } } public static void MapValidationException_IfInheritanceMapDuplicated(TypeMapBase map, IEnumerable<TypeMapBase> maps) { if (maps.Count((TypeMapBase m) => m.SourceType == map.SourceType || m.DestinationType == map.DestinationType) > 1) { throw new MapValidationException(string.Format(Resources.InheritanceMapDuplicated1, (map != null) ? map.ToString() : string.Empty), null); } } public static void MapValidationException_IfTypeMapHasMapperAndPropertyMaps(TypeMapBase map, Action<object, object, TypeMappingContext> mapper, IEnumerable<PropertyMapBase> propertyMaps) { if (mapper != null && propertyMaps != null && propertyMaps.Count<PropertyMapBase>() > 0) { throw new MapValidationException(string.Format(Resources.TypeMapHasMapperAndMaps1, (map != null) ? map.ToString() : string.Empty), null); } } public static void MapValidationException_IfPropertyMapHasMapperAndInheritanceMapsOrNothing(TypeMapBase typeMap, PropertyMapBase propertyMap, Action<object, object, TypeMappingContext> mapper, IEnumerable<TypeMapBase> inheritanceMaps) { if ((mapper != null && inheritanceMaps != null && inheritanceMaps.Count<TypeMapBase>() > 0) || (mapper == null && (inheritanceMaps == null || inheritanceMaps.Count<TypeMapBase>() == 0))) { throw new MapValidationException(string.Format(Resources.PropertyMapHasMapperAndInheritanceMapsOrNothing2, (propertyMap != null) ? propertyMap.ToString() : string.Empty, (typeMap != null) ? typeMap.ToString() : string.Empty), null); } } public static void MapValidationException_IfTypeMapperOrTypeUnMapperIsNotDefined(ReversiveTypeMap map) { if ((map.Mapper == null && map.UnMapper != null) || (map.Mapper != null && map.UnMapper == null)) { throw new MapValidationException(string.Format(Resources.TypeMapperOrTypeUnMapperIsNotDefined1, (map != null) ? map.ToString() : string.Empty), null); } } public static void MapValidationException_IfPropertyMapperOrPropertyUnMapperIsNotDefined( TypeMapBase typeMap, ReversivePropertyMap propertyMap) { if ((propertyMap.Mapper == null && propertyMap.UnMapper != null) || (propertyMap.Mapper != null && propertyMap.UnMapper == null)) { throw new MapValidationException(string.Format(Resources.PropertyMapperOrPropertyUnMapperIsNotDefined2, (propertyMap != null) ? propertyMap.ToString() : string.Empty, (typeMap != null) ? typeMap.ToString() : string.Empty), null); } } public static void MapValidationException_IfPropertyMapIsNotForBothComplexEnumerableOrComplexTypes( TypeMapBase typeMap, PropertyMapBase propertyMap) { Type propertyType = propertyMap.SourcePropertyInfo.PropertyType; Type type = (propertyMap.DestinationPropertyInfo != null) ? propertyMap.DestinationPropertyInfo.PropertyType : null; if ((type == null && (ReflectionHelper.IsComplexEnumerable(propertyType) || ReflectionHelper.IsSimple(propertyType))) || (type != null && ((ReflectionHelper.IsComplexEnumerable(propertyType) && ReflectionHelper.IsComplex(type)) || (ReflectionHelper.IsComplex(propertyType) && ReflectionHelper.IsComplexEnumerable(type)) || ReflectionHelper.IsSimple(propertyType) || ReflectionHelper.IsSimple(type)))) { throw new MapValidationException(string.Format(Resources.PropertyMapIsNotForBothComplexEnumerableOrComplexTypes2, (propertyMap != null) ? propertyMap.ToString() : string.Empty, (typeMap != null) ? typeMap.ToString() : string.Empty), null); } } public static void MapValidationException_TypeMapIsNotSupported(TypeMapBase map) { throw new MapValidationException(string.Format(Resources.TypeMapIsNotSupported1, (map != null) ? map.ToString() : string.Empty), null); } public static void MapValidationException_PropertyMapIsNotSupported(PropertyMapBase map) { throw new MapValidationException(string.Format(Resources.PropertyMapIsNotSupported1, (map != null) ? map.ToString() : string.Empty), null); } public static void MappingException_IfMapperIsNull(Action<object, object, TypeMappingContext> mapper, Type fromType) { if (mapper == null) { throw new MappingException(string.Format(Resources.MapperNotFound1, (fromType != null) ? fromType.FullName : string.Empty), null); } } #endregion } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; namespace DalSic { /// <summary> /// Strongly-typed collection for the AprFactorRiesgoPsicosocial class. /// </summary> [Serializable] public partial class AprFactorRiesgoPsicosocialCollection : ActiveList<AprFactorRiesgoPsicosocial, AprFactorRiesgoPsicosocialCollection> { public AprFactorRiesgoPsicosocialCollection() {} /// <summary> /// Filters an existing collection based on the set criteria. This is an in-memory filter /// Thanks to developingchris for this! /// </summary> /// <returns>AprFactorRiesgoPsicosocialCollection</returns> public AprFactorRiesgoPsicosocialCollection Filter() { for (int i = this.Count - 1; i > -1; i--) { AprFactorRiesgoPsicosocial o = this[i]; foreach (SubSonic.Where w in this.wheres) { bool remove = false; System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName); if (pi.CanRead) { object val = pi.GetValue(o, null); switch (w.Comparison) { case SubSonic.Comparison.Equals: if (!val.Equals(w.ParameterValue)) { remove = true; } break; } } if (remove) { this.Remove(o); break; } } } return this; } } /// <summary> /// This is an ActiveRecord class which wraps the APR_FactorRiesgoPsicosocial table. /// </summary> [Serializable] public partial class AprFactorRiesgoPsicosocial : ActiveRecord<AprFactorRiesgoPsicosocial>, IActiveRecord { #region .ctors and Default Settings public AprFactorRiesgoPsicosocial() { SetSQLProps(); InitSetDefaults(); MarkNew(); } private void InitSetDefaults() { SetDefaults(); } public AprFactorRiesgoPsicosocial(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) ForceDefaults(); MarkNew(); } public AprFactorRiesgoPsicosocial(object keyID) { SetSQLProps(); InitSetDefaults(); LoadByKey(keyID); } public AprFactorRiesgoPsicosocial(string columnName, object columnValue) { SetSQLProps(); InitSetDefaults(); LoadByParam(columnName,columnValue); } protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema and Query Accessor public static Query CreateQuery() { return new Query(Schema); } public static TableSchema.Table Schema { get { if (BaseSchema == null) SetSQLProps(); return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("APR_FactorRiesgoPsicosocial", TableType.Table, DataService.GetInstance("sicProvider")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarIdFactorRiesgoPsicosocial = new TableSchema.TableColumn(schema); colvarIdFactorRiesgoPsicosocial.ColumnName = "idFactorRiesgoPsicosocial"; colvarIdFactorRiesgoPsicosocial.DataType = DbType.Int32; colvarIdFactorRiesgoPsicosocial.MaxLength = 0; colvarIdFactorRiesgoPsicosocial.AutoIncrement = true; colvarIdFactorRiesgoPsicosocial.IsNullable = false; colvarIdFactorRiesgoPsicosocial.IsPrimaryKey = true; colvarIdFactorRiesgoPsicosocial.IsForeignKey = false; colvarIdFactorRiesgoPsicosocial.IsReadOnly = false; colvarIdFactorRiesgoPsicosocial.DefaultSetting = @""; colvarIdFactorRiesgoPsicosocial.ForeignKeyTableName = ""; schema.Columns.Add(colvarIdFactorRiesgoPsicosocial); TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema); colvarNombre.ColumnName = "nombre"; colvarNombre.DataType = DbType.AnsiString; colvarNombre.MaxLength = 50; colvarNombre.AutoIncrement = false; colvarNombre.IsNullable = false; colvarNombre.IsPrimaryKey = false; colvarNombre.IsForeignKey = false; colvarNombre.IsReadOnly = false; colvarNombre.DefaultSetting = @""; colvarNombre.ForeignKeyTableName = ""; schema.Columns.Add(colvarNombre); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["sicProvider"].AddSchema("APR_FactorRiesgoPsicosocial",schema); } } #endregion #region Props [XmlAttribute("IdFactorRiesgoPsicosocial")] [Bindable(true)] public int IdFactorRiesgoPsicosocial { get { return GetColumnValue<int>(Columns.IdFactorRiesgoPsicosocial); } set { SetColumnValue(Columns.IdFactorRiesgoPsicosocial, value); } } [XmlAttribute("Nombre")] [Bindable(true)] public string Nombre { get { return GetColumnValue<string>(Columns.Nombre); } set { SetColumnValue(Columns.Nombre, value); } } #endregion //no foreign key tables defined (0) //no ManyToMany tables defined (0) #region ObjectDataSource support /// <summary> /// Inserts a record, can be used with the Object Data Source /// </summary> public static void Insert(string varNombre) { AprFactorRiesgoPsicosocial item = new AprFactorRiesgoPsicosocial(); item.Nombre = varNombre; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } /// <summary> /// Updates a record, can be used with the Object Data Source /// </summary> public static void Update(int varIdFactorRiesgoPsicosocial,string varNombre) { AprFactorRiesgoPsicosocial item = new AprFactorRiesgoPsicosocial(); item.IdFactorRiesgoPsicosocial = varIdFactorRiesgoPsicosocial; item.Nombre = varNombre; item.IsNew = false; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } #endregion #region Typed Columns public static TableSchema.TableColumn IdFactorRiesgoPsicosocialColumn { get { return Schema.Columns[0]; } } public static TableSchema.TableColumn NombreColumn { get { return Schema.Columns[1]; } } #endregion #region Columns Struct public struct Columns { public static string IdFactorRiesgoPsicosocial = @"idFactorRiesgoPsicosocial"; public static string Nombre = @"nombre"; } #endregion #region Update PK Collections #endregion #region Deep Save #endregion } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.Zelig.Emulation.ArmProcessor.FlashMemory { using System; using System.Collections.Generic; using EncDef = Microsoft.Zelig.TargetModel.ArmProcessor.EncodingDefinition; using ElementTypes = Microsoft.Zelig.MetaData.ElementTypes; //--// public class S29WS128N : Simulator.MemoryHandler { const uint Sector16KWord = 16 * 1024 * sizeof(ushort); const uint Sector64KWord = 64 * 1024 * sizeof(ushort); const uint ChipSize = 16 * 1024 * 1024; //--// static ushort[] sequence_CFI = new ushort[] { 0x555, 0x0098 }; static ushort[] sequence_PROGRAM = new ushort[] { 0x555, 0x00AA , 0x2AA, 0x0055 , 0x555, 0x00A0 }; static ushort[] sequence_ERASE = new ushort[] { 0x555, 0x00AA , 0x2AA, 0x0055 , 0x555, 0x0080 , 0x555, 0x00AA , 0x2AA, 0x0055 }; enum Mode { Normal , Query , Program, Erase , } class SequenceTracker { // // State // internal ushort[] m_sequence; internal int m_pos; internal ulong m_lastWrite; // // Constructor Methods // internal SequenceTracker( ushort[] sequence ) { m_sequence = sequence; m_pos = 0; } // // Helper Methods // internal void Reset() { m_pos = 0; } internal void Advance( uint relativeAddress , ushort value , Simulator owner ) { // // If the writes are too spaced out, abort. // if(m_pos > 0 && owner.ClockTicks - m_lastWrite > 5 * 30) { Reset(); return; } if(IsMatch( relativeAddress, value, m_sequence, m_pos ) == false) { Reset(); return; } m_pos += 2; m_lastWrite = owner.ClockTicks; } private static bool IsMatch( uint relativeAddress , ushort value , ushort[] sequence , int pos ) { return (sequence[pos ] == relativeAddress / sizeof(ushort) && sequence[pos+1] == value ); } // // Access Methods // internal bool IsRecognized { get { return m_pos == m_sequence.Length; } } } class BankState { static ushort[] query_results = new ushort[] { 0x10, 0x0051, // 0x11, 0x0052, // Query Unique ASCII string "QRY" 0x12, 0x0059, // 0x13, 0x0002, // Primary OEM Command Set 0x14, 0x0000, // 0x15, 0x0040, // Address for Primary Extended Table 0x16, 0x0000, // 0x17, 0x0000, // Alternate OEM Command Set 0x18, 0x0000, // 0x2C, 0x0003, // Number of Erase Block Regions within device. 0x2D, 0x0003, // Erase Block Region 1 Information 0x2E, 0x0000, // 0x2F, 0x0080, // 0x30, 0x0000, // 0x31, 0x007D, // Erase Block Region 2 Information 0x32, 0x0000, // 0x33, 0x0000, // 0x34, 0x0002, // 0x35, 0x0003, // Erase Block Region 3 Information 0x36, 0x0000, // 0x37, 0x0080, // 0x38, 0x0000, // }; const ushort resetCommand = 0x00F0; const ushort eraseCommand = 0x0030; const ushort c_DQ7 = (ushort)(1u << 7); const ushort c_DQ6 = (ushort)(1u << 6); const ushort c_DQ3 = (ushort)(1u << 3); const ushort c_DQ2 = (ushort)(1u << 2); // // State // internal uint m_addressStart; internal uint m_addressEnd; internal uint[] m_sectorSizes; internal Mode m_mode; internal ushort m_lastStatusValue; internal ulong m_timer; internal int m_erasingSectorIndex; internal uint m_programmingAddress; internal ushort m_programmingValue; // // Constructor Methods // internal BankState( uint addressStart , params uint[] sectorDefinition ) { uint totalSize = 0; uint totalSectors = 0; for(int i = 0; i < sectorDefinition.Length; i += 2) { uint sectorSize = sectorDefinition[i ]; uint sectorNum = sectorDefinition[i+1]; totalSectors += sectorNum; totalSize += sectorNum * sectorSize; } m_addressStart = addressStart; m_addressEnd = addressStart + totalSize; m_sectorSizes = new uint[totalSectors]; m_mode = Mode.Normal; uint offset = 0; for(int i = 0; i < sectorDefinition.Length; i += 2) { uint sectorSize = sectorDefinition[i ]; uint sectorNum = sectorDefinition[i+1]; while(sectorNum-- > 0) { m_sectorSizes[offset++] = sectorSize; } } } // // Helper Methods // internal void HandleWrite( Simulator owner , uint address , ushort value ) { switch(m_mode) { case Mode.Normal: break; case Mode.Query: if(address == 0 && value == resetCommand) { m_mode = Mode.Normal; } break; case Mode.Program: m_programmingAddress = address; m_programmingValue = value; m_timer = owner.ClockTicks + 10 * 30; // BUGBUG: We need a way to convert from Ticks to Time. m_lastStatusValue = (ushort)(~value & c_DQ7); // DQ7# break; case Mode.Erase: uint offset = address - m_addressStart; for(int i = 0; i < m_sectorSizes.Length; i++) { if(offset < m_sectorSizes[i]) { m_erasingSectorIndex = i; break; } offset -= m_sectorSizes[i]; } //m_timer = owner.ClockTicks + 400 * 1000 * 30; // BUGBUG: We need a way to convert from Ticks to Time. m_timer = owner.ClockTicks + 40 * 30; // BUGBUG: We need a way to convert from Ticks to Time. m_lastStatusValue = c_DQ3; break; } } internal bool HandleRead( Simulator owner , uint address , out ushort value ) { switch(m_mode) { case Mode.Normal: break; case Mode.Query: for(int i = 0; i < query_results.Length; i += 2) { if(query_results[i] == address / sizeof(ushort)) { value = query_results[i+1]; return true; } } value = 0xFFFF; return true; case Mode.Program: if(owner.ClockTicks < m_timer) { value = m_lastStatusValue; m_lastStatusValue ^= c_DQ6; // Toggle DQ6. return true; } m_mode = Mode.Normal; break; case Mode.Erase: if(owner.ClockTicks < m_timer) { value = m_lastStatusValue; m_lastStatusValue ^= c_DQ6 | c_DQ2; // Toggle DQ6 and DQ2. return true; } m_mode = Mode.Normal; break; } value = 0xFFFF; return false; } internal bool FindSector( uint address , out uint sectorStart , out uint sectorEnd ) { if(m_addressStart <= address && address < m_addressEnd) { uint sector = m_addressStart; foreach(uint sectorSize in m_sectorSizes) { uint sectorNext = sector + sectorSize; if(sector <= address && address < sectorNext) { sectorStart = sector; sectorEnd = sectorNext; return true; } sector = sectorNext; } } sectorStart = 0; sectorEnd = 0; return false; } } // // State // BankState[] m_banks; Mode m_mode; SequenceTracker m_sequenceTracker_CFI; SequenceTracker m_sequenceTracker_Program; SequenceTracker m_sequenceTracker_Erase; // // Constructor Methods // public S29WS128N() { m_banks = new BankState[16]; m_mode = Mode.Normal; // // Bank 0 // m_banks[0] = new BankState( 0, Sector16KWord, 4, Sector64KWord, 7 ); // // Bank 1-14 // for(int bank = 1; bank <= 14; bank++) { m_banks[bank] = new BankState( m_banks[bank-1].m_addressEnd, Sector64KWord, 8 ); } // // Bank 15 // m_banks[15] = new BankState( m_banks[14].m_addressEnd, Sector64KWord, 7, Sector16KWord, 4 ); //--// m_sequenceTracker_CFI = new SequenceTracker( sequence_CFI ); m_sequenceTracker_Program = new SequenceTracker( sequence_PROGRAM ); m_sequenceTracker_Erase = new SequenceTracker( sequence_ERASE ); } // // Helper Methods // public override void Initialize( Simulator owner , ulong rangeLength , uint rangeWidth , uint readLatency , uint writeLatency ) { base.Initialize( owner, rangeLength, rangeWidth, readLatency, writeLatency ); // // Erase the whole chip. // for(int i = 0; i < m_target.Length; i++) { m_target[i] = 0xFFFFFFFF; } } public override uint Read( uint address , uint relativeAddress , TargetAdapterAbstractionLayer.MemoryAccessType kind ) { if(m_owner.AreTimingUpdatesEnabled) { BankState bank; uint sectorStart; uint sectorEnd; if(FindSector( relativeAddress, out bank, out sectorStart, out sectorEnd )) { ushort value; if(bank.HandleRead( m_owner, relativeAddress, out value )) { UpdateClockTicksForLoad( address, kind ); return value; } } } return base.Read( address, relativeAddress, kind ); } public override void Write( uint address , uint relativeAddress , uint value , TargetAdapterAbstractionLayer.MemoryAccessType kind ) { if(m_owner.AreTimingUpdatesEnabled) { BankState bank; uint sectorStart; uint sectorEnd; if(kind != TargetAdapterAbstractionLayer.MemoryAccessType.UINT16) { throw new TargetAdapterAbstractionLayer.BusErrorException( address, kind ); } UpdateClockTicksForStore( address, kind ); ushort valueShort = (ushort)value; if(m_mode == Mode.Normal) { m_sequenceTracker_CFI .Advance( relativeAddress, valueShort, m_owner ); m_sequenceTracker_Program.Advance( relativeAddress, valueShort, m_owner ); m_sequenceTracker_Erase .Advance( relativeAddress, valueShort, m_owner ); if(m_sequenceTracker_CFI.IsRecognized) { foreach(var bank2 in m_banks) { bank2.m_mode = Mode.Query; } m_mode = Mode.Query; return; } if(m_sequenceTracker_Program.IsRecognized) { m_mode = Mode.Program; return; } if(m_sequenceTracker_Erase.IsRecognized) { m_mode = Mode.Erase; return; } } if(m_mode != Mode.Normal) { m_sequenceTracker_CFI .Reset(); m_sequenceTracker_Program.Reset(); m_sequenceTracker_Erase .Reset(); if(FindSector( relativeAddress, out bank, out sectorStart, out sectorEnd )) { bank.m_mode = m_mode; bank.HandleWrite( m_owner, relativeAddress, valueShort ); switch(m_mode) { case Mode.Query: if(bank.m_mode == Mode.Normal) { foreach(var bank2 in m_banks) { bank2.m_mode = Mode.Normal; } m_mode = Mode.Normal; } break; case Mode.Program: ushort lastValueShort = (ushort)base.Read( address, relativeAddress, kind ); // // Only update the flash if the new value doesn't include any 0 -> 1 transitions, which can only happens through an erase. // if((~lastValueShort & valueShort) == 0) { base.Write( address, relativeAddress, value, kind ); } m_mode = Mode.Normal; break; case Mode.Erase: while(sectorStart < sectorEnd) { base.Write( sectorStart, sectorStart, 0xFFFF, TargetAdapterAbstractionLayer.MemoryAccessType.UINT16 ); sectorStart += sizeof(ushort); } m_mode = Mode.Normal; break; } } } return; } base.Write( address, relativeAddress, value, kind ); } //--// bool FindSector( uint address , out BankState bank , out uint sectorStart , out uint sectorEnd ) { foreach(BankState bank2 in m_banks) { if(bank2.FindSector( address, out sectorStart, out sectorEnd )) { bank = bank2; return true; } } bank = null; sectorStart = 0; sectorEnd = 0; return false; } } }
#region License /* * All content copyright Terracotta, Inc., unless otherwise indicated. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ #endregion using System; using System.Globalization; using Quartz.Spi; namespace Quartz.Impl.Triggers { /// <summary> /// The base abstract class to be extended by all triggers. /// </summary> /// <remarks> /// <para> /// <see cref="ITrigger" />s have a name and group associated with them, which /// should uniquely identify them within a single <see cref="IScheduler" />. /// </para> /// /// <para> /// <see cref="ITrigger" />s are the 'mechanism' by which <see cref="IJob" /> s /// are scheduled. Many <see cref="ITrigger" /> s can point to the same <see cref="IJob" />, /// but a single <see cref="ITrigger" /> can only point to one <see cref="IJob" />. /// </para> /// /// <para> /// Triggers can 'send' parameters/data to <see cref="IJob" />s by placing contents /// into the <see cref="JobDataMap" /> on the <see cref="ITrigger" />. /// </para> /// </remarks> /// <seealso cref="ISimpleTrigger" /> /// <seealso cref="ICronTrigger" /> /// <seealso cref="IDailyTimeIntervalTrigger" /> /// <seealso cref="JobDataMap" /> /// <seealso cref="IJobExecutionContext" /> /// <author>James House</author> /// <author>Sharada Jambula</author> /// <author>Marko Lahma (.NET)</author> [Serializable] public abstract class AbstractTrigger : IOperableTrigger, IEquatable<AbstractTrigger> { private string name; private string group = SchedulerConstants.DefaultGroup; private string jobName; private string jobGroup = SchedulerConstants.DefaultGroup; private string description; private JobDataMap jobDataMap; private string calendarName; private string fireInstanceId; private int misfireInstruction = Quartz.MisfireInstruction.InstructionNotSet; private DateTimeOffset? endTimeUtc; private DateTimeOffset startTimeUtc; private int priority = TriggerConstants.DefaultPriority; [NonSerialized] // we have the key in string fields private TriggerKey key; /// <summary> /// Get or sets the name of this <see cref="ITrigger" />. /// </summary> /// <exception cref="ArgumentException">If name is null or empty.</exception> public virtual string Name { get { return name; } set { if (value == null || value.Trim().Length == 0) { throw new ArgumentException("Trigger name cannot be null or empty."); } name = value; key = null; } } /// <summary> /// Get the group of this <see cref="ITrigger" />. If <see langword="null" />, Scheduler.DefaultGroup will be used. /// </summary> /// <exception cref="ArgumentException"> /// if group is an empty string. /// </exception> public virtual string Group { get { return group; } set { if (value != null && value.Trim().Length == 0) { throw new ArgumentException("Group name cannot be an empty string."); } if (value == null) { value = SchedulerConstants.DefaultGroup; } group = value; key = null; } } /// <summary> /// Get or set the name of the associated <see cref="IJobDetail" />. /// </summary> /// <exception cref="ArgumentException"> /// if jobName is null or empty. /// </exception> public virtual string JobName { get { return jobName; } set { if (value == null || value.Trim().Length == 0) { throw new ArgumentException("Job name cannot be null or empty."); } jobName = value; } } /// <summary> /// Gets or sets the name of the associated <see cref="IJobDetail" />'s /// group. If set with <see langword="null" />, Scheduler.DefaultGroup will be used. /// </summary> /// <exception cref="ArgumentException"> ArgumentException /// if group is an empty string. /// </exception> public virtual string JobGroup { get { return jobGroup; } set { if (value != null && value.Trim().Length == 0) { throw new ArgumentException("Group name cannot be null or empty."); } if (value == null) { value = SchedulerConstants.DefaultGroup; } jobGroup = value; } } /// <summary> /// Returns the 'full name' of the <see cref="ITrigger" /> in the format /// "group.name". /// </summary> public virtual string FullName { get { return group + "." + name; } } /// <summary> /// Gets the key. /// </summary> /// <value>The key.</value> public virtual TriggerKey Key { get { if(key == null) { key = new TriggerKey(Name, Group); } return key; } set { Name = value.Name; Group = value.Group; key = value; } } public JobKey JobKey { set { JobName = value.Name; JobGroup = value.Group; } get { if (JobName == null) { return null; } return new JobKey(JobName, JobGroup); } } /// <summary> /// Returns the 'full name' of the <see cref="IJob" /> that the <see cref="ITrigger" /> /// points to, in the format "group.name". /// </summary> public virtual string FullJobName { get { return jobGroup + "." + jobName; } } public TriggerBuilder GetTriggerBuilder() { return TriggerBuilder.Create() .ForJob(JobKey) .ModifiedByCalendar(CalendarName) .UsingJobData(JobDataMap) .WithDescription(Description) .EndAt(EndTimeUtc) .WithIdentity(Key) .WithPriority(Priority) .StartAt(StartTimeUtc) .WithSchedule(GetScheduleBuilder()); } public abstract IScheduleBuilder GetScheduleBuilder(); /// <summary> /// Get or set the description given to the <see cref="ITrigger" /> instance by /// its creator (if any). /// </summary> public virtual string Description { get { return description; } set { description = value; } } /// <summary> /// Get or set the <see cref="ICalendar" /> with the given name with /// this Trigger. Use <see langword="null" /> when setting to dis-associate a Calendar. /// </summary> public virtual string CalendarName { get { return calendarName; } set { calendarName = value; } } /// <summary> /// Get or set the <see cref="JobDataMap" /> that is associated with the /// <see cref="ITrigger" />. /// <para> /// Changes made to this map during job execution are not re-persisted, and /// in fact typically result in an illegal state. /// </para> /// </summary> public virtual JobDataMap JobDataMap { get { if (jobDataMap == null) { jobDataMap = new JobDataMap(); } return jobDataMap; } set { jobDataMap = value; } } /// <summary> /// Returns the last UTC time at which the <see cref="ITrigger" /> will fire, if /// the Trigger will repeat indefinitely, null will be returned. /// <para> /// Note that the return time *may* be in the past. /// </para> /// </summary> public abstract DateTimeOffset? FinalFireTimeUtc { get; } /// <summary> /// Get or set the instruction the <see cref="IScheduler" /> should be given for /// handling misfire situations for this <see cref="ITrigger" />- the /// concrete <see cref="ITrigger" /> type that you are using will have /// defined a set of additional MISFIRE_INSTRUCTION_XXX /// constants that may be passed to this method. /// <para> /// If not explicitly set, the default value is <see cref="Quartz.MisfireInstruction.InstructionNotSet" />. /// </para> /// </summary> /// <seealso cref="Quartz.MisfireInstruction.InstructionNotSet" /> /// <seealso cref="UpdateAfterMisfire" /> /// <seealso cref="ISimpleTrigger" /> /// <seealso cref="ICronTrigger" /> public virtual int MisfireInstruction { get { return misfireInstruction; } set { if (!ValidateMisfireInstruction(value)) { throw new ArgumentException("The misfire instruction code is invalid for this type of trigger."); } misfireInstruction = value; } } /// <summary> /// This method should not be used by the Quartz client. /// </summary> /// <remarks> /// Usable by <see cref="IJobStore" /> /// implementations, in order to facilitate 'recognizing' instances of fired /// <see cref="ITrigger" /> s as their jobs complete execution. /// </remarks> public virtual string FireInstanceId { get { return fireInstanceId; } set { fireInstanceId = value; } } public abstract void SetNextFireTimeUtc(DateTimeOffset? nextFireTime); public abstract void SetPreviousFireTimeUtc(DateTimeOffset? previousFireTime); /// <summary> /// Returns the previous time at which the <see cref="ITrigger" /> fired. /// If the trigger has not yet fired, <see langword="null" /> will be returned. /// </summary> public abstract DateTimeOffset? GetPreviousFireTimeUtc(); /// <summary> /// Gets and sets the date/time on which the trigger must stop firing. This /// defines the final boundary for trigger firings &#x8212; the trigger will /// not fire after to this date and time. If this value is null, no end time /// boundary is assumed, and the trigger can continue indefinitely. /// </summary> public virtual DateTimeOffset? EndTimeUtc { get { return endTimeUtc; } set { DateTimeOffset sTime = StartTimeUtc; if (value.HasValue && (sTime > value.Value)) { throw new ArgumentException("End time cannot be before start time"); } endTimeUtc = value; } } /// <summary> /// The time at which the trigger's scheduling should start. May or may not /// be the first actual fire time of the trigger, depending upon the type of /// trigger and the settings of the other properties of the trigger. However /// the first actual first time will not be before this date. /// </summary> /// <remarks> /// Setting a value in the past may cause a new trigger to compute a first /// fire time that is in the past, which may cause an immediate misfire /// of the trigger. /// </remarks> public virtual DateTimeOffset StartTimeUtc { get { return startTimeUtc; } set { if (EndTimeUtc.HasValue && EndTimeUtc.Value < value) { throw new ArgumentException("End time cannot be before start time"); } if (!HasMillisecondPrecision) { // round off millisecond... startTimeUtc = value.AddMilliseconds(-value.Millisecond); } else { startTimeUtc = value; } } } /// <summary> /// Tells whether this Trigger instance can handle events /// in millisecond precision. /// </summary> public abstract bool HasMillisecondPrecision { get; } /// <summary> /// Create a <see cref="ITrigger" /> with no specified name, group, or <see cref="IJobDetail" />. /// </summary> /// <remarks> /// Note that the <see cref="Name" />, <see cref="Group" /> and /// the <see cref="JobName" /> and <see cref="JobGroup" /> properties /// must be set before the <see cref="ITrigger" /> can be placed into a /// <see cref="IScheduler" />. /// </remarks> protected AbstractTrigger() { // do nothing... } /// <summary> /// Create a <see cref="ITrigger" /> with the given name, and default group. /// </summary> /// <remarks> /// Note that the <see cref="JobName" /> and <see cref="JobGroup" /> /// properties must be set before the <see cref="ITrigger" /> /// can be placed into a <see cref="IScheduler" />. /// </remarks> /// <param name="name">The name.</param> protected AbstractTrigger(string name) : this(name, null) { } /// <summary> /// Create a <see cref="ITrigger" /> with the given name, and group. /// </summary> /// <remarks> /// Note that the <see cref="JobName" /> and <see cref="JobGroup" /> /// properties must be set before the <see cref="ITrigger" /> /// can be placed into a <see cref="IScheduler" />. /// </remarks> /// <param name="name">The name.</param> /// <param name="group">if <see langword="null" />, Scheduler.DefaultGroup will be used.</param> protected AbstractTrigger(string name, string group) { Name = name; Group = group; } /// <summary> /// Create a <see cref="ITrigger" /> with the given name, and group. /// </summary> /// <param name="name">The name.</param> /// <param name="group">if <see langword="null" />, Scheduler.DefaultGroup will be used.</param> /// <param name="jobName">Name of the job.</param> /// <param name="jobGroup">The job group.</param> /// <exception cref="ArgumentException"> ArgumentException /// if name is null or empty, or the group is an empty string. /// </exception> protected AbstractTrigger(string name, string group, string jobName, string jobGroup) { Name = name; Group = group; JobName = jobName; JobGroup = jobGroup; } /// <summary> /// The priority of a <see cref="ITrigger" /> acts as a tie breaker such that if /// two <see cref="ITrigger" />s have the same scheduled fire time, then Quartz /// will do its best to give the one with the higher priority first access /// to a worker thread. /// </summary> /// <remarks> /// If not explicitly set, the default value is <i>5</i>. /// </remarks> /// <returns></returns> /// <see cref="TriggerConstants.DefaultPriority" /> public virtual int Priority { get { return priority; } set { priority = value; } } /// <summary> /// This method should not be used by the Quartz client. /// </summary> /// <remarks> /// Called when the <see cref="IScheduler" /> has decided to 'fire' /// the trigger (Execute the associated <see cref="IJob" />), in order to /// give the <see cref="ITrigger" /> a chance to update itself for its next /// triggering (if any). /// </remarks> /// <seealso cref="JobExecutionException" /> public abstract void Triggered(ICalendar cal); /// <summary> /// This method should not be used by the Quartz client. /// </summary> /// <remarks> /// <para> /// Called by the scheduler at the time a <see cref="ITrigger" /> is first /// added to the scheduler, in order to have the <see cref="ITrigger" /> /// compute its first fire time, based on any associated calendar. /// </para> /// /// <para> /// After this method has been called, <see cref="GetNextFireTimeUtc" /> /// should return a valid answer. /// </para> /// </remarks> /// <returns> /// The first time at which the <see cref="ITrigger" /> will be fired /// by the scheduler, which is also the same value <see cref="GetNextFireTimeUtc" /> /// will return (until after the first firing of the <see cref="ITrigger" />). /// </returns> public abstract DateTimeOffset? ComputeFirstFireTimeUtc(ICalendar cal); /// <summary> /// This method should not be used by the Quartz client. /// </summary> /// <remarks> /// Called after the <see cref="IScheduler" /> has executed the /// <see cref="IJobDetail" /> associated with the <see cref="ITrigger" /> /// in order to get the final instruction code from the trigger. /// </remarks> /// <param name="context"> /// is the <see cref="IJobExecutionContext" /> that was used by the /// <see cref="IJob" />'s<see cref="IJob.Execute" /> method.</param> /// <param name="result">is the <see cref="JobExecutionException" /> thrown by the /// <see cref="IJob" />, if any (may be null). /// </param> /// <returns> /// One of the <see cref="SchedulerInstruction"/> members. /// </returns> /// <seealso cref="SchedulerInstruction" /> /// <seealso cref="Triggered" /> public virtual SchedulerInstruction ExecutionComplete(IJobExecutionContext context, JobExecutionException result) { if (result != null && result.RefireImmediately) { return SchedulerInstruction.ReExecuteJob; } if (result != null && result.UnscheduleFiringTrigger) { return SchedulerInstruction.SetTriggerComplete; } if (result != null && result.UnscheduleAllTriggers) { return SchedulerInstruction.SetAllJobTriggersComplete; } if (!GetMayFireAgain()) { return SchedulerInstruction.DeleteTrigger; } return SchedulerInstruction.NoInstruction; } /// <summary> /// Used by the <see cref="IScheduler" /> to determine whether or not /// it is possible for this <see cref="ITrigger" /> to fire again. /// <para> /// If the returned value is <see langword="false" /> then the <see cref="IScheduler" /> /// may remove the <see cref="ITrigger" /> from the <see cref="IJobStore" />. /// </para> /// </summary> public abstract bool GetMayFireAgain(); /// <summary> /// Returns the next time at which the <see cref="ITrigger" /> is scheduled to fire. If /// the trigger will not fire again, <see langword="null" /> will be returned. Note that /// the time returned can possibly be in the past, if the time that was computed /// for the trigger to next fire has already arrived, but the scheduler has not yet /// been able to fire the trigger (which would likely be due to lack of resources /// e.g. threads). /// </summary> ///<remarks> /// The value returned is not guaranteed to be valid until after the <see cref="ITrigger" /> /// has been added to the scheduler. /// </remarks> /// <returns></returns> public abstract DateTimeOffset? GetNextFireTimeUtc(); /// <summary> /// Returns the next time at which the <see cref="ITrigger" /> will fire, /// after the given time. If the trigger will not fire after the given time, /// <see langword="null" /> will be returned. /// </summary> public abstract DateTimeOffset? GetFireTimeAfter(DateTimeOffset? afterTime); /// <summary> /// Validates the misfire instruction. /// </summary> /// <param name="misfireInstruction">The misfire instruction.</param> /// <returns></returns> protected abstract bool ValidateMisfireInstruction(int misfireInstruction); /// <summary> /// This method should not be used by the Quartz client. /// <para> /// To be implemented by the concrete classes that extend this class. /// </para> /// <para> /// The implementation should update the <see cref="ITrigger" />'s state /// based on the MISFIRE_INSTRUCTION_XXX that was selected when the <see cref="ITrigger" /> /// was created. /// </para> /// </summary> public abstract void UpdateAfterMisfire(ICalendar cal); /// <summary> /// This method should not be used by the Quartz client. /// <para> /// The implementation should update the <see cref="ITrigger" />'s state /// based on the given new version of the associated <see cref="ICalendar" /> /// (the state should be updated so that it's next fire time is appropriate /// given the Calendar's new settings). /// </para> /// </summary> /// <param name="cal"> </param> /// <param name="misfireThreshold"></param> public abstract void UpdateWithNewCalendar(ICalendar cal, TimeSpan misfireThreshold); /// <summary> /// Validates whether the properties of the <see cref="IJobDetail" /> are /// valid for submission into a <see cref="IScheduler" />. /// </summary> public virtual void Validate() { if (name == null) { throw new SchedulerException("Trigger's name cannot be null"); } if (group == null) { throw new SchedulerException("Trigger's group cannot be null"); } if (jobName == null) { throw new SchedulerException("Trigger's related Job's name cannot be null"); } if (jobGroup == null) { throw new SchedulerException("Trigger's related Job's group cannot be null"); } } /// <summary> /// Gets a value indicating whether this instance has additional properties /// that should be considered when for example saving to database. /// </summary> /// <remarks> /// If trigger implementation has additional properties that need to be saved /// with base properties you need to make your class override this property with value true. /// Returning true will effectively mean that ADOJobStore needs to serialize /// this trigger instance to make sure additional properties are also saved. /// </remarks> /// <value> /// <c>true</c> if this instance has additional properties; otherwise, <c>false</c>. /// </value> public virtual bool HasAdditionalProperties { get { return false; } } /// <summary> /// Return a simple string representation of this object. /// </summary> public override string ToString() { return string.Format( CultureInfo.InvariantCulture, "Trigger '{0}': triggerClass: '{1} calendar: '{2}' misfireInstruction: {3} nextFireTime: {4}", FullName, GetType().FullName, CalendarName, MisfireInstruction, GetNextFireTimeUtc()); } /// <summary> /// Compare the next fire time of this <see cref="ITrigger" /> to that of /// another by comparing their keys, or in other words, sorts them /// according to the natural (i.e. alphabetical) order of their keys. /// </summary> /// <param name="other"></param> /// <returns></returns> public virtual int CompareTo(ITrigger other) { if ((other == null || other.Key == null) && Key == null) { return 0; } if (other == null || other.Key == null) { return -1; } if (Key == null) { return 1; } return Key.CompareTo(other.Key); } /// <summary> /// Determines whether the specified <see cref="T:System.Object"></see> is equal to the current <see cref="T:System.Object"></see>. /// </summary> /// <param name="obj">The <see cref="T:System.Object"></see> to compare with the current <see cref="T:System.Object"></see>.</param> /// <returns> /// true if the specified <see cref="T:System.Object"></see> is equal to the current <see cref="T:System.Object"></see>; otherwise, false. /// </returns> public override bool Equals(object obj) { return Equals(obj as AbstractTrigger); } /// <summary> /// Trigger equality is based upon the equality of the TriggerKey. /// </summary> /// <param name="trigger"></param> /// <returns>true if the key of this Trigger equals that of the given Trigger</returns> public virtual bool Equals(AbstractTrigger trigger) { if (trigger == null) { return false; } if (trigger.Key == null || Key == null) { return false; } return Key.Equals(trigger.Key); } /// <summary> /// Serves as a hash function for a particular type. <see cref="M:System.Object.GetHashCode"></see> is suitable for use in hashing algorithms and data structures like a hash table. /// </summary> /// <returns> /// A hash code for the current <see cref="T:System.Object"></see>. /// </returns> public override int GetHashCode() { if (Key == null) { return base.GetHashCode(); } return Key.GetHashCode(); } /// <summary> /// Creates a new object that is a copy of the current instance. /// </summary> /// <returns> /// A new object that is a copy of this instance. /// </returns> public virtual object Clone() { AbstractTrigger copy; try { copy = (AbstractTrigger)MemberwiseClone(); // Shallow copy the jobDataMap. Note that this means that if a user // modifies a value object in this map from the cloned Trigger // they will also be modifying this Trigger. if (jobDataMap != null) { copy.jobDataMap = (JobDataMap)jobDataMap.Clone(); } } catch (Exception ex) { throw new Exception("Not Cloneable.", ex); } return copy; } } }
/* * Copyright 2021 Google LLC All Rights Reserved. * Use of this source code is governed by a BSD-style * license that can be found in the LICENSE file or at * https://developers.google.com/open-source/licenses/bsd */ // <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/type/timeofday.proto // </auto-generated> #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Type { /// <summary>Holder for reflection information generated from google/type/timeofday.proto</summary> public static partial class TimeofdayReflection { #region Descriptor /// <summary>File descriptor for google/type/timeofday.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static TimeofdayReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Chtnb29nbGUvdHlwZS90aW1lb2ZkYXkucHJvdG8SC2dvb2dsZS50eXBlIksK", "CVRpbWVPZkRheRINCgVob3VycxgBIAEoBRIPCgdtaW51dGVzGAIgASgFEg8K", "B3NlY29uZHMYAyABKAUSDQoFbmFub3MYBCABKAVCbAoPY29tLmdvb2dsZS50", "eXBlQg5UaW1lT2ZEYXlQcm90b1ABWj5nb29nbGUuZ29sYW5nLm9yZy9nZW5w", "cm90by9nb29nbGVhcGlzL3R5cGUvdGltZW9mZGF5O3RpbWVvZmRhefgBAaIC", "A0dUUGIGcHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Type.TimeOfDay), global::Google.Type.TimeOfDay.Parser, new[]{ "Hours", "Minutes", "Seconds", "Nanos" }, null, null, null, null) })); } #endregion } #region Messages /// <summary> /// Represents a time of day. The date and time zone are either not significant /// or are specified elsewhere. An API may choose to allow leap seconds. Related /// types are [google.type.Date][google.type.Date] and /// `google.protobuf.Timestamp`. /// </summary> public sealed partial class TimeOfDay : pb::IMessage<TimeOfDay> #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE , pb::IBufferMessage #endif { private static readonly pb::MessageParser<TimeOfDay> _parser = new pb::MessageParser<TimeOfDay>(() => new TimeOfDay()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pb::MessageParser<TimeOfDay> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Type.TimeofdayReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public TimeOfDay() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public TimeOfDay(TimeOfDay other) : this() { hours_ = other.hours_; minutes_ = other.minutes_; seconds_ = other.seconds_; nanos_ = other.nanos_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public TimeOfDay Clone() { return new TimeOfDay(this); } /// <summary>Field number for the "hours" field.</summary> public const int HoursFieldNumber = 1; private int hours_; /// <summary> /// Hours of day in 24 hour format. Should be from 0 to 23. An API may choose /// to allow the value "24:00:00" for scenarios like business closing time. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int Hours { get { return hours_; } set { hours_ = value; } } /// <summary>Field number for the "minutes" field.</summary> public const int MinutesFieldNumber = 2; private int minutes_; /// <summary> /// Minutes of hour of day. Must be from 0 to 59. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int Minutes { get { return minutes_; } set { minutes_ = value; } } /// <summary>Field number for the "seconds" field.</summary> public const int SecondsFieldNumber = 3; private int seconds_; /// <summary> /// Seconds of minutes of the time. Must normally be from 0 to 59. An API may /// allow the value 60 if it allows leap-seconds. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int Seconds { get { return seconds_; } set { seconds_ = value; } } /// <summary>Field number for the "nanos" field.</summary> public const int NanosFieldNumber = 4; private int nanos_; /// <summary> /// Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int Nanos { get { return nanos_; } set { nanos_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override bool Equals(object other) { return Equals(other as TimeOfDay); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool Equals(TimeOfDay other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Hours != other.Hours) return false; if (Minutes != other.Minutes) return false; if (Seconds != other.Seconds) return false; if (Nanos != other.Nanos) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override int GetHashCode() { int hash = 1; if (Hours != 0) hash ^= Hours.GetHashCode(); if (Minutes != 0) hash ^= Minutes.GetHashCode(); if (Seconds != 0) hash ^= Seconds.GetHashCode(); if (Nanos != 0) hash ^= Nanos.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void WriteTo(pb::CodedOutputStream output) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE output.WriteRawMessage(this); #else if (Hours != 0) { output.WriteRawTag(8); output.WriteInt32(Hours); } if (Minutes != 0) { output.WriteRawTag(16); output.WriteInt32(Minutes); } if (Seconds != 0) { output.WriteRawTag(24); output.WriteInt32(Seconds); } if (Nanos != 0) { output.WriteRawTag(32); output.WriteInt32(Nanos); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) { if (Hours != 0) { output.WriteRawTag(8); output.WriteInt32(Hours); } if (Minutes != 0) { output.WriteRawTag(16); output.WriteInt32(Minutes); } if (Seconds != 0) { output.WriteRawTag(24); output.WriteInt32(Seconds); } if (Nanos != 0) { output.WriteRawTag(32); output.WriteInt32(Nanos); } if (_unknownFields != null) { _unknownFields.WriteTo(ref output); } } #endif [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int CalculateSize() { int size = 0; if (Hours != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Hours); } if (Minutes != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Minutes); } if (Seconds != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Seconds); } if (Nanos != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Nanos); } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(TimeOfDay other) { if (other == null) { return; } if (other.Hours != 0) { Hours = other.Hours; } if (other.Minutes != 0) { Minutes = other.Minutes; } if (other.Seconds != 0) { Seconds = other.Seconds; } if (other.Nanos != 0) { Nanos = other.Nanos; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(pb::CodedInputStream input) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE input.ReadRawMessage(this); #else uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 8: { Hours = input.ReadInt32(); break; } case 16: { Minutes = input.ReadInt32(); break; } case 24: { Seconds = input.ReadInt32(); break; } case 32: { Nanos = input.ReadInt32(); break; } } } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input); break; case 8: { Hours = input.ReadInt32(); break; } case 16: { Minutes = input.ReadInt32(); break; } case 24: { Seconds = input.ReadInt32(); break; } case 32: { Nanos = input.ReadInt32(); break; } } } } #endif } #endregion } #endregion Designer generated code
// // This file was generated by the BinaryNotes compiler. // See http://bnotes.sourceforge.net // Any modifications to this file will be lost upon recompilation of the source ASN.1. // using System.Collections.Generic; using GSF.ASN1; using GSF.ASN1.Attributes; using GSF.ASN1.Coders; namespace GSF.MMS.Model { [ASN1PreparedElement] [ASN1Choice(Name = "ControlElement")] public class ControlElement : IASN1PreparedElement { private static readonly IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(ControlElement)); private BeginDomainDefSequenceType beginDomainDef_; private bool beginDomainDef_selected; private ContinueDomainDefSequenceType continueDomainDef_; private bool continueDomainDef_selected; private Identifier endDomainDef_; private bool endDomainDef_selected; private PiDefinitionSequenceType piDefinition_; private bool piDefinition_selected; [ASN1Element(Name = "beginDomainDef", IsOptional = false, HasTag = true, Tag = 0, HasDefaultValue = false)] public BeginDomainDefSequenceType BeginDomainDef { get { return beginDomainDef_; } set { selectBeginDomainDef(value); } } [ASN1Element(Name = "continueDomainDef", IsOptional = false, HasTag = true, Tag = 1, HasDefaultValue = false)] public ContinueDomainDefSequenceType ContinueDomainDef { get { return continueDomainDef_; } set { selectContinueDomainDef(value); } } [ASN1Element(Name = "endDomainDef", IsOptional = false, HasTag = true, Tag = 2, HasDefaultValue = false)] public Identifier EndDomainDef { get { return endDomainDef_; } set { selectEndDomainDef(value); } } [ASN1Element(Name = "piDefinition", IsOptional = false, HasTag = true, Tag = 3, HasDefaultValue = false)] public PiDefinitionSequenceType PiDefinition { get { return piDefinition_; } set { selectPiDefinition(value); } } public void initWithDefaults() { } public IASN1PreparedElementData PreparedData { get { return preparedData; } } public bool isBeginDomainDefSelected() { return beginDomainDef_selected; } public void selectBeginDomainDef(BeginDomainDefSequenceType val) { beginDomainDef_ = val; beginDomainDef_selected = true; continueDomainDef_selected = false; endDomainDef_selected = false; piDefinition_selected = false; } public bool isContinueDomainDefSelected() { return continueDomainDef_selected; } public void selectContinueDomainDef(ContinueDomainDefSequenceType val) { continueDomainDef_ = val; continueDomainDef_selected = true; beginDomainDef_selected = false; endDomainDef_selected = false; piDefinition_selected = false; } public bool isEndDomainDefSelected() { return endDomainDef_selected; } public void selectEndDomainDef(Identifier val) { endDomainDef_ = val; endDomainDef_selected = true; beginDomainDef_selected = false; continueDomainDef_selected = false; piDefinition_selected = false; } public bool isPiDefinitionSelected() { return piDefinition_selected; } public void selectPiDefinition(PiDefinitionSequenceType val) { piDefinition_ = val; piDefinition_selected = true; beginDomainDef_selected = false; continueDomainDef_selected = false; endDomainDef_selected = false; } [ASN1PreparedElement] [ASN1Sequence(Name = "beginDomainDef", IsSet = false)] public class BeginDomainDefSequenceType : IASN1PreparedElement { private static IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(BeginDomainDefSequenceType)); private ICollection<MMSString> capabilities_; private Identifier domainName_; private LoadData loadData_; private bool loadData_present; private bool sharable_; [ASN1Element(Name = "domainName", IsOptional = false, HasTag = true, Tag = 1, HasDefaultValue = false)] public Identifier DomainName { get { return domainName_; } set { domainName_ = value; } } [ASN1SequenceOf(Name = "capabilities", IsSetOf = false)] [ASN1Element(Name = "capabilities", IsOptional = false, HasTag = true, Tag = 2, HasDefaultValue = false)] public ICollection<MMSString> Capabilities { get { return capabilities_; } set { capabilities_ = value; } } [ASN1Boolean(Name = "")] [ASN1Element(Name = "sharable", IsOptional = false, HasTag = true, Tag = 3, HasDefaultValue = false)] public bool Sharable { get { return sharable_; } set { sharable_ = value; } } [ASN1Element(Name = "loadData", IsOptional = true, HasTag = true, Tag = 4, HasDefaultValue = false)] public LoadData LoadData { get { return loadData_; } set { loadData_ = value; loadData_present = true; } } public void initWithDefaults() { } public IASN1PreparedElementData PreparedData { get { return preparedData; } } public bool isLoadDataPresent() { return loadData_present; } } [ASN1PreparedElement] [ASN1Sequence(Name = "continueDomainDef", IsSet = false)] public class ContinueDomainDefSequenceType : IASN1PreparedElement { private static IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(ContinueDomainDefSequenceType)); private Identifier domainName_; private LoadData loadData_; [ASN1Element(Name = "domainName", IsOptional = false, HasTag = true, Tag = 1, HasDefaultValue = false)] public Identifier DomainName { get { return domainName_; } set { domainName_ = value; } } [ASN1Element(Name = "loadData", IsOptional = false, HasTag = true, Tag = 3, HasDefaultValue = false)] public LoadData LoadData { get { return loadData_; } set { loadData_ = value; } } public void initWithDefaults() { } public IASN1PreparedElementData PreparedData { get { return preparedData; } } } [ASN1PreparedElement] [ASN1Sequence(Name = "piDefinition", IsSet = false)] public class PiDefinitionSequenceType : IASN1PreparedElement { private static IASN1PreparedElementData preparedData = CoderFactory.getInstance().newPreparedElementData(typeof(PiDefinitionSequenceType)); private ICollection<Identifier> listOfDomains_; private bool monitorType_; private bool monitorType_present; private ProgramInvocationState pIState_; private bool pIState_present; private Identifier piName_; private bool reusable_; [ASN1Element(Name = "piName", IsOptional = false, HasTag = true, Tag = 0, HasDefaultValue = false)] public Identifier PiName { get { return piName_; } set { piName_ = value; } } [ASN1SequenceOf(Name = "listOfDomains", IsSetOf = false)] [ASN1Element(Name = "listOfDomains", IsOptional = false, HasTag = true, Tag = 1, HasDefaultValue = false)] public ICollection<Identifier> ListOfDomains { get { return listOfDomains_; } set { listOfDomains_ = value; } } [ASN1Boolean(Name = "")] [ASN1Element(Name = "reusable", IsOptional = false, HasTag = true, Tag = 2, HasDefaultValue = true)] public bool Reusable { get { return reusable_; } set { reusable_ = value; } } [ASN1Boolean(Name = "")] [ASN1Element(Name = "monitorType", IsOptional = true, HasTag = true, Tag = 3, HasDefaultValue = false)] public bool MonitorType { get { return monitorType_; } set { monitorType_ = value; monitorType_present = true; } } [ASN1Element(Name = "pIState", IsOptional = true, HasTag = true, Tag = 4, HasDefaultValue = false)] public ProgramInvocationState PIState { get { return pIState_; } set { pIState_ = value; pIState_present = true; } } public void initWithDefaults() { bool param_Reusable = false; Reusable = param_Reusable; } public IASN1PreparedElementData PreparedData { get { return preparedData; } } public bool isMonitorTypePresent() { return monitorType_present; } public bool isPIStatePresent() { return pIState_present; } } } }
using Gedcomx.Model.Rt; using Gedcomx.Model.Util; using Gx.Common; using Gx.Records; using Gx.Types; // <auto-generated> // // // Generated by <a href="http://enunciate.codehaus.org">Enunciate</a>. // </auto-generated> using System; using System.Xml.Serialization; using Newtonsoft.Json; using System.Collections.Generic; namespace Gx.Conclusion { /// <remarks> /// A relationship between two or more persons. /// </remarks> /// <summary> /// A relationship between two or more persons. /// </summary> [Serializable] [XmlType(Namespace = "http://gedcomx.org/v1/", TypeName = "Relationship")] [XmlRoot(Namespace = "http://gedcomx.org/v1/", ElementName = "relationship")] public partial class Relationship : Gx.Conclusion.Subject { private string _type; private Gx.Common.ResourceReference _person1; private Gx.Common.ResourceReference _person2; private List<Gx.Conclusion.Fact> _facts; private List<Gx.Records.Field> _fields; /// <summary> /// The type of this relationship. /// </summary> [XmlAttribute(AttributeName = "type")] [JsonProperty("type")] public string Type { get { return this._type; } set { this._type = value; } } /// <summary> /// Convenience property for treating Type as an enum. See Gx.Types.RelationshipTypeQNameUtil for details on getter/setter functionality. /// </summary> [XmlIgnore] [JsonIgnore] public Gx.Types.RelationshipType KnownType { get { return XmlQNameEnumUtil.GetEnumValue<RelationshipType>(this._type); } set { this._type = XmlQNameEnumUtil.GetNameValue(value); } } /// <summary> /// A reference to a person in the relationship. The name &quot;person1&quot; is used only to distinguish it from /// the other person in this relationship and implies neither order nor role. When the relationship type /// implies direction, it goes from &quot;person1&quot; to &quot;person2&quot;. /// </summary> [XmlElement(ElementName = "person1", Namespace = "http://gedcomx.org/v1/")] [JsonProperty("person1")] public Gx.Common.ResourceReference Person1 { get { return this._person1; } set { this._person1 = value; } } /// <summary> /// A reference to a person in the relationship. The name &quot;person2&quot; is used only to distinguish it from /// the other person in this relationship and implies neither order nor role. When the relationship type /// implies direction, it goes from &quot;person1&quot; to &quot;person2&quot;. /// </summary> [XmlElement(ElementName = "person2", Namespace = "http://gedcomx.org/v1/")] [JsonProperty("person2")] public Gx.Common.ResourceReference Person2 { get { return this._person2; } set { this._person2 = value; } } /// <summary> /// The fact conclusions for the relationship. /// </summary> [XmlElement(ElementName = "fact", Namespace = "http://gedcomx.org/v1/")] [JsonProperty("facts")] public List<Gx.Conclusion.Fact> Facts { get { return this._facts; } set { this._facts = value; } } /// <summary> /// The references to the record fields being used as evidence. /// </summary> [XmlElement(ElementName = "field", Namespace = "http://gedcomx.org/v1/")] [JsonProperty("fields")] public List<Gx.Records.Field> Fields { get { return this._fields; } set { this._fields = value; } } /// <summary> /// Add a fact conclusion. /// </summary> /// <param name="fact">The fact conclusion to be added.</param> public void AddFact(Fact fact) { if (fact != null) { if (Facts == null) { Facts = new List<Fact>(); } Facts.Add(fact); } } /** * Accept a visitor. * * @param visitor The visitor. */ public void Accept(IGedcomxModelVisitor visitor) { visitor.VisitRelationship(this); } /** * Build out this relationship with a type. * @param type The type. * @return this. */ public Relationship SetType(String type) { Type = type; return this; } /** * Build out this relationship with a type. * @param type The type. * @return this. */ public Relationship SetType(RelationshipType type) { KnownType = type; return this; } /** * Build out this relationship with a reference to person 1. * * @param person1 person 1. * @return this. */ public Relationship SetPerson1(ResourceReference person1) { Person1 = person1; return this; } /** * Build out this relationship with a reference to person 1. * * @param person1 person 1. * @return this. */ public Relationship SetPerson1(Person person1) { if (person1.Id == null) { throw new ArgumentException("Cannot reference person1: no id."); } Person1 = new ResourceReference("#" + person1.Id); return this; } /** * Build out this relationship with a reference to person 2. * * @param person2 person 2. * @return this. */ public Relationship SetPerson2(ResourceReference person2) { Person2 = person2; return this; } /** * Build out this relationship with a reference to person 2. * * @param person2 person 2. * @return this. */ public Relationship SetPerson2(Person person2) { if (person2.Id == null) { throw new ArgumentException("Cannot reference person2: no id."); } SetPerson2(new ResourceReference("#" + person2.Id)); return this; } /** * Build out this relationship with a fact. * @param fact The fact. * @return this */ public Relationship SetFact(Fact fact) { AddFact(fact); return this; } /** * Build out this relationship with a field. * * @param field The field. * @return this. */ public Relationship SetField(Field field) { AddField(field); return this; } /** * Add a reference to the record field values being used as evidence. * * @param field The field to be added. */ public void AddField(Field field) { if (field != null) { if (_fields == null) { _fields = new List<Field>(); } _fields.Add(field); } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using Microsoft.Extensions.Logging; using Orleans.Runtime.Scheduler; namespace Orleans.Runtime.GrainDirectory { internal class AdaptiveDirectoryCacheMaintainer<TValue> : AsynchAgent { private static readonly TimeSpan SLEEP_TIME_BETWEEN_REFRESHES = Debugger.IsAttached ? TimeSpan.FromMinutes(5) : TimeSpan.FromMinutes(1); // this should be something like minTTL/4 private readonly AdaptiveGrainDirectoryCache<TValue> cache; private readonly LocalGrainDirectory router; private readonly Func<List<ActivationAddress>, TValue> updateFunc; private readonly IInternalGrainFactory grainFactory; private long lastNumAccesses; // for stats private long lastNumHits; // for stats internal AdaptiveDirectoryCacheMaintainer( LocalGrainDirectory router, AdaptiveGrainDirectoryCache<TValue> cache, Func<List<ActivationAddress>, TValue> updateFunc, IInternalGrainFactory grainFactory, ExecutorService executorService, ILoggerFactory loggerFactory) :base(executorService, loggerFactory) { this.updateFunc = updateFunc; this.grainFactory = grainFactory; this.router = router; this.cache = cache; lastNumAccesses = 0; lastNumHits = 0; OnFault = FaultBehavior.RestartOnFault; } protected override void Run() { while (router.Running) { // Run through all cache entries and do the following: // 1. If the entry is not expired, skip it // 2. If the entry is expired and was not accessed in the last time interval -- throw it away // 3. If the entry is expired and was accessed in the last time interval, put into "fetch-batch-requests" list // At the end of the process, fetch batch requests for entries that need to be refreshed // Upon receiving refreshing answers, if the entry was not changed, double its expiration timer. // If it was changed, update the cache and reset the expiration timer. // this dictionary holds a map between a silo address and the list of grains that need to be refreshed var fetchInBatchList = new Dictionary<SiloAddress, List<GrainId>>(); // get the list of cached grains // for debug only int cnt1 = 0, cnt2 = 0, cnt3 = 0, cnt4 = 0; // run through all cache entries var enumerator = cache.GetStoredEntries(); while (enumerator.MoveNext()) { var pair = enumerator.Current; GrainId grain = pair.Key; var entry = pair.Value; SiloAddress owner = router.CalculateTargetSilo(grain); if (owner == null) // Null means there's no other silo and we're shutting down, so skip this entry { continue; } if (owner.Equals(router.MyAddress)) { // we found our owned entry in the cache -- it is not supposed to happen unless there were // changes in the membership Log.Warn(ErrorCode.Runtime_Error_100185, "Grain {0} owned by {1} was found in the cache of {1}", grain, owner, owner); cache.Remove(grain); cnt1++; // for debug } else { if (entry == null) { // 0. If the entry was deleted in parallel, presumably due to cleanup after silo death cache.Remove(grain); // for debug cnt3++; } else if (!entry.IsExpired()) { // 1. If the entry is not expired, skip it cnt2++; // for debug } else if (entry.NumAccesses == 0) { // 2. If the entry is expired and was not accessed in the last time interval -- throw it away cache.Remove(grain); // for debug cnt3++; } else { // 3. If the entry is expired and was accessed in the last time interval, put into "fetch-batch-requests" list if (!fetchInBatchList.ContainsKey(owner)) { fetchInBatchList[owner] = new List<GrainId>(); } fetchInBatchList[owner].Add(grain); // And reset the entry's access count for next time entry.NumAccesses = 0; cnt4++; // for debug } } } if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} self-owned (and removed) {1}, kept {2}, removed {3} and tries to refresh {4} grains", router.MyAddress, cnt1, cnt2, cnt3, cnt4); // send batch requests SendBatchCacheRefreshRequests(fetchInBatchList); ProduceStats(); // recheck every X seconds (Consider making it a configurable parameter) Thread.Sleep(SLEEP_TIME_BETWEEN_REFRESHES); } } private void SendBatchCacheRefreshRequests(Dictionary<SiloAddress, List<GrainId>> refreshRequests) { foreach (SiloAddress silo in refreshRequests.Keys) { List<Tuple<GrainId, int>> cachedGrainAndETagList = BuildGrainAndETagList(refreshRequests[silo]); SiloAddress capture = silo; router.CacheValidationsSent.Increment(); // Send all of the items in one large request var validator = this.grainFactory.GetSystemTarget<IRemoteGrainDirectory>(Constants.DirectoryCacheValidatorId, capture); router.Scheduler.QueueTask(async () => { var response = await validator.LookUpMany(cachedGrainAndETagList); ProcessCacheRefreshResponse(capture, response); }, router.CacheValidator.SchedulingContext).Ignore(); if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} is sending request to silo {1} with {2} entries", router.MyAddress, silo, cachedGrainAndETagList.Count); } } private void ProcessCacheRefreshResponse( SiloAddress silo, IReadOnlyCollection<Tuple<GrainId, int, List<ActivationAddress>>> refreshResponse) { if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} received ProcessCacheRefreshResponse. #Response entries {1}.", router.MyAddress, refreshResponse.Count); int cnt1 = 0, cnt2 = 0, cnt3 = 0; // pass through returned results and update the cache if needed foreach (Tuple<GrainId, int, List<ActivationAddress>> tuple in refreshResponse) { if (tuple.Item3 != null) { // the server returned an updated entry var updated = updateFunc(tuple.Item3); cache.AddOrUpdate(tuple.Item1, updated, tuple.Item2); cnt1++; } else if (tuple.Item2 == -1) { // The server indicates that it does not own the grain anymore. // It could be that by now, the cache has been already updated and contains an entry received from another server (i.e., current owner for the grain). // For simplicity, we do not care about this corner case and simply remove the cache entry. cache.Remove(tuple.Item1); cnt2++; } else { // The server returned only a (not -1) generation number, indicating that we hold the most // updated copy of the grain's activations list. // Validate that the generation number in the request and the response are equal // Contract.Assert(tuple.Item2 == refreshRequest.Find(o => o.Item1 == tuple.Item1).Item2); // refresh the entry in the cache cache.MarkAsFresh(tuple.Item1); cnt3++; } } if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Silo {0} processed refresh response from {1} with {2} updated, {3} removed, {4} unchanged grains", router.MyAddress, silo, cnt1, cnt2, cnt3); } /// <summary> /// Gets the list of grains (all owned by the same silo) and produces a new list /// of tuples, where each tuple holds the grain and its generation counter currently stored in the cache /// </summary> /// <param name="grains">List of grains owned by the same silo</param> /// <returns>List of grains in input along with their generation counters stored in the cache </returns> private List<Tuple<GrainId, int>> BuildGrainAndETagList(IEnumerable<GrainId> grains) { var grainAndETagList = new List<Tuple<GrainId, int>>(); foreach (GrainId grain in grains) { // NOTE: should this be done with TryGet? Won't Get invoke the LRU getter function? AdaptiveGrainDirectoryCache<TValue>.GrainDirectoryCacheEntry entry = cache.Get(grain); if (entry != null) { grainAndETagList.Add(new Tuple<GrainId, int>(grain, entry.ETag)); } else { // this may happen only if the LRU cache is full and decided to drop this grain // while we try to refresh it Log.Warn(ErrorCode.Runtime_Error_100199, "Grain {0} disappeared from the cache during maintainance", grain); } } return grainAndETagList; } private void ProduceStats() { // We do not want to synchronize the access on numAccess and numHits in cache to avoid performance issues. // Thus we take the current reading of these fields and calculate the stats. We might miss an access or two, // but it should not be matter. long curNumAccesses = cache.NumAccesses; long curNumHits = cache.NumHits; long numAccesses = curNumAccesses - lastNumAccesses; long numHits = curNumHits - lastNumHits; if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("#accesses: {0}, hit-ratio: {1}%", numAccesses, (numHits / Math.Max(numAccesses, 0.00001)) * 100); lastNumAccesses = curNumAccesses; lastNumHits = curNumHits; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Reflection; using System.Runtime.Serialization.Formatters.Binary; using System.Runtime.Serialization.Formatters.Tests; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.Tests { public static partial class LazyTests { [Fact] public static void Ctor() { var lazyString = new Lazy<string>(); VerifyLazy(lazyString, "", hasValue: false, isValueCreated: false); var lazyObject = new Lazy<int>(); VerifyLazy(lazyObject, 0, hasValue: true, isValueCreated: false); } [Theory] [InlineData(true)] [InlineData(false)] public static void Ctor_Bool(bool isThreadSafe) { var lazyString = new Lazy<string>(isThreadSafe); VerifyLazy(lazyString, "", hasValue: false, isValueCreated: false); } [Fact] public static void Ctor_ValueFactory() { var lazyString = new Lazy<string>(() => "foo"); VerifyLazy(lazyString, "foo", hasValue: true, isValueCreated: false); var lazyInt = new Lazy<int>(() => 1); VerifyLazy(lazyInt, 1, hasValue: true, isValueCreated: false); } [Fact] public static void Ctor_ValueFactory_NullValueFactory_ThrowsArguentNullException() { AssertExtensions.Throws<ArgumentNullException>("valueFactory", () => new Lazy<object>(null)); // Value factory is null } [Fact] public static void Ctor_LazyThreadSafetyMode() { var lazyString = new Lazy<string>(LazyThreadSafetyMode.PublicationOnly); VerifyLazy(lazyString, "", hasValue: false, isValueCreated: false); } [Fact] public static void Ctor_LazyThreadSafetyMode_InvalidMode_ThrowsArgumentOutOfRangeException() { AssertExtensions.Throws<ArgumentOutOfRangeException>("mode", () => new Lazy<string>(LazyThreadSafetyMode.None - 1)); // Invalid thread saftety mode AssertExtensions.Throws<ArgumentOutOfRangeException>("mode", () => new Lazy<string>(LazyThreadSafetyMode.ExecutionAndPublication + 1)); // Invalid thread saftety mode } [Theory] [InlineData(true)] [InlineData(false)] public static void Ctor_ValueFactor_Bool(bool isThreadSafe) { var lazyString = new Lazy<string>(() => "foo", isThreadSafe); VerifyLazy(lazyString, "foo", hasValue: true, isValueCreated: false); } [Fact] public static void Ctor_ValueFactory_Bool_NullValueFactory_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("valueFactory", () => new Lazy<object>(null, false)); // Value factory is null } [Fact] public static void Ctor_ValueFactor_LazyThreadSafetyMode() { var lazyString = new Lazy<string>(() => "foo", LazyThreadSafetyMode.PublicationOnly); VerifyLazy(lazyString, "foo", hasValue: true, isValueCreated: false); var lazyInt = new Lazy<int>(() => 1, LazyThreadSafetyMode.PublicationOnly); VerifyLazy(lazyInt, 1, hasValue: true, isValueCreated: false); } [Fact] public static void Ctor_ValueFactor_LazyThreadSafetyMode_Invalid() { AssertExtensions.Throws<ArgumentNullException>("valueFactory", () => new Lazy<object>(null, LazyThreadSafetyMode.PublicationOnly)); // Value factory is null AssertExtensions.Throws<ArgumentOutOfRangeException>("mode", () => new Lazy<string>(() => "foo", LazyThreadSafetyMode.None - 1)); // Invalid thread saftety mode AssertExtensions.Throws<ArgumentOutOfRangeException>("mode", () => new Lazy<string>(() => "foof", LazyThreadSafetyMode.ExecutionAndPublication + 1)); // Invalid thread saftety mode } [Fact] public static void ToString_DoesntForceAllocation() { var lazy = new Lazy<object>(() => 1); Assert.NotEqual("1", lazy.ToString()); Assert.False(lazy.IsValueCreated); object tmp = lazy.Value; Assert.Equal("1", lazy.ToString()); } private static void Value_Invalid_Impl<T>(ref Lazy<T> x, Lazy<T> lazy) { x = lazy; Assert.Throws<InvalidOperationException>(() => lazy.Value); } [Fact] public static void Value_Invalid() { Lazy<int> x = null; Func<int> f = () => x.Value; Value_Invalid_Impl(ref x, new Lazy<int>(f)); Value_Invalid_Impl(ref x, new Lazy<int>(f, true)); Value_Invalid_Impl(ref x, new Lazy<int>(f, false)); Value_Invalid_Impl(ref x, new Lazy<int>(f, LazyThreadSafetyMode.ExecutionAndPublication)); Value_Invalid_Impl(ref x, new Lazy<int>(f, LazyThreadSafetyMode.None)); // When used with LazyThreadSafetyMode.PublicationOnly this causes a stack overflow // Value_Invalid_Impl(ref x, new Lazy<int>(f, LazyThreadSafetyMode.PublicationOnly)); } public class InitiallyExceptionThrowingCtor { public static int counter = 0; public static int getValue() { if (++counter < 5) throw new Exception(); else return counter; } public int Value { get; } public InitiallyExceptionThrowingCtor() { Value = getValue(); } } public static IEnumerable<object[]> Ctor_ExceptionRecovery_MemberData() { yield return new object[] { new Lazy<InitiallyExceptionThrowingCtor>(), 5 }; yield return new object[] { new Lazy<InitiallyExceptionThrowingCtor>(true), 5 }; yield return new object[] { new Lazy<InitiallyExceptionThrowingCtor>(false), 5 }; yield return new object[] { new Lazy<InitiallyExceptionThrowingCtor>(LazyThreadSafetyMode.ExecutionAndPublication), 5 }; yield return new object[] { new Lazy<InitiallyExceptionThrowingCtor>(LazyThreadSafetyMode.None), 5 }; yield return new object[] { new Lazy<InitiallyExceptionThrowingCtor>(LazyThreadSafetyMode.PublicationOnly), 5 }; } [Theory] [MemberData(nameof(Ctor_ExceptionRecovery_MemberData))] public static void Ctor_ExceptionRecovery(Lazy<InitiallyExceptionThrowingCtor> lazy, int expected) { InitiallyExceptionThrowingCtor.counter = 0; InitiallyExceptionThrowingCtor result = null; for (var i = 0; i < 10; ++i) { try { result = lazy.Value; } catch (Exception) { } } Assert.Equal(result.Value, expected); } private static void Value_ExceptionRecovery_IntImpl(Lazy<int> lazy, ref int counter, int expected) { counter = 0; int result = 0; for (var i = 0; i < 10; ++i) { try { result = lazy.Value; } catch (Exception) { } } Assert.Equal(result, expected); } private static void Value_ExceptionRecovery_StringImpl(Lazy<string> lazy, ref int counter, string expected) { counter = 0; var result = default(string); for (var i = 0; i < 10; ++i) { try { result = lazy.Value; } catch (Exception) { } } Assert.Equal(expected, result); } [Fact] public static void Value_ExceptionRecovery() { int counter = 0; // set in test function var fint = new Func<int> (() => { if (++counter < 5) throw new Exception(); else return counter; }); var fobj = new Func<string>(() => { if (++counter < 5) throw new Exception(); else return counter.ToString(); }); Value_ExceptionRecovery_IntImpl(new Lazy<int>(fint), ref counter, 0); Value_ExceptionRecovery_IntImpl(new Lazy<int>(fint, true), ref counter, 0); Value_ExceptionRecovery_IntImpl(new Lazy<int>(fint, false), ref counter, 0); Value_ExceptionRecovery_IntImpl(new Lazy<int>(fint, LazyThreadSafetyMode.ExecutionAndPublication), ref counter, 0); Value_ExceptionRecovery_IntImpl(new Lazy<int>(fint, LazyThreadSafetyMode.None), ref counter, 0); Value_ExceptionRecovery_IntImpl(new Lazy<int>(fint, LazyThreadSafetyMode.PublicationOnly), ref counter, 5); Value_ExceptionRecovery_StringImpl(new Lazy<string>(fobj), ref counter, null); Value_ExceptionRecovery_StringImpl(new Lazy<string>(fobj, true), ref counter, null); Value_ExceptionRecovery_StringImpl(new Lazy<string>(fobj, false), ref counter, null); Value_ExceptionRecovery_StringImpl(new Lazy<string>(fobj, LazyThreadSafetyMode.ExecutionAndPublication), ref counter, null); Value_ExceptionRecovery_StringImpl(new Lazy<string>(fobj, LazyThreadSafetyMode.None), ref counter, null); Value_ExceptionRecovery_StringImpl(new Lazy<string>(fobj, LazyThreadSafetyMode.PublicationOnly), ref counter, 5.ToString()); } class MyException : Exception { public int Value { get; } public MyException(int value) { Value = value; } } public class ExceptionInCtor { public ExceptionInCtor() : this(99) { } public ExceptionInCtor(int value) { throw new MyException(value); } } public static IEnumerable<object[]> Value_Func_Exception_MemberData() { yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, true) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, false) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, LazyThreadSafetyMode.ExecutionAndPublication) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, LazyThreadSafetyMode.None) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, LazyThreadSafetyMode.PublicationOnly) }; } [Theory] [MemberData(nameof(Value_Func_Exception_MemberData))] public static void Value_Func_Exception(Lazy<int> lazy) { Assert.Throws<MyException>(() => lazy.Value); } public static IEnumerable<object[]> Value_FuncCtor_Exception_MemberData() { yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99)) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), true) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), false) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), LazyThreadSafetyMode.ExecutionAndPublication) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), LazyThreadSafetyMode.None) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), LazyThreadSafetyMode.PublicationOnly) }; } [Theory] [MemberData(nameof(Value_FuncCtor_Exception_MemberData))] public static void Value_FuncCtor_Exception(Lazy<ExceptionInCtor> lazy) { Assert.Throws<MyException>(() => lazy.Value); } public static IEnumerable<object[]> Value_TargetInvocationException_MemberData() { yield return new object[] { new Lazy<ExceptionInCtor>() }; yield return new object[] { new Lazy<ExceptionInCtor>(true) }; yield return new object[] { new Lazy<ExceptionInCtor>(false) }; yield return new object[] { new Lazy<ExceptionInCtor>(LazyThreadSafetyMode.ExecutionAndPublication) }; yield return new object[] { new Lazy<ExceptionInCtor>(LazyThreadSafetyMode.None) }; yield return new object[] { new Lazy<ExceptionInCtor>(LazyThreadSafetyMode.PublicationOnly) }; } [Theory] [MemberData(nameof(Value_TargetInvocationException_MemberData))] public static void Value_TargetInvocationException(Lazy<ExceptionInCtor> lazy) { Assert.Throws<TargetInvocationException>(() => lazy.Value); } public static IEnumerable<object[]> Exceptions_Func_Idempotent_MemberData() { yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, true) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, false) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, LazyThreadSafetyMode.ExecutionAndPublication) }; yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, LazyThreadSafetyMode.None) }; } [Theory] [MemberData(nameof(Exceptions_Func_Idempotent_MemberData))] public static void Exceptions_Func_Idempotent(Lazy<int> x) { var e = Assert.ThrowsAny<Exception>(() => x.Value); Assert.Same(e, Assert.ThrowsAny<Exception>(() => x.Value)); } public static IEnumerable<object[]> Exceptions_Ctor_Idempotent_MemberData() { yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99)) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), true) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), false) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), LazyThreadSafetyMode.ExecutionAndPublication) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), LazyThreadSafetyMode.None) }; } [Theory] [MemberData(nameof(Exceptions_Ctor_Idempotent_MemberData))] public static void Exceptions_Ctor_Idempotent(Lazy<ExceptionInCtor> x) { var e = Assert.ThrowsAny<Exception>(() => x.Value); Assert.Same(e, Assert.ThrowsAny<Exception>(() => x.Value)); } public static IEnumerable<object[]> Exceptions_Func_NotIdempotent_MemberData() { yield return new object[] { new Lazy<int>(() => { throw new MyException(99); }, LazyThreadSafetyMode.PublicationOnly) }; } public static IEnumerable<object[]> Exceptions_Ctor_NotIdempotent_MemberData() { yield return new object[] { new Lazy<ExceptionInCtor>() }; yield return new object[] { new Lazy<ExceptionInCtor>(true) }; yield return new object[] { new Lazy<ExceptionInCtor>(false) }; yield return new object[] { new Lazy<ExceptionInCtor>(LazyThreadSafetyMode.ExecutionAndPublication) }; yield return new object[] { new Lazy<ExceptionInCtor>(LazyThreadSafetyMode.None) }; yield return new object[] { new Lazy<ExceptionInCtor>(LazyThreadSafetyMode.PublicationOnly) }; yield return new object[] { new Lazy<ExceptionInCtor>(() => new ExceptionInCtor(99), LazyThreadSafetyMode.PublicationOnly) }; } [Theory] [MemberData(nameof(Exceptions_Func_NotIdempotent_MemberData))] public static void Exceptions_Func_NotIdempotent(Lazy<int> x) { var e = Assert.ThrowsAny<Exception>(() => x.Value); Assert.NotSame(e, Assert.ThrowsAny<Exception>(() => x.Value)); } [Theory] [MemberData(nameof(Exceptions_Ctor_NotIdempotent_MemberData))] public static void Exceptions_Ctor_NotIdempotent(Lazy<ExceptionInCtor> x) { var e = Assert.ThrowsAny<Exception>(() => x.Value); Assert.NotSame(e, Assert.ThrowsAny<Exception>(() => x.Value)); } [Fact] public static void Serialization_ValueType() { var stream = new MemoryStream(); var formatter = new BinaryFormatter(); formatter.Serialize(stream, new Lazy<int>(() => 42)); stream.Seek(0, SeekOrigin.Begin); var fortytwo = (Lazy<int>)formatter.Deserialize(stream); Assert.True(fortytwo.IsValueCreated); Assert.Equal(fortytwo.Value, 42); } [Fact] public static void Serialization_RefType() { var stream = new MemoryStream(); var formatter = new BinaryFormatter(); formatter.Serialize(stream, new Lazy<string>(() => "42")); stream.Seek(0, SeekOrigin.Begin); var x = BinaryFormatterHelpers.Clone(new object()); var fortytwo = (Lazy<string>)formatter.Deserialize(stream); Assert.True(fortytwo.IsValueCreated); Assert.Equal(fortytwo.Value, "42"); } [Theory] [InlineData(LazyThreadSafetyMode.ExecutionAndPublication)] [InlineData(LazyThreadSafetyMode.None)] public static void Value_ThrownException_DoesntCreateValue(LazyThreadSafetyMode mode) { var lazy = new Lazy<string>(() => { throw new DivideByZeroException(); }, mode); Exception exception1 = Assert.Throws<DivideByZeroException>(() => lazy.Value); Exception exception2 = Assert.Throws<DivideByZeroException>(() => lazy.Value); Assert.Same(exception1, exception2); Assert.False(lazy.IsValueCreated); } [Fact] public static void Value_ThrownException_DoesntCreateValue_PublicationOnly() { var lazy = new Lazy<string>(() => { throw new DivideByZeroException(); }, LazyThreadSafetyMode.PublicationOnly); Exception exception1 = Assert.Throws<DivideByZeroException>(() => lazy.Value); Exception exception2 = Assert.Throws<DivideByZeroException>(() => lazy.Value); Assert.NotSame(exception1, exception2); Assert.False(lazy.IsValueCreated); } [Fact] public static void EnsureInitalized_SimpleRefTypes() { var hdcTemplate = new HasDefaultCtor(); string strTemplate = "foo"; // Activator.CreateInstance (uninitialized). HasDefaultCtor a = null; Assert.NotNull(LazyInitializer.EnsureInitialized(ref a)); Assert.Same(a, LazyInitializer.EnsureInitialized(ref a)); Assert.NotNull(a); // Activator.CreateInstance (already initialized). HasDefaultCtor b = hdcTemplate; Assert.Equal(hdcTemplate, LazyInitializer.EnsureInitialized(ref b)); Assert.Same(b, LazyInitializer.EnsureInitialized(ref b)); Assert.Equal(hdcTemplate, b); // Func based initialization (uninitialized). string c = null; Assert.Equal(strTemplate, LazyInitializer.EnsureInitialized(ref c, () => strTemplate)); Assert.Same(c, LazyInitializer.EnsureInitialized(ref c)); Assert.Equal(strTemplate, c); // Func based initialization (already initialized). string d = strTemplate; Assert.Equal(strTemplate, LazyInitializer.EnsureInitialized(ref d, () => strTemplate + "bar")); Assert.Same(d, LazyInitializer.EnsureInitialized(ref d)); Assert.Equal(strTemplate, d); } [Fact] public static void EnsureInitalized_SimpleRefTypes_Invalid() { // Func based initialization (nulls not permitted). string e = null; Assert.Throws<InvalidOperationException>(() => LazyInitializer.EnsureInitialized(ref e, () => null)); // Activator.CreateInstance (for a type without a default ctor). NoDefaultCtor ndc = null; Assert.Throws<MissingMemberException>(() => LazyInitializer.EnsureInitialized(ref ndc)); } [Fact] public static void EnsureInitialized_ComplexRefTypes() { string strTemplate = "foo"; var hdcTemplate = new HasDefaultCtor(); // Activator.CreateInstance (uninitialized). HasDefaultCtor a = null; bool aInit = false; object aLock = null; Assert.NotNull(LazyInitializer.EnsureInitialized(ref a, ref aInit, ref aLock)); Assert.NotNull(a); Assert.True(aInit); Assert.NotNull(aLock); // Activator.CreateInstance (already initialized). HasDefaultCtor b = hdcTemplate; bool bInit = true; object bLock = null; Assert.Equal(hdcTemplate, LazyInitializer.EnsureInitialized(ref b, ref bInit, ref bLock)); Assert.Equal(hdcTemplate, b); Assert.True(bInit); Assert.Null(bLock); // Func based initialization (uninitialized). string c = null; bool cInit = false; object cLock = null; Assert.Equal(strTemplate, LazyInitializer.EnsureInitialized(ref c, ref cInit, ref cLock, () => strTemplate)); Assert.Equal(strTemplate, c); Assert.True(cInit); Assert.NotNull(cLock); // Func based initialization (already initialized). string d = strTemplate; bool dInit = true; object dLock = null; Assert.Equal(strTemplate, LazyInitializer.EnsureInitialized(ref d, ref dInit, ref dLock, () => strTemplate + "bar")); Assert.Equal(strTemplate, d); Assert.True(dInit); Assert.Null(dLock); // Func based initialization (nulls *ARE* permitted). string e = null; bool einit = false; object elock = null; int initCount = 0; Assert.Null(LazyInitializer.EnsureInitialized(ref e, ref einit, ref elock, () => { initCount++; return null; })); Assert.Null(e); Assert.Equal(1, initCount); Assert.True(einit); Assert.NotNull(elock); Assert.Null(LazyInitializer.EnsureInitialized(ref e, ref einit, ref elock, () => { initCount++; return null; })); } [Fact] public static void EnsureInitalized_ComplexRefTypes_Invalid() { // Activator.CreateInstance (for a type without a default ctor). NoDefaultCtor ndc = null; bool ndcInit = false; object ndcLock = null; Assert.Throws<MissingMemberException>(() => LazyInitializer.EnsureInitialized(ref ndc, ref ndcInit, ref ndcLock)); } [Fact] public static void LazyInitializerComplexValueTypes() { var empty = new LIX(); var template = new LIX(33); // Activator.CreateInstance (uninitialized). LIX a = default(LIX); bool aInit = false; object aLock = null; LIX ensuredValA = LazyInitializer.EnsureInitialized(ref a, ref aInit, ref aLock); Assert.Equal(empty, ensuredValA); Assert.Equal(empty, a); // Activator.CreateInstance (already initialized). LIX b = template; bool bInit = true; object bLock = null; LIX ensuredValB = LazyInitializer.EnsureInitialized(ref b, ref bInit, ref bLock); Assert.Equal(template, ensuredValB); Assert.Equal(template, b); // Func based initialization (uninitialized). LIX c = default(LIX); bool cInit = false; object cLock = null; LIX ensuredValC = LazyInitializer.EnsureInitialized(ref c, ref cInit, ref cLock, () => template); Assert.Equal(template, c); Assert.Equal(template, ensuredValC); // Func based initialization (already initialized). LIX d = template; bool dInit = true; object dLock = null; LIX template2 = new LIX(template.f * 2); LIX ensuredValD = LazyInitializer.EnsureInitialized(ref d, ref dInit, ref dLock, () => template2); Assert.Equal(template, ensuredValD); Assert.Equal(template, d); } private static void VerifyLazy<T>(Lazy<T> lazy, T expectedValue, bool hasValue, bool isValueCreated) { Assert.Equal(isValueCreated, lazy.IsValueCreated); if (hasValue) { Assert.Equal(expectedValue, lazy.Value); Assert.True(lazy.IsValueCreated); } else { Assert.Throws<MissingMemberException>(() => lazy.Value); // Value could not be created Assert.False(lazy.IsValueCreated); } } private class HasDefaultCtor { } private class NoDefaultCtor { public NoDefaultCtor(int x) { } } private struct LIX { public int f; public LIX(int f) { this.f = f; } public override bool Equals(object other) => other is LIX && ((LIX)other).f == f; public override int GetHashCode() => f.GetHashCode(); public override string ToString() => "LIX<" + f + ">"; } } }
#if !UNITY_WINRT || UNITY_EDITOR || (UNITY_WP8 && !UNITY_WP_8_1) #region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Linq; using System.Globalization; #if(UNITY_IOS || UNITY_IPHONE) using Newtonsoft.Json.Aot; #endif namespace Newtonsoft.Json.Utilities { internal static class StringUtils { public const string CarriageReturnLineFeed = "\r\n"; public const string Empty = ""; public const char CarriageReturn = '\r'; public const char LineFeed = '\n'; public const char Tab = '\t'; public static string FormatWith(this string format, IFormatProvider provider, params object[] args) { ValidationUtils.ArgumentNotNull(format, "format"); return string.Format(provider, format, args); } /// <summary> /// Determines whether the string contains white space. /// </summary> /// <param name="s">The string to test for white space.</param> /// <returns> /// <c>true</c> if the string contains white space; otherwise, <c>false</c>. /// </returns> public static bool ContainsWhiteSpace(string s) { if (s == null) throw new ArgumentNullException("s"); for (int i = 0; i < s.Length; i++) { if (char.IsWhiteSpace(s[i])) return true; } return false; } /// <summary> /// Determines whether the string is all white space. Empty string will return false. /// </summary> /// <param name="s">The string to test whether it is all white space.</param> /// <returns> /// <c>true</c> if the string is all white space; otherwise, <c>false</c>. /// </returns> public static bool IsWhiteSpace(string s) { if (s == null) throw new ArgumentNullException("s"); if (s.Length == 0) return false; for (int i = 0; i < s.Length; i++) { if (!char.IsWhiteSpace(s[i])) return false; } return true; } /// <summary> /// Ensures the target string ends with the specified string. /// </summary> /// <param name="target">The target.</param> /// <param name="value">The value.</param> /// <returns>The target string with the value string at the end.</returns> public static string EnsureEndsWith(string target, string value) { if (target == null) throw new ArgumentNullException("target"); if (value == null) throw new ArgumentNullException("value"); if (target.Length >= value.Length) { if (string.Compare(target, target.Length - value.Length, value, 0, value.Length, StringComparison.OrdinalIgnoreCase) == 0) return target; string trimmedString = target.TrimEnd(null); if (string.Compare(trimmedString, trimmedString.Length - value.Length, value, 0, value.Length, StringComparison.OrdinalIgnoreCase) == 0) return target; } return target + value; } public static bool IsNullOrEmptyOrWhiteSpace(string s) { if (string.IsNullOrEmpty(s)) return true; else if (IsWhiteSpace(s)) return true; else return false; } /// <summary> /// Perform an action if the string is not null or empty. /// </summary> /// <param name="value">The value.</param> /// <param name="action">The action to perform.</param> public static void IfNotNullOrEmpty(string value, Action<string> action) { IfNotNullOrEmpty(value, action, null); } private static void IfNotNullOrEmpty(string value, Action<string> trueAction, Action<string> falseAction) { if (!string.IsNullOrEmpty(value)) { if (trueAction != null) trueAction(value); } else { if (falseAction != null) falseAction(value); } } /// <summary> /// Indents the specified string. /// </summary> /// <param name="s">The string to indent.</param> /// <param name="indentation">The number of characters to indent by.</param> /// <returns></returns> public static string Indent(string s, int indentation) { return Indent(s, indentation, ' '); } /// <summary> /// Indents the specified string. /// </summary> /// <param name="s">The string to indent.</param> /// <param name="indentation">The number of characters to indent by.</param> /// <param name="indentChar">The indent character.</param> /// <returns></returns> public static string Indent(string s, int indentation, char indentChar) { if (s == null) throw new ArgumentNullException("s"); if (indentation <= 0) throw new ArgumentException("Must be greater than zero.", "indentation"); StringReader sr = new StringReader(s); StringWriter sw = new StringWriter(CultureInfo.InvariantCulture); ActionTextReaderLine(sr, sw, delegate(TextWriter tw, string line) { tw.Write(new string(indentChar, indentation)); tw.Write(line); }); return sw.ToString(); } private delegate void ActionLine(TextWriter textWriter, string line); private static void ActionTextReaderLine(TextReader textReader, TextWriter textWriter, ActionLine lineAction) { string line; bool firstLine = true; while ((line = textReader.ReadLine()) != null) { if (!firstLine) textWriter.WriteLine(); else firstLine = false; lineAction(textWriter, line); } } /// <summary> /// Numbers the lines. /// </summary> /// <param name="s">The string to number.</param> /// <returns></returns> public static string NumberLines(string s) { if (s == null) throw new ArgumentNullException("s"); StringReader sr = new StringReader(s); StringWriter sw = new StringWriter(CultureInfo.InvariantCulture); int lineNumber = 1; ActionTextReaderLine(sr, sw, delegate(TextWriter tw, string line) { tw.Write(lineNumber.ToString(CultureInfo.InvariantCulture).PadLeft(4)); tw.Write(". "); tw.Write(line); lineNumber++; }); return sw.ToString(); } /// <summary> /// Nulls an empty string. /// </summary> /// <param name="s">The string.</param> /// <returns>Null if the string was null, otherwise the string unchanged.</returns> public static string NullEmptyString(string s) { return (string.IsNullOrEmpty(s)) ? null : s; } public static string ReplaceNewLines(string s, string replacement) { StringReader sr = new StringReader(s); StringBuilder sb = new StringBuilder(); bool first = true; string line; while ((line = sr.ReadLine()) != null) { if (first) first = false; else sb.Append(replacement); sb.Append(line); } return sb.ToString(); } public static string Truncate(string s, int maximumLength) { return Truncate(s, maximumLength, "..."); } public static string Truncate(string s, int maximumLength, string suffix) { if (suffix == null) throw new ArgumentNullException("suffix"); if (maximumLength <= 0) throw new ArgumentException("Maximum length must be greater than zero.", "maximumLength"); int subStringLength = maximumLength - suffix.Length; if (subStringLength <= 0) throw new ArgumentException("Length of suffix string is greater or equal to maximumLength"); if (s != null && s.Length > maximumLength) { string truncatedString = s.Substring(0, subStringLength); // incase the last character is a space truncatedString = truncatedString.Trim(); truncatedString += suffix; return truncatedString; } else { return s; } } public static StringWriter CreateStringWriter(int capacity) { StringBuilder sb = new StringBuilder(capacity); StringWriter sw = new StringWriter(sb, CultureInfo.InvariantCulture); return sw; } public static int? GetLength(string value) { if (value == null) return null; else return value.Length; } public static string ToCharAsUnicode(char c) { char h1 = MathUtils.IntToHex((c >> 12) & '\x000f'); char h2 = MathUtils.IntToHex((c >> 8) & '\x000f'); char h3 = MathUtils.IntToHex((c >> 4) & '\x000f'); char h4 = MathUtils.IntToHex(c & '\x000f'); return new string(new[] { '\\', 'u', h1, h2, h3, h4 }); } public static void WriteCharAsUnicode(TextWriter writer, char c) { ValidationUtils.ArgumentNotNull(writer, "writer"); char h1 = MathUtils.IntToHex((c >> 12) & '\x000f'); char h2 = MathUtils.IntToHex((c >> 8) & '\x000f'); char h3 = MathUtils.IntToHex((c >> 4) & '\x000f'); char h4 = MathUtils.IntToHex(c & '\x000f'); writer.Write('\\'); writer.Write('u'); writer.Write(h1); writer.Write(h2); writer.Write(h3); writer.Write(h4); } public static TSource ForgivingCaseSensitiveFind<TSource>(this IEnumerable<TSource> source, Func<TSource, string> valueSelector, string testValue) { if (source == null) throw new ArgumentNullException("source"); if (valueSelector == null) throw new ArgumentNullException("valueSelector"); #if !(UNITY_IPHONE || UNITY_IOS) || (UNITY_IOS && !(UNITY_3_5 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3)) var caseInsensitiveResults = source.Where(s => string.Compare(valueSelector(s), testValue, StringComparison.OrdinalIgnoreCase) == 0).ToArray(); var resultCount = caseInsensitiveResults.Length; #else var caseInsensitiveResults = new List<TSource>(); source.ForEach(itm => { if (string.Compare(valueSelector(itm), testValue, StringComparison.OrdinalIgnoreCase) == 0) caseInsensitiveResults.Add(itm); }); var resultCount = caseInsensitiveResults.Count; #endif if (resultCount <= 1) { return resultCount == 1 ? caseInsensitiveResults[0] : default(TSource); } else { //multiple results returned. now filter using case sensitivity #if !(UNITY_IPHONE || UNITY_IOS) || (UNITY_IOS && !(UNITY_3_5 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3)) var caseSensitiveResults = source.Where(s => string.Compare(valueSelector(s), testValue, StringComparison.Ordinal) == 0); return caseSensitiveResults.SingleOrDefault(); #else var caseSensitiveResults = new List<TSource>(); source.ForEach(itm => { if (string.Compare(valueSelector(itm), testValue, StringComparison.Ordinal) == 0) caseSensitiveResults.Add(itm); }); return caseSensitiveResults.Count > 0 ? caseSensitiveResults[0] : default(TSource); #endif } } public static string ToCamelCase(string s) { if (string.IsNullOrEmpty(s)) return s; if (!char.IsUpper(s[0])) return s; string camelCase = char.ToLower(s[0], CultureInfo.InvariantCulture).ToString(CultureInfo.InvariantCulture); if (s.Length > 1) camelCase += s.Substring(1); return camelCase; } } } #endif
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeGeneration; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Editing; using Microsoft.CodeAnalysis.Editor.Commands; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Options; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Internal.Log; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Simplification; using Microsoft.CodeAnalysis.Text; using Microsoft.CodeAnalysis.Text.Shared.Extensions; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.CSharp.EventHookup { internal partial class EventHookupCommandHandler : ICommandHandler<TabKeyCommandArgs> { public void ExecuteCommand(TabKeyCommandArgs args, Action nextHandler) { AssertIsForeground(); if (!args.SubjectBuffer.GetFeatureOnOffOption(InternalFeatureOnOffOptions.EventHookup)) { nextHandler(); return; } if (EventHookupSessionManager.CurrentSession == null) { nextHandler(); return; } // Handling tab is currently uncancellable. HandleTabWorker(args.TextView, args.SubjectBuffer, nextHandler, CancellationToken.None); } public CommandState GetCommandState(TabKeyCommandArgs args, Func<CommandState> nextHandler) { AssertIsForeground(); if (EventHookupSessionManager.CurrentSession != null) { return CommandState.Available; } else { return nextHandler(); } } private void HandleTabWorker(ITextView textView, ITextBuffer subjectBuffer, Action nextHandler, CancellationToken cancellationToken) { AssertIsForeground(); // For test purposes only! if (EventHookupSessionManager.CurrentSession.TESTSessionHookupMutex != null) { try { EventHookupSessionManager.CurrentSession.TESTSessionHookupMutex.ReleaseMutex(); } catch (ApplicationException) { } } // Blocking wait (if necessary) to determine whether to consume the tab and // generate the event handler. EventHookupSessionManager.CurrentSession.GetEventNameTask.Wait(cancellationToken); string eventHandlerMethodName = null; if (EventHookupSessionManager.CurrentSession.GetEventNameTask.Status == TaskStatus.RanToCompletion) { eventHandlerMethodName = EventHookupSessionManager.CurrentSession.GetEventNameTask.WaitAndGetResult(cancellationToken); } if (eventHandlerMethodName == null || EventHookupSessionManager.CurrentSession.TextView != textView) { nextHandler(); EventHookupSessionManager.CancelAndDismissExistingSessions(); return; } // If QuickInfoSession is null, then Tab was pressed before the background task // finished (that is, the Wait call above actually needed to wait). Since we know an // event hookup was found, we should set everything up now because the background task // will not have a chance to set things up until after this Tab has been handled, and // by then it's too late. When the background task alerts that it found an event hookup // nothing will change because QuickInfoSession will already be set. EventHookupSessionManager.EventHookupFoundInSession(EventHookupSessionManager.CurrentSession); // This tab means we should generate the event handler method. Begin the code // generation process. GenerateAndAddEventHandler(textView, subjectBuffer, eventHandlerMethodName, nextHandler, cancellationToken); } private void GenerateAndAddEventHandler(ITextView textView, ITextBuffer subjectBuffer, string eventHandlerMethodName, Action nextHandler, CancellationToken cancellationToken) { AssertIsForeground(); using (Logger.LogBlock(FunctionId.EventHookup_Generate_Handler, cancellationToken)) { EventHookupSessionManager.CancelAndDismissExistingSessions(); var workspace = textView.TextSnapshot.TextBuffer.GetWorkspace(); if (workspace == null) { nextHandler(); EventHookupSessionManager.CancelAndDismissExistingSessions(); return; } Document document = textView.TextSnapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document == null) { Contract.Fail("Event Hookup could not find the document for the IBufferView."); } var position = textView.GetCaretPoint(subjectBuffer).Value.Position; var solutionWithEventHandler = CreateSolutionWithEventHandler( document, eventHandlerMethodName, position, out var plusEqualTokenEndPosition, cancellationToken); if (solutionWithEventHandler == null) { Contract.Fail("Event Hookup could not create solution with event handler."); } // The new solution is created, so start user observable changes if (!workspace.TryApplyChanges(solutionWithEventHandler)) { Contract.Fail("Event Hookup could not update the solution."); } // The += token will not move during this process, so it is safe to use that // position as a location from which to find the identifier we're renaming. BeginInlineRename(workspace, textView, subjectBuffer, plusEqualTokenEndPosition, cancellationToken); } } private Solution CreateSolutionWithEventHandler( Document document, string eventHandlerMethodName, int position, out int plusEqualTokenEndPosition, CancellationToken cancellationToken) { AssertIsForeground(); // Mark the += token with an annotation so we can find it after formatting var plusEqualsTokenAnnotation = new SyntaxAnnotation(); var documentWithNameAndAnnotationsAdded = AddMethodNameAndAnnotationsToSolution(document, eventHandlerMethodName, position, plusEqualsTokenAnnotation, cancellationToken); var semanticDocument = SemanticDocument.CreateAsync(documentWithNameAndAnnotationsAdded, cancellationToken).WaitAndGetResult(cancellationToken); var updatedRoot = AddGeneratedHandlerMethodToSolution(semanticDocument, eventHandlerMethodName, plusEqualsTokenAnnotation, cancellationToken); if (updatedRoot == null) { plusEqualTokenEndPosition = 0; return null; } var simplifiedDocument = Simplifier.ReduceAsync(documentWithNameAndAnnotationsAdded.WithSyntaxRoot(updatedRoot), Simplifier.Annotation, cancellationToken: cancellationToken).WaitAndGetResult(cancellationToken); var formattedDocument = Formatter.FormatAsync(simplifiedDocument, Formatter.Annotation, cancellationToken: cancellationToken).WaitAndGetResult(cancellationToken); var newRoot = formattedDocument.GetSyntaxRootSynchronously(cancellationToken); plusEqualTokenEndPosition = newRoot.GetAnnotatedNodesAndTokens(plusEqualsTokenAnnotation) .Single().Span.End; return document.Project.Solution.WithDocumentText( formattedDocument.Id, formattedDocument.GetTextAsync(cancellationToken).WaitAndGetResult(cancellationToken)); } private Document AddMethodNameAndAnnotationsToSolution( Document document, string eventHandlerMethodName, int position, SyntaxAnnotation plusEqualsTokenAnnotation, CancellationToken cancellationToken) { // First find the event hookup to determine if we are in a static context. var root = document.GetSyntaxRootSynchronously(cancellationToken); var plusEqualsToken = root.FindTokenOnLeftOfPosition(position); var eventHookupExpression = plusEqualsToken.GetAncestor<AssignmentExpressionSyntax>(); var textToInsert = eventHandlerMethodName + ";"; if (!eventHookupExpression.IsInStaticContext()) { // This will be simplified later if it's not needed. textToInsert = "this." + textToInsert; } // Next, perform a textual insertion of the event handler method name. var textChange = new TextChange(new TextSpan(position, 0), textToInsert); var newText = document.GetTextAsync(cancellationToken).WaitAndGetResult(cancellationToken).WithChanges(textChange); var documentWithNameAdded = document.WithText(newText); // Now find the event hookup again to add the appropriate annotations. root = documentWithNameAdded.GetSyntaxRootSynchronously(cancellationToken); plusEqualsToken = root.FindTokenOnLeftOfPosition(position); eventHookupExpression = plusEqualsToken.GetAncestor<AssignmentExpressionSyntax>(); var updatedEventHookupExpression = eventHookupExpression .ReplaceToken(plusEqualsToken, plusEqualsToken.WithAdditionalAnnotations(plusEqualsTokenAnnotation)) .WithRight(eventHookupExpression.Right.WithAdditionalAnnotations(Simplifier.Annotation)) .WithAdditionalAnnotations(Formatter.Annotation); var rootWithUpdatedEventHookupExpression = root.ReplaceNode(eventHookupExpression, updatedEventHookupExpression); return documentWithNameAdded.WithSyntaxRoot(rootWithUpdatedEventHookupExpression); } private SyntaxNode AddGeneratedHandlerMethodToSolution( SemanticDocument document, string eventHandlerMethodName, SyntaxAnnotation plusEqualsTokenAnnotation, CancellationToken cancellationToken) { var root = document.Root as SyntaxNode; var eventHookupExpression = root.GetAnnotatedNodesAndTokens(plusEqualsTokenAnnotation).Single().AsToken().GetAncestor<AssignmentExpressionSyntax>(); var generatedMethodSymbol = GetMethodSymbol(document, eventHandlerMethodName, eventHookupExpression, cancellationToken); if (generatedMethodSymbol == null) { return null; } var typeDecl = eventHookupExpression.GetAncestor<TypeDeclarationSyntax>(); var typeDeclWithMethodAdded = CodeGenerator.AddMethodDeclaration(typeDecl, generatedMethodSymbol, document.Project.Solution.Workspace, new CodeGenerationOptions(afterThisLocation: eventHookupExpression.GetLocation())); return root.ReplaceNode(typeDecl, typeDeclWithMethodAdded); } private IMethodSymbol GetMethodSymbol( SemanticDocument document, string eventHandlerMethodName, AssignmentExpressionSyntax eventHookupExpression, CancellationToken cancellationToken) { var semanticModel = document.SemanticModel as SemanticModel; var symbolInfo = semanticModel.GetSymbolInfo(eventHookupExpression.Left, cancellationToken); var symbol = symbolInfo.Symbol; if (symbol == null || symbol.Kind != SymbolKind.Event) { return null; } var typeInference = document.Project.LanguageServices.GetService<ITypeInferenceService>(); var delegateType = typeInference.InferDelegateType(semanticModel, eventHookupExpression.Right, cancellationToken); if (delegateType == null || delegateType.DelegateInvokeMethod == null) { return null; } var syntaxFactory = document.Project.LanguageServices.GetService<SyntaxGenerator>(); return CodeGenerationSymbolFactory.CreateMethodSymbol( attributes: default(ImmutableArray<AttributeData>), accessibility: Accessibility.Private, modifiers: new DeclarationModifiers(isStatic: eventHookupExpression.IsInStaticContext()), returnType: delegateType.DelegateInvokeMethod.ReturnType, returnsByRef: delegateType.DelegateInvokeMethod.ReturnsByRef, explicitInterfaceSymbol: null, name: eventHandlerMethodName, typeParameters: default(ImmutableArray<ITypeParameterSymbol>), parameters: delegateType.DelegateInvokeMethod.Parameters, statements: ImmutableArray.Create( CodeGenerationHelpers.GenerateThrowStatement(syntaxFactory, document, "System.NotImplementedException", cancellationToken))); } private void BeginInlineRename(Workspace workspace, ITextView textView, ITextBuffer subjectBuffer, int plusEqualTokenEndPosition, CancellationToken cancellationToken) { AssertIsForeground(); if (_inlineRenameService.ActiveSession == null) { var document = textView.TextSnapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document != null) { // In the middle of a user action, cannot cancel. var root = document.GetSyntaxRootSynchronously(cancellationToken); var token = root.FindTokenOnRightOfPosition(plusEqualTokenEndPosition); var editSpan = token.Span; var memberAccessExpression = token.GetAncestor<MemberAccessExpressionSyntax>(); if (memberAccessExpression != null) { // the event hookup might look like `MyEvent += this.GeneratedHandlerName;` editSpan = memberAccessExpression.Name.Span; } _inlineRenameService.StartInlineSession(document, editSpan, cancellationToken); textView.SetSelection(editSpan.ToSnapshotSpan(textView.TextSnapshot)); } } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using AutoMapper; using Microsoft.Azure.Commands.Compute.Automation.Models; using Microsoft.Azure.Management.Compute; using Microsoft.Azure.Management.Compute.Models; using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Management.Automation; namespace Microsoft.Azure.Commands.Compute.Automation { public partial class InvokeAzureComputeMethodCmdlet : ComputeAutomationBaseCmdlet { protected object CreateImageGetDynamicParameters() { dynamicParameters = new RuntimeDefinedParameterDictionary(); var pResourceGroupName = new RuntimeDefinedParameter(); pResourceGroupName.Name = "ResourceGroupName"; pResourceGroupName.ParameterType = typeof(string); pResourceGroupName.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByDynamicParameters", Position = 1, Mandatory = true }); pResourceGroupName.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("ResourceGroupName", pResourceGroupName); var pImageName = new RuntimeDefinedParameter(); pImageName.Name = "ImageName"; pImageName.ParameterType = typeof(string); pImageName.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByDynamicParameters", Position = 2, Mandatory = true }); pImageName.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("ImageName", pImageName); var pExpand = new RuntimeDefinedParameter(); pExpand.Name = "Expand"; pExpand.ParameterType = typeof(string); pExpand.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByDynamicParameters", Position = 3, Mandatory = false }); pExpand.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("Expand", pExpand); var pArgumentList = new RuntimeDefinedParameter(); pArgumentList.Name = "ArgumentList"; pArgumentList.ParameterType = typeof(object[]); pArgumentList.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByStaticParameters", Position = 4, Mandatory = true }); pArgumentList.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("ArgumentList", pArgumentList); return dynamicParameters; } protected void ExecuteImageGetMethod(object[] invokeMethodInputParameters) { string resourceGroupName = (string)ParseParameter(invokeMethodInputParameters[0]); string imageName = (string)ParseParameter(invokeMethodInputParameters[1]); string expand = (string)ParseParameter(invokeMethodInputParameters[2]); if (!string.IsNullOrEmpty(resourceGroupName) && !string.IsNullOrEmpty(imageName)) { var result = ImagesClient.Get(resourceGroupName, imageName, expand); var psObject = new PSImage(); Mapper.Map<Image, PSImage>(result, psObject); WriteObject(psObject); } else if (!string.IsNullOrEmpty(resourceGroupName)) { var result = ImagesClient.ListByResourceGroup(resourceGroupName); var resultList = result.ToList(); var nextPageLink = result.NextPageLink; while (!string.IsNullOrEmpty(nextPageLink)) { var pageResult = ImagesClient.ListByResourceGroupNext(nextPageLink); foreach (var pageItem in pageResult) { resultList.Add(pageItem); } nextPageLink = pageResult.NextPageLink; } var psObject = new List<PSImageList>(); foreach (var r in resultList) { psObject.Add(Mapper.Map<Image, PSImageList>(r)); } WriteObject(psObject, true); } else { var result = ImagesClient.List(); var resultList = result.ToList(); var nextPageLink = result.NextPageLink; while (!string.IsNullOrEmpty(nextPageLink)) { var pageResult = ImagesClient.ListNext(nextPageLink); foreach (var pageItem in pageResult) { resultList.Add(pageItem); } nextPageLink = pageResult.NextPageLink; } var psObject = new List<PSImageList>(); foreach (var r in resultList) { psObject.Add(Mapper.Map<Image, PSImageList>(r)); } WriteObject(psObject, true); } } } public partial class NewAzureComputeArgumentListCmdlet : ComputeAutomationBaseCmdlet { protected PSArgument[] CreateImageGetParameters() { string resourceGroupName = string.Empty; string imageName = string.Empty; string expand = string.Empty; return ConvertFromObjectsToArguments( new string[] { "ResourceGroupName", "ImageName", "Expand" }, new object[] { resourceGroupName, imageName, expand }); } } [Cmdlet(VerbsCommon.Get, "AzureRmImage", DefaultParameterSetName = "InvokeByDynamicParameters")] public partial class GetAzureRmImage : InvokeAzureComputeMethodCmdlet { public override string MethodName { get; set; } protected override void ProcessRecord() { this.MethodName = "ImageGet"; base.ProcessRecord(); } public override object GetDynamicParameters() { dynamicParameters = new RuntimeDefinedParameterDictionary(); var pResourceGroupName = new RuntimeDefinedParameter(); pResourceGroupName.Name = "ResourceGroupName"; pResourceGroupName.ParameterType = typeof(string); pResourceGroupName.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByDynamicParameters", Position = 1, Mandatory = false, ValueFromPipelineByPropertyName = true, ValueFromPipeline = false }); pResourceGroupName.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("ResourceGroupName", pResourceGroupName); var pImageName = new RuntimeDefinedParameter(); pImageName.Name = "ImageName"; pImageName.ParameterType = typeof(string); pImageName.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByDynamicParameters", Position = 2, Mandatory = false, ValueFromPipelineByPropertyName = true, ValueFromPipeline = false }); pImageName.Attributes.Add(new AliasAttribute("Name")); pImageName.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("ImageName", pImageName); var pExpand = new RuntimeDefinedParameter(); pExpand.Name = "Expand"; pExpand.ParameterType = typeof(string); pExpand.Attributes.Add(new ParameterAttribute { ParameterSetName = "InvokeByDynamicParameters", Position = 3, Mandatory = false, ValueFromPipelineByPropertyName = true, ValueFromPipeline = false }); pExpand.Attributes.Add(new AllowNullAttribute()); dynamicParameters.Add("Expand", pExpand); return dynamicParameters; } } }
using Microsoft.TeamFoundation.DistributedTask.WebApi; using Microsoft.VisualStudio.Services.Agent.Worker; using Moq; using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Xunit; using Microsoft.VisualStudio.Services.WebApi; using Pipelines = Microsoft.TeamFoundation.DistributedTask.Pipelines; namespace Microsoft.VisualStudio.Services.Agent.Tests.Worker { public sealed class WorkerL0 { private Mock<IProcessChannel> _processChannel; private Mock<IJobRunner> _jobRunner; private Mock<IVstsAgentWebProxy> _proxy; private Mock<IAgentCertificateManager> _cert; public WorkerL0() { _processChannel = new Mock<IProcessChannel>(); _jobRunner = new Mock<IJobRunner>(); _proxy = new Mock<IVstsAgentWebProxy>(); _cert = new Mock<IAgentCertificateManager>(); } private Pipelines.AgentJobRequestMessage CreateJobRequestMessage(string jobName) { TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference() { PlanId = Guid.NewGuid() }; TimelineReference timeline = null; Dictionary<string, VariableValue> variables = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase); variables[Constants.Variables.System.Culture] = "en-US"; Pipelines.JobResources resources = new Pipelines.JobResources(); var serviceEndpoint = new ServiceEndpoint(); serviceEndpoint.Authorization = new EndpointAuthorization(); serviceEndpoint.Authorization.Parameters.Add("nullValue", null); resources.Endpoints.Add(serviceEndpoint); List<Pipelines.JobStep> tasks = new List<Pipelines.JobStep>(); tasks.Add(new Pipelines.TaskStep() { Id = Guid.NewGuid(), Reference = new Pipelines.TaskStepDefinitionReference() { Id = Guid.NewGuid(), Name = "TestTask", Version = "1.0.0" } }); Guid JobId = Guid.NewGuid(); var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, "ubuntu", variables, new List<MaskHint>(), resources, null, tasks); return jobRequest; } private JobCancelMessage CreateJobCancelMessage(Guid jobId) { return new JobCancelMessage(jobId, TimeSpan.FromSeconds(0)); } [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] public async void DispatchRunNewJob() { //Arrange using (var hc = new TestHostContext(this)) using (var tokenSource = new CancellationTokenSource()) { var worker = new Microsoft.VisualStudio.Services.Agent.Worker.Worker(); hc.EnqueueInstance<IProcessChannel>(_processChannel.Object); hc.EnqueueInstance<IJobRunner>(_jobRunner.Object); hc.SetSingleton<IVstsAgentWebProxy>(_proxy.Object); hc.SetSingleton<IAgentCertificateManager>(_cert.Object); worker.Initialize(hc); var jobMessage = CreateJobRequestMessage("job1"); var arWorkerMessages = new WorkerMessage[] { new WorkerMessage { Body = JsonUtility.ToString(jobMessage), MessageType = MessageType.NewJobRequest } }; var workerMessages = new Queue<WorkerMessage>(arWorkerMessages); _processChannel .Setup(x => x.ReceiveAsync(It.IsAny<CancellationToken>())) .Returns(async () => { // Return the job message. if (workerMessages.Count > 0) { return workerMessages.Dequeue(); } // Wait for the text to run await Task.Delay(-1, tokenSource.Token); return default(WorkerMessage); }); _jobRunner.Setup(x => x.RunAsync(It.IsAny<Pipelines.AgentJobRequestMessage>(), It.IsAny<CancellationToken>())) .Returns(Task.FromResult<TaskResult>(TaskResult.Succeeded)); //Act await worker.RunAsync(pipeIn: "1", pipeOut: "2"); //Assert _processChannel.Verify(x => x.StartClient("1", "2"), Times.Once()); _jobRunner.Verify(x => x.RunAsync( It.Is<Pipelines.AgentJobRequestMessage>(y => IsMessageIdentical(y, jobMessage)), It.IsAny<CancellationToken>())); tokenSource.Cancel(); } } [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] public async void DispatchCancellation() { //Arrange using (var hc = new TestHostContext(this)) { var worker = new Microsoft.VisualStudio.Services.Agent.Worker.Worker(); hc.EnqueueInstance<IProcessChannel>(_processChannel.Object); hc.EnqueueInstance<IJobRunner>(_jobRunner.Object); hc.SetSingleton<IVstsAgentWebProxy>(_proxy.Object); hc.SetSingleton<IAgentCertificateManager>(_cert.Object); worker.Initialize(hc); var jobMessage = CreateJobRequestMessage("job1"); var cancelMessage = CreateJobCancelMessage(jobMessage.JobId); var arWorkerMessages = new WorkerMessage[] { new WorkerMessage { Body = JsonUtility.ToString(jobMessage), MessageType = MessageType.NewJobRequest }, new WorkerMessage { Body = JsonUtility.ToString(cancelMessage), MessageType = MessageType.CancelRequest } }; var workerMessages = new Queue<WorkerMessage>(arWorkerMessages); _processChannel.Setup(x => x.ReceiveAsync(It.IsAny<CancellationToken>())) .Returns(() => Task.FromResult(workerMessages.Dequeue())); _jobRunner.Setup(x => x.RunAsync(It.IsAny<Pipelines.AgentJobRequestMessage>(), It.IsAny<CancellationToken>())) .Returns( async (Pipelines.AgentJobRequestMessage jm, CancellationToken ct) => { await Task.Delay(-1, ct); return TaskResult.Canceled; }); //Act await Assert.ThrowsAsync<TaskCanceledException>( async () => await worker.RunAsync("1", "2")); //Assert _processChannel.Verify(x => x.StartClient("1", "2"), Times.Once()); _jobRunner.Verify(x => x.RunAsync( It.Is<Pipelines.AgentJobRequestMessage>(y => IsMessageIdentical(y, jobMessage)), It.IsAny<CancellationToken>())); } } private bool IsMessageIdentical(Pipelines.AgentJobRequestMessage source, Pipelines.AgentJobRequestMessage target) { if (source == null && target == null) { return true; } if (source != null && target == null) { return false; } if (source == null && target != null) { return false; } if (source.JobContainer != target.JobContainer) { return false; } if (source.JobDisplayName != target.JobDisplayName) { return false; } if (source.JobId != target.JobId) { return false; } if (source.JobName != target.JobName) { return false; } if (source.MaskHints.Count != target.MaskHints.Count) { return false; } if (source.MessageType != target.MessageType) { return false; } if (source.Plan.PlanId != target.Plan.PlanId) { return false; } if (source.RequestId != target.RequestId) { return false; } if (source.Resources.Endpoints.Count != target.Resources.Endpoints.Count) { return false; } if (source.Steps.Count != target.Steps.Count) { return false; } if (source.Variables.Count != target.Variables.Count) { return false; } return true; } } }
/* * Qa full api * * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: all * * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; namespace HostMe.Sdk.Model { /// <summary> /// WaitingsStatReportItem /// </summary> [DataContract] public partial class WaitingsStatReportItem : IEquatable<WaitingsStatReportItem>, IValidatableObject { /// <summary> /// Initializes a new instance of the <see cref="WaitingsStatReportItem" /> class. /// </summary> /// <param name="Dimensions">Dimensions.</param> /// <param name="TotalPartyCount">TotalPartyCount.</param> /// <param name="TotalSeatedCount">TotalSeatedCount.</param> /// <param name="TotalCanceledCount">TotalCanceledCount.</param> /// <param name="MinTimeToCall">MinTimeToCall.</param> /// <param name="AvgTimeToCall">AvgTimeToCall.</param> /// <param name="MaxTimeToCall">MaxTimeToCall.</param> /// <param name="MinTimeToSeat">MinTimeToSeat.</param> /// <param name="AvgTimeToSeat">AvgTimeToSeat.</param> /// <param name="MaxTimeToSeat">MaxTimeToSeat.</param> /// <param name="MinTimeToCancel">MinTimeToCancel.</param> /// <param name="AvgTimeToCancel">AvgTimeToCancel.</param> /// <param name="MaxTimeToCancel">MaxTimeToCancel.</param> /// <param name="MaxLine">MaxLine.</param> /// <param name="AvgLine">AvgLine.</param> public WaitingsStatReportItem(WaitingsStatDimensions Dimensions = null, double? TotalPartyCount = null, double? TotalSeatedCount = null, double? TotalCanceledCount = null, double? MinTimeToCall = null, double? AvgTimeToCall = null, double? MaxTimeToCall = null, double? MinTimeToSeat = null, double? AvgTimeToSeat = null, double? MaxTimeToSeat = null, double? MinTimeToCancel = null, double? AvgTimeToCancel = null, double? MaxTimeToCancel = null, int? MaxLine = null, double? AvgLine = null) { this.Dimensions = Dimensions; this.TotalPartyCount = TotalPartyCount; this.TotalSeatedCount = TotalSeatedCount; this.TotalCanceledCount = TotalCanceledCount; this.MinTimeToCall = MinTimeToCall; this.AvgTimeToCall = AvgTimeToCall; this.MaxTimeToCall = MaxTimeToCall; this.MinTimeToSeat = MinTimeToSeat; this.AvgTimeToSeat = AvgTimeToSeat; this.MaxTimeToSeat = MaxTimeToSeat; this.MinTimeToCancel = MinTimeToCancel; this.AvgTimeToCancel = AvgTimeToCancel; this.MaxTimeToCancel = MaxTimeToCancel; this.MaxLine = MaxLine; this.AvgLine = AvgLine; } /// <summary> /// Gets or Sets Dimensions /// </summary> [DataMember(Name="dimensions", EmitDefaultValue=true)] public WaitingsStatDimensions Dimensions { get; set; } /// <summary> /// Gets or Sets TotalPartyCount /// </summary> [DataMember(Name="totalPartyCount", EmitDefaultValue=true)] public double? TotalPartyCount { get; set; } /// <summary> /// Gets or Sets TotalSeatedCount /// </summary> [DataMember(Name="totalSeatedCount", EmitDefaultValue=true)] public double? TotalSeatedCount { get; set; } /// <summary> /// Gets or Sets TotalCanceledCount /// </summary> [DataMember(Name="totalCanceledCount", EmitDefaultValue=true)] public double? TotalCanceledCount { get; set; } /// <summary> /// Gets or Sets MinTimeToCall /// </summary> [DataMember(Name="minTimeToCall", EmitDefaultValue=true)] public double? MinTimeToCall { get; set; } /// <summary> /// Gets or Sets AvgTimeToCall /// </summary> [DataMember(Name="avgTimeToCall", EmitDefaultValue=true)] public double? AvgTimeToCall { get; set; } /// <summary> /// Gets or Sets MaxTimeToCall /// </summary> [DataMember(Name="maxTimeToCall", EmitDefaultValue=true)] public double? MaxTimeToCall { get; set; } /// <summary> /// Gets or Sets MinTimeToSeat /// </summary> [DataMember(Name="minTimeToSeat", EmitDefaultValue=true)] public double? MinTimeToSeat { get; set; } /// <summary> /// Gets or Sets AvgTimeToSeat /// </summary> [DataMember(Name="avgTimeToSeat", EmitDefaultValue=true)] public double? AvgTimeToSeat { get; set; } /// <summary> /// Gets or Sets MaxTimeToSeat /// </summary> [DataMember(Name="maxTimeToSeat", EmitDefaultValue=true)] public double? MaxTimeToSeat { get; set; } /// <summary> /// Gets or Sets MinTimeToCancel /// </summary> [DataMember(Name="minTimeToCancel", EmitDefaultValue=true)] public double? MinTimeToCancel { get; set; } /// <summary> /// Gets or Sets AvgTimeToCancel /// </summary> [DataMember(Name="avgTimeToCancel", EmitDefaultValue=true)] public double? AvgTimeToCancel { get; set; } /// <summary> /// Gets or Sets MaxTimeToCancel /// </summary> [DataMember(Name="maxTimeToCancel", EmitDefaultValue=true)] public double? MaxTimeToCancel { get; set; } /// <summary> /// Gets or Sets MaxLine /// </summary> [DataMember(Name="maxLine", EmitDefaultValue=true)] public int? MaxLine { get; set; } /// <summary> /// Gets or Sets AvgLine /// </summary> [DataMember(Name="avgLine", EmitDefaultValue=true)] public double? AvgLine { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class WaitingsStatReportItem {\n"); sb.Append(" Dimensions: ").Append(Dimensions).Append("\n"); sb.Append(" TotalPartyCount: ").Append(TotalPartyCount).Append("\n"); sb.Append(" TotalSeatedCount: ").Append(TotalSeatedCount).Append("\n"); sb.Append(" TotalCanceledCount: ").Append(TotalCanceledCount).Append("\n"); sb.Append(" MinTimeToCall: ").Append(MinTimeToCall).Append("\n"); sb.Append(" AvgTimeToCall: ").Append(AvgTimeToCall).Append("\n"); sb.Append(" MaxTimeToCall: ").Append(MaxTimeToCall).Append("\n"); sb.Append(" MinTimeToSeat: ").Append(MinTimeToSeat).Append("\n"); sb.Append(" AvgTimeToSeat: ").Append(AvgTimeToSeat).Append("\n"); sb.Append(" MaxTimeToSeat: ").Append(MaxTimeToSeat).Append("\n"); sb.Append(" MinTimeToCancel: ").Append(MinTimeToCancel).Append("\n"); sb.Append(" AvgTimeToCancel: ").Append(AvgTimeToCancel).Append("\n"); sb.Append(" MaxTimeToCancel: ").Append(MaxTimeToCancel).Append("\n"); sb.Append(" MaxLine: ").Append(MaxLine).Append("\n"); sb.Append(" AvgLine: ").Append(AvgLine).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as WaitingsStatReportItem); } /// <summary> /// Returns true if WaitingsStatReportItem instances are equal /// </summary> /// <param name="other">Instance of WaitingsStatReportItem to be compared</param> /// <returns>Boolean</returns> public bool Equals(WaitingsStatReportItem other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.Dimensions == other.Dimensions || this.Dimensions != null && this.Dimensions.Equals(other.Dimensions) ) && ( this.TotalPartyCount == other.TotalPartyCount || this.TotalPartyCount != null && this.TotalPartyCount.Equals(other.TotalPartyCount) ) && ( this.TotalSeatedCount == other.TotalSeatedCount || this.TotalSeatedCount != null && this.TotalSeatedCount.Equals(other.TotalSeatedCount) ) && ( this.TotalCanceledCount == other.TotalCanceledCount || this.TotalCanceledCount != null && this.TotalCanceledCount.Equals(other.TotalCanceledCount) ) && ( this.MinTimeToCall == other.MinTimeToCall || this.MinTimeToCall != null && this.MinTimeToCall.Equals(other.MinTimeToCall) ) && ( this.AvgTimeToCall == other.AvgTimeToCall || this.AvgTimeToCall != null && this.AvgTimeToCall.Equals(other.AvgTimeToCall) ) && ( this.MaxTimeToCall == other.MaxTimeToCall || this.MaxTimeToCall != null && this.MaxTimeToCall.Equals(other.MaxTimeToCall) ) && ( this.MinTimeToSeat == other.MinTimeToSeat || this.MinTimeToSeat != null && this.MinTimeToSeat.Equals(other.MinTimeToSeat) ) && ( this.AvgTimeToSeat == other.AvgTimeToSeat || this.AvgTimeToSeat != null && this.AvgTimeToSeat.Equals(other.AvgTimeToSeat) ) && ( this.MaxTimeToSeat == other.MaxTimeToSeat || this.MaxTimeToSeat != null && this.MaxTimeToSeat.Equals(other.MaxTimeToSeat) ) && ( this.MinTimeToCancel == other.MinTimeToCancel || this.MinTimeToCancel != null && this.MinTimeToCancel.Equals(other.MinTimeToCancel) ) && ( this.AvgTimeToCancel == other.AvgTimeToCancel || this.AvgTimeToCancel != null && this.AvgTimeToCancel.Equals(other.AvgTimeToCancel) ) && ( this.MaxTimeToCancel == other.MaxTimeToCancel || this.MaxTimeToCancel != null && this.MaxTimeToCancel.Equals(other.MaxTimeToCancel) ) && ( this.MaxLine == other.MaxLine || this.MaxLine != null && this.MaxLine.Equals(other.MaxLine) ) && ( this.AvgLine == other.AvgLine || this.AvgLine != null && this.AvgLine.Equals(other.AvgLine) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.Dimensions != null) hash = hash * 59 + this.Dimensions.GetHashCode(); if (this.TotalPartyCount != null) hash = hash * 59 + this.TotalPartyCount.GetHashCode(); if (this.TotalSeatedCount != null) hash = hash * 59 + this.TotalSeatedCount.GetHashCode(); if (this.TotalCanceledCount != null) hash = hash * 59 + this.TotalCanceledCount.GetHashCode(); if (this.MinTimeToCall != null) hash = hash * 59 + this.MinTimeToCall.GetHashCode(); if (this.AvgTimeToCall != null) hash = hash * 59 + this.AvgTimeToCall.GetHashCode(); if (this.MaxTimeToCall != null) hash = hash * 59 + this.MaxTimeToCall.GetHashCode(); if (this.MinTimeToSeat != null) hash = hash * 59 + this.MinTimeToSeat.GetHashCode(); if (this.AvgTimeToSeat != null) hash = hash * 59 + this.AvgTimeToSeat.GetHashCode(); if (this.MaxTimeToSeat != null) hash = hash * 59 + this.MaxTimeToSeat.GetHashCode(); if (this.MinTimeToCancel != null) hash = hash * 59 + this.MinTimeToCancel.GetHashCode(); if (this.AvgTimeToCancel != null) hash = hash * 59 + this.AvgTimeToCancel.GetHashCode(); if (this.MaxTimeToCancel != null) hash = hash * 59 + this.MaxTimeToCancel.GetHashCode(); if (this.MaxLine != null) hash = hash * 59 + this.MaxLine.GetHashCode(); if (this.AvgLine != null) hash = hash * 59 + this.AvgLine.GetHashCode(); return hash; } } public IEnumerable<ValidationResult> Validate(ValidationContext validationContext) { yield break; } } }
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Globalization; using System.Linq; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.UnitTests { public class CommonCommandLineParserTests : TestBase { private const int EN_US = 1033; private void VerifyCommandLineSplitter(string commandLine, string[] expected) { string[] actual = CommandLineSplitter.SplitCommandLine(commandLine); Assert.Equal(expected.Length, actual.Length); for (int i = 0; i < actual.Length; ++i) { Assert.Equal(expected[i], actual[i]); } } private RuleSet ParseRuleSet(string source, params string[] otherSources) { var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(source); for (int i = 1; i <= otherSources.Length; i++) { var newFile = dir.CreateFile("file" + i + ".ruleset"); newFile.WriteAllText(otherSources[i - 1]); } if (otherSources.Length != 0) { return RuleSet.LoadEffectiveRuleSetFromFile(file.Path); } return RuleSetProcessor.LoadFromFile(file.Path); } private void VerifyRuleSetError(string source, string message, bool locSpecific = true, string locMessage = "", params string[] otherSources) { try { ParseRuleSet(source, otherSources); } catch (Exception e) { if (CultureInfo.CurrentCulture.LCID == EN_US || CultureInfo.CurrentUICulture.LCID == EN_US || CultureInfo.CurrentCulture == CultureInfo.InvariantCulture || CultureInfo.CurrentUICulture == CultureInfo.InvariantCulture) { Assert.Equal(message, e.Message); } else if (locSpecific) { if (locMessage != "") Assert.Contains(locMessage, e.Message); else Assert.Equal(message, e.Message); } return; } Assert.True(false, "Didn't return an error"); } [Fact] public void TestCommandLineSplitter() { VerifyCommandLineSplitter("", new string[0]); VerifyCommandLineSplitter(" \t ", new string[0]); VerifyCommandLineSplitter(" abc\tdef baz quuz ", new string[] {"abc", "def", "baz", "quuz"}); VerifyCommandLineSplitter(@" ""abc def"" fi""ddle dee de""e ""hi there ""dude he""llo there"" ", new string[] { @"abc def", @"fi""ddle dee de""e", @"""hi there ""dude", @"he""llo there""" }); VerifyCommandLineSplitter(@" ""abc def \"" baz quuz"" ""\""straw berry"" fi\""zz \""buzz fizzbuzz", new string[] { @"abc def "" baz quuz", @"""straw berry", @"fi""zz", @"""buzz", @"fizzbuzz"}); VerifyCommandLineSplitter(@" \\""abc def"" \\\""abc def"" ", new string[] { @"\""abc def""", @"\""abc", @"def""" }); VerifyCommandLineSplitter(@" \\\\""abc def"" \\\\\""abc def"" ", new string[] { @"\\""abc def""", @"\\""abc", @"def""" }); } [Fact] public void TestRuleSetParsingDuplicateRule() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1012"" Action=""Warning"" /> <Rule Id=""CA1013"" Action=""Warning"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> </RuleSet>"; string paranment = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "There is a duplicate key sequence 'CA1012' for the 'UniqueRuleName' key or unique identity constraint."); string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "There is a duplicate key sequence 'CA1012' for the 'UniqueRuleName' key or unique identity constraint."), locMessage: locMessage); string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "There is a duplicate key sequence 'CA1012' for the 'UniqueRuleName' key or unique identity constraint."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingDuplicateRule2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> <Rule Id=""CA1013"" Action=""None"" /> </Rules> </RuleSet>"; VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetHasDuplicateRules, "CA1012", "Error", "Warn"), locSpecific: false); } [Fact] public void TestRuleSetParsingDuplicateRule3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1013"" Action=""None"" /> </Rules> </RuleSet>"; var ruleSet = ParseRuleSet(source); Assert.Equal(expected: ReportDiagnostic.Error, actual: ruleSet.SpecificDiagnosticOptions["CA1012"]); } [Fact] public void TestRuleSetParsingDuplicateRuleSet() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> <RuleSet Name=""Ruleset2"" Description=""Test""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; VerifyRuleSetError(source, "There are multiple root elements. Line 8, position 2.", false); } [Fact] public void TestRuleSetParsingIncludeAll1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.Equal(ReportDiagnostic.Warn, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetParsingIncludeAll2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetParsingWithIncludeOfSameFile() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""a.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, new string[] { "" }); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Equal(1, RuleSet.GetEffectiveIncludesFromFile(ruleSet.FilePath).Count()); } [Fact] public void TestRuleSetParsingWithMutualIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""a.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Equal(2, RuleSet.GetEffectiveIncludesFromFile(ruleSet.FilePath).Count()); } [Fact] public void TestRuleSetParsingWithSiblingIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Warning"" /> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Equal(3, RuleSet.GetEffectiveIncludesFromFile(ruleSet.FilePath).Count()); } [Fact] public void TestRuleSetParsingIncludeAll3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The 'Action' attribute is invalid - The value 'Default' is invalid according to its datatype 'TIncludeAllAction' - The Enumeration constraint failed."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'Id' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'Action' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'AnalyzerId' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute4() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0""> <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'RuleNamespace' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute5() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'ToolsVersion' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRulesMissingAttribute6() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The required attribute 'Name' is missing."), locMessage: locMessage); } [Fact] public void TestRuleSetParsingRules() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> <Rule Id=""CA1013"" Action=""Warning"" /> <Rule Id=""CA1014"" Action=""None"" /> <Rule Id=""CA1015"" Action=""Info"" /> <Rule Id=""CA1016"" Action=""Hidden"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1012"], ReportDiagnostic.Error); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1013"], ReportDiagnostic.Warn); Assert.Contains("CA1014", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1014"], ReportDiagnostic.Suppress); Assert.Contains("CA1015", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1015"], ReportDiagnostic.Info); Assert.Contains("CA1016", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ruleSet.SpecificDiagnosticOptions["CA1016"], ReportDiagnostic.Hidden); } [Fact] public void TestRuleSetParsingRules2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Default"" /> <Rule Id=""CA1013"" Action=""Warning"" /> <Rule Id=""CA1014"" Action=""None"" /> </Rules> </RuleSet> "; string locMessage = string.Format(CodeAnalysisResources.RuleSetSchemaViolation, ""); VerifyRuleSetError(source, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "The 'Action' attribute is invalid - The value 'Default' is invalid according to its datatype 'TRuleAction' - The Enumeration constraint failed."), locMessage: locMessage); } [Fact] public void TestRuleSetInclude() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""foo.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source); Assert.True(ruleSet.Includes.Count() == 1); Assert.Equal(ruleSet.Includes.First().Action, ReportDiagnostic.Default); Assert.Equal(ruleSet.Includes.First().IncludePath, "foo.ruleset"); } [WorkItem(156)] [Fact(Skip = "156")] public void TestRuleSetInclude1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""foo.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; VerifyRuleSetError(source, string.Format(CodeAnalysisResources.InvalidRuleSetInclude, "foo.ruleset", string.Format(CodeAnalysisResources.FailedToResolveRuleSetName, "foo.ruleset")), otherSources: new string[] {""}); } [Fact] public void TestRuleSetInclude2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Hidden"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Hidden, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Info"" /> <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Info, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeGlobalStrict3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Include Path=""file1.ruleset"" Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeRecursiveIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1014"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1013"]); Assert.Contains("CA1014", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1014"]); } [Fact] public void TestRuleSetIncludeSpecificStrict1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); // CA1012's value in source wins. Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); } [Fact] public void TestRuleSetIncludeSpecificStrict2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Error"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); // CA1012's value in source still wins. Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); } [Fact] public void TestRuleSetIncludeSpecificStrict3() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Default"" /> <Include Path=""file2.ruleset"" Action=""Default"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Error"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); // CA1013's value in source2 wins. Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeEffectiveAction() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""None"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.DoesNotContain("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); } [Fact] public void TestRuleSetIncludeEffectiveAction1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); Assert.Equal(ReportDiagnostic.Default, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetIncludeEffectiveActionGlobal1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Error, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetIncludeEffectiveActionGlobal2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <IncludeAll Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Equal(ReportDiagnostic.Warn, ruleSet.GeneralDiagnosticOption); } [Fact] public void TestRuleSetIncludeEffectiveActionSpecific1() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""None"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Suppress, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestRuleSetIncludeEffectiveActionSpecific2() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Warning"" /> </Rules> </RuleSet> "; var ruleSet = ParseRuleSet(source, source1); Assert.Contains("CA1012", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1012"]); Assert.Contains("CA1013", ruleSet.SpecificDiagnosticOptions.Keys); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA1013"]); } [Fact] public void TestAllCombinations() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Error"" /> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1000"" Action=""Warning"" /> <Rule Id=""CA1001"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""None"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set2"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> </Rules> </RuleSet> "; string source2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set3"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> <Rule Id=""CA2119"" Action=""None"" /> <Rule Id=""CA2104"" Action=""Error"" /> <Rule Id=""CA2105"" Action=""Warning"" /> </Rules> </RuleSet>"; var ruleSet = ParseRuleSet(source, source1, source2); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1000"]); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA1001"]); Assert.Equal(ReportDiagnostic.Error, ruleSet.SpecificDiagnosticOptions["CA2100"]); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA2104"]); Assert.Equal(ReportDiagnostic.Warn, ruleSet.SpecificDiagnosticOptions["CA2105"]); Assert.Equal(ReportDiagnostic.Suppress, ruleSet.SpecificDiagnosticOptions["CA2111"]); Assert.Equal(ReportDiagnostic.Suppress, ruleSet.SpecificDiagnosticOptions["CA2119"]); } [Fact] public void TestRuleSetIncludeError() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; string source1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset2"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1013"" Action=""Default"" /> </Rules> </RuleSet> "; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(source); var newFile = dir.CreateFile("file1.ruleset"); newFile.WriteAllText(source1); try { RuleSet.LoadEffectiveRuleSetFromFile(file.Path); Assert.True(false, "Didn't throw an exception"); } catch (InvalidRuleSetException e) { Assert.Contains(string.Format(CodeAnalysisResources.InvalidRuleSetInclude, newFile.Path, string.Format(CodeAnalysisResources.RuleSetSchemaViolation, "")), e.Message); } } [Fact] public void GetEffectiveIncludes_NoIncludes() { string source = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""Ruleset1"" Description=""Test"" ToolsVersion=""12.0"" > <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1012"" Action=""Warning"" /> </Rules> </RuleSet> "; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(source); var includePaths = RuleSet.GetEffectiveIncludesFromFile(file.Path); Assert.Equal(expected: 1, actual: includePaths.Length); Assert.Equal(expected: file.Path, actual: includePaths[0]); } [Fact] public void GetEffectiveIncludes_OneLevel() { string ruleSetSource = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1000"" Action=""Warning"" /> <Rule Id=""CA1001"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""None"" /> </Rules> </RuleSet> "; string includeSource = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set2"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> </Rules> </RuleSet> "; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(ruleSetSource); var include = dir.CreateFile("file1.ruleset"); include.WriteAllText(includeSource); var includePaths = RuleSet.GetEffectiveIncludesFromFile(file.Path); Assert.Equal(expected: 2, actual: includePaths.Length); Assert.Equal(expected: file.Path, actual: includePaths[0]); Assert.Equal(expected: include.Path, actual: includePaths[1]); } [Fact] public void GetEffectiveIncludes_TwoLevels() { string ruleSetSource = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set1"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file1.ruleset"" Action=""Error"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA1000"" Action=""Warning"" /> <Rule Id=""CA1001"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""None"" /> </Rules> </RuleSet> "; string includeSource1 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set2"" Description=""Test"" ToolsVersion=""12.0""> <Include Path=""file2.ruleset"" Action=""Warning"" /> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> </Rules> </RuleSet> "; string includeSource2 = @"<?xml version=""1.0"" encoding=""utf-8""?> <RuleSet Name=""New Rule Set3"" Description=""Test"" ToolsVersion=""12.0""> <Rules AnalyzerId=""Microsoft.Analyzers.ManagedCodeAnalysis"" RuleNamespace=""Microsoft.Rules.Managed""> <Rule Id=""CA2100"" Action=""Warning"" /> <Rule Id=""CA2111"" Action=""Warning"" /> <Rule Id=""CA2119"" Action=""None"" /> <Rule Id=""CA2104"" Action=""Error"" /> <Rule Id=""CA2105"" Action=""Warning"" /> </Rules> </RuleSet>"; var dir = Temp.CreateDirectory(); var file = dir.CreateFile("a.ruleset"); file.WriteAllText(ruleSetSource); var include1 = dir.CreateFile("file1.ruleset"); include1.WriteAllText(includeSource1); var include2 = dir.CreateFile("file2.ruleset"); include2.WriteAllText(includeSource2); var includePaths = RuleSet.GetEffectiveIncludesFromFile(file.Path); Assert.Equal(expected: 3, actual: includePaths.Length); Assert.Equal(expected: file.Path, actual: includePaths[0]); Assert.Equal(expected: include1.Path, actual: includePaths[1]); Assert.Equal(expected: include2.Path, actual: includePaths[2]); } } }
/* ==================================================================== Licensed To the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file To You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed To in writing, software distributed under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ /* ================================================================ * About NPOI * Author: Tony Qu * Author's email: tonyqus (at) gmail.com * Author's Blog: tonyqus.wordpress.com.cn (wp.tonyqus.cn) * HomePage: http://www.codeplex.com/npoi * Contributors: * * ==============================================================*/ namespace NPOI.HPSF { using System; using System.Collections; using NPOI.HPSF.Wellknown; using NPOI.Util; /// <summary> /// Convenience class representing a DocumentSummary Information stream in a /// Microsoft Office document. /// @author Rainer Klute /// [email protected] /// @author Drew Varner (Drew.Varner cloSeto sc.edu) /// @author [email protected] /// @since 2002-02-09 /// </summary> [Serializable] public class DocumentSummaryInformation : SpecialPropertySet { /** * The document name a document summary information stream * usually has in a POIFS filesystem. */ public const string DEFAULT_STREAM_NAME = "\x0005DocumentSummaryInformation"; public override PropertyIDMap PropertySetIDMap { get { return PropertyIDMap.DocumentSummaryInformationProperties; } } /// <summary> /// Initializes a new instance of the <see cref="DocumentSummaryInformation"/> class. /// </summary> /// <param name="ps">A property Set which should be Created from a /// document summary information stream.</param> public DocumentSummaryInformation(PropertySet ps): base(ps) { if (!IsDocumentSummaryInformation) throw new UnexpectedPropertySetTypeException ("Not a " + GetType().Name); } /// <summary> /// Gets or sets the category. /// </summary> /// <value>The category value</value> public String Category { get { return GetPropertyStringValue(PropertyIDMap.PID_CATEGORY); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_CATEGORY, value); } } /// <summary> /// Removes the category. /// </summary> public void RemoveCategory() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_CATEGORY); } /// <summary> /// Gets or sets the presentation format (or null). /// </summary> /// <value>The presentation format value</value> public String PresentationFormat { get { return GetPropertyStringValue(PropertyIDMap.PID_PRESFORMAT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_PRESFORMAT, value); } } /// <summary> /// Removes the presentation format. /// </summary> public void RemovePresentationFormat() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_PRESFORMAT); } /// <summary> /// Gets or sets the byte count or 0 if the {@link /// DocumentSummaryInformation} does not contain a byte count. /// </summary> /// <value>The byteCount value</value> public int ByteCount { get { return GetPropertyIntValue(PropertyIDMap.PID_BYTECOUNT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_BYTECOUNT, value); } } /// <summary> /// Removes the byte count. /// </summary> public void RemoveByteCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_BYTECOUNT); } /// <summary> /// Gets or sets the line count or 0 if the {@link /// DocumentSummaryInformation} does not contain a line count. /// </summary> /// <value>The line count value.</value> public int LineCount { get { return GetPropertyIntValue(PropertyIDMap.PID_LINECOUNT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_LINECOUNT, value); } } /// <summary> /// Removes the line count. /// </summary> public void RemoveLineCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_LINECOUNT); } /// <summary> /// Gets or sets the par count or 0 if the {@link /// DocumentSummaryInformation} does not contain a par count. /// </summary> /// <value>The par count value</value> public int ParCount { get { return GetPropertyIntValue(PropertyIDMap.PID_PARCOUNT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_PARCOUNT, value); } } /// <summary> /// Removes the par count. /// </summary> public void RemoveParCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_PARCOUNT); } /// <summary> /// Gets or sets the slide count or 0 if the {@link /// DocumentSummaryInformation} does not contain a slide count. /// </summary> /// <value>The slide count value</value> public int SlideCount { get { return GetPropertyIntValue(PropertyIDMap.PID_SLIDECOUNT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_SLIDECOUNT, value); } } /// <summary> /// Removes the slide count. /// </summary> public void RemoveSlideCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_SLIDECOUNT); } /// <summary> /// Gets or sets the note count or 0 if the {@link /// DocumentSummaryInformation} does not contain a note count /// </summary> /// <value>The note count value</value> public int NoteCount { get { return GetPropertyIntValue(PropertyIDMap.PID_NOTECOUNT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_NOTECOUNT, value); } } /// <summary> /// Removes the note count. /// </summary> public void RemoveNoteCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_NOTECOUNT); } /// <summary> /// Gets or sets the hidden count or 0 if the {@link /// DocumentSummaryInformation} does not contain a hidden /// count. /// </summary> /// <value>The hidden count value.</value> public int HiddenCount { get { return GetPropertyIntValue(PropertyIDMap.PID_HIDDENCOUNT); } set { MutableSection s = (MutableSection)Sections[0]; s.SetProperty(PropertyIDMap.PID_HIDDENCOUNT, value); } } /// <summary> /// Removes the hidden count. /// </summary> public void RemoveHiddenCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_HIDDENCOUNT); } /// <summary> /// Returns the mmclip count or 0 if the {@link /// DocumentSummaryInformation} does not contain a mmclip /// count. /// </summary> /// <value>The mmclip count value.</value> public int MMClipCount { get { return GetPropertyIntValue(PropertyIDMap.PID_MMCLIPCOUNT); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_MMCLIPCOUNT, value); } } /// <summary> /// Removes the MMClip count. /// </summary> public void RemoveMMClipCount() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_MMCLIPCOUNT); } /// <summary> /// Gets or sets a value indicating whether this <see cref="DocumentSummaryInformation"/> is scale. /// </summary> /// <value><c>true</c> if cropping is desired; otherwise, <c>false</c>.</value> public bool Scale { get { return GetPropertyBooleanValue(PropertyIDMap.PID_SCALE); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_SCALE, value); } } /// <summary> /// Removes the scale. /// </summary> public void RemoveScale() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_SCALE); } /// <summary> /// Gets or sets the heading pair (or null) /// </summary> /// <value>The heading pair value.</value> public byte[] HeadingPair { get { return (byte[])GetProperty(PropertyIDMap.PID_HEADINGPAIR); } set { throw new NotImplementedException("Writing byte arrays "); } } /// <summary> /// Removes the heading pair. /// </summary> public void RemoveHeadingPair() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_HEADINGPAIR); } /// <summary> /// Gets or sets the doc parts. /// </summary> /// <value>The doc parts value</value> public byte[] Docparts { get { return (byte[])GetProperty(PropertyIDMap.PID_DOCPARTS); } set { throw new NotImplementedException("Writing byte arrays"); } } /// <summary> /// Removes the doc parts. /// </summary> public void RemoveDocparts() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_DOCPARTS); } /// <summary> /// Gets or sets the manager (or <c>null</c>). /// </summary> /// <value>The manager value</value> public String Manager { get { return GetPropertyStringValue(PropertyIDMap.PID_MANAGER); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_MANAGER, value); } } /// <summary> /// Removes the manager. /// </summary> public void RemoveManager() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_MANAGER); } /// <summary> /// Gets or sets the company (or <c>null</c>). /// </summary> /// <value>The company value</value> public String Company { get { return GetPropertyStringValue(PropertyIDMap.PID_COMPANY); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_COMPANY, value); } } /// <summary> /// Removes the company. /// </summary> public void RemoveCompany() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_COMPANY); } /// <summary> /// Gets or sets a value indicating whether [links dirty]. /// </summary> /// <value><c>true</c> if the custom links are dirty.; otherwise, <c>false</c>.</value> public bool LinksDirty { get { return GetPropertyBooleanValue(PropertyIDMap.PID_LINKSDIRTY); } set { MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_LINKSDIRTY, value); } } /// <summary> /// Removes the links dirty. /// </summary> public void RemoveLinksDirty() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_LINKSDIRTY); } /** * <p>Returns the character count including whitespace, or 0 if the * {@link DocumentSummaryInformation} does not contain this char count.</p> * This is the whitespace-including version of {@link SummaryInformation#getCharCount()} * * @return The character count or <code>null</code> */ public int CharCountWithSpaces { get { return GetPropertyIntValue(PropertyIDMap.PID_CCHWITHSPACES); } set { /* * Sets the character count including whitespace */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_CCHWITHSPACES, value); } } /** * Removes the character count */ public void RemoveCharCountWithSpaces() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_CCHWITHSPACES); } /** * <p>Get if the User Defined Property Set has been updated outside of the * Application.</p> * <p>If it has (true), the hyperlinks should be updated on document load.</p> */ public bool HyperlinksChanged { get { return GetPropertyBooleanValue(PropertyIDMap.PID_HYPERLINKSCHANGED); } set { /* * Set the flag for if the User Defined Property Set has been updated outside * of the Application. */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_HYPERLINKSCHANGED, value); } } /** * Removes the flag for if the User Defined Property Set has been updated * outside of the Application. */ public void RemoveHyperlinksChanged() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_HYPERLINKSCHANGED); } /** * <p>Gets the version of the Application which wrote the * Property set, stored with the two high order bytes having the major * version number, and the two low order bytes the minor version number.</p> * <p>This will be 0 if no version is set.</p> */ public int ApplicationVersion { get { return GetPropertyIntValue(PropertyIDMap.PID_VERSION); } set { /* * Sets the Application version, which must be a 4 byte int with * the two high order bytes having the major version number, and the * two low order bytes the minor version number. */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_VERSION, value); } } /** * Removes the Application Version */ public void RemoveApplicationVersion() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_VERSION); } /** * <p>Returns the VBA digital signature for the VBA project * embedded in the document (or <code>null</code>).</p> */ public byte[] VBADigitalSignature { get { Object value = GetProperty(PropertyIDMap.PID_DIGSIG); if (value != null && value is byte[]) { return (byte[])value; } return null; } set { /* * <p>Sets the VBA digital signature for the VBA project * embedded in the document.</p> */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_DIGSIG, value); } } /** * Removes the VBA Digital Signature */ public void RemoveVBADigitalSignature() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_DIGSIG); } /** * <p>Gets the content type of the file (or <code>null</code>).</p> */ public String ContentType { get { return GetPropertyStringValue(PropertyIDMap.PID_CONTENTTYPE); } set { /* * Sets the content type of the file */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_CONTENTTYPE, value); } } /** * Removes the content type of the file */ public void RemoveContentType() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_CONTENTTYPE); } /** * <p>Gets the content status of the file (or <code>null</code>).</p> */ public String ContentStatus { get { return GetPropertyStringValue(PropertyIDMap.PID_CONTENTSTATUS); } set { /* * Sets the content type of the file */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_CONTENTSTATUS, value); } } /** * Removes the content status of the file */ public void RemoveContentStatus() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_CONTENTSTATUS); } /** * <p>Gets the document language, which is normally unset and empty * (or <code>null</code>).</p> */ public String Language { get { return GetPropertyStringValue(PropertyIDMap.PID_LANGUAGE); } set { /* * Set the document language */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_LANGUAGE, value); } } /** * Removes the document language */ public void RemoveLanguage() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_LANGUAGE); } /** * <p>Gets the document version as a string, which is normally unset and empty * (or <code>null</code>).</p> */ public String DocumentVersion { get { return GetPropertyStringValue(PropertyIDMap.PID_DOCVERSION); } set { /* * Sets the document version string */ MutableSection s = (MutableSection)FirstSection; s.SetProperty(PropertyIDMap.PID_DOCVERSION, value); } } /** * Removes the document version string */ public void RemoveDocumentVersion() { MutableSection s = (MutableSection)FirstSection; s.RemoveProperty(PropertyIDMap.PID_DOCVERSION); } /// <summary> /// Gets or sets the custom properties. /// </summary> /// <value>The custom properties.</value> public CustomProperties CustomProperties { get { CustomProperties cps = null; if (SectionCount >= 2) { cps = new CustomProperties(); Section section = (Section)Sections[1]; IDictionary dictionary = section.Dictionary; Property[] properties = section.Properties; int propertyCount = 0; for (int i = 0; i < properties.Length; i++) { Property p = properties[i]; long id = p.ID; if (id != 0 && id != 1) { propertyCount++; CustomProperty cp = new CustomProperty(p, (string)dictionary[id]); cps.Put(cp.Name, cp); } } if (cps.Count != propertyCount) cps.IsPure=false; } return cps; } set { EnsureSection2(); MutableSection section = (MutableSection)Sections[1]; IDictionary dictionary = value.Dictionary; section.Clear(); /* Set the codepage. If both custom properties and section have a * codepage, the codepage from the custom properties wins, else take the * one that is defined. If none is defined, take Unicode. */ int cpCodepage = value.Codepage; if (cpCodepage < 0) cpCodepage = section.Codepage; if (cpCodepage < 0) cpCodepage = CodePageUtil.CP_UNICODE; value.Codepage=cpCodepage; section.Codepage=cpCodepage; //add codepage propertyset section.Dictionary=dictionary; //generate dictionary propertyset //generate MutableSections for (IEnumerator i = value.Values.GetEnumerator(); i.MoveNext(); ) { Property p = (Property)i.Current; section.SetProperty(p); } } } /// <summary> /// Creates section 2 if it is not alReady present. /// </summary> private void EnsureSection2() { if (SectionCount < 2) { MutableSection s2 = new MutableSection(); s2.SetFormatID(SectionIDMap.DOCUMENT_SUMMARY_INFORMATION_ID2); AddSection(s2); } } /// <summary> /// Removes the custom properties. /// </summary> public void RemoveCustomProperties() { if (SectionCount >= 2) Sections.RemoveAt(1); else throw new HPSFRuntimeException("Illegal internal format of Document SummaryInformation stream: second section is missing."); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Linq; using System.Security.Cryptography.X509Certificates; using DotJEM.Json.Storage.Adapter.Materialize; using DotJEM.Json.Storage.Adapter.Materialize.ChanceLog; using DotJEM.Json.Storage.Adapter.Materialize.ChanceLog.ChangeObjects; using DotJEM.Json.Storage.Adapter.Materialize.Log; using DotJEM.Json.Storage.Configuration; using DotJEM.Json.Storage.Queries; using Newtonsoft.Json.Linq; namespace DotJEM.Json.Storage.Adapter { public interface IStorageAreaLog { /// <summary> /// Gets the latest generation returned by this changelog. /// </summary> long CurrentGeneration { get; } /// <summary> /// Gets the latest generation stored in the database. /// </summary> long LatestGeneration { get; } /// <summary> /// Gets the next batch of changes. /// </summary> /// <remarks> /// Use this method to continiously pool for changed documents in the storage area while letting the <see cref="IStorageAreaLog"/> track which generation was returned last. /// </remarks> /// <param name="includeDeletes">If <code>true</code>, returns all types of changes; If <code>false</code>, it skips deletes.</param> /// <param name="count">The maximum number of changes to return.</param> /// <returns>A <see cref="IStorageChangeCollection"/> with changes since <see cref="CurrentGeneration"/></returns> IStorageChangeCollection Get(bool includeDeletes = true, int count = 5000); /// <summary> /// Gets a batch of changes from the provided <see cref="generation"/>. /// </summary> /// <remarks> /// Use this method to continiously pool for changed documents in the storage area while taking over tracking of the last returned generation. /// <strong>Note:</strong>If <see cref="count"/> is less than <code>1</code>, then this method will just reset <see cref="CurrentGeneration"/> to /// the <see cref="generation"/> provided unless the <see cref="generation"/> provided is greater than <see cref="LatestGeneration"/>, in which case /// <see cref="CurrentGeneration"/> is set to <see cref="LatestGeneration"/>. /// </remarks> /// <param name="generation">The generation to start from.</param> /// <param name="includeDeletes">If <code>true</code>, returns all types of changes; If <code>false</code>, it skips deletes.</param> /// <param name="count">The maximum number of changes to return.</param> /// <returns></returns> IStorageChangeCollection Get(long generation, bool includeDeletes = true, int count = 5000); } public class SqlServerStorageAreaLog : IStorageAreaLog { private bool initialized; private readonly SqlServerStorageArea area; private readonly SqlServerStorageContext context; private readonly object padlock = new object(); public long CurrentGeneration { get; private set; } = -1; public long LatestGeneration { get { if (!TableExists) return -1; using (SqlConnection connection = context.Connection()) { connection.Open(); using (SqlCommand command = new SqlCommand { Connection = connection }) { command.CommandTimeout = context.SqlServerConfiguration.ReadCommandTimeout; command.CommandText = area.Commands["SelectMaxGeneration"]; object maxGeneration = command.ExecuteScalar(); if (maxGeneration == null || maxGeneration is DBNull) return -1; return (long)maxGeneration; } } } } public SqlServerStorageAreaLog(SqlServerStorageArea area, SqlServerStorageContext context) { this.area = area; this.context = context; this.indexes = new Lazy<Dictionary<string, IndexDefinition>>(() => LoadIndexes().ToDictionary(def => def.Name)); } public IStorageChangeCollection Insert(Guid id, JObject original, JObject changed, ChangeType action, SqlConnection connection, SqlTransaction transaction) { EnsureTable(); using (SqlCommand command = new SqlCommand { Connection = connection, Transaction = transaction }) { command.CommandText = area.Commands["InsertChange"]; command.Parameters.Add(new SqlParameter(StorageField.Fid.ToString(), SqlDbType.UniqueIdentifier)).Value = id; command.Parameters.Add(new SqlParameter(LogField.Action.ToString(), SqlDbType.VarChar)).Value = action.ToString(); command.Parameters.Add(new SqlParameter(StorageField.Data.ToString(), SqlDbType.VarBinary)).Value = context.Serializer.Serialize(Diff(original, changed)); using (SqlDataReader reader = command.ExecuteReader()) { reader.Read(); long token = reader.GetInt64(reader.GetOrdinal(StorageField.Id.ToString())); reader.Close(); string reference = (string)(changed ?? original)[context.Configuration.Fields[JsonField.Reference]]; string contentType = (string)(changed ?? original)[context.Configuration.Fields[JsonField.ContentType]]; int version = (int)(changed ?? original)[context.Configuration.Fields[JsonField.Version]]; DateTime created = (DateTime)(changed ?? original)[context.Configuration.Fields[JsonField.Created]]; DateTime updated = (DateTime)(changed ?? original)[context.Configuration.Fields[JsonField.Updated]]; ChangeLogRow row = action switch { ChangeType.Create => new CreateOnChangeLogRow(context, area.Name, token, id, contentType, Base36.Decode(reference), version, created, updated, changed), ChangeType.Update => new UpdateOnChangeLogRow(context, area.Name, token, id, contentType, Base36.Decode(reference), version, created, updated, changed), ChangeType.Delete => new DeleteOnChangeLogRow(context, area.Name, token, id, contentType, Base36.Decode(reference), version, created, updated, original), _ => throw new ArgumentOutOfRangeException() }; return new StorageChangeCollection(area.Name, token, new List<IChangeLogRow> { row }); } } } private StorageChangeCollection RunDataReader(long startGeneration, SqlDataReader reader) { int tokenColumn = reader.GetOrdinal("Token"); int actionColumn = reader.GetOrdinal("Action"); int idColumn = reader.GetOrdinal(StorageField.Id.ToString()); int fidColumn = reader.GetOrdinal(StorageField.Fid.ToString()); int dataColumn = reader.GetOrdinal(StorageField.Data.ToString()); int refColumn = reader.GetOrdinal(StorageField.Reference.ToString()); int versionColumn = reader.GetOrdinal(StorageField.Version.ToString()); int contentTypeColumn = reader.GetOrdinal(StorageField.ContentType.ToString()); int createdColumn = reader.GetOrdinal(StorageField.Created.ToString()); int updatedColumn = reader.GetOrdinal(StorageField.Updated.ToString()); List<IChangeLogRow> changes = new List<IChangeLogRow>(); while (reader.Read()) { long token = reader.GetInt64(tokenColumn); Enum.TryParse(reader.GetString(actionColumn), out ChangeType changeType); try { ChangeLogRow row = changeType switch { ChangeType.Create => new CreateChangeLogRow(context, area.Name, token, reader.GetGuid(idColumn), reader.GetString(contentTypeColumn), reader.GetInt64(refColumn), reader.GetInt32(versionColumn), reader.GetDateTime(createdColumn), reader.GetDateTime(updatedColumn), reader.GetSqlBinary(dataColumn).Value), ChangeType.Update => new UpdateChangeLogRow(context, area.Name, token, reader.GetGuid(idColumn), reader.GetString(contentTypeColumn), reader.GetInt64(refColumn), reader.GetInt32(versionColumn), reader.GetDateTime(createdColumn), reader.GetDateTime(updatedColumn), reader.GetSqlBinary(dataColumn).Value), ChangeType.Delete => new DeleteChangeLogRow(context, area.Name, token, reader.GetGuid(fidColumn)), _ => throw new ArgumentOutOfRangeException() }; changes.Add(row); } catch (Exception exception) { changes.Add(new FaultyChangeLogRow(context, area.Name, token, reader.GetGuid(fidColumn), changeType, exception)); } } if (changes.Any()) { return new StorageChangeCollection(area.Name, changes.Last().Generation, changes); } return new StorageChangeCollection(area.Name, startGeneration, changes); } private JObject Diff(JObject original, JObject changed) { JObject either = original ?? changed; JObject change = new JObject(); change[context.Configuration.Fields[JsonField.ContentType]] = either[context.Configuration.Fields[JsonField.ContentType]]; //TODO: Implemnt simple diff (record changed properties) // - Could also use this for change details... return change; } public IStorageChangeCollection Get(bool includeDeletes = true, int count = 5000) { if (!TableExists) return new StorageChangeCollection(area.Name, -1, new List<IChangeLogRow>()); return GetFromGeneration(CurrentGeneration, includeDeletes, count); } public IStorageChangeCollection Get(long generation, bool includeDeletes = true, int count = 5000) { //Note: If the requested token is greater than the current generation, we fetch the latest generation. // This ensures that the generation actually exists so that we don't skip future generation. if (generation > CurrentGeneration) generation = Math.Min(generation, LatestGeneration); return GetFromGeneration(generation, includeDeletes, count); } private IStorageChangeCollection GetFromGeneration(long generation, bool includeDeletes, int count) { if (!TableExists) return new StorageChangeCollection(area.Name, -1, new List<IChangeLogRow>()); if (count < 1) { //Note: If count is 0 or less, we don't load any changes, but only resets the generation. CurrentGeneration = generation; return new StorageChangeCollection(area.Name, CurrentGeneration, new List<IChangeLogRow>()); } using (SqlConnection connection = context.Connection()) { connection.Open(); using (SqlCommand command = new SqlCommand { Connection = connection }) { command.CommandTimeout = context.SqlServerConfiguration.ReadCommandTimeout; command.CommandText = includeDeletes ? area.Commands["SelectChangesWithDeletes"] : area.Commands["SelectChangesNoDeletes"]; command.Parameters.Add(new SqlParameter("token", SqlDbType.BigInt)).Value = generation; command.Parameters.Add(new SqlParameter("count", SqlDbType.Int)).Value = count; using (SqlDataReader reader = command.ExecuteReader()) { StorageChangeCollection changes = RunDataReader(generation, reader); CurrentGeneration = changes.Generation; return changes; } } } } private void EnsureTable() { if (initialized) return; if (!TableExists) CreateTable(); EnsureIndexes(); initialized = true; } private void EnsureIndexes() { EnsureIndex($"{area.Name}.changelog.id_fid_index", "ChangeLogIdFidIndex"); EnsureIndex($"{area.Name}.changelog.id_fid_action_index", "ChangeLogIdFidActionIndex"); EnsureIndex($"{area.Name}.changelog.fid_id_index", "ChangeLogFidIdIndex"); } private void EnsureIndex(string name, string commandName) { if (Indexes.ContainsKey(name)) return; lock (padlock) { if (Indexes.ContainsKey(name)) return; CreateIndex(commandName); } } private void CreateIndex(string commandName) { using (SqlConnection connection = context.Connection()) { connection.Open(); using (SqlCommand command = new SqlCommand { Connection = connection }) { command.CommandText = area.Commands[commandName]; command.ExecuteNonQuery(); } } } private Dictionary<string, IndexDefinition> Indexes => indexes.Value; //TODO: Lazy evaliated result set: The lazy definition + load method could probably be mixed into a single construct making this easier in the future to manage other such constructs. private readonly Lazy<Dictionary<string, IndexDefinition>> indexes; private IEnumerable<IndexDefinition> LoadIndexes() { using (SqlConnection connection = context.Connection()) { connection.Open(); using (SqlCommand command = new SqlCommand { Connection = connection }) { command.CommandText = area.Commands["ChangeLogIndexes"]; using (SqlDataReader reader = command.ExecuteReader()) { int nameColumn = reader.GetOrdinal("name"); while (reader.Read()) { string name = reader.GetString(nameColumn); yield return new IndexDefinition(name); } } } } } //TODO: Lazy evaliated result set end: private bool TableExists { get { using (SqlConnection connection = context.Connection()) { connection.Open(); using (SqlCommand command = new SqlCommand { Connection = connection }) { command.CommandText = area.Commands["LogTableExists"]; object result = command.ExecuteScalar(); return 1 == Convert.ToInt32(result); } } } } private void CreateTable() { using (SqlConnection connection = context.Connection()) { lock (padlock) { if (TableExists) return; connection.Open(); using (SqlCommand command = new SqlCommand { Connection = connection }) { command.CommandText = area.Commands["CreateLogTable"]; command.ExecuteNonQuery(); } } } } private class IndexDefinition { public string Name { get; } public IndexDefinition(string name) { Name = name; } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Threading.Tasks; using System.Transactions; using Abp.Auditing; using Abp.Dependency; using Abp.Domain.Entities; using Abp.Domain.Entities.Auditing; using Abp.Domain.Uow; using Abp.Events.Bus.Entities; using Abp.Extensions; using Abp.Json; using Abp.Runtime.Session; using Abp.Timing; using Castle.Core.Logging; using JetBrains.Annotations; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.ChangeTracking; namespace Abp.EntityHistory { public class EntityHistoryHelper : IEntityHistoryHelper, ITransientDependency { public ILogger Logger { get; set; } public IAbpSession AbpSession { get; set; } public IClientInfoProvider ClientInfoProvider { get; set; } public IEntityChangeSetReasonProvider EntityChangeSetReasonProvider { get; set; } public IEntityHistoryStore EntityHistoryStore { get; set; } private readonly IEntityHistoryConfiguration _configuration; private readonly IUnitOfWorkManager _unitOfWorkManager; private bool IsEntityHistoryEnabled { get { if (!_configuration.IsEnabled) { return false; } if (!_configuration.IsEnabledForAnonymousUsers && (AbpSession?.UserId == null)) { return false; } return true; } } public EntityHistoryHelper( IEntityHistoryConfiguration configuration, IUnitOfWorkManager unitOfWorkManager) { _configuration = configuration; _unitOfWorkManager = unitOfWorkManager; AbpSession = NullAbpSession.Instance; Logger = NullLogger.Instance; ClientInfoProvider = NullClientInfoProvider.Instance; EntityChangeSetReasonProvider = NullEntityChangeSetReasonProvider.Instance; EntityHistoryStore = NullEntityHistoryStore.Instance; } public virtual EntityChangeSet CreateEntityChangeSet(ICollection<EntityEntry> entityEntries) { var changeSet = new EntityChangeSet { Reason = EntityChangeSetReasonProvider.Reason.TruncateWithPostfix(EntityChangeSet.MaxReasonLength), // Fill "who did this change" BrowserInfo = ClientInfoProvider.BrowserInfo.TruncateWithPostfix(EntityChangeSet.MaxBrowserInfoLength), ClientIpAddress = ClientInfoProvider.ClientIpAddress.TruncateWithPostfix(EntityChangeSet.MaxClientIpAddressLength), ClientName = ClientInfoProvider.ComputerName.TruncateWithPostfix(EntityChangeSet.MaxClientNameLength), ImpersonatorTenantId = AbpSession.ImpersonatorTenantId, ImpersonatorUserId = AbpSession.ImpersonatorUserId, TenantId = AbpSession.TenantId, UserId = AbpSession.UserId }; if (!IsEntityHistoryEnabled) { return changeSet; } foreach (var entry in entityEntries) { var shouldSaveEntityHistory = ShouldSaveEntityHistory(entry); if (!shouldSaveEntityHistory && !HasAuditedProperties(entry)) { continue; } var entityChange = CreateEntityChange(entry, shouldSaveEntityHistory); if (entityChange == null) { continue; } changeSet.EntityChanges.Add(entityChange); } return changeSet; } public virtual async Task SaveAsync(EntityChangeSet changeSet) { if (!IsEntityHistoryEnabled) { return; } if (changeSet.EntityChanges.Count == 0) { return; } UpdateChangeSet(changeSet); using (var uow = _unitOfWorkManager.Begin(TransactionScopeOption.Suppress)) { await EntityHistoryStore.SaveAsync(changeSet); await uow.CompleteAsync(); } } [CanBeNull] private EntityChange CreateEntityChange(EntityEntry entityEntry, bool shouldSaveEntityHistory) { EntityChangeType changeType; switch (entityEntry.State) { case EntityState.Added: changeType = EntityChangeType.Created; break; case EntityState.Deleted: changeType = EntityChangeType.Deleted; break; case EntityState.Modified: changeType = IsDeleted(entityEntry) ? EntityChangeType.Deleted : EntityChangeType.Updated; break; case EntityState.Detached: case EntityState.Unchanged: default: Logger.Error("Unexpected EntityState!"); return null; } var entityId = GetEntityId(entityEntry); if (entityId == null && changeType != EntityChangeType.Created) { Logger.Error("Unexpected null value for entityId!"); return null; } var entityType = entityEntry.Entity.GetType(); var entityChange = new EntityChange { ChangeType = changeType, EntityEntry = entityEntry, // [NotMapped] EntityId = entityId, EntityTypeFullName = entityType.FullName, PropertyChanges = GetPropertyChanges(entityEntry, shouldSaveEntityHistory), TenantId = AbpSession.TenantId }; if (!shouldSaveEntityHistory && entityChange.PropertyChanges.Count == 0) { return null; } return entityChange; } private DateTime GetChangeTime(EntityChange entityChange) { var entity = entityChange.EntityEntry.As<EntityEntry>().Entity; switch (entityChange.ChangeType) { case EntityChangeType.Created: return (entity as IHasCreationTime)?.CreationTime ?? Clock.Now; case EntityChangeType.Deleted: return (entity as IHasDeletionTime)?.DeletionTime ?? Clock.Now; case EntityChangeType.Updated: return (entity as IHasModificationTime)?.LastModificationTime ?? Clock.Now; default: Logger.Error("Unexpected EntityState!"); return Clock.Now; } } private string GetEntityId(EntityEntry entry) { var primaryKeys = entry.Properties.Where(p => p.Metadata.IsPrimaryKey()); return primaryKeys.First().CurrentValue?.ToJsonString(); } /// <summary> /// Gets the property changes for this entry. /// </summary> private ICollection<EntityPropertyChange> GetPropertyChanges(EntityEntry entityEntry, bool shouldSaveEntityHistory) { var propertyChanges = new List<EntityPropertyChange>(); var properties = entityEntry.Metadata.GetProperties(); var isCreated = IsCreated(entityEntry); var isDeleted = IsDeleted(entityEntry); foreach (var property in properties) { var propertyEntry = entityEntry.Property(property.Name); if (ShouldSavePropertyHistory(propertyEntry, shouldSaveEntityHistory, isCreated || isDeleted)) { propertyChanges.Add(new EntityPropertyChange { NewValue = isDeleted ? null : propertyEntry.CurrentValue.ToJsonString().TruncateWithPostfix(EntityPropertyChange.MaxValueLength), OriginalValue = isCreated ? null : propertyEntry.OriginalValue.ToJsonString().TruncateWithPostfix(EntityPropertyChange.MaxValueLength), PropertyName = property.Name, PropertyTypeFullName = property.ClrType.FullName, TenantId = AbpSession.TenantId }); } } return propertyChanges; } private bool HasAuditedProperties(EntityEntry entityEntry) { var properties = entityEntry.Metadata.GetProperties(); return properties.Any(p => p.PropertyInfo?.IsDefined(typeof(AuditedAttribute)) ?? false); } private bool IsCreated(EntityEntry entityEntry) { return entityEntry.State == EntityState.Added; } private bool IsDeleted(EntityEntry entityEntry) { if (entityEntry.State == EntityState.Deleted) { return true; } var entity = entityEntry.Entity; return entity is ISoftDelete && entity.As<ISoftDelete>().IsDeleted; } private bool ShouldSaveEntityHistory(EntityEntry entityEntry) { if (entityEntry.State == EntityState.Detached || entityEntry.State == EntityState.Unchanged) { return false; } if (_configuration.IgnoredTypes.Any(t => t.IsInstanceOfType(entityEntry.Entity))) { return false; } var entityType = entityEntry.Entity.GetType(); var isOwnedEntity = entityEntry.Metadata.IsOwned(); if (!EntityHelper.IsEntity(entityType) && !isOwnedEntity) { return false; } var shouldSaveEntityHistoryForType = ShouldSaveEntityHistoryForType(entityType); if (shouldSaveEntityHistoryForType.HasValue) { return shouldSaveEntityHistoryForType.Value; } if (isOwnedEntity) { // Check if should save entity history for property that points to this owned entity var foreignKey = entityEntry.Metadata.GetForeignKeys().First(); var propertyInfo = foreignKey.PrincipalToDependent.PropertyInfo; var ownerType = foreignKey.PrincipalEntityType.ClrType; var shouldSaveEntityHistoryForOwnerType = ShouldSaveEntityHistoryForType(ownerType); var shouldSaveEntityHistoryForProperty = ShouldSavePropertyHistoryForInfo(propertyInfo, shouldSaveEntityHistoryForOwnerType ?? false); if (shouldSaveEntityHistoryForProperty.HasValue) { return shouldSaveEntityHistoryForProperty.Value; } // Check if should save entity history for owner type if (shouldSaveEntityHistoryForOwnerType.HasValue) { return shouldSaveEntityHistoryForOwnerType.Value; } } return false; } private bool? ShouldSaveEntityHistoryForType(Type entityType) { if (!entityType.IsPublic) { return false; } if (entityType.GetTypeInfo().IsDefined(typeof(DisableAuditingAttribute), true)) { return false; } if (entityType.GetTypeInfo().IsDefined(typeof(AuditedAttribute), true)) { return true; } if (_configuration.Selectors.Any(selector => selector.Predicate(entityType))) { return true; } return null; } private bool ShouldSavePropertyHistory(PropertyEntry propertyEntry, bool shouldSaveEntityHistory, bool defaultValue) { if (propertyEntry.Metadata.IsPrimaryKey()) { return false; } var propertyInfo = propertyEntry.Metadata.PropertyInfo; var shouldSavePropertyHistoryForInfo = ShouldSavePropertyHistoryForInfo(propertyInfo, shouldSaveEntityHistory); if (shouldSavePropertyHistoryForInfo.HasValue) { return shouldSavePropertyHistoryForInfo.Value; } var isModified = !(propertyEntry.OriginalValue?.Equals(propertyEntry.CurrentValue) ?? propertyEntry.CurrentValue == null); if (isModified) { return true; } return defaultValue; } private bool? ShouldSavePropertyHistoryForInfo(PropertyInfo propertyInfo, bool shouldSaveEntityHistory) { if (propertyInfo != null && propertyInfo.IsDefined(typeof(DisableAuditingAttribute), true)) { return false; } if (!shouldSaveEntityHistory) { // Should not save property history if property is not audited if (propertyInfo == null || !propertyInfo.IsDefined(typeof(AuditedAttribute), true)) { return false; } } return null; } /// <summary> /// Updates change time, entity id and foreign keys after SaveChanges is called. /// </summary> private void UpdateChangeSet(EntityChangeSet changeSet) { foreach (var entityChange in changeSet.EntityChanges) { /* Update change time */ entityChange.ChangeTime = GetChangeTime(entityChange); /* Update entity id */ var entityEntry = entityChange.EntityEntry.As<EntityEntry>(); entityChange.EntityId = GetEntityId(entityEntry); /* Update foreign keys */ var foreignKeys = entityEntry.Metadata.GetForeignKeys(); foreach (var foreignKey in foreignKeys) { foreach (var property in foreignKey.Properties) { var propertyEntry = entityEntry.Property(property.Name); var propertyChange = entityChange.PropertyChanges.FirstOrDefault(pc => pc.PropertyName == property.Name); if (propertyChange == null) { if (!(propertyEntry.OriginalValue?.Equals(propertyEntry.CurrentValue) ?? propertyEntry.CurrentValue == null)) { // Add foreign key entityChange.PropertyChanges.Add(new EntityPropertyChange { NewValue = propertyEntry.CurrentValue.ToJsonString(), OriginalValue = propertyEntry.OriginalValue.ToJsonString(), PropertyName = property.Name, PropertyTypeFullName = property.ClrType.FullName }); } continue; } if (propertyChange.OriginalValue == propertyChange.NewValue) { var newValue = propertyEntry.CurrentValue.ToJsonString(); if (newValue == propertyChange.NewValue) { // No change entityChange.PropertyChanges.Remove(propertyChange); } else { // Update foreign key propertyChange.NewValue = newValue.TruncateWithPostfix(EntityPropertyChange.MaxValueLength); } } } } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void SubtractSByte() { var test = new SimpleBinaryOpTest__SubtractSByte(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__SubtractSByte { private const int VectorSize = 16; private const int Op1ElementCount = VectorSize / sizeof(SByte); private const int Op2ElementCount = VectorSize / sizeof(SByte); private const int RetElementCount = VectorSize / sizeof(SByte); private static SByte[] _data1 = new SByte[Op1ElementCount]; private static SByte[] _data2 = new SByte[Op2ElementCount]; private static Vector128<SByte> _clsVar1; private static Vector128<SByte> _clsVar2; private Vector128<SByte> _fld1; private Vector128<SByte> _fld2; private SimpleBinaryOpTest__DataTable<SByte, SByte, SByte> _dataTable; static SimpleBinaryOpTest__SubtractSByte() { var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar1), ref Unsafe.As<SByte, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar2), ref Unsafe.As<SByte, byte>(ref _data2[0]), VectorSize); } public SimpleBinaryOpTest__SubtractSByte() { Succeeded = true; var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld1), ref Unsafe.As<SByte, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld2), ref Unsafe.As<SByte, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); } _dataTable = new SimpleBinaryOpTest__DataTable<SByte, SByte, SByte>(_data1, _data2, new SByte[RetElementCount], VectorSize); } public bool IsSupported => Sse2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Sse2.Subtract( Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Sse2.Subtract( Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Sse2.Subtract( Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Sse2).GetMethod(nameof(Sse2.Subtract), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Sse2).GetMethod(nameof(Sse2.Subtract), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>) }) .Invoke(null, new object[] { Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Sse2).GetMethod(nameof(Sse2.Subtract), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Sse2.Subtract( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr); var result = Sse2.Subtract(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr)); var result = Sse2.Subtract(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr)); var result = Sse2.Subtract(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__SubtractSByte(); var result = Sse2.Subtract(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Sse2.Subtract(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector128<SByte> left, Vector128<SByte> right, void* result, [CallerMemberName] string method = "") { SByte[] inArray1 = new SByte[Op1ElementCount]; SByte[] inArray2 = new SByte[Op2ElementCount]; SByte[] outArray = new SByte[RetElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { SByte[] inArray1 = new SByte[Op1ElementCount]; SByte[] inArray2 = new SByte[Op2ElementCount]; SByte[] outArray = new SByte[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(SByte[] left, SByte[] right, SByte[] result, [CallerMemberName] string method = "") { if ((sbyte)(left[0] - right[0]) != result[0]) { Succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if ((sbyte)(left[i] - right[i]) != result[i]) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Sse2)}.{nameof(Sse2.Subtract)}<SByte>(Vector128<SByte>, Vector128<SByte>): {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /* ** This program was translated to C# and adapted for xunit-performance. ** New variants of several tests were added to compare class versus ** struct and to compare jagged arrays vs multi-dimensional arrays. */ /* ** BYTEmark (tm) ** BYTE Magazine's Native Mode benchmarks ** Rick Grehan, BYTE Magazine ** ** Create: ** Revision: 3/95 ** ** DISCLAIMER ** The source, executable, and documentation files that comprise ** the BYTEmark benchmarks are made available on an "as is" basis. ** This means that we at BYTE Magazine have made every reasonable ** effort to verify that the there are no errors in the source and ** executable code. We cannot, however, guarantee that the programs ** are error-free. Consequently, McGraw-HIll and BYTE Magazine make ** no claims in regard to the fitness of the source code, executable ** code, and documentation of the BYTEmark. ** ** Furthermore, BYTE Magazine, McGraw-Hill, and all employees ** of McGraw-Hill cannot be held responsible for any damages resulting ** from the use of this code or the results obtained from using ** this code. */ /******************************** ** BACK PROPAGATION NEURAL NET ** ********************************* ** This code is a modified version of the code ** that was submitted to BYTE Magazine by ** Maureen Caudill. It accomanied an article ** that I CANNOT NOW RECALL. ** The author's original heading/comment was ** as follows: ** ** Backpropagation Network ** Written by Maureen Caudill ** in Think C 4.0 on a Macintosh ** ** (c) Maureen Caudill 1988-1991 ** This network will accept 5x7 input patterns ** and produce 8 bit output patterns. ** The source code may be copied or modified without restriction, ** but no fee may be charged for its use. ** ** ++++++++++++++ ** I have modified the code so that it will work ** on systems other than a Macintosh -- RG */ /*********** ** DoNNet ** ************ ** Perform the neural net benchmark. ** Note that this benchmark is one of the few that ** requires an input file. That file is "NNET.DAT" and ** should be on the local directory (from which the ** benchmark program in launched). */ using System; using System.IO; public class Neural : NNetStruct { public override string Name() { return "NEURAL NET(rectangle)"; } /* ** DEFINES */ public static int T = 1; /* TRUE */ public static int F = 0; /* FALSE */ public static int ERR = -1; public static int MAXPATS = 10; /* max number of patterns in data file */ public static int IN_X_SIZE = 5; /* number of neurodes/row of input layer */ public static int IN_Y_SIZE = 7; /* number of neurodes/col of input layer */ public static int IN_SIZE = 35; /* equals IN_X_SIZE*IN_Y_SIZE */ public static int MID_SIZE = 8; /* number of neurodes in middle layer */ public static int OUT_SIZE = 8; /* number of neurodes in output layer */ public static double MARGIN = 0.1; /* how near to 1,0 do we have to come to stop? */ public static double BETA = 0.09; /* beta learning constant */ public static double ALPHA = 0.09; /* momentum term constant */ public static double STOP = 0.1; /* when worst_error less than STOP, training is done */ /* ** MAXNNETLOOPS ** ** This constant sets the max number of loops through the neural ** net that the system will attempt before giving up. This ** is not a critical constant. You can alter it if your system ** has sufficient horsepower. */ public static int MAXNNETLOOPS = 50000; /* ** GLOBALS */ public static double[,] mid_wts = new double[MID_SIZE, IN_SIZE]; /* middle layer weights */ public static double[,] out_wts = new double[OUT_SIZE, MID_SIZE]; public static double[] mid_out = new double[MID_SIZE]; public static double[] out_out = new double[OUT_SIZE]; public static double[] mid_error = new double[MID_SIZE]; public static double[] out_error = new double[OUT_SIZE]; public static double[,] mid_wt_change = new double[MID_SIZE, IN_SIZE]; public static double[,] out_wt_change = new double[OUT_SIZE, MID_SIZE]; public static double[,] in_pats = new double[MAXPATS, IN_SIZE]; public static double[,] out_pats = new double[MAXPATS, OUT_SIZE]; public static double[] tot_out_error = new double[MAXPATS]; public static double[,] out_wt_cum_change = new double[OUT_SIZE, MID_SIZE]; public static double[,] mid_wt_cum_change = new double[MID_SIZE, IN_SIZE]; public static double worst_error = 0.0; /* worst error each pass through the data */ public static double average_error = 0.0; /* average error each pass through the data */ public static double[] avg_out_error = new double[MAXPATS]; public static int iteration_count = 0; /* number of passes thru network so far */ public static int numpats = 0; /* number of patterns in data file */ public static int numpasses = 0; /* number of training passes through data file */ public static int learned = 0; /* flag--if TRUE, network has learned all patterns */ /* ** The Neural Net test requires an input data file. ** The name is specified here. */ public static string inpath = "NNET.DAT"; public override double Run() { return DoNNET(this); } /********************* ** read_data_file() ** ********************** ** Read in the input data file and store the patterns in ** in_pats and out_pats. ** The format for the data file is as follows: ** ** line# data expected ** ----- ------------------------------ ** 1 In-X-size,in-y-size,out-size ** 2 number of patterns in file ** 3 1st X row of 1st input pattern ** 4.. following rows of 1st input pattern pattern ** in-x+2 y-out pattern ** 1st X row of 2nd pattern ** etc. ** ** Each row of data is separated by commas or spaces. ** The data is expected to be ascii text corresponding to ** either a +1 or a 0. ** ** Sample input for a 1-pattern file (The comments to the ** right may NOT be in the file unless more sophisticated ** parsing of the input is done.): ** ** 5,7,8 input is 5x7 grid, output is 8 bits ** 1 one pattern in file ** 0,1,1,1,0 beginning of pattern for "O" ** 1,0,0,0,1 ** 1,0,0,0,1 ** 1,0,0,0,1 ** 1,0,0,0,1 ** 1,0,0,0,0 ** 0,1,1,1,0 ** 0,1,0,0,1,1,1,1 ASCII code for "O" -- 0100 1111 ** ** Clearly, this simple scheme can be expanded or enhanced ** any way you like. ** ** Returns -1 if any file error occurred, otherwise 0. **/ private void read_data_file() { int xinsize = 0, yinsize = 0, youtsize = 0; int patt = 0, element = 0, i = 0, row = 0; int vals_read = 0; int val1 = 0, val2 = 0, val3 = 0, val4 = 0, val5 = 0, val6 = 0, val7 = 0, val8 = 0; Object[] results = new Object[8]; string input = NeuralData.Input; StringReader infile = new StringReader(input); vals_read = Utility.fscanf(infile, "%d %d %d", results); xinsize = (int)results[0]; yinsize = (int)results[1]; youtsize = (int)results[2]; if (vals_read != 3) { throw new Exception("NNET: error reading input"); } vals_read = Utility.fscanf(infile, "%d", results); numpats = (int)results[0]; if (vals_read != 1) { throw new Exception("NNET: error reading input"); } if (numpats > MAXPATS) numpats = MAXPATS; for (patt = 0; patt < numpats; patt++) { element = 0; for (row = 0; row < yinsize; row++) { vals_read = Utility.fscanf(infile, "%d %d %d %d %d", results); val1 = (int)results[0]; val2 = (int)results[1]; val3 = (int)results[2]; val4 = (int)results[3]; val5 = (int)results[4]; if (vals_read != 5) { throw new Exception("NNET: error reading input"); } element = row * xinsize; in_pats[patt, element] = (double)val1; element++; in_pats[patt, element] = (double)val2; element++; in_pats[patt, element] = (double)val3; element++; in_pats[patt, element] = (double)val4; element++; in_pats[patt, element] = (double)val5; element++; } for (i = 0; i < IN_SIZE; i++) { if (in_pats[patt, i] >= 0.9) in_pats[patt, i] = 0.9; if (in_pats[patt, i] <= 0.1) in_pats[patt, i] = 0.1; } element = 0; vals_read = Utility.fscanf(infile, "%d %d %d %d %d %d %d %d", results); val1 = (int)results[0]; val2 = (int)results[1]; val3 = (int)results[2]; val4 = (int)results[3]; val5 = (int)results[4]; val6 = (int)results[5]; val7 = (int)results[6]; val8 = (int)results[7]; out_pats[patt, element] = (double)val1; element++; out_pats[patt, element] = (double)val2; element++; out_pats[patt, element] = (double)val3; element++; out_pats[patt, element] = (double)val4; element++; out_pats[patt, element] = (double)val5; element++; out_pats[patt, element] = (double)val6; element++; out_pats[patt, element] = (double)val7; element++; out_pats[patt, element] = (double)val8; element++; } } private double DoNNET(NNetStruct locnnetstruct) { // string errorcontext = "CPU:NNET"; // int systemerror = 0; long accumtime = 0; double iterations = 0.0; /* ** Init random number generator. ** NOTE: It is important that the random number generator ** be re-initialized for every pass through this test. ** The NNET algorithm uses the random number generator ** to initialize the net. Results are sensitive to ** the initial neural net state. */ ByteMark.randnum(3); /* ** Read in the input and output patterns. We'll do this ** only once here at the beginning. These values don't ** change once loaded. */ read_data_file(); /* ** See if we need to perform self adjustment loop. */ if (locnnetstruct.adjust == 0) { /* ** Do self-adjustment. This involves initializing the ** # of loops and increasing the loop count until we ** get a number of loops that we can use. */ for (locnnetstruct.loops = 1; locnnetstruct.loops < MAXNNETLOOPS; locnnetstruct.loops++) { ByteMark.randnum(3); if (DoNNetIteration(locnnetstruct.loops) > global.min_ticks) break; } } /* ** All's well if we get here. Do the test. */ accumtime = 0L; iterations = (double)0.0; do { ByteMark.randnum(3); /* Gotta do this for Neural Net */ accumtime += DoNNetIteration(locnnetstruct.loops); iterations += (double)locnnetstruct.loops; } while (ByteMark.TicksToSecs(accumtime) < locnnetstruct.request_secs); /* ** Clean up, calculate results, and go home. Be sure to ** show that we don't have to rerun adjustment code. */ locnnetstruct.iterspersec = iterations / ByteMark.TicksToFracSecs(accumtime); if (locnnetstruct.adjust == 0) locnnetstruct.adjust = 1; return locnnetstruct.iterspersec; } /******************** ** DoNNetIteration ** ********************* ** Do a single iteration of the neural net benchmark. ** By iteration, we mean a "learning" pass. */ public static long DoNNetIteration(long nloops) { long elapsed; /* Elapsed time */ int patt; /* ** Run nloops learning cycles. Notice that, counted with ** the learning cycle is the weight randomization and ** zeroing of changes. This should reduce clock jitter, ** since we don't have to stop and start the clock for ** each iteration. */ elapsed = ByteMark.StartStopwatch(); while (nloops-- != 0) { randomize_wts(); zero_changes(); iteration_count = 1; learned = F; numpasses = 0; while (learned == F) { for (patt = 0; patt < numpats; patt++) { worst_error = 0.0; /* reset this every pass through data */ move_wt_changes(); /* move last pass's wt changes to momentum array */ do_forward_pass(patt); do_back_pass(patt); iteration_count++; } numpasses++; learned = check_out_error(); } } return (ByteMark.StopStopwatch(elapsed)); } /************************* ** do_mid_forward(patt) ** ************************** ** Process the middle layer's forward pass ** The activation of middle layer's neurode is the weighted ** sum of the inputs from the input pattern, with sigmoid ** function applied to the inputs. **/ public static void do_mid_forward(int patt) { double sum; int neurode, i; for (neurode = 0; neurode < MID_SIZE; neurode++) { sum = 0.0; for (i = 0; i < IN_SIZE; i++) { /* compute weighted sum of input signals */ sum += mid_wts[neurode, i] * in_pats[patt, i]; } /* ** apply sigmoid function f(x) = 1/(1+exp(-x)) to weighted sum */ sum = 1.0 / (1.0 + Math.Exp(-sum)); mid_out[neurode] = sum; } return; } /********************* ** do_out_forward() ** ********************** ** process the forward pass through the output layer ** The activation of the output layer is the weighted sum of ** the inputs (outputs from middle layer), modified by the ** sigmoid function. **/ public static void do_out_forward() { double sum; int neurode, i; for (neurode = 0; neurode < OUT_SIZE; neurode++) { sum = 0.0; for (i = 0; i < MID_SIZE; i++) { /* ** compute weighted sum of input signals ** from middle layer */ sum += out_wts[neurode, i] * mid_out[i]; } /* ** Apply f(x) = 1/(1+Math.Exp(-x)) to weighted input */ sum = 1.0 / (1.0 + Math.Exp(-sum)); out_out[neurode] = sum; } return; } /************************* ** display_output(patt) ** ************************** ** Display the actual output vs. the desired output of the ** network. ** Once the training is complete, and the "learned" flag set ** to TRUE, then display_output sends its output to both ** the screen and to a text output file. ** ** NOTE: This routine has been disabled in the benchmark ** version. -- RG **/ /* public static void display_output(int patt) { int i; fprintf(outfile,"\n Iteration # %d",iteration_count); fprintf(outfile,"\n Desired Output: "); for (i=0; i<OUT_SIZE; i++) { fprintf(outfile,"%6.3f ",out_pats[patt][i]); } fprintf(outfile,"\n Actual Output: "); for (i=0; i<OUT_SIZE; i++) { fprintf(outfile,"%6.3f ",out_out[i]); } fprintf(outfile,"\n"); return; } */ /********************** ** do_forward_pass() ** *********************** ** control function for the forward pass through the network ** NOTE: I have disabled the call to display_output() in ** the benchmark version -- RG. **/ public static void do_forward_pass(int patt) { do_mid_forward(patt); /* process forward pass, middle layer */ do_out_forward(); /* process forward pass, output layer */ /* display_output(patt); ** display results of forward pass */ return; } /*********************** ** do_out_error(patt) ** ************************ ** Compute the error for the output layer neurodes. ** This is simply Desired - Actual. **/ public static void do_out_error(int patt) { int neurode; double error, tot_error, sum; tot_error = 0.0; sum = 0.0; for (neurode = 0; neurode < OUT_SIZE; neurode++) { out_error[neurode] = out_pats[patt, neurode] - out_out[neurode]; /* ** while we're here, also compute magnitude ** of total error and worst error in this pass. ** We use these to decide if we are done yet. */ error = out_error[neurode]; if (error < 0.0) { sum += -error; if (-error > tot_error) tot_error = -error; /* worst error this pattern */ } else { sum += error; if (error > tot_error) tot_error = error; /* worst error this pattern */ } } avg_out_error[patt] = sum / OUT_SIZE; tot_out_error[patt] = tot_error; return; } /*********************** ** worst_pass_error() ** ************************ ** Find the worst and average error in the pass and save it **/ public static void worst_pass_error() { double error, sum; int i; error = 0.0; sum = 0.0; for (i = 0; i < numpats; i++) { if (tot_out_error[i] > error) error = tot_out_error[i]; sum += avg_out_error[i]; } worst_error = error; average_error = sum / numpats; return; } /******************* ** do_mid_error() ** ******************** ** Compute the error for the middle layer neurodes ** This is based on the output errors computed above. ** Note that the derivative of the sigmoid f(x) is ** f'(x) = f(x)(1 - f(x)) ** Recall that f(x) is merely the output of the middle ** layer neurode on the forward pass. **/ public static void do_mid_error() { double sum; int neurode, i; for (neurode = 0; neurode < MID_SIZE; neurode++) { sum = 0.0; for (i = 0; i < OUT_SIZE; i++) sum += out_wts[i, neurode] * out_error[i]; /* ** apply the derivative of the sigmoid here ** Because of the choice of sigmoid f(I), the derivative ** of the sigmoid is f'(I) = f(I)(1 - f(I)) */ mid_error[neurode] = mid_out[neurode] * (1 - mid_out[neurode]) * sum; } return; } /********************* ** adjust_out_wts() ** ********************** ** Adjust the weights of the output layer. The error for ** the output layer has been previously propagated back to ** the middle layer. ** Use the Delta Rule with momentum term to adjust the weights. **/ public static void adjust_out_wts() { int weight, neurode; double learn, delta, alph; learn = BETA; alph = ALPHA; for (neurode = 0; neurode < OUT_SIZE; neurode++) { for (weight = 0; weight < MID_SIZE; weight++) { /* standard delta rule */ delta = learn * out_error[neurode] * mid_out[weight]; /* now the momentum term */ delta += alph * out_wt_change[neurode, weight]; out_wts[neurode, weight] += delta; /* keep track of this pass's cum wt changes for next pass's momentum */ out_wt_cum_change[neurode, weight] += delta; } } return; } /************************* ** adjust_mid_wts(patt) ** ************************** ** Adjust the middle layer weights using the previously computed ** errors. ** We use the Generalized Delta Rule with momentum term **/ public static void adjust_mid_wts(int patt) { int weight, neurode; double learn, alph, delta; learn = BETA; alph = ALPHA; for (neurode = 0; neurode < MID_SIZE; neurode++) { for (weight = 0; weight < IN_SIZE; weight++) { /* first the basic delta rule */ delta = learn * mid_error[neurode] * in_pats[patt, weight]; /* with the momentum term */ delta += alph * mid_wt_change[neurode, weight]; mid_wts[neurode, weight] += delta; /* keep track of this pass's cum wt changes for next pass's momentum */ mid_wt_cum_change[neurode, weight] += delta; } } return; } /******************* ** do_back_pass() ** ******************** ** Process the backward propagation of error through network. **/ public static void do_back_pass(int patt) { do_out_error(patt); do_mid_error(); adjust_out_wts(); adjust_mid_wts(patt); return; } /********************** ** move_wt_changes() ** *********************** ** Move the weight changes accumulated last pass into the wt-change ** array for use by the momentum term in this pass. Also zero out ** the accumulating arrays after the move. **/ public static void move_wt_changes() { int i, j; for (i = 0; i < MID_SIZE; i++) for (j = 0; j < IN_SIZE; j++) { mid_wt_change[i, j] = mid_wt_cum_change[i, j]; /* ** Zero it out for next pass accumulation. */ mid_wt_cum_change[i, j] = 0.0; } for (i = 0; i < OUT_SIZE; i++) for (j = 0; j < MID_SIZE; j++) { out_wt_change[i, j] = out_wt_cum_change[i, j]; out_wt_cum_change[i, j] = 0.0; } return; } /********************** ** check_out_error() ** *********************** ** Check to see if the error in the output layer is below ** MARGIN*OUT_SIZE for all output patterns. If so, then ** assume the network has learned acceptably well. This ** is simply an arbitrary measure of how well the network ** has learned -- many other standards are possible. **/ public static int check_out_error() { int result, i, error; result = T; error = F; worst_pass_error(); /* identify the worst error in this pass */ /* #if DEBUG Console.WriteLine("\n Iteration # {0}",iteration_count); #endif */ for (i = 0; i < numpats; i++) { /* printf("\n Error pattern %d: Worst: %8.3f; Average: %8.3f", i+1,tot_out_error[i], avg_out_error[i]); fprintf(outfile, "\n Error pattern %d: Worst: %8.3f; Average: %8.3f", i+1,tot_out_error[i]); */ if (worst_error >= STOP) result = F; if (tot_out_error[i] >= 16.0) error = T; } if (error == T) result = ERR; #if DEBUG /* printf("\n Error this pass thru data: Worst: %8.3f; Average: %8.3f", worst_error,average_error); */ /* fprintf(outfile, "\n Error this pass thru data: Worst: %8.3f; Average: %8.3f", worst_error, average_error); */ #endif return (result); } /******************* ** zero_changes() ** ******************** ** Zero out all the wt change arrays **/ public static void zero_changes() { int i, j; for (i = 0; i < MID_SIZE; i++) { for (j = 0; j < IN_SIZE; j++) { mid_wt_change[i, j] = 0.0; mid_wt_cum_change[i, j] = 0.0; } } for (i = 0; i < OUT_SIZE; i++) { for (j = 0; j < MID_SIZE; j++) { out_wt_change[i, j] = 0.0; out_wt_cum_change[i, j] = 0.0; } } return; } /******************** ** randomize_wts() ** ********************* ** Intialize the weights in the middle and output layers to ** random values between -0.25..+0.25 ** Function rand() returns a value between 0 and 32767. ** ** NOTE: Had to make alterations to how the random numbers were ** created. -- RG. **/ public static void randomize_wts() { int neurode, i; double value; /* ** Following not used int benchmark version -- RG ** ** printf("\n Please enter a random number seed (1..32767): "); ** scanf("%d", &i); ** srand(i); */ for (neurode = 0; neurode < MID_SIZE; neurode++) { for (i = 0; i < IN_SIZE; i++) { value = (double)ByteMark.abs_randwc(100000); value = value / (double)100000.0 - (double)0.5; mid_wts[neurode, i] = value / 2; } } for (neurode = 0; neurode < OUT_SIZE; neurode++) { for (i = 0; i < MID_SIZE; i++) { value = (double)ByteMark.abs_randwc(100000); value = value / (double)10000.0 - (double)0.5; out_wts[neurode, i] = value / 2; } } return; } /********************** ** display_mid_wts() ** *********************** ** Display the weights on the middle layer neurodes ** NOTE: This routine is not used in the benchmark ** test -- RG **/ /* static void display_mid_wts() { int neurode, weight, row, col; fprintf(outfile,"\n Weights of Middle Layer neurodes:"); for (neurode=0; neurode<MID_SIZE; neurode++) { fprintf(outfile,"\n Mid Neurode # %d",neurode); for (row=0; row<IN_Y_SIZE; row++) { fprintf(outfile,"\n "); for (col=0; col<IN_X_SIZE; col++) { weight = IN_X_SIZE * row + col; fprintf(outfile," %8.3f ", mid_wts[neurode,weight]); } } } return; } */ /********************** ** display_out_wts() ** *********************** ** Display the weights on the output layer neurodes ** NOTE: This code is not used in the benchmark ** test -- RG */ /* void display_out_wts() { int neurode, weight; fprintf(outfile,"\n Weights of Output Layer neurodes:"); for (neurode=0; neurode<OUT_SIZE; neurode++) { fprintf(outfile,"\n Out Neurode # %d \n",neurode); for (weight=0; weight<MID_SIZE; weight++) { fprintf(outfile," %8.3f ", out_wts[neurode,weight]); } } return; } */ }
// SPDX-License-Identifier: MIT // Copyright [email protected] // Copyright iced contributors #if ENCODER && BLOCK_ENCODER && CODE_ASSEMBLER using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; namespace Iced.Intel { /// <summary> /// High-Level Assembler. /// </summary> public partial class Assembler { readonly InstructionList _instructions; ulong _currentLabelId; Label _label; Label _currentAnonLabel; Label _nextAnonLabel; bool _definedAnonLabel; PrefixFlags _nextPrefixFlags; /// <summary> /// Creates a new instance of this assembler /// </summary> /// <param name="bitness">The assembler instruction set bitness, either 16, 32 or 64 bit.</param> public Assembler(int bitness) { switch (bitness) { case 16: case 32: case 64: break; default: throw new ArgumentOutOfRangeException(nameof(bitness)); } Bitness = bitness; _instructions = new InstructionList(); _label = default; _currentAnonLabel = default; _nextAnonLabel = default; _definedAnonLabel = false; PreferVex = true; PreferBranchShort = true; } /// <summary> /// Gets the bitness defined for this assembler. /// </summary> public int Bitness { get; } /// <summary> /// <c>true</c> to prefer VEX encoding over EVEX. This is the default. /// </summary> public bool PreferVex { get; set; } /// <summary> /// <c>true</c> to prefer short branch encoding. This is the default. /// </summary> public bool PreferBranchShort { get; set; } /// <summary> /// Gets the instructions. /// </summary> public IReadOnlyList<Instruction> Instructions => _instructions; /// <summary> /// Reset the current set of instructions and labels added to this instance. /// </summary> public void Reset() { _instructions.Clear(); _currentLabelId = 0; _label = default; _currentAnonLabel = default; _nextAnonLabel = default; _definedAnonLabel = false; _nextPrefixFlags = PrefixFlags.None; } /// <summary> /// Creates a label. /// </summary> /// <param name="name">Optional name of the label.</param> /// <returns></returns> public Label CreateLabel(string? name = null) { _currentLabelId++; var label = new Label(name, _currentLabelId); return label; } /// <summary> /// Gets the current label used by this instance. /// </summary> public Label CurrentLabel => _label; /// <summary> /// Use the specified label. /// </summary> /// <param name="label">Label to use</param> /// <exception cref="ArgumentException"></exception> public void Label(ref Label label) { if (label.IsEmpty) throw new ArgumentException($"Invalid label. Must be created via {nameof(CreateLabel)}", nameof(label)); if (label.InstructionIndex >= 0) throw new ArgumentException($"Cannot reuse label. The specified label is already associated with an instruction at index {label.InstructionIndex}.", nameof(label)); if (!_label.IsEmpty) throw new ArgumentException("At most one label per instruction is allowed"); label.InstructionIndex = _instructions.Count; _label = label; } /// <summary> /// Creates an anonymous label that can be referenced by using <see cref="B"/> (backward anonymous label) /// and <see cref="F"/> (forward anonymous label). /// </summary> public void AnonymousLabel() { if (_definedAnonLabel) throw new InvalidOperationException("At most one anonymous label per instruction is allowed"); if (_nextAnonLabel.IsEmpty) _currentAnonLabel = CreateLabel(); else _currentAnonLabel = _nextAnonLabel; _nextAnonLabel = default; _definedAnonLabel = true; } /// <summary> /// References the previous anonymous label created by <see cref="AnonymousLabel"/> /// </summary> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Label @B { get { if (_currentAnonLabel.IsEmpty) throw new InvalidOperationException("No anonymous label has been created yet"); return _currentAnonLabel; } } /// <summary> /// References the next anonymous label created by a future call to <see cref="AnonymousLabel"/> /// </summary> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Label @F { get { if (_nextAnonLabel.IsEmpty) _nextAnonLabel = CreateLabel(); return _nextAnonLabel; } } /// <summary> /// Add an instruction directly to the flow of instructions. /// </summary> /// <param name="instruction"></param> public void AddInstruction(Instruction instruction) => AddInstruction(ref instruction); /// <summary> /// Add an instruction directly to the flow of instructions. /// </summary> /// <param name="instruction"></param> public void AddInstruction(ref Instruction instruction) { if (!_label.IsEmpty && _definedAnonLabel) throw new InvalidOperationException("You can't create both an anonymous label and a normal label"); if (!_label.IsEmpty) instruction.IP = _label.Id; else if (_definedAnonLabel) instruction.IP = _currentAnonLabel.Id; // Setup prefixes if (_nextPrefixFlags != PrefixFlags.None) { if ((_nextPrefixFlags & PrefixFlags.Lock) != 0) { instruction.HasLockPrefix = true; } if ((_nextPrefixFlags & PrefixFlags.Xacquire) != 0) { instruction.HasXacquirePrefix = true; } if ((_nextPrefixFlags & PrefixFlags.Xrelease) != 0) { instruction.HasXreleasePrefix = true; } if ((_nextPrefixFlags & PrefixFlags.Rep) != 0) { instruction.HasRepPrefix = true; } else if ((_nextPrefixFlags & PrefixFlags.Repe) != 0) { instruction.HasRepePrefix = true; } else if ((_nextPrefixFlags & PrefixFlags.Repne) != 0) { instruction.HasRepnePrefix = true; } if ((_nextPrefixFlags & PrefixFlags.Bnd) != 0) { instruction.HasRepnePrefix = true; } if ((_nextPrefixFlags & PrefixFlags.Notrack) != 0) { instruction.SegmentPrefix = Register.DS; } } _instructions.Add(instruction); _label = default; _definedAnonLabel = false; _nextPrefixFlags = PrefixFlags.None; } /// <summary> /// Add an instruction directly to the flow of instructions. /// </summary> /// <param name="instruction"></param> /// <param name="flags">Operand flags passed.</param> void AddInstruction(Instruction instruction, AssemblerOperandFlags flags = AssemblerOperandFlags.None) { if (flags != AssemblerOperandFlags.None) { if ((flags & AssemblerOperandFlags.Broadcast) != 0) { instruction.IsBroadcast = true; } if ((flags & AssemblerOperandFlags.Zeroing) != 0) { instruction.ZeroingMasking = true; } if ((flags & AssemblerOperandFlags.RegisterMask) != 0) { // register mask is shift by 2 (starts at index 1 for K1) instruction.OpMask = (Register)((int)Register.K0 + (((int)(flags & AssemblerOperandFlags.RegisterMask)) >> 6)); } if ((flags & AssemblerOperandFlags.SuppressAllExceptions) != 0) { instruction.SuppressAllExceptions = true; } if ((flags & AssemblerOperandFlags.RoundControlMask) != 0) { instruction.RoundingControl = (RoundingControl)((((int)(flags & AssemblerOperandFlags.RoundControlMask)) >> 3)); } } AddInstruction(ref instruction); } /// <summary> /// Add lock prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler @lock { get { _nextPrefixFlags |= PrefixFlags.Lock; return this; } } /// <summary> /// Add xacquire prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler xacquire { get { _nextPrefixFlags |= PrefixFlags.Xacquire; return this; } } /// <summary> /// Add xrelease prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler xrelease { get { _nextPrefixFlags |= PrefixFlags.Xrelease; return this; } } /// <summary> /// Add rep prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler rep { get { _nextPrefixFlags |= PrefixFlags.Rep; return this; } } /// <summary> /// Add repe prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler repe { get { _nextPrefixFlags |= PrefixFlags.Repe; return this; } } /// <summary> /// Add repne prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler repne { get { _nextPrefixFlags |= PrefixFlags.Repne; return this; } } /// <summary> /// Add bnd prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler bnd { get { _nextPrefixFlags |= PrefixFlags.Bnd; return this; } } /// <summary> /// Add notrack prefix before the next instruction. /// </summary> /// <returns></returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] public Assembler notrack { get { _nextPrefixFlags |= PrefixFlags.Notrack; return this; } } /// <summary> /// Adds data /// </summary> /// <param name="array">Data</param> public void db(byte[] array) { if (array is null) ThrowHelper.ThrowArgumentNullException_array(); db(array, 0, array.Length); } /// <summary> /// Adds data /// </summary> /// <param name="array">Data</param> /// <param name="index">Start index</param> /// <param name="length">Length in bytes</param> public void db(byte[] array, int index, int length) { if (array is null) ThrowHelper.ThrowArgumentNullException_array(); if (index < 0) ThrowHelper.ThrowArgumentOutOfRangeException_index(); if (length < 0 || (uint)(index + length) > (uint)array.Length) ThrowHelper.ThrowArgumentOutOfRangeException_length(); const int maxLength = 16; int cycles = Math.DivRem(length, maxLength, out int rest); int currentPosition = index; for (int i = 0; i < cycles; i++) { AddInstruction(Instruction.CreateDeclareByte(array, currentPosition, maxLength)); currentPosition += maxLength; } if (rest > 0) AddInstruction(Instruction.CreateDeclareByte(array, currentPosition, rest)); } #if HAS_SPAN /// <summary> /// Adds data /// </summary> /// <param name="data">Data</param> public void db(ReadOnlySpan<byte> data) { const int maxLength = 16; int cycles = Math.DivRem(data.Length, maxLength, out int rest); int currentPosition = 0; for (int i = 0; i < cycles; i++) { AddInstruction(Instruction.CreateDeclareByte(data.Slice(currentPosition, maxLength))); currentPosition += maxLength; } if (rest > 0) AddInstruction(Instruction.CreateDeclareByte(data.Slice(currentPosition, rest))); } #endif /// <summary>call selector:offset instruction.</summary> public void call(ushort selector, uint offset) => AddInstruction(Instruction.CreateBranch(Bitness >= 32 ? Code.Call_ptr1632 : Code.Call_ptr1616, selector, offset)); /// <summary>jmp selector:offset instruction.</summary> public void jmp(ushort selector, uint offset) => AddInstruction(Instruction.CreateBranch(Bitness >= 32 ? Code.Jmp_ptr1632 : Code.Jmp_ptr1616, selector, offset)); /// <summary>xlatb instruction.</summary> public void xlatb() { if (Bitness == 64) AddInstruction(Instruction.Create(Code.Xlat_m8, new MemoryOperand(Register.RBX, Register.AL, 1))); else if (Bitness == 32) AddInstruction(Instruction.Create(Code.Xlat_m8, new MemoryOperand(Register.EBX, Register.AL, 1))); else AddInstruction(Instruction.Create(Code.Xlat_m8, new MemoryOperand(Register.BX, Register.AL, 1))); } /// <summary> /// Generates multibyte NOP instructions /// </summary> /// <param name="amount">Number of bytes</param> public void nop(int amount) { if (amount < 0) throw new ArgumentOutOfRangeException(nameof(amount)); if (amount == 0) return; const int maxMultibyteNopInstructionLength = 9; int cycles = Math.DivRem(amount, maxMultibyteNopInstructionLength, out int rest); for (int i = 0; i < cycles; i++) { AppendNop(maxMultibyteNopInstructionLength); } if (rest > 0) AppendNop(rest); void AppendNop(int amount) { switch (amount) { case 1: db(0x90); //NOP break; case 2: db(0x66, 0x90); //66 NOP break; case 3: db(0x0F, 0x1F, 0x00); //NOP dword ptr [eax] or NOP word ptr [bx+si] break; case 4: db(0x0F, 0x1F, 0x40, 0x00); //NOP dword ptr [eax + 00] or NOP word ptr [bx+si] break; case 5: if (Bitness >= 32) db(0x0F, 0x1F, 0x44, 0x00, 0x00); //NOP dword ptr [eax + eax*1 + 00] else db(0x0F, 0x1F, 0x80, 0x00, 0x00); //NOP word ptr[bx + si] break; case 6: if (Bitness >= 32) db(0x66, 0x0F, 0x1F, 0x44, 0x00, 0x00); //66 NOP dword ptr [eax + eax*1 + 00] else db(0x66, 0x0F, 0x1F, 0x80, 0x00, 0x00); //NOP dword ptr [bx+si] break; case 7: if (Bitness >= 32) db(0x0F, 0x1F, 0x80, 0x00, 0x00, 0x00, 0x00); //NOP dword ptr [eax + 00000000] else db(0x67, 0x66, 0x0F, 0x1F, 0x44, 0x00, 0x00); //NOP dword ptr [eax+eax] break; case 8: if (Bitness >= 32) db(0x0F, 0x1F, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00); //NOP dword ptr [eax + eax*1 + 00000000] else db(0x67, 0x0F, 0x1F, 0x80, 0x00, 0x00, 0x00, 0x00); //NOP word ptr [eax] break; case 9: if (Bitness >= 32) db(0x66, 0x0F, 0x1F, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00); //66 NOP dword ptr [eax + eax*1 + 00000000] else db(0x67, 0x0F, 0x1F, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00); //NOP word ptr [eax+eax] break; } } } /// <summary> /// Assembles the instructions of this assembler with the specified options. /// </summary> /// <param name="writer">The code writer.</param> /// <param name="rip">Base address.</param> /// <param name="options">Encoding options.</param> /// <returns></returns> /// <exception cref="InvalidOperationException"></exception> public AssemblerResult Assemble(CodeWriter writer, ulong rip, BlockEncoderOptions options = BlockEncoderOptions.None) { if (!TryAssemble(writer, rip, out var errorMessage, out var assemblerResult, options)) { throw new InvalidOperationException(errorMessage); } return assemblerResult; } /// <summary> /// Tries to assemble the instructions of this assembler with the specified options. /// </summary> /// <param name="writer">The code writer.</param> /// <param name="rip">Base address.</param> /// <param name="errorMessage">Error messages.</param> /// <param name="assemblerResult">The assembler result if successful.</param> /// <param name="options">Encoding options.</param> /// <returns><c>true</c> if the encoding was successful; <c>false</c> otherwise.</returns> public bool TryAssemble(CodeWriter writer, ulong rip, [NotNullWhen(false)] out string? errorMessage, out AssemblerResult assemblerResult, BlockEncoderOptions options = BlockEncoderOptions.None) { if (writer is null) ThrowHelper.ThrowArgumentNullException_writer(); assemblerResult = default; // Protect against using a prefix without actually using it if (_nextPrefixFlags != PrefixFlags.None) { errorMessage = $"Unused prefixes {_nextPrefixFlags}. You must emit an instruction after using an instruction prefix."; return false; } // Protect against a label emitted without being attached to an instruction if (!_label.IsEmpty) { errorMessage = $"Unused label {_label}. You must emit an instruction after emitting a label."; return false; } if (_definedAnonLabel) { errorMessage = "Unused anonymous label. You must emit an instruction after emitting a label."; return false; } if (!_nextAnonLabel.IsEmpty) { errorMessage = "Found an @F anonymous label reference but there was no call to " + nameof(AnonymousLabel); return false; } var blocks = new[] { new InstructionBlock(writer, _instructions, rip) }; if (BlockEncoder.TryEncode(Bitness, blocks, out errorMessage, out var blockResults, options)) { assemblerResult = new AssemblerResult(blockResults); return true; } else { assemblerResult = new AssemblerResult(Array2.Empty<BlockEncoderResult>()); return false; } } /// <summary> /// Internal method used to throw an InvalidOperationException if it was not possible to encode an OpCode. /// </summary> /// <param name="mnemonic">The mnemonic of the instruction</param> /// <param name="argNames">The argument values.</param> /// <returns></returns> InvalidOperationException NoOpCodeFoundFor(Mnemonic mnemonic, params object[] argNames) { var builder = new StringBuilder(); builder.Append($"Unable to calculate an OpCode for `{mnemonic.ToString().ToLowerInvariant()}"); for (int i = 0; i < argNames.Length; i++) { builder.Append(i == 0 ? " " : ", "); builder.Append(argNames[i]); // TODO: add pretty print for arguments (registers, memory...) } builder.Append($"`. Combination of arguments and/or current bitness {Bitness} is not compatible with any existing OpCode encoding."); return new InvalidOperationException(builder.ToString()); } [Flags] enum PrefixFlags { None = 0, Xacquire = 1 << 0, Xrelease = 1 << 1, Lock = 1 << 2, Rep = 1 << 3, Repe = 1 << 4, Repne = 1 << 5, Bnd = 1 << 6, Notrack = 1 << 7, } } } #endif
/* * CmpCache * * Chris Fallin <[email protected]>, 2010-09-12 */ using System; using System.Collections.Generic; using System.Diagnostics; /* The coherence timing model is a compromise between 100% fidelity (full protocol FSMs) and speed/simplicity. The model has a functional/timing split: state is updated immediately in the functional model (cache contents and block states), and a chain (actually, DAG) of packets with dependencies is generated for each transaction. This chain of packets is then sent through the interconnect timing model, respecting dependencies, and the request completes in the timing model when all packets are delivered. Note that when there is no write-contention, and there are no memory-ordering races, this model should give 100% fidelity. It gives up accuracy in the case where write contention and/or interconnect reordering hit corner cases of the protocol; however, the hope is that these cases will be rare (if the protocol is well-behaved). Functional-level behavior such as ping-ponging is still captured; only behavior such as protocol-level retries (in a NACK-based recovery scheme) or unfairness due to interconnect delay in write contention are not captured. The great strength of this approach is that we *need not work out* the corner cases -- this gives robustness and speed, and allows implementations to be very simple, by considering transactions as atomic DAGs of packets that never execute simultaneously with any other transaction on the same block. The sequence for a request is: - A CmpCache_Txn (transaction) is generated for a read-miss, write-miss, or write-upgrade. The functional state is updated here, and the deltas (nodes that were invalidated, downgraded; type of transaction; whether it missed in shared cache) are recorded in the txn for timing to use. The functional update *must* happen immediately: if it waits until the packets are actually delivered (i.e., the timing model says that things are actually done), then multiple transactions might start with the same (out-of-date) state without awareness of each other. - The subclass of CmpCache that implements the protocol implements gen_proto(). This routine takes the transaction and generates a DAG of packets. - Packets flow through the interconnect. When each is delivered, it iterates over its wakeup list and decrements the remaining-deps counts on each dependent packet. If this reaches zero on a packet, that packet is sent. Also, the remaining-packets count on the transaction is decremented at each delivery. - When all of a transaction's packets are delivered, it is complete. */ namespace ICSimulator { struct CmpCache_Owners { ulong[] bitmask; const int ULONG_BITS = 64; // maximum of 4096 caches public CmpCache_Owners(int junk) { this.bitmask = new ulong[ULONG_BITS]; for(int i=0; i < ULONG_BITS; i++) bitmask[i] = 0; } public void set(int i) { int r = i/ULONG_BITS; bitmask[r] |= ((ulong)1) << (i % ULONG_BITS); } public void unset(int i) { int r = i/ULONG_BITS; bitmask[r] &= ~( ((ulong)1) << (i % ULONG_BITS) ); } public void reset() { for(int i=0; i<ULONG_BITS; i++) bitmask[i] =0; } public bool is_set(int i) { int r = i/ULONG_BITS; return (bitmask[r] & ( ((ulong)1) << (i % ULONG_BITS) )) != 0; } public bool others_set(int i) { int r = i/ULONG_BITS; // Special check for the bits in the same ulong for(int j=0; j< ULONG_BITS; j++) { if(r==j) { if( (bitmask[j] & ~( ((ulong)1) << (i % ULONG_BITS) )) != 0) return true; } else if(bitmask[j] != 0) return true; } return false; } public bool any_set() { for(int i=0; i<ULONG_BITS; i++) if(bitmask[i] != 0) return true; return false; } } class CmpCache_State { public CmpCache_Owners owners = new CmpCache_Owners(0); // bitmask of owners public int excl; // single node that has exclusive grant, -1 otherwise public bool modified; // exclusive copy is modified? public bool sh_dirty; // copy in shared cache is dirty? public CmpCache_State() { excl = -1; modified = false; sh_dirty = false; } } // a DAG of CmpCache_Pkt instances is created for each CmpCache_Txn. The set // of packets, with dependencies, is the work required by the cache coherence // protocol to complete the transaction. public class CmpCache_Pkt { public bool send; // send a packet at this node (nodes can also be join-points w/o send) public int from, to; public ulong id; public int flits; public bool off_crit; public int vc_class; public bool done; // critical-path completion after this packet? public bool mem; // virtual node for going to memory public ulong mem_addr; public bool mem_write; public int mem_requestor; public ulong delay; // delay before sending (e.g., to model cache response latency) // out-edges public List<CmpCache_Pkt> wakeup; // packets that depend on this one // in-edges public int deps; // number of packets for which we are still waiting // associated txn public CmpCache_Txn txn; public CmpCache_Pkt () { send = false; from = to = 0; id = 0; done = false; off_crit = false; mem = false; mem_addr = 0; delay = 0; deps = 0; txn = null; } } // a transaction is one client-initiated operation that requires a protocol // interaction (read or write miss, or upgrade). // // A generic cache-coherence transaction is classified along these axes: // // Control ops: // - data grant: we grant a lease (either shared or excl) to zero or one // nodes per transaction. Could be due to miss or upgrade // (i.e., may or may not transfer data). // - data rescind: we rescind a lease (downgrade or invalidate) from these // nodes. Could be due to another node's upgrade or due to // a shared-cache replacement. // - additionally, we distinguish data rescinds due to private-cache invalidates // from those due to shared-cache evictions that require private cache invals. // // Data ops: // - data sh->prv: transfer data from shared to private (normal data grant) // - data prv->prv: transfer data from private to private (cache-to-cache xfer) // - data prv->sh: transfer data from private to shared (writeback) // - data mem->sh: transfer data from memory to shared (sh cache miss) // - data sh->mem: transfer data from shared to memory (sh cache writeback) // // The dependency ordering of the above is: // // (CASE 1) not in requestor's cache, present in other private caches: // // control request (client -> shared) // inval data rescinds (others -> shared or client directly) if exclusive req // inval prv->sh / prv->prv (data writeback if others invalidated) // -OR- // transfer prv->prv // WB of evicted block if dirty // // (CASE 2) not present in other private caches, but present in sh cache: // // control request (client->shared) // data grant (sh->prv), transfer (sh->prv) // data transfer (writeback) prv->sh upon replacement (must hit, due to inclusive cache) // (CASE 3) not present in other private caches, not present in sh cache: // // control request (client->mem) // mem request (shared->mem) // mem response // data grant/transfer to prv, AND inval: // - inval requests to owners of evicted sh-cache block, and WB to mem if clean, or WB prv->mem if dirty // public class CmpCache_Txn { /* initiating node */ public int node; /* protocol packet DAG */ public CmpCache_Pkt pkts; public int n_pkts; /* updated at each packet arrival */ public int n_pkts_remaining; /* timing completion callback */ public Simulator.Ready cb; }; public class CmpCache { ulong m_prvdelay; // private-cache probe delay (both local accesses and invalidates) ulong m_shdelay; // probe delay at shared cache (for first access) ulong m_opdelay; // pass-through delay once an operation is in progress int m_datapkt_size; bool m_sh_perfect; // shared cache perfect? Dictionary<ulong, CmpCache_State> m_perf_sh; int m_blkshift; int m_N; Sets<CmpCache_State> m_sh; Sets<bool>[] m_prv; // address mapping for shared cache slices //int map_addr(ulong addr) { return Simulator.network.mapping.homeNode(addr >> Config.cache_block).ID; } int map_addr(int node, ulong addr) { return Simulator.controller.mapCache(node, addr >> Config.cache_block); } // address mapping for memory controllers //int map_addr_mem(ulong addr) { return Simulator.network.mapping.memNode(addr >> Config.cache_block).ID; } int map_addr_mem(int node, ulong addr) { return Simulator.controller.mapMC(node, addr >> Config.cache_block); } // closest out of 'nodes' set int closest(int node, CmpCache_Owners nodes) { int best = -1; int best_dist = m_N; Coord here = new Coord(node); for (int i = 0; i < m_N; i++) if (nodes.is_set(i)) { int dist = (int)Simulator.distance(new Coord(i), here); if (dist < best_dist) { best = i; best_dist = dist; } } return best; } public CmpCache() { m_N = Config.N; m_blkshift = Config.cache_block; m_prv = new Sets<bool>[m_N]; for (int i = 0; i < m_N; i++) m_prv[i] = new Sets<bool>(m_blkshift, 1 << Config.coherent_cache_assoc, 1 << (Config.coherent_cache_size - Config.cache_block - Config.coherent_cache_assoc)); if (!Config.simple_nocoher) { if (Config.sh_cache_perfect) m_perf_sh = new Dictionary<ulong, CmpCache_State>(); else m_sh = new Sets<CmpCache_State>(m_blkshift, 1 << Config.sh_cache_assoc, (1 << (Config.sh_cache_size - Config.cache_block - Config.sh_cache_assoc)) * m_N); } m_prvdelay = (ulong)Config.cohcache_lat; m_shdelay = (ulong)Config.shcache_lat; m_opdelay = (ulong)Config.cacheop_lat; m_datapkt_size = Config.router.dataPacketSize; m_sh_perfect = Config.sh_cache_perfect; } public void access(int node, ulong addr, bool write, Simulator.Ready cb, out bool L1hit, out bool L1upgr, out bool L1ev, out bool L1wb, out bool L2access, out bool L2hit, out bool L2ev, out bool L2wb, out bool c2c) { CmpCache_Txn txn = null; int sh_slice = map_addr(node, addr); // ------------- first, we probe the cache (private, and shared if necessary) to // determine current state. // probe private cache CmpCache_State state; bool prv_state; bool prv_hit = m_prv[node].probe(addr, out prv_state); bool sh_hit = false; if (m_sh_perfect) { ulong blk = addr >> m_blkshift; sh_hit = true; if (m_perf_sh.ContainsKey(blk)) state = m_perf_sh[blk]; else { state = new CmpCache_State(); m_perf_sh[blk] = state; } } else sh_hit = m_sh.probe(addr, out state); bool prv_excl = sh_hit ? (state.excl == node) : false; if (prv_hit) // we always update the timestamp on the private cache m_prv[node].update(addr, Simulator.CurrentRound); // out-params L1hit = prv_hit; L1upgr = L1hit && !prv_excl; L2hit = sh_hit; c2c = false; // will be set below for appropriate cases L1ev = false; // will be set below L1wb = false; // will be set below L2ev = false; // will be set below L2wb = false; // will be set below L2access = false; // will be set below // ----------------- now, we execute one of four cases: // 1a. present in private cache, with appropriate ownership. // 1b. present in private cache, but not excl (for a write) // 2. not present in private cache, but in shared cache. // 3. not present in private or shared cache. // // in each case, we update functional state and generate the packet DAG as we go. if (prv_hit && (!write || prv_excl)) // CASE 1a: present in prv cache, have excl if write { // just set modified-bit in state, then we're done (no protocol interaction) if (write) state.modified = true; } else if (prv_hit && write && !prv_excl) // CASE 1b: present in prv cache, need upgr { txn = new CmpCache_Txn(); txn.node = node; // request packet CmpCache_Pkt req_pkt = add_ctl_pkt(txn, node, sh_slice, false); CmpCache_Pkt done_pkt = null; // present in others? if (state.owners.others_set(node)) { done_pkt = do_inval(txn, state, req_pkt, node, addr); } else { // not present in others, but we didn't have excl -- send empty grant // (could happen if others have evicted and we are the only one left) done_pkt = add_ctl_pkt(txn, sh_slice, node, true); done_pkt.delay = m_shdelay; add_dep(req_pkt, done_pkt); } state.owners.reset(); state.owners.set(node); state.excl = node; state.modified = true; } else if (!prv_hit && sh_hit) // CASE 2: not in prv cache, but in sh cache { txn = new CmpCache_Txn(); txn.node = node; // update functional shared state if (!m_sh_perfect) m_sh.update(addr, Simulator.CurrentRound); // request packet CmpCache_Pkt req_pkt = add_ctl_pkt(txn, node, sh_slice, false); CmpCache_Pkt done_pkt = null; if (state.owners.any_set()) // in other caches? { if (write) // need to invalidate? { if (state.excl != -1) // someone else has exclusive -- c-to-c xfer { c2c = true; // out-param CmpCache_Pkt xfer_req = add_ctl_pkt(txn, sh_slice, state.excl, false); CmpCache_Pkt xfer_dat = add_data_pkt(txn, state.excl, node, true); done_pkt = xfer_dat; xfer_req.delay = m_shdelay; xfer_dat.delay = m_prvdelay; add_dep(req_pkt, xfer_req); add_dep(xfer_req, xfer_dat); bool evicted_state; m_prv[state.excl].inval(addr, out evicted_state); } else // others have it -- inval to all, c-to-c from closest { int close = closest(node, state.owners); if (close != -1) c2c = true; // out-param done_pkt = do_inval(txn, state, req_pkt, node, addr, close); } // for a write, we need exclusive -- update state state.owners.reset(); state.owners.set(node); state.excl = node; state.modified = true; } else // just a read -- joining sharer set, c-to-c from closest { if (state.excl != -1) { CmpCache_Pkt xfer_req = add_ctl_pkt(txn, sh_slice, state.excl, false); CmpCache_Pkt xfer_dat = add_data_pkt(txn, state.excl, node, true); done_pkt = xfer_dat; c2c = true; // out-param xfer_req.delay = m_shdelay; xfer_dat.delay = m_prvdelay; add_dep(req_pkt, xfer_req); add_dep(xfer_req, xfer_dat); // downgrade must also trigger writeback if (state.modified) { CmpCache_Pkt wb_dat = add_data_pkt(txn, state.excl, sh_slice, false); add_dep(xfer_req, wb_dat); state.modified = false; state.sh_dirty = true; } } else { int close = closest(node, state.owners); if (close != -1) c2c = true; // out-param CmpCache_Pkt xfer_req = add_ctl_pkt(txn, sh_slice, close, false); CmpCache_Pkt xfer_dat = add_data_pkt(txn, close, node, true); done_pkt = xfer_dat; xfer_req.delay = m_shdelay; xfer_dat.delay = m_prvdelay; add_dep(req_pkt, xfer_req); add_dep(xfer_req, xfer_dat); } state.owners.set(node); state.excl = -1; } } else { // not in other prv caches, need to get from shared slice L2access = true; CmpCache_Pkt dat_resp = add_data_pkt(txn, sh_slice, node, true); done_pkt = dat_resp; add_dep(req_pkt, done_pkt); dat_resp.delay = m_shdelay; state.owners.reset(); state.owners.set(node); state.excl = node; state.modified = write; } // insert into private cache, get evicted block (if any) ulong evict_addr; bool evict_data; bool evicted = m_prv[node].insert(addr, true, out evict_addr, out evict_data, Simulator.CurrentRound); // add either a writeback or a release packet if (evicted) { L1ev = true; do_evict(txn, done_pkt, node, evict_addr, out L1wb); } } else if (!prv_hit && !sh_hit) // CASE 3: not in prv or shared cache { // here, we need to go to memory Debug.Assert(!m_sh_perfect); txn = new CmpCache_Txn(); txn.node = node; L2access = true; // request packet CmpCache_Pkt req_pkt = add_ctl_pkt(txn, node, sh_slice, false); // cache response packet CmpCache_Pkt resp_pkt = add_data_pkt(txn, sh_slice, node, true); resp_pkt.delay = m_opdelay; // req already active -- just a pass-through op delay here // memory request packet int mem_slice = map_addr_mem(node, addr); CmpCache_Pkt memreq_pkt = add_ctl_pkt(txn, sh_slice, mem_slice, false); memreq_pkt.delay = m_shdelay; // memory-access virtual node CmpCache_Pkt mem_access = add_ctl_pkt(txn, 0, 0, false); mem_access.send = false; mem_access.mem = true; mem_access.mem_addr = addr; mem_access.mem_write = false; // cache-line fill mem_access.mem_requestor = node; // memory response packet CmpCache_Pkt memresp_pkt = add_data_pkt(txn, mem_slice, sh_slice, false); // connect up the critical path first add_dep(req_pkt, memreq_pkt); add_dep(memreq_pkt, mem_access); add_dep(mem_access, memresp_pkt); add_dep(memresp_pkt, resp_pkt); // now, handle replacement in the shared cache... CmpCache_State new_state = new CmpCache_State(); new_state.owners.reset(); new_state.owners.set(node); new_state.excl = node; new_state.modified = write; new_state.sh_dirty = false; ulong sh_evicted_addr; CmpCache_State sh_evicted_state; bool evicted = m_sh.insert(addr, new_state, out sh_evicted_addr, out sh_evicted_state, Simulator.CurrentRound); if (evicted) { // shared-cache eviction (different from the private-cache evictions elsewhere): // we must evict any private-cache copies, because we model an inclusive hierarchy. L2ev = true; CmpCache_Pkt prv_evict_join = add_joinpt(txn, false); if (sh_evicted_state.excl != -1) // evicted block lives only in one prv cache { // invalidate request to prv cache before sh cache does eviction CmpCache_Pkt prv_invl = add_ctl_pkt(txn, sh_slice, sh_evicted_state.excl, false); add_dep(memresp_pkt, prv_invl); CmpCache_Pkt prv_wb; prv_invl.delay = m_opdelay; if (sh_evicted_state.modified) { // writeback prv_wb = add_data_pkt(txn, sh_evicted_state.excl, sh_slice, false); prv_wb.delay = m_prvdelay; sh_evicted_state.sh_dirty = true; } else { // simple ACK prv_wb = add_ctl_pkt(txn, sh_evicted_state.excl, sh_slice, false); prv_wb.delay = m_prvdelay; } add_dep(prv_invl, prv_wb); add_dep(prv_wb, prv_evict_join); bool prv_evicted_dat; m_prv[sh_evicted_state.excl].inval(sh_evicted_addr, out prv_evicted_dat); } else if (sh_evicted_state.owners.any_set()) // evicted block has greater-than-one sharer set { for (int i = 0; i < m_N; i++) if (sh_evicted_state.owners.is_set(i)) { CmpCache_Pkt prv_invl = add_ctl_pkt(txn, sh_slice, i, false); CmpCache_Pkt prv_ack = add_ctl_pkt(txn, i, sh_slice, false); prv_invl.delay = m_opdelay; prv_ack.delay = m_prvdelay; add_dep(memresp_pkt, prv_invl); add_dep(prv_invl, prv_ack); add_dep(prv_ack, prv_evict_join); bool prv_evicted_dat; m_prv[i].inval(sh_evicted_addr, out prv_evicted_dat); } } else // evicted block has no owners (was only in shared cache) { add_dep(memresp_pkt, prv_evict_join); } // now writeback to memory, if we were dirty if (sh_evicted_state.sh_dirty) { CmpCache_Pkt mem_wb = add_data_pkt(txn, sh_slice, mem_slice, false); mem_wb.delay = m_opdelay; add_dep(prv_evict_join, mem_wb); CmpCache_Pkt mem_wb_op = add_ctl_pkt(txn, 0, 0, false); mem_wb_op.send = false; mem_wb_op.mem = true; mem_wb_op.mem_addr = sh_evicted_addr; mem_wb_op.mem_write = true; mem_wb_op.mem_requestor = node; add_dep(mem_wb, mem_wb_op); L2wb = true; } } // ...and insert and handle replacement in the private cache ulong evict_addr; bool evict_data; bool prv_evicted = m_prv[node].insert(addr, true, out evict_addr, out evict_data, Simulator.CurrentRound); // add either a writeback or a release packet if (prv_evicted) { L1ev = true; do_evict(txn, resp_pkt, node, evict_addr, out L1wb); } } else // shouldn't happen. Debug.Assert(false); // now start the transaction, if one was needed if (txn != null) { txn.cb = cb; assignVCclasses(txn.pkts); // start running the protocol DAG. It may be an empty graph (for a silent upgr), in // which case the deferred start (after cache delay) Simulator.Defer(delegate() { start_pkts(txn); }, Simulator.CurrentRound + m_prvdelay); } // no transaction -- just the cache access delay. schedule deferred callback. else { Simulator.Defer(cb, Simulator.CurrentRound + m_prvdelay); } } // evict a block from given node, and construct either writeback or release packet. // updates functional state accordingly. void do_evict(CmpCache_Txn txn, CmpCache_Pkt init_dep, int node, ulong evict_addr, out bool wb) { ulong blk = evict_addr >> m_blkshift; int sh_slice = map_addr(node, evict_addr); CmpCache_State evicted_st; if (m_sh_perfect) { Debug.Assert(m_perf_sh.ContainsKey(blk)); evicted_st = m_perf_sh[blk]; } else { bool hit = m_sh.probe(evict_addr, out evicted_st); Debug.Assert(hit); // inclusive sh cache -- MUST be present in sh cache } if(evicted_st.excl == node && evicted_st.modified) { CmpCache_Pkt wb_pkt = add_data_pkt(txn, node, sh_slice, false); wb_pkt.delay = m_opdelay; // pass-through delay: operation already in progress add_dep(init_dep, wb_pkt); evicted_st.owners.reset(); evicted_st.excl = -1; evicted_st.sh_dirty = true; wb = true; } else { CmpCache_Pkt release_pkt = add_ctl_pkt(txn, node, sh_slice, false); release_pkt.delay = m_opdelay; add_dep(init_dep, release_pkt); evicted_st.owners.unset(node); if (evicted_st.excl == node) evicted_st.excl = -1; wb = false; } if (m_sh_perfect && !evicted_st.owners.any_set()) m_perf_sh.Remove(blk); } // construct a set of invalidation packets, all depending on init_dep, and // joining at a join-point that we return. Also invalidate the given addr // in the other prv caches. CmpCache_Pkt do_inval(CmpCache_Txn txn, CmpCache_State state, CmpCache_Pkt init_dep, int node, ulong addr) { return do_inval(txn, state, init_dep, node, addr, -1); } CmpCache_Pkt do_inval(CmpCache_Txn txn, CmpCache_State state, CmpCache_Pkt init_dep, int node, ulong addr, int c2c) { int sh_slice = map_addr(node, addr); // join-point (virtual packet). this is the completion point (DONE flag) CmpCache_Pkt invl_join = add_joinpt(txn, true); // invalidate from shared slice to each other owner for (int i = 0; i < m_N; i++) if (state.owners.is_set(i) && i != node) { CmpCache_Pkt invl_pkt = add_ctl_pkt(txn, sh_slice, i, false); invl_pkt.delay = m_shdelay; CmpCache_Pkt invl_resp = (c2c == i) ? add_data_pkt(txn, i, node, false) : add_ctl_pkt(txn, i, node, false); invl_resp.delay = m_prvdelay; add_dep(init_dep, invl_pkt); add_dep(invl_pkt, invl_resp); add_dep(invl_resp, invl_join); // invalidate in this prv cache. bool evicted_data; m_prv[i].inval(addr, out evicted_data); } return invl_join; } ulong pkt_id = 0; CmpCache_Pkt _add_pkt(CmpCache_Txn txn, int from, int to, bool data, bool send, bool done) { Debug.Assert(to >= 0 && to < m_N); CmpCache_Pkt pkt = new CmpCache_Pkt(); pkt.wakeup = new List<CmpCache_Pkt>(); pkt.id = pkt_id++; pkt.from = from; pkt.to = to; pkt.txn = txn; pkt.flits = data ? m_datapkt_size : 1; pkt.vc_class = 0; // gets filled in once DAG is complete pkt.done = done; pkt.send = send; pkt.deps = 0; pkt.delay = 0; pkt.mem_addr = 0; txn.n_pkts++; txn.n_pkts_remaining++; if (txn.pkts == null) txn.pkts = pkt; return pkt; } CmpCache_Pkt add_ctl_pkt(CmpCache_Txn txn, int from, int to, bool done) { return _add_pkt(txn, from, to, false, true, done); } CmpCache_Pkt add_data_pkt(CmpCache_Txn txn, int from, int to, bool done) { return _add_pkt(txn, from, to, true, true, done); } CmpCache_Pkt add_joinpt(CmpCache_Txn txn, bool done) { return _add_pkt(txn, 0, 0, false, false, done); } void add_dep(CmpCache_Pkt from, CmpCache_Pkt to) { from.wakeup.Add(to); to.deps++; } void start_pkts(CmpCache_Txn txn) { if (txn.n_pkts_remaining > 0) send_pkt(txn, txn.pkts); else txn.cb(); } void send_pkt(CmpCache_Txn txn, CmpCache_Pkt pkt) { if (pkt.delay > 0) { ulong due = Simulator.CurrentRound + pkt.delay; pkt.delay = 0; Simulator.Defer(delegate() { send_pkt(txn, pkt); }, due); } else if (pkt.send) { send_noc(txn.node, pkt.from, pkt.to, pkt.flits, delegate() { pkt_callback(txn, pkt); }, pkt.off_crit, pkt.vc_class); } else if (pkt.mem) { access_mem(pkt.mem_requestor, pkt.mem_addr, pkt.mem_write, delegate() { pkt_callback(txn, pkt); }); } else pkt_callback(txn, pkt); } void pkt_callback(CmpCache_Txn txn, CmpCache_Pkt pkt) { txn.n_pkts_remaining--; if (pkt.done) txn.cb(); foreach (CmpCache_Pkt dep in pkt.wakeup) { if (pkt.done || pkt.off_crit) dep.off_crit = true; dep.deps--; if (dep.deps == 0) send_pkt(txn, dep); } } void send_noc(int reqNode, int from, int to, int flits, Simulator.Ready cb, bool off_crit, int vc) { int cl = off_crit ? 2 : // packet class (used for split queues): 0 = ctl, 1 = data, 2 = off-crit (writebacks) (flits > 1 ? 1 : 0); CachePacket p = new CachePacket(reqNode, from, to, flits, cl, vc, cb); Simulator.network.nodes[from].queuePacket(p); } void access_mem(int requestor, ulong addr, bool write, Simulator.Ready cb) { Request req = new Request(requestor, addr, write); int node = map_addr_mem(requestor, addr); Simulator.network.nodes[node].mem.access(req, cb); } private Queue<CmpCache_Pkt> workQ = new Queue<CmpCache_Pkt>(); // avoid alloc'ing this for each call void assignVCclasses(CmpCache_Pkt root) { // basic idea: we traverse the DAG using a work-list algorithm, assigning VC classes as follows: // - any network packet node sets the VC of its successors to at least its own VC plus 1. // - any data packet gets VC at least 4. // - non-network-packet nodes carry VC numbers anyway to propagate dependence information. // - VC classes start at 0 and increment as this algo runs. workQ.Enqueue(root); while (workQ.Count > 0) { CmpCache_Pkt pkt = workQ.Dequeue(); if (pkt.flits > 1) pkt.vc_class = Math.Max(4, pkt.vc_class); int succ = pkt.send ? pkt.vc_class + 1 : pkt.vc_class; foreach (CmpCache_Pkt s in pkt.wakeup) { int old = s.vc_class; s.vc_class = Math.Max(succ, s.vc_class); if (s.vc_class > old) workQ.Enqueue(s); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.InteropServices; using System.Threading; namespace System.Net.Sockets { // Note on asynchronous behavior here: // The asynchronous socket operations here generally do the following: // (1) If the operation queue is empty, try to perform the operation immediately, non-blocking. // If this completes (i.e. does not return EWOULDBLOCK), then we return the results immediately // for both success (SocketError.Success) or failure. // No callback will happen; callers are expected to handle these synchronous completions themselves. // (2) If EWOULDBLOCK is returned, or the queue is not empty, then we enqueue an operation to the // appropriate queue and return SocketError.IOPending. // Enqueuing itself may fail because the socket is closed before the operation can be enqueued; // in this case, we return SocketError.OperationAborted (which matches what Winsock would return in this case). // (3) When the queue completes the operation, it will post a work item to the threadpool // to call the callback with results (either success or failure). // Synchronous operations generally do the same, except that instead of returning IOPending, // they block on an event handle until the operation is processed by the queue. // Also, synchronous methods return SocketError.Interrupted when enqueuing fails // (which again matches Winsock behavior). internal sealed class SocketAsyncContext { private abstract class AsyncOperation { private enum State { Waiting = 0, Running = 1, Complete = 2, Cancelled = 3 } private int _state; // Actually AsyncOperation.State. #if DEBUG private int _callbackQueued; // When non-zero, the callback has been queued. #endif public AsyncOperation Next; protected object CallbackOrEvent; public SocketError ErrorCode; public byte[] SocketAddress; public int SocketAddressLen; public ManualResetEventSlim Event { private get { return (ManualResetEventSlim)CallbackOrEvent; } set { CallbackOrEvent = value; } } public AsyncOperation() { _state = (int)State.Waiting; Next = this; } public bool TryComplete(SocketAsyncContext context) { Debug.Assert(_state == (int)State.Waiting, $"Unexpected _state: {_state}"); return DoTryComplete(context); } public bool TryCompleteAsync(SocketAsyncContext context) { return TryCompleteOrAbortAsync(context, abort: false); } public void AbortAsync() { bool completed = TryCompleteOrAbortAsync(null, abort: true); Debug.Assert(completed, $"Expected TryCompleteOrAbortAsync to return true"); } private bool TryCompleteOrAbortAsync(SocketAsyncContext context, bool abort) { Debug.Assert(context != null || abort, $"Unexpected values: context={context}, abort={abort}"); State oldState = (State)Interlocked.CompareExchange(ref _state, (int)State.Running, (int)State.Waiting); if (oldState == State.Cancelled) { // This operation has been cancelled. The canceller is responsible for // correctly updating any state that would have been handled by // AsyncOperation.Abort. return true; } Debug.Assert(oldState != State.Complete && oldState != State.Running, $"Unexpected oldState: {oldState}"); bool completed; if (abort) { Abort(); ErrorCode = SocketError.OperationAborted; completed = true; } else { completed = DoTryComplete(context); } if (completed) { var @event = CallbackOrEvent as ManualResetEventSlim; if (@event != null) { @event.Set(); } else { Debug.Assert(_state != (int)State.Cancelled, $"Unexpected _state: {_state}"); #if DEBUG Debug.Assert(Interlocked.CompareExchange(ref _callbackQueued, 1, 0) == 0, $"Unexpected _callbackQueued: {_callbackQueued}"); #endif ThreadPool.QueueUserWorkItem(o => ((AsyncOperation)o).InvokeCallback(), this); } Volatile.Write(ref _state, (int)State.Complete); return true; } Volatile.Write(ref _state, (int)State.Waiting); return false; } public bool Wait(int timeout) { if (Event.Wait(timeout)) { return true; } var spinWait = new SpinWait(); for (;;) { int state = Interlocked.CompareExchange(ref _state, (int)State.Cancelled, (int)State.Waiting); switch ((State)state) { case State.Running: // A completion attempt is in progress. Keep busy-waiting. spinWait.SpinOnce(); break; case State.Complete: // A completion attempt succeeded. Consider this operation as having completed within the timeout. return true; case State.Waiting: // This operation was successfully cancelled. return false; } } } protected abstract void Abort(); protected abstract bool DoTryComplete(SocketAsyncContext context); protected abstract void InvokeCallback(); } // These two abstract classes differentiate the operations that go in the // read queue vs the ones that go in the write queue. private abstract class ReadOperation : AsyncOperation { } private abstract class WriteOperation : AsyncOperation { } private sealed class SendOperation : WriteOperation { public byte[] Buffer; public int Offset; public int Count; public SocketFlags Flags; public int BytesTransferred; public IList<ArraySegment<byte>> Buffers; public int BufferIndex; protected sealed override void Abort() { } public Action<int, byte[], int, SocketFlags, SocketError> Callback { private get { return (Action<int, byte[], int, SocketFlags, SocketError>)CallbackOrEvent; } set { CallbackOrEvent = value; } } protected sealed override void InvokeCallback() { Callback(BytesTransferred, SocketAddress, SocketAddressLen, SocketFlags.None, ErrorCode); } protected override bool DoTryComplete(SocketAsyncContext context) { return SocketPal.TryCompleteSendTo(context._socket, Buffer, Buffers, ref BufferIndex, ref Offset, ref Count, Flags, SocketAddress, SocketAddressLen, ref BytesTransferred, out ErrorCode); } } private sealed class ReceiveOperation : ReadOperation { public byte[] Buffer; public int Offset; public int Count; public SocketFlags Flags; public int BytesTransferred; public SocketFlags ReceivedFlags; public IList<ArraySegment<byte>> Buffers; protected sealed override void Abort() { } public Action<int, byte[], int, SocketFlags, SocketError> Callback { private get { return (Action<int, byte[], int, SocketFlags, SocketError>)CallbackOrEvent; } set { CallbackOrEvent = value; } } protected sealed override void InvokeCallback() { Callback(BytesTransferred, SocketAddress, SocketAddressLen, ReceivedFlags, ErrorCode); } protected override bool DoTryComplete(SocketAsyncContext context) { return SocketPal.TryCompleteReceiveFrom(context._socket, Buffer, Buffers, Offset, Count, Flags, SocketAddress, ref SocketAddressLen, out BytesTransferred, out ReceivedFlags, out ErrorCode); } } private sealed class ReceiveMessageFromOperation : ReadOperation { public byte[] Buffer; public int Offset; public int Count; public SocketFlags Flags; public int BytesTransferred; public SocketFlags ReceivedFlags; public IList<ArraySegment<byte>> Buffers; public bool IsIPv4; public bool IsIPv6; public IPPacketInformation IPPacketInformation; protected sealed override void Abort() { } public Action<int, byte[], int, SocketFlags, IPPacketInformation, SocketError> Callback { private get { return (Action<int, byte[], int, SocketFlags, IPPacketInformation, SocketError>)CallbackOrEvent; } set { CallbackOrEvent = value; } } protected override bool DoTryComplete(SocketAsyncContext context) { return SocketPal.TryCompleteReceiveMessageFrom(context._socket, Buffer, Buffers, Offset, Count, Flags, SocketAddress, ref SocketAddressLen, IsIPv4, IsIPv6, out BytesTransferred, out ReceivedFlags, out IPPacketInformation, out ErrorCode); } protected override void InvokeCallback() { Callback(BytesTransferred, SocketAddress, SocketAddressLen, ReceivedFlags, IPPacketInformation, ErrorCode); } } private sealed class AcceptOperation : ReadOperation { public IntPtr AcceptedFileDescriptor; public Action<IntPtr, byte[], int, SocketError> Callback { private get { return (Action<IntPtr, byte[], int, SocketError>)CallbackOrEvent; } set { CallbackOrEvent = value; } } protected override void Abort() { AcceptedFileDescriptor = (IntPtr)(-1); } protected override bool DoTryComplete(SocketAsyncContext context) { bool completed = SocketPal.TryCompleteAccept(context._socket, SocketAddress, ref SocketAddressLen, out AcceptedFileDescriptor, out ErrorCode); Debug.Assert(ErrorCode == SocketError.Success || AcceptedFileDescriptor == (IntPtr)(-1), $"Unexpected values: ErrorCode={ErrorCode}, AcceptedFileDescriptor={AcceptedFileDescriptor}"); return completed; } protected override void InvokeCallback() { Callback(AcceptedFileDescriptor, SocketAddress, SocketAddressLen, ErrorCode); } } private sealed class ConnectOperation : WriteOperation { public Action<SocketError> Callback { private get { return (Action<SocketError>)CallbackOrEvent; } set { CallbackOrEvent = value; } } protected override void Abort() { } protected override bool DoTryComplete(SocketAsyncContext context) { bool result = SocketPal.TryCompleteConnect(context._socket, SocketAddressLen, out ErrorCode); context._socket.RegisterConnectResult(ErrorCode); return result; } protected override void InvokeCallback() { Callback(ErrorCode); } } private sealed class SendFileOperation : WriteOperation { public SafeFileHandle FileHandle; public long Offset; public long Count; public long BytesTransferred; protected override void Abort() { } public Action<long, SocketError> Callback { private get { return (Action<long, SocketError>)CallbackOrEvent; } set { CallbackOrEvent = value; } } protected override void InvokeCallback() { Callback(BytesTransferred, ErrorCode); } protected override bool DoTryComplete(SocketAsyncContext context) { return SocketPal.TryCompleteSendFile(context._socket, FileHandle, ref Offset, ref Count, ref BytesTransferred, out ErrorCode); } } private enum QueueState { Clear = 0, Set = 1, Stopped = 2, } private struct OperationQueue<TOperation> where TOperation : AsyncOperation { private object _queueLock; private AsyncOperation _tail; public QueueState State { get; set; } public bool IsStopped { get { return State == QueueState.Stopped; } } public bool IsEmpty { get { return _tail == null; } } public object QueueLock { get { return _queueLock; } } public void Init() { Debug.Assert(_queueLock == null); _queueLock = new object(); } public void Enqueue(TOperation operation) { Debug.Assert(!IsStopped, "Expected !IsStopped"); Debug.Assert(operation.Next == operation, "Expected operation.Next == operation"); if (!IsEmpty) { operation.Next = _tail.Next; _tail.Next = operation; } _tail = operation; } private bool TryDequeue(out TOperation operation) { if (_tail == null) { operation = null; return false; } AsyncOperation head = _tail.Next; if (head == _tail) { _tail = null; } else { _tail.Next = head.Next; } head.Next = null; operation = (TOperation)head; return true; } private void Requeue(TOperation operation) { // Insert at the head of the queue Debug.Assert(!IsStopped, "Expected !IsStopped"); Debug.Assert(operation.Next == null, "Operation already in queue"); if (IsEmpty) { operation.Next = operation; _tail = operation; } else { operation.Next = _tail.Next; _tail.Next = operation; } } public void Complete(SocketAsyncContext context) { lock (_queueLock) { if (IsStopped) return; State = QueueState.Set; TOperation op; while (TryDequeue(out op)) { if (!op.TryCompleteAsync(context)) { Requeue(op); return; } } } } public void StopAndAbort() { lock (_queueLock) { State = QueueState.Stopped; TOperation op; while (TryDequeue(out op)) { op.AbortAsync(); } } } } private readonly SafeCloseSocket _socket; private OperationQueue<ReadOperation> _receiveQueue; private OperationQueue<WriteOperation> _sendQueue; private SocketAsyncEngine.Token _asyncEngineToken; private Interop.Sys.SocketEvents _registeredEvents; private bool _nonBlockingSet; private readonly object _registerLock = new object(); public SocketAsyncContext(SafeCloseSocket socket) { _socket = socket; _receiveQueue.Init(); _sendQueue.Init(); } private void Register(Interop.Sys.SocketEvents events) { lock (_registerLock) { Debug.Assert((_registeredEvents & events) == Interop.Sys.SocketEvents.None, $"Unexpected values: _registeredEvents={_registeredEvents}, events={events}"); if (!_asyncEngineToken.WasAllocated) { _asyncEngineToken = new SocketAsyncEngine.Token(this); } events |= _registeredEvents; Interop.Error errorCode; if (!_asyncEngineToken.TryRegister(_socket, _registeredEvents, events, out errorCode)) { if (errorCode == Interop.Error.ENOMEM || errorCode == Interop.Error.ENOSPC) { throw new OutOfMemoryException(); } else { throw new InternalException(); } } _registeredEvents = events; } } public void Close() { // Drain queues _sendQueue.StopAndAbort(); _receiveQueue.StopAndAbort(); lock (_registerLock) { // Freeing the token will prevent any future event delivery. This socket will be unregistered // from the event port automatically by the OS when it's closed. _asyncEngineToken.Free(); } } public void SetNonBlocking() { // // Our sockets may start as blocking, and later transition to non-blocking, either because the user // explicitly requested non-blocking mode, or because we need non-blocking mode to support async // operations. We never transition back to blocking mode, to avoid problems synchronizing that // transition with the async infrastructure. // // Note that there's no synchronization here, so we may set the non-blocking option multiple times // in a race. This should be fine. // if (!_nonBlockingSet) { if (Interop.Sys.Fcntl.SetIsNonBlocking(_socket, 1) != 0) { throw new SocketException((int)SocketPal.GetSocketErrorForErrorCode(Interop.Sys.GetLastError())); } _nonBlockingSet = true; } } private bool TryBeginOperation<TOperation>(ref OperationQueue<TOperation> queue, TOperation operation, Interop.Sys.SocketEvents events, bool maintainOrder, out bool isStopped) where TOperation : AsyncOperation { // Exactly one of the two queue locks must be held by the caller Debug.Assert(Monitor.IsEntered(_sendQueue.QueueLock) ^ Monitor.IsEntered(_receiveQueue.QueueLock)); switch (queue.State) { case QueueState.Stopped: isStopped = true; return false; case QueueState.Clear: break; case QueueState.Set: if (queue.IsEmpty || !maintainOrder) { isStopped = false; queue.State = QueueState.Clear; return false; } break; } if ((_registeredEvents & events) == Interop.Sys.SocketEvents.None) { Register(events); } queue.Enqueue(operation); isStopped = false; return true; } public SocketError Accept(byte[] socketAddress, ref int socketAddressLen, int timeout, out IntPtr acceptedFd) { Debug.Assert(socketAddress != null, "Expected non-null socketAddress"); Debug.Assert(socketAddressLen > 0, $"Unexpected socketAddressLen: {socketAddressLen}"); Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); SocketError errorCode; if (SocketPal.TryCompleteAccept(_socket, socketAddress, ref socketAddressLen, out acceptedFd, out errorCode)) { Debug.Assert(errorCode == SocketError.Success || acceptedFd == (IntPtr)(-1), $"Unexpected values: errorCode={errorCode}, acceptedFd={acceptedFd}"); return errorCode; } using (var @event = new ManualResetEventSlim(false, 0)) { var operation = new AcceptOperation { Event = @event, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen }; bool isStopped; while (true) { lock (_receiveQueue.QueueLock) { if (TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: false, isStopped: out isStopped)) { break; } } if (isStopped) { acceptedFd = (IntPtr)(-1); return SocketError.Interrupted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; acceptedFd = operation.AcceptedFileDescriptor; return operation.ErrorCode; } } if (!operation.Wait(timeout)) { acceptedFd = (IntPtr)(-1); return SocketError.TimedOut; } socketAddressLen = operation.SocketAddressLen; acceptedFd = operation.AcceptedFileDescriptor; return operation.ErrorCode; } } public SocketError AcceptAsync(byte[] socketAddress, ref int socketAddressLen, out IntPtr acceptedFd, Action<IntPtr, byte[], int, SocketError> callback) { Debug.Assert(socketAddress != null, "Expected non-null socketAddress"); Debug.Assert(socketAddressLen > 0, $"Unexpected socketAddressLen: {socketAddressLen}"); Debug.Assert(callback != null, "Expected non-null callback"); SetNonBlocking(); SocketError errorCode; if (SocketPal.TryCompleteAccept(_socket, socketAddress, ref socketAddressLen, out acceptedFd, out errorCode)) { Debug.Assert(errorCode == SocketError.Success || acceptedFd == (IntPtr)(-1), $"Unexpected values: errorCode={errorCode}, acceptedFd={acceptedFd}"); return errorCode; } var operation = new AcceptOperation { Callback = callback, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen }; bool isStopped; while (true) { lock (_receiveQueue.QueueLock) { if (TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: false, isStopped: out isStopped)) { break; } } if (isStopped) { return SocketError.OperationAborted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; acceptedFd = operation.AcceptedFileDescriptor; return operation.ErrorCode; } } return SocketError.IOPending; } public SocketError Connect(byte[] socketAddress, int socketAddressLen, int timeout) { Debug.Assert(socketAddress != null, "Expected non-null socketAddress"); Debug.Assert(socketAddressLen > 0, $"Unexpected socketAddressLen: {socketAddressLen}"); Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); SocketError errorCode; if (SocketPal.TryStartConnect(_socket, socketAddress, socketAddressLen, out errorCode)) { _socket.RegisterConnectResult(errorCode); return errorCode; } using (var @event = new ManualResetEventSlim(false, 0)) { var operation = new ConnectOperation { Event = @event, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen }; bool isStopped; while (true) { lock (_sendQueue.QueueLock) { if (TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: false, isStopped: out isStopped)) { break; } } if (isStopped) { return SocketError.Interrupted; } if (operation.TryComplete(this)) { return operation.ErrorCode; } } return operation.Wait(timeout) ? operation.ErrorCode : SocketError.TimedOut; } } public SocketError ConnectAsync(byte[] socketAddress, int socketAddressLen, Action<SocketError> callback) { Debug.Assert(socketAddress != null, "Expected non-null socketAddress"); Debug.Assert(socketAddressLen > 0, $"Unexpected socketAddressLen: {socketAddressLen}"); Debug.Assert(callback != null, "Expected non-null callback"); SetNonBlocking(); SocketError errorCode; if (SocketPal.TryStartConnect(_socket, socketAddress, socketAddressLen, out errorCode)) { _socket.RegisterConnectResult(errorCode); return errorCode; } var operation = new ConnectOperation { Callback = callback, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen }; bool isStopped; while (true) { lock (_sendQueue.QueueLock) { if (TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: false, isStopped: out isStopped)) { break; } } if (isStopped) { return SocketError.OperationAborted; } if (operation.TryComplete(this)) { return operation.ErrorCode; } } return SocketError.IOPending; } public SocketError Receive(byte[] buffer, int offset, int count, ref SocketFlags flags, int timeout, out int bytesReceived) { int socketAddressLen = 0; return ReceiveFrom(buffer, offset, count, ref flags, null, ref socketAddressLen, timeout, out bytesReceived); } public SocketError ReceiveAsync(byte[] buffer, int offset, int count, SocketFlags flags, out int bytesReceived, out SocketFlags receivedFlags, Action<int, byte[], int, SocketFlags, SocketError> callback) { int socketAddressLen = 0; return ReceiveFromAsync(buffer, offset, count, flags, null, ref socketAddressLen, out bytesReceived, out receivedFlags, callback); } public SocketError ReceiveFrom(byte[] buffer, int offset, int count, ref SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, int timeout, out int bytesReceived) { Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); ManualResetEventSlim @event = null; try { ReceiveOperation operation; lock (_receiveQueue.QueueLock) { SocketFlags receivedFlags; SocketError errorCode; if (_receiveQueue.IsEmpty && SocketPal.TryCompleteReceiveFrom(_socket, buffer, offset, count, flags, socketAddress, ref socketAddressLen, out bytesReceived, out receivedFlags, out errorCode)) { flags = receivedFlags; return errorCode; } @event = new ManualResetEventSlim(false, 0); operation = new ReceiveOperation { Event = @event, Buffer = buffer, Offset = offset, Count = count, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, }; bool isStopped; while (!TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { flags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return SocketError.Interrupted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return operation.ErrorCode; } } } bool signaled = operation.Wait(timeout); socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return signaled ? operation.ErrorCode : SocketError.TimedOut; } finally { if (@event != null) @event.Dispose(); } } public SocketError ReceiveFromAsync(byte[] buffer, int offset, int count, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesReceived, out SocketFlags receivedFlags, Action<int, byte[], int, SocketFlags, SocketError> callback) { SetNonBlocking(); lock (_receiveQueue.QueueLock) { SocketError errorCode; if (_receiveQueue.IsEmpty && SocketPal.TryCompleteReceiveFrom(_socket, buffer, offset, count, flags, socketAddress, ref socketAddressLen, out bytesReceived, out receivedFlags, out errorCode)) { // Synchronous success or failure return errorCode; } var operation = new ReceiveOperation { Callback = callback, Buffer = buffer, Offset = offset, Count = count, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, }; bool isStopped; while (!TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { bytesReceived = 0; receivedFlags = SocketFlags.None; return SocketError.OperationAborted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; receivedFlags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return operation.ErrorCode; } } bytesReceived = 0; receivedFlags = SocketFlags.None; return SocketError.IOPending; } } public SocketError Receive(IList<ArraySegment<byte>> buffers, ref SocketFlags flags, int timeout, out int bytesReceived) { return ReceiveFrom(buffers, ref flags, null, 0, timeout, out bytesReceived); } public SocketError ReceiveAsync(IList<ArraySegment<byte>> buffers, SocketFlags flags, out int bytesReceived, out SocketFlags receivedFlags, Action<int, byte[], int, SocketFlags, SocketError> callback) { int socketAddressLen = 0; return ReceiveFromAsync(buffers, flags, null, ref socketAddressLen, out bytesReceived, out receivedFlags, callback); } public SocketError ReceiveFrom(IList<ArraySegment<byte>> buffers, ref SocketFlags flags, byte[] socketAddress, int socketAddressLen, int timeout, out int bytesReceived) { Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); ManualResetEventSlim @event = null; try { ReceiveOperation operation; lock (_receiveQueue.QueueLock) { SocketFlags receivedFlags; SocketError errorCode; if (_receiveQueue.IsEmpty && SocketPal.TryCompleteReceiveFrom(_socket, buffers, flags, socketAddress, ref socketAddressLen, out bytesReceived, out receivedFlags, out errorCode)) { flags = receivedFlags; return errorCode; } @event = new ManualResetEventSlim(false, 0); operation = new ReceiveOperation { Event = @event, Buffers = buffers, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, }; bool isStopped; while (!TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { flags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return SocketError.Interrupted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return operation.ErrorCode; } } } bool signaled = operation.Wait(timeout); socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return signaled ? operation.ErrorCode : SocketError.TimedOut; } finally { if (@event != null) @event.Dispose(); } } public SocketError ReceiveFromAsync(IList<ArraySegment<byte>> buffers, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesReceived, out SocketFlags receivedFlags, Action<int, byte[], int, SocketFlags, SocketError> callback) { SetNonBlocking(); ReceiveOperation operation; lock (_receiveQueue.QueueLock) { SocketError errorCode; if (_receiveQueue.IsEmpty && SocketPal.TryCompleteReceiveFrom(_socket, buffers, flags, socketAddress, ref socketAddressLen, out bytesReceived, out receivedFlags, out errorCode)) { // Synchronous success or failure return errorCode; } operation = new ReceiveOperation { Callback = callback, Buffers = buffers, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, }; bool isStopped; while (!TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { bytesReceived = 0; receivedFlags = SocketFlags.None; return SocketError.OperationAborted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; receivedFlags = operation.ReceivedFlags; bytesReceived = operation.BytesTransferred; return operation.ErrorCode; } } bytesReceived = 0; receivedFlags = SocketFlags.None; return SocketError.IOPending; } } public SocketError ReceiveMessageFrom( byte[] buffer, IList<ArraySegment<byte>> buffers, int offset, int count, ref SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, bool isIPv4, bool isIPv6, int timeout, out IPPacketInformation ipPacketInformation, out int bytesReceived) { Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); ManualResetEventSlim @event = null; try { ReceiveMessageFromOperation operation; lock (_receiveQueue.QueueLock) { SocketFlags receivedFlags; SocketError errorCode; if (_receiveQueue.IsEmpty && SocketPal.TryCompleteReceiveMessageFrom(_socket, buffer, buffers, offset, count, flags, socketAddress, ref socketAddressLen, isIPv4, isIPv6, out bytesReceived, out receivedFlags, out ipPacketInformation, out errorCode)) { flags = receivedFlags; return errorCode; } @event = new ManualResetEventSlim(false, 0); operation = new ReceiveMessageFromOperation { Event = @event, Buffer = buffer, Buffers = buffers, Offset = offset, Count = count, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, IsIPv4 = isIPv4, IsIPv6 = isIPv6, }; bool isStopped; while (!TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; ipPacketInformation = operation.IPPacketInformation; bytesReceived = operation.BytesTransferred; return SocketError.Interrupted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; ipPacketInformation = operation.IPPacketInformation; bytesReceived = operation.BytesTransferred; return operation.ErrorCode; } } } bool signaled = operation.Wait(timeout); socketAddressLen = operation.SocketAddressLen; flags = operation.ReceivedFlags; ipPacketInformation = operation.IPPacketInformation; bytesReceived = operation.BytesTransferred; return signaled ? operation.ErrorCode : SocketError.TimedOut; } finally { if (@event != null) @event.Dispose(); } } public SocketError ReceiveMessageFromAsync(byte[] buffer, IList<ArraySegment<byte>> buffers, int offset, int count, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, bool isIPv4, bool isIPv6, out int bytesReceived, out SocketFlags receivedFlags, out IPPacketInformation ipPacketInformation, Action<int, byte[], int, SocketFlags, IPPacketInformation, SocketError> callback) { SetNonBlocking(); lock (_receiveQueue.QueueLock) { SocketError errorCode; if (_receiveQueue.IsEmpty && SocketPal.TryCompleteReceiveMessageFrom(_socket, buffer, buffers, offset, count, flags, socketAddress, ref socketAddressLen, isIPv4, isIPv6, out bytesReceived, out receivedFlags, out ipPacketInformation, out errorCode)) { // Synchronous success or failure return errorCode; } var operation = new ReceiveMessageFromOperation { Callback = callback, Buffer = buffer, Buffers = buffers, Offset = offset, Count = count, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, IsIPv4 = isIPv4, IsIPv6 = isIPv6, }; bool isStopped; while (!TryBeginOperation(ref _receiveQueue, operation, Interop.Sys.SocketEvents.Read, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { ipPacketInformation = default(IPPacketInformation); bytesReceived = 0; receivedFlags = SocketFlags.None; return SocketError.OperationAborted; } if (operation.TryComplete(this)) { socketAddressLen = operation.SocketAddressLen; receivedFlags = operation.ReceivedFlags; ipPacketInformation = operation.IPPacketInformation; bytesReceived = operation.BytesTransferred; return operation.ErrorCode; } } ipPacketInformation = default(IPPacketInformation); bytesReceived = 0; receivedFlags = SocketFlags.None; return SocketError.IOPending; } } public SocketError Send(byte[] buffer, int offset, int count, SocketFlags flags, int timeout, out int bytesSent) { return SendTo(buffer, offset, count, flags, null, 0, timeout, out bytesSent); } public SocketError SendAsync(byte[] buffer, int offset, int count, SocketFlags flags, out int bytesSent, Action<int, byte[], int, SocketFlags, SocketError> callback) { int socketAddressLen = 0; return SendToAsync(buffer, offset, count, flags, null, ref socketAddressLen, out bytesSent, callback); } public SocketError SendTo(byte[] buffer, int offset, int count, SocketFlags flags, byte[] socketAddress, int socketAddressLen, int timeout, out int bytesSent) { Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); ManualResetEventSlim @event = null; try { SendOperation operation; lock (_sendQueue.QueueLock) { bytesSent = 0; SocketError errorCode; if (_sendQueue.IsEmpty && SocketPal.TryCompleteSendTo(_socket, buffer, ref offset, ref count, flags, socketAddress, socketAddressLen, ref bytesSent, out errorCode)) { return errorCode; } @event = new ManualResetEventSlim(false, 0); operation = new SendOperation { Event = @event, Buffer = buffer, Offset = offset, Count = count, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, BytesTransferred = bytesSent }; bool isStopped; while (!TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { bytesSent = operation.BytesTransferred; return SocketError.Interrupted; } if (operation.TryComplete(this)) { bytesSent = operation.BytesTransferred; return operation.ErrorCode; } } } bool signaled = operation.Wait(timeout); bytesSent = operation.BytesTransferred; return signaled ? operation.ErrorCode : SocketError.TimedOut; } finally { if (@event != null) @event.Dispose(); } } public SocketError SendToAsync(byte[] buffer, int offset, int count, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesSent, Action<int, byte[], int, SocketFlags, SocketError> callback) { SetNonBlocking(); lock (_sendQueue.QueueLock) { bytesSent = 0; SocketError errorCode; if (_sendQueue.IsEmpty && SocketPal.TryCompleteSendTo(_socket, buffer, ref offset, ref count, flags, socketAddress, socketAddressLen, ref bytesSent, out errorCode)) { // Synchronous success or failure return errorCode; } var operation = new SendOperation { Callback = callback, Buffer = buffer, Offset = offset, Count = count, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, BytesTransferred = bytesSent }; bool isStopped; while (!TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { return SocketError.OperationAborted; } if (operation.TryComplete(this)) { bytesSent = operation.BytesTransferred; return operation.ErrorCode; } } return SocketError.IOPending; } } public SocketError Send(IList<ArraySegment<byte>> buffers, SocketFlags flags, int timeout, out int bytesSent) { return SendTo(buffers, flags, null, 0, timeout, out bytesSent); } public SocketError SendAsync(IList<ArraySegment<byte>> buffers, SocketFlags flags, out int bytesSent, Action<int, byte[], int, SocketFlags, SocketError> callback) { int socketAddressLen = 0; return SendToAsync(buffers, flags, null, ref socketAddressLen, out bytesSent, callback); } public SocketError SendTo(IList<ArraySegment<byte>> buffers, SocketFlags flags, byte[] socketAddress, int socketAddressLen, int timeout, out int bytesSent) { Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); ManualResetEventSlim @event = null; try { SendOperation operation; lock (_sendQueue.QueueLock) { bytesSent = 0; int bufferIndex = 0; int offset = 0; SocketError errorCode; if (_sendQueue.IsEmpty && SocketPal.TryCompleteSendTo(_socket, buffers, ref bufferIndex, ref offset, flags, socketAddress, socketAddressLen, ref bytesSent, out errorCode)) { return errorCode; } @event = new ManualResetEventSlim(false, 0); operation = new SendOperation { Event = @event, Buffers = buffers, BufferIndex = bufferIndex, Offset = offset, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, BytesTransferred = bytesSent }; bool isStopped; while (!TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { bytesSent = operation.BytesTransferred; return SocketError.Interrupted; } if (operation.TryComplete(this)) { bytesSent = operation.BytesTransferred; return operation.ErrorCode; } } } bool signaled = operation.Wait(timeout); bytesSent = operation.BytesTransferred; return signaled ? operation.ErrorCode : SocketError.TimedOut; } finally { if (@event != null) @event.Dispose(); } } public SocketError SendToAsync(IList<ArraySegment<byte>> buffers, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesSent, Action<int, byte[], int, SocketFlags, SocketError> callback) { SetNonBlocking(); lock (_sendQueue.QueueLock) { bytesSent = 0; int bufferIndex = 0; int offset = 0; SocketError errorCode; if (_sendQueue.IsEmpty && SocketPal.TryCompleteSendTo(_socket, buffers, ref bufferIndex, ref offset, flags, socketAddress, socketAddressLen, ref bytesSent, out errorCode)) { // Synchronous success or failure return errorCode; } var operation = new SendOperation { Callback = callback, Buffers = buffers, BufferIndex = bufferIndex, Offset = offset, Flags = flags, SocketAddress = socketAddress, SocketAddressLen = socketAddressLen, BytesTransferred = bytesSent }; bool isStopped; while (!TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { return SocketError.OperationAborted; } if (operation.TryComplete(this)) { bytesSent = operation.BytesTransferred; return operation.ErrorCode; } } return SocketError.IOPending; } } public SocketError SendFile(SafeFileHandle fileHandle, long offset, long count, int timeout, out long bytesSent) { Debug.Assert(timeout == -1 || timeout > 0, $"Unexpected timeout: {timeout}"); ManualResetEventSlim @event = null; try { SendFileOperation operation; lock (_sendQueue.QueueLock) { bytesSent = 0; SocketError errorCode; if (_sendQueue.IsEmpty && SocketPal.TryCompleteSendFile(_socket, fileHandle, ref offset, ref count, ref bytesSent, out errorCode)) { return errorCode; } @event = new ManualResetEventSlim(false, 0); operation = new SendFileOperation { Event = @event, FileHandle = fileHandle, Offset = offset, Count = count, BytesTransferred = bytesSent }; bool isStopped; while (!TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { bytesSent = operation.BytesTransferred; return SocketError.Interrupted; } if (operation.TryComplete(this)) { bytesSent = operation.BytesTransferred; return operation.ErrorCode; } } } bool signaled = operation.Wait(timeout); bytesSent = operation.BytesTransferred; return signaled ? operation.ErrorCode : SocketError.TimedOut; } finally { if (@event != null) @event.Dispose(); } } public SocketError SendFileAsync(SafeFileHandle fileHandle, long offset, long count, out long bytesSent, Action<long, SocketError> callback) { SetNonBlocking(); lock (_sendQueue.QueueLock) { bytesSent = 0; SocketError errorCode; if (_sendQueue.IsEmpty && SocketPal.TryCompleteSendFile(_socket, fileHandle, ref offset, ref count, ref bytesSent, out errorCode)) { // Synchronous success or failure return errorCode; } var operation = new SendFileOperation { Callback = callback, FileHandle = fileHandle, Offset = offset, Count = count, BytesTransferred = bytesSent }; bool isStopped; while (!TryBeginOperation(ref _sendQueue, operation, Interop.Sys.SocketEvents.Write, maintainOrder: true, isStopped: out isStopped)) { if (isStopped) { return SocketError.OperationAborted; } if (operation.TryComplete(this)) { bytesSent = operation.BytesTransferred; return operation.ErrorCode; } } return SocketError.IOPending; } } public unsafe void HandleEvents(Interop.Sys.SocketEvents events) { if ((events & Interop.Sys.SocketEvents.Error) != 0) { // Set the Read and Write flags as well; the processing for these events // will pick up the error. events |= Interop.Sys.SocketEvents.Read | Interop.Sys.SocketEvents.Write; } if ((events & Interop.Sys.SocketEvents.Read) != 0) { _receiveQueue.Complete(this); } if ((events & Interop.Sys.SocketEvents.Write) != 0) { _sendQueue.Complete(this); } } } }
using System.Collections.Generic; using System.Text; namespace YAF.Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AttributeSource = YAF.Lucene.Net.Util.AttributeSource; using IBits = YAF.Lucene.Net.Util.IBits; using BytesRef = YAF.Lucene.Net.Util.BytesRef; /// <summary> /// A <see cref="FilterAtomicReader"/> contains another <see cref="AtomicReader"/>, which it /// uses as its basic source of data, possibly transforming the data along the /// way or providing additional functionality. The class /// <see cref="FilterAtomicReader"/> itself simply implements all abstract methods /// of <see cref="IndexReader"/> with versions that pass all requests to the /// contained index reader. Subclasses of <see cref="FilterAtomicReader"/> may /// further override some of these methods and may also provide additional /// methods and fields. /// <para/><b>NOTE</b>: If you override <see cref="LiveDocs"/>, you will likely need /// to override <see cref="NumDocs"/> as well and vice-versa. /// <para/><b>NOTE</b>: If this <see cref="FilterAtomicReader"/> does not change the /// content the contained reader, you could consider overriding /// <see cref="IndexReader.CoreCacheKey"/> so that <see cref="Search.IFieldCache"/> and /// <see cref="Search.CachingWrapperFilter"/> share the same entries for this atomic reader /// and the wrapped one. <see cref="IndexReader.CombinedCoreAndDeletesKey"/> could be /// overridden as well if the <see cref="LiveDocs"/> are not changed /// either. /// </summary> public class FilterAtomicReader : AtomicReader { /// <summary> /// Get the wrapped instance by <paramref name="reader"/> as long as this reader is /// an intance of <see cref="FilterAtomicReader"/>. /// </summary> public static AtomicReader Unwrap(AtomicReader reader) { while (reader is FilterAtomicReader) { reader = ((FilterAtomicReader)reader).m_input; } return reader; } /// <summary> /// Base class for filtering <see cref="Index.Fields"/> /// implementations. /// </summary> public class FilterFields : Fields { /// <summary> /// The underlying <see cref="Index.Fields"/> instance. </summary> protected readonly Fields m_input; /// <summary> /// Creates a new <see cref="FilterFields"/>. </summary> /// <param name="input"> the underlying <see cref="Index.Fields"/> instance. </param> public FilterFields(Fields input) { this.m_input = input; } public override IEnumerator<string> GetEnumerator() { return m_input.GetEnumerator(); } public override Terms GetTerms(string field) { return m_input.GetTerms(field); } public override int Count { get { return m_input.Count; } } } /// <summary> /// Base class for filtering <see cref="Terms"/> implementations. /// <para/><b>NOTE</b>: If the order of terms and documents is not changed, and if /// these terms are going to be intersected with automata, you could consider /// overriding <see cref="Terms.Intersect"/> for better performance. /// </summary> public class FilterTerms : Terms { /// <summary> /// The underlying <see cref="Terms"/> instance. </summary> protected readonly Terms m_input; /// <summary> /// Creates a new <see cref="FilterTerms"/> </summary> /// <param name="input"> the underlying <see cref="Terms"/> instance. </param> public FilterTerms(Terms input) { this.m_input = input; } public override TermsEnum GetIterator(TermsEnum reuse) { return m_input.GetIterator(reuse); } public override IComparer<BytesRef> Comparer { get { return m_input.Comparer; } } public override long Count { get { return m_input.Count; } } public override long SumTotalTermFreq { get { return m_input.SumTotalTermFreq; } } public override long SumDocFreq { get { return m_input.SumDocFreq; } } public override int DocCount { get { return m_input.DocCount; } } public override bool HasFreqs { get { return m_input.HasFreqs; } } public override bool HasOffsets { get { return m_input.HasOffsets; } } public override bool HasPositions { get { return m_input.HasPositions; } } public override bool HasPayloads { get { return m_input.HasPayloads; } } } /// <summary> /// Base class for filtering <see cref="TermsEnum"/> implementations. </summary> public class FilterTermsEnum : TermsEnum { /// <summary> /// The underlying <see cref="TermsEnum"/> instance. </summary> protected internal readonly TermsEnum m_input; /// <summary> /// Creates a new <see cref="FilterTermsEnum"/> </summary> /// <param name="input"> the underlying <see cref="TermsEnum"/> instance. </param> public FilterTermsEnum(TermsEnum input) { this.m_input = input; } public override AttributeSource Attributes { get { return m_input.Attributes; } } public override SeekStatus SeekCeil(BytesRef text) { return m_input.SeekCeil(text); } public override void SeekExact(long ord) { m_input.SeekExact(ord); } public override BytesRef Next() { return m_input.Next(); } public override BytesRef Term { get { return m_input.Term; } } public override long Ord { get { return m_input.Ord; } } public override int DocFreq { get { return m_input.DocFreq; } } public override long TotalTermFreq { get { return m_input.TotalTermFreq; } } public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) { return m_input.Docs(liveDocs, reuse, flags); } public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags) { return m_input.DocsAndPositions(liveDocs, reuse, flags); } public override IComparer<BytesRef> Comparer { get { return m_input.Comparer; } } } /// <summary> /// Base class for filtering <see cref="DocsEnum"/> implementations. </summary> public class FilterDocsEnum : DocsEnum { /// <summary> /// The underlying <see cref="DocsEnum"/> instance. /// </summary> protected internal DocsEnum m_input; /// <summary> /// Create a new <see cref="FilterDocsEnum"/> </summary> /// <param name="input"> the underlying <see cref="DocsEnum"/> instance. </param> public FilterDocsEnum(DocsEnum input) { this.m_input = input; } public override AttributeSource Attributes { get { return m_input.Attributes; } } public override int DocID { get { return m_input.DocID; } } public override int Freq { get { return m_input.Freq; } } public override int NextDoc() { return m_input.NextDoc(); } public override int Advance(int target) { return m_input.Advance(target); } public override long GetCost() { return m_input.GetCost(); } } /// <summary> /// Base class for filtering <see cref="DocsAndPositionsEnum"/> implementations. </summary> public class FilterDocsAndPositionsEnum : DocsAndPositionsEnum { /// <summary> /// The underlying <see cref="DocsAndPositionsEnum"/> instance. </summary> protected internal readonly DocsAndPositionsEnum m_input; /// <summary> /// Create a new <see cref="FilterDocsAndPositionsEnum"/> </summary> /// <param name="input"> the underlying <see cref="DocsAndPositionsEnum"/> instance. </param> public FilterDocsAndPositionsEnum(DocsAndPositionsEnum input) { this.m_input = input; } public override AttributeSource Attributes { get { return m_input.Attributes; } } public override int DocID { get { return m_input.DocID; } } public override int Freq { get { return m_input.Freq; } } public override int NextDoc() { return m_input.NextDoc(); } public override int Advance(int target) { return m_input.Advance(target); } public override int NextPosition() { return m_input.NextPosition(); } public override int StartOffset { get { return m_input.StartOffset; } } public override int EndOffset { get { return m_input.EndOffset; } } public override BytesRef GetPayload() { return m_input.GetPayload(); } public override long GetCost() { return m_input.GetCost(); } } /// <summary> /// The underlying <see cref="AtomicReader"/>. </summary> protected readonly AtomicReader m_input; /// <summary> /// Construct a <see cref="FilterAtomicReader"/> based on the specified base reader. /// <para/> /// Note that base reader is closed if this <see cref="FilterAtomicReader"/> is closed. /// </summary> /// <param name="input"> specified base reader. </param> public FilterAtomicReader(AtomicReader input) : base() { this.m_input = input; input.RegisterParentReader(this); } public override IBits LiveDocs { get { EnsureOpen(); return m_input.LiveDocs; } } public override FieldInfos FieldInfos { get { return m_input.FieldInfos; } } public override Fields GetTermVectors(int docID) { EnsureOpen(); return m_input.GetTermVectors(docID); } public override int NumDocs { get { // Don't call ensureOpen() here (it could affect performance) return m_input.NumDocs; } } public override int MaxDoc { get { // Don't call ensureOpen() here (it could affect performance) return m_input.MaxDoc; } } public override void Document(int docID, StoredFieldVisitor visitor) { EnsureOpen(); m_input.Document(docID, visitor); } protected internal override void DoClose() { m_input.Dispose(); } public override Fields Fields { get { EnsureOpen(); return m_input.Fields; } } public override string ToString() { StringBuilder buffer = new StringBuilder("FilterAtomicReader("); buffer.Append(m_input); buffer.Append(')'); return buffer.ToString(); } public override NumericDocValues GetNumericDocValues(string field) { EnsureOpen(); return m_input.GetNumericDocValues(field); } public override BinaryDocValues GetBinaryDocValues(string field) { EnsureOpen(); return m_input.GetBinaryDocValues(field); } public override SortedDocValues GetSortedDocValues(string field) { EnsureOpen(); return m_input.GetSortedDocValues(field); } public override SortedSetDocValues GetSortedSetDocValues(string field) { EnsureOpen(); return m_input.GetSortedSetDocValues(field); } public override NumericDocValues GetNormValues(string field) { EnsureOpen(); return m_input.GetNormValues(field); } public override IBits GetDocsWithField(string field) { EnsureOpen(); return m_input.GetDocsWithField(field); } public override void CheckIntegrity() { EnsureOpen(); m_input.CheckIntegrity(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Reflection; using Xunit; namespace System.Linq.Expressions.Tests { public static class BlockTests { #region Test methods [Fact] // [Issue(4020, "https://github.com/dotnet/corefx/issues/4020")] public static void CheckBlockClosureVariableInitializationTest() { foreach (var kv in BlockClosureVariableInitialization()) { VerifyBlockClosureVariableInitialization(kv.Key, kv.Value); } } private static IEnumerable<KeyValuePair<Expression, object>> BlockClosureVariableInitialization() { { var p = Expression.Parameter(typeof(int)); var q = Expression.Parameter(typeof(Func<int>)); var l = Expression.Lambda<Func<int>>(p); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(int)); } { var p = Expression.Parameter(typeof(int)); var q = Expression.Parameter(typeof(Action<int>)); var x = Expression.Parameter(typeof(int)); var l = Expression.Lambda<Action<int>>(Expression.Assign(p, x), x); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(int)); } { var p = Expression.Parameter(typeof(TimeSpan)); var q = Expression.Parameter(typeof(Func<TimeSpan>)); var l = Expression.Lambda<Func<TimeSpan>>(p); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(TimeSpan)); } { var p = Expression.Parameter(typeof(TimeSpan)); var q = Expression.Parameter(typeof(Action<TimeSpan>)); var x = Expression.Parameter(typeof(TimeSpan)); var l = Expression.Lambda<Action<TimeSpan>>(Expression.Assign(p, x), x); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(TimeSpan)); } { var p = Expression.Parameter(typeof(string)); var q = Expression.Parameter(typeof(Func<string>)); var l = Expression.Lambda<Func<string>>(p); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(string)); } { var p = Expression.Parameter(typeof(string)); var q = Expression.Parameter(typeof(Action<string>)); var x = Expression.Parameter(typeof(string)); var l = Expression.Lambda<Action<string>>(Expression.Assign(p, x), x); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(string)); } { var p = Expression.Parameter(typeof(int?)); var q = Expression.Parameter(typeof(Func<int?>)); var l = Expression.Lambda<Func<int?>>(p); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(int?)); } { var p = Expression.Parameter(typeof(int?)); var q = Expression.Parameter(typeof(Action<int?>)); var x = Expression.Parameter(typeof(int?)); var l = Expression.Lambda<Action<int?>>(Expression.Assign(p, x), x); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(int?)); } { var p = Expression.Parameter(typeof(TimeSpan?)); var q = Expression.Parameter(typeof(Func<TimeSpan?>)); var l = Expression.Lambda<Func<TimeSpan?>>(p); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(TimeSpan?)); } { var p = Expression.Parameter(typeof(TimeSpan?)); var q = Expression.Parameter(typeof(Action<TimeSpan?>)); var x = Expression.Parameter(typeof(TimeSpan?)); var l = Expression.Lambda<Action<TimeSpan?>>(Expression.Assign(p, x), x); yield return new KeyValuePair<Expression, object>(Expression.Block(new[] { p, q }, Expression.Assign(q, l), p), default(TimeSpan?)); } } #endregion #region Test verifiers private static void VerifyBlockClosureVariableInitialization(Expression e, object o) { Expression<Func<object>> f = Expression.Lambda<Func<object>>( Expression.Convert(e, typeof(object))); Func<object> c = f.Compile(); Assert.Equal(o, c()); #if FEATURE_INTERPRET Func<object> i = f.Compile(true); Assert.Equal(o, i()); #endif } #endregion private class ParameterChangingVisitor : ExpressionVisitor { protected override Expression VisitParameter(ParameterExpression node) { return Expression.Parameter(node.IsByRef ? node.Type.MakeByRefType() : node.Type, node.Name); } } [Fact] public static void VisitChangingOnlyParmeters() { var block = Expression.Block( new[] { Expression.Parameter(typeof(int)), Expression.Parameter(typeof(string)) }, Expression.Empty() ); Assert.NotSame(block, new ParameterChangingVisitor().Visit(block)); } [Fact] public static void VisitChangingOnlyParmetersMultiStatementBody() { var block = Expression.Block( new[] { Expression.Parameter(typeof(int)), Expression.Parameter(typeof(string)) }, Expression.Empty(), Expression.Empty() ); Assert.NotSame(block, new ParameterChangingVisitor().Visit(block)); } [Fact] public static void VisitChangingOnlyParmetersTyped() { var block = Expression.Block( typeof(object), new[] { Expression.Parameter(typeof(int)), Expression.Parameter(typeof(string)) }, Expression.Constant("") ); Assert.NotSame(block, new ParameterChangingVisitor().Visit(block)); } [Fact] public static void EmptyBlockCompiled() { var block = Expression.Block(); Assert.Equal(typeof(void), block.Type); Action nop = Expression.Lambda<Action>(block).Compile(false); nop(); } [Fact] public static void EmptyBlockIntepreted() { var block = Expression.Block(); Assert.Equal(typeof(void), block.Type); Action nop = Expression.Lambda<Action>(block).Compile(true); nop(); } [Fact] public static void EmptyBlockExplicitTypeCompiled() { var block = Expression.Block(typeof(void)); Assert.Equal(typeof(void), block.Type); Action nop = Expression.Lambda<Action>(block).Compile(false); nop(); } [Fact] public static void EmptyBlockExplicitTypeInterpreted() { var block = Expression.Block(typeof(void)); Assert.Equal(typeof(void), block.Type); Action nop = Expression.Lambda<Action>(block).Compile(true); nop(); } [Fact] public static void EmptyBlockWrongExplicitType() { Assert.Throws<ArgumentException>(() => Expression.Block(typeof(int))); } [Fact] public static void EmptyScopeCompiled() { var scope = Expression.Block(new[] { Expression.Parameter(typeof(int), "x") }, new Expression[0]); Assert.Equal(typeof(void), scope.Type); Action nop = Expression.Lambda<Action>(scope).Compile(false); nop(); } [Fact] public static void EmptyScopeIntepreted() { var scope = Expression.Block(new[] { Expression.Parameter(typeof(int), "x") }, new Expression[0]); Assert.Equal(typeof(void), scope.Type); Action nop = Expression.Lambda<Action>(scope).Compile(true); nop(); } [Fact] public static void EmptyScopeExplicitTypeCompiled() { var scope = Expression.Block(typeof(void), new[] { Expression.Parameter(typeof(int), "x") }, new Expression[0]); Assert.Equal(typeof(void), scope.Type); Action nop = Expression.Lambda<Action>(scope).Compile(false); nop(); } [Fact] public static void EmptyScopeExplicitTypeInterpreted() { var scope = Expression.Block(typeof(void), new[] { Expression.Parameter(typeof(int), "x") }, new Expression[0]); Assert.Equal(typeof(void), scope.Type); Action nop = Expression.Lambda<Action>(scope).Compile(true); nop(); } [Fact] public static void EmptyScopeExplicitWrongType() { Assert.Throws<ArgumentException>(() => Expression.Block( typeof(int), new[] { Expression.Parameter(typeof(int), "x") }, new Expression[0])); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Drawing.Text; using System.Globalization; using System.Runtime.InteropServices; using Gdip = System.Drawing.SafeNativeMethods.Gdip; namespace System.Drawing { /// <summary> /// Abstracts a group of type faces having a similar basic design but having certain variation in styles. /// </summary> public sealed partial class FontFamily : MarshalByRefObject, IDisposable { private const int NeutralLanguage = 0; private IntPtr _nativeFamily; private bool _createDefaultOnFail; #if DEBUG private static object s_lockObj = new object(); private static int s_idCount = 0; private int _id; #endif [SuppressMessage("Microsoft.Security", "CA2106:SecureAsserts")] private void SetNativeFamily(IntPtr family) { Debug.Assert(_nativeFamily == IntPtr.Zero, "Setting GDI+ native font family when already initialized."); _nativeFamily = family; #if DEBUG lock (s_lockObj) { _id = ++s_idCount; } #endif } internal FontFamily(IntPtr family) => SetNativeFamily(family); /// <summary> /// Initializes a new instance of the <see cref='FontFamily'/> class with the specified name. /// /// The <paramref name="createDefaultOnFail"/> parameter determines how errors are handled when creating a /// font based on a font family that does not exist on the end user's system at run time. If this parameter is /// true, then a fall-back font will always be used instead. If this parameter is false, an exception will be thrown. /// </summary> internal FontFamily(string name, bool createDefaultOnFail) { _createDefaultOnFail = createDefaultOnFail; CreateFontFamily(name, null); } /// <summary> /// Initializes a new instance of the <see cref='FontFamily'/> class with the specified name. /// </summary> public FontFamily(string name) => CreateFontFamily(name, null); /// <summary> /// Initializes a new instance of the <see cref='FontFamily'/> class in the specified /// <see cref='FontCollection'/> and with the specified name. /// </summary> public FontFamily(string name, FontCollection fontCollection) => CreateFontFamily(name, fontCollection); // Creates the native font family object. // Note: GDI+ creates singleton font family objects (from the corresponding font file) and reference count them. private void CreateFontFamily(string name, FontCollection fontCollection) { IntPtr fontfamily = IntPtr.Zero; IntPtr nativeFontCollection = (fontCollection == null) ? IntPtr.Zero : fontCollection._nativeFontCollection; int status = Gdip.GdipCreateFontFamilyFromName(name, new HandleRef(fontCollection, nativeFontCollection), out fontfamily); if (status != Gdip.Ok) { if (_createDefaultOnFail) { fontfamily = GetGdipGenericSansSerif(); // This throws if failed. } else { // Special case this incredibly common error message to give more information. if (status == Gdip.FontFamilyNotFound) { throw new ArgumentException(SR.Format(SR.GdiplusFontFamilyNotFound, name)); } else if (status == Gdip.NotTrueTypeFont) { throw new ArgumentException(SR.Format(SR.GdiplusNotTrueTypeFont, name)); } else { throw Gdip.StatusException(status); } } } SetNativeFamily(fontfamily); } /// <summary> /// Initializes a new instance of the <see cref='FontFamily'/> class from the specified generic font family. /// </summary> public FontFamily(GenericFontFamilies genericFamily) { IntPtr nativeFamily = IntPtr.Zero; int status; switch (genericFamily) { case GenericFontFamilies.Serif: status = Gdip.GdipGetGenericFontFamilySerif(out nativeFamily); break; case GenericFontFamilies.SansSerif: status = Gdip.GdipGetGenericFontFamilySansSerif(out nativeFamily); break; case GenericFontFamilies.Monospace: default: status = Gdip.GdipGetGenericFontFamilyMonospace(out nativeFamily); break; } Gdip.CheckStatus(status); SetNativeFamily(nativeFamily); } ~FontFamily() => Dispose(false); internal IntPtr NativeFamily => _nativeFamily; /// <summary> /// Converts this <see cref='FontFamily'/> to a human-readable string. /// </summary> public override string ToString() => $"[{GetType().Name}: Name={Name}]"; /// <summary> /// Gets a hash code for this <see cref='FontFamily'/>. /// </summary> public override int GetHashCode() => GetName(NeutralLanguage).GetHashCode(); private static int CurrentLanguage => CultureInfo.CurrentUICulture.LCID; /// <summary> /// Disposes of this <see cref='FontFamily'/>. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } private void Dispose(bool disposing) { if (_nativeFamily != IntPtr.Zero) { try { #if DEBUG int status = !Gdip.Initialized ? Gdip.Ok : #endif Gdip.GdipDeleteFontFamily(new HandleRef(this, _nativeFamily)); #if DEBUG Debug.Assert(status == Gdip.Ok, "GDI+ returned an error status: " + status.ToString(CultureInfo.InvariantCulture)); #endif } catch (Exception ex) when (!ClientUtils.IsCriticalException(ex)) { } finally { _nativeFamily = IntPtr.Zero; } } } /// <summary> /// Gets the name of this <see cref='FontFamily'/>. /// </summary> public string Name => GetName(CurrentLanguage); /// <summary> /// Returns the name of this <see cref='FontFamily'/> in the specified language. /// </summary> public unsafe string GetName(int language) { char* name = stackalloc char[32]; // LF_FACESIZE is 32 int status = Gdip.GdipGetFamilyName(new HandleRef(this, NativeFamily), name, language); Gdip.CheckStatus(status); return Marshal.PtrToStringUni((IntPtr)name); } /// <summary> /// Returns an array that contains all of the <see cref='FontFamily'/> objects associated with the current /// graphics context. /// </summary> public static FontFamily[] Families => new InstalledFontCollection().Families; /// <summary> /// Gets a generic SansSerif <see cref='FontFamily'/>. /// </summary> public static FontFamily GenericSansSerif => new FontFamily(GetGdipGenericSansSerif()); private static IntPtr GetGdipGenericSansSerif() { IntPtr nativeFamily = IntPtr.Zero; int status = Gdip.GdipGetGenericFontFamilySansSerif(out nativeFamily); Gdip.CheckStatus(status); return nativeFamily; } /// <summary> /// Gets a generic Serif <see cref='FontFamily'/>. /// </summary> public static FontFamily GenericSerif => new FontFamily(GenericFontFamilies.Serif); /// <summary> /// Gets a generic monospace <see cref='FontFamily'/>. /// </summary> public static FontFamily GenericMonospace => new FontFamily(GenericFontFamilies.Monospace); /// <summary> /// Returns an array that contains all of the <see cref='FontFamily'/> objects associated with the specified /// graphics context. /// </summary> [Obsolete("Do not use method GetFamilies, use property Families instead")] public static FontFamily[] GetFamilies(Graphics graphics) { if (graphics == null) { throw new ArgumentNullException(nameof(graphics)); } return new InstalledFontCollection().Families; } /// <summary> /// Indicates whether the specified <see cref='FontStyle'/> is available. /// </summary> public bool IsStyleAvailable(FontStyle style) { int bresult; int status = Gdip.GdipIsStyleAvailable(new HandleRef(this, NativeFamily), style, out bresult); Gdip.CheckStatus(status); return bresult != 0; } /// <summary> /// Gets the size of the Em square for the specified style in font design units. /// </summary> public int GetEmHeight(FontStyle style) { int result = 0; int status = Gdip.GdipGetEmHeight(new HandleRef(this, NativeFamily), style, out result); Gdip.CheckStatus(status); return result; } /// <summary> /// Returns the ascender metric for Windows. /// </summary> public int GetCellAscent(FontStyle style) { int result = 0; int status = Gdip.GdipGetCellAscent(new HandleRef(this, NativeFamily), style, out result); Gdip.CheckStatus(status); return result; } /// <summary> /// Returns the descender metric for Windows. /// </summary> public int GetCellDescent(FontStyle style) { int result = 0; int status = Gdip.GdipGetCellDescent(new HandleRef(this, NativeFamily), style, out result); Gdip.CheckStatus(status); return result; } /// <summary> /// Returns the distance between two consecutive lines of text for this <see cref='FontFamily'/> with the /// specified <see cref='FontStyle'/>. /// </summary> public int GetLineSpacing(FontStyle style) { int result = 0; int status = Gdip.GdipGetLineSpacing(new HandleRef(this, NativeFamily), style, out result); Gdip.CheckStatus(status); return result; } } }
using System; using System.Collections.Generic; using System.Linq; #if __UNIFIED__ using Foundation; using UIKit; #else using MonoTouch.Foundation; using MonoTouch.UIKit; using CGRect = global::System.Drawing.RectangleF; using CGSize = global::System.Drawing.SizeF; using CGPoint = global::System.Drawing.PointF; using nfloat = global::System.Single; using nint = global::System.Int32; using nuint = global::System.UInt32; #endif using SWTableViewCells; namespace SWTableViewCellSample { partial class ViewController : UITableViewController { private string[] sections; private List<string>[] testArray; private CellDelegate cellDelegate; private bool useCustomCells; public ViewController (IntPtr handle) : base (handle) { } public override void ViewDidLoad () { base.ViewDidLoad (); // set up the data Random rnd = new Random (); sections = UILocalizedIndexedCollation.CurrentCollation ().SectionIndexTitles; testArray = ( from section in sections group section by section into g select Enumerable.Range (1, rnd.Next (5) + 1).Select (i => i.ToString ()).ToList () ).ToArray (); cellDelegate = new CellDelegate (testArray, TableView); NavigationItem.LeftBarButtonItem = EditButtonItem; TableView.RowHeight = 90; NavigationItem.Title = "Pull to Toggle Cell Type"; // Setup refresh control for example app UIRefreshControl refreshControl = new UIRefreshControl (); refreshControl.ValueChanged += (sender, args) => { refreshControl.BeginRefreshing (); useCustomCells = !useCustomCells; if (useCustomCells) { RefreshControl.TintColor = UIColor.Yellow; } else { RefreshControl.TintColor = UIColor.Blue; } TableView.ReloadData (); refreshControl.EndRefreshing (); }; refreshControl.TintColor = UIColor.Blue; TableView.AddSubview (refreshControl); RefreshControl = refreshControl; useCustomCells = false; } public override nint NumberOfSections (UITableView tableView) { return testArray.Length; } public override nint RowsInSection (UITableView tableView, nint section) { return testArray [section].Count (); } public override string TitleForHeader (UITableView tableView, nint section) { return sections [section]; } public override UITableViewCell GetCell (UITableView tableView, NSIndexPath indexPath) { if (useCustomCells) { UMTableViewCell cell = (UMTableViewCell)tableView.DequeueReusableCell ("UMCell"); // we assume that only new cells have no delegate if (cell.Delegate == null) { cell.Delegate = cellDelegate; // optionally specify a width that each set of utility buttons will share cell.SetLeftUtilityButtons (LeftButtons (), 32.0f); cell.SetRightUtilityButtons (RightButtons (), 58.0f); } cell.Label.Text = string.Format ("Section: {0}, Seat: {1}", indexPath.Section, indexPath.Row); return cell; } else { SWTableViewCell cell = (SWTableViewCell)tableView.DequeueReusableCell ("Cell"); // we assume that only new cells have no delegate if (cell.Delegate == null) { cell.Delegate = cellDelegate; cell.RightUtilityButtons = RightButtons (); cell.LeftUtilityButtons = LeftButtons (); } cell.TextLabel.Text = string.Format ("Seat: {0}", testArray [indexPath.Section] [indexPath.Row]); cell.DetailTextLabel.Text = string.Format ("Details for seat {1} in section: {0}.", indexPath.Section, indexPath.Row); return cell; } } public override void RowSelected (UITableView tableView, NSIndexPath indexPath) { Console.WriteLine ("cell selected at index path {0}:{1}", indexPath.Section, indexPath.Row); Console.WriteLine ("selected cell index path is {0}", TableView.IndexPathForSelectedRow); if (!tableView.Editing) { tableView.DeselectRow (indexPath, true); } } static UIButton[] RightButtons () { NSMutableArray rightUtilityButtons = new NSMutableArray (); rightUtilityButtons.AddUtilityButton (UIColor.FromRGBA (0.78f, 0.78f, 0.8f, 1.0f), "More"); rightUtilityButtons.AddUtilityButton (UIColor.FromRGBA (1.0f, 0.231f, 0.188f, 1.0f), "Delete"); return NSArray.FromArray<UIButton> (rightUtilityButtons); } static UIButton[] LeftButtons () { NSMutableArray leftUtilityButtons = new NSMutableArray (); leftUtilityButtons.AddUtilityButton (UIColor.FromRGBA (0.07f, 0.75f, 0.16f, 1.0f), UIImage.FromBundle ("check.png")); leftUtilityButtons.AddUtilityButton (UIColor.FromRGBA (1.0f, 1.0f, 0.35f, 1.0f), UIImage.FromBundle ("clock.png")); leftUtilityButtons.AddUtilityButton (UIColor.FromRGBA (1.0f, 0.231f, 0.188f, 1.0f), UIImage.FromBundle ("cross.png")); leftUtilityButtons.AddUtilityButton (UIColor.FromRGBA (0.55f, 0.27f, 0.07f, 1.0f), UIImage.FromBundle ("list.png")); return NSArray.FromArray<UIButton> (leftUtilityButtons); } class CellDelegate : SWTableViewCellDelegate { private readonly List<string>[] testArray; private readonly UITableView tableView; public CellDelegate (List<string>[] testArray, UITableView tableView) { this.testArray = testArray; this.tableView = tableView; } public override void ScrollingToState (SWTableViewCell cell, SWCellState state) { switch (state) { case SWCellState.Center: Console.WriteLine ("utility buttons closed"); break; case SWCellState.Left: Console.WriteLine ("left utility buttons open"); break; case SWCellState.Right: Console.WriteLine ("right utility buttons open"); break; } } public override void DidTriggerLeftUtilityButton (SWTableViewCell cell, nint index) { Console.WriteLine ("Left button {0} was pressed.", index); new UIAlertView ("Left Utility Buttons", string.Format ("Left button {0} was pressed.", index), null, "OK", null).Show (); } public override void DidTriggerRightUtilityButton (SWTableViewCell cell, nint index) { Console.WriteLine ("Right button {0} was pressed.", index); switch (index) { case 0: // More button was pressed Console.WriteLine ("More button was pressed"); new UIAlertView ("Hello", "More more more", null, "cancel", null).Show (); cell.HideUtilityButtons (true); break; case 1: // Delete button was pressed NSIndexPath cellIndexPath = tableView.IndexPathForCell (cell); testArray [cellIndexPath.Section].RemoveAt (cellIndexPath.Row); tableView.DeleteRows (new[] { cellIndexPath }, UITableViewRowAnimation.Left); break; } } public override bool ShouldHideUtilityButtonsOnSwipe (SWTableViewCell cell) { // allow just one cell's utility button to be open at once return true; } public override bool CanSwipeToState (SWTableViewCell cell, SWCellState state) { switch (state) { case SWCellState.Left: // set to false to disable all left utility buttons appearing return true; case SWCellState.Right: // set to false to disable all right utility buttons appearing return true; } return true; } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Reflection; using log4net; using OpenMetaverse; namespace OpenSim.Framework { /// <summary> /// Represents an item in a task inventory /// </summary> public class TaskInventoryItem : ICloneable { // private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary> /// XXX This should really be factored out into some constants class. /// </summary> private const uint FULL_MASK_PERMISSIONS_GENERAL = 2147483647; private UUID _assetID = UUID.Zero; private uint _baseMask = FULL_MASK_PERMISSIONS_GENERAL; private uint _creationDate = 0; private UUID _creatorID = UUID.Zero; private string _creatorData = String.Empty; private string _description = String.Empty; private uint _everyoneMask = FULL_MASK_PERMISSIONS_GENERAL; private uint _flags = 0; private UUID _groupID = UUID.Zero; private uint _groupMask = FULL_MASK_PERMISSIONS_GENERAL; private int _invType = 0; private UUID _itemID = UUID.Zero; private UUID _lastOwnerID = UUID.Zero; private string _name = String.Empty; private uint _nextOwnerMask = FULL_MASK_PERMISSIONS_GENERAL; private UUID _ownerID = UUID.Zero; private uint _ownerMask = FULL_MASK_PERMISSIONS_GENERAL; private UUID _parentID = UUID.Zero; //parent folder id private UUID _parentPartID = UUID.Zero; // SceneObjectPart this is inside private UUID _permsGranter; private int _permsMask; private int _type = 0; private UUID _oldID; private UUID _loadedID = UUID.Zero; private bool _ownerChanged = false; public UUID AssetID { get { return _assetID; } set { _assetID = value; } } public uint BasePermissions { get { return _baseMask; } set { _baseMask = value; } } public uint CreationDate { get { return _creationDate; } set { _creationDate = value; } } public UUID CreatorID { get { return _creatorID; } set { _creatorID = value; } } public string CreatorData // = <profile url>;<name> { get { return _creatorData; } set { _creatorData = value; } } /// <summary> /// Used by the DB layer to retrieve / store the entire user identification. /// The identification can either be a simple UUID or a string of the form /// uuid[;profile_url[;name]] /// </summary> public string CreatorIdentification { get { if (!string.IsNullOrEmpty(_creatorData)) return _creatorID.ToString() + ';' + _creatorData; else return _creatorID.ToString(); } set { if ((value == null) || (value != null && value == string.Empty)) { _creatorData = string.Empty; return; } if (!value.Contains(";")) // plain UUID { UUID uuid = UUID.Zero; UUID.TryParse(value, out uuid); _creatorID = uuid; } else // <uuid>[;<endpoint>[;name]] { string name = "Unknown User"; string[] parts = value.Split(';'); if (parts.Length >= 1) { UUID uuid = UUID.Zero; UUID.TryParse(parts[0], out uuid); _creatorID = uuid; } if (parts.Length >= 2) _creatorData = parts[1]; if (parts.Length >= 3) name = parts[2]; _creatorData += ';' + name; } } } public string Description { get { return _description; } set { _description = value; } } public uint EveryonePermissions { get { return _everyoneMask; } set { _everyoneMask = value; } } public uint Flags { get { return _flags; } set { _flags = value; } } public UUID GroupID { get { return _groupID; } set { _groupID = value; } } public uint GroupPermissions { get { return _groupMask; } set { _groupMask = value; } } public int InvType { get { return _invType; } set { _invType = value; } } public UUID ItemID { get { return _itemID; } set { _itemID = value; } } public UUID OldItemID { get { return _oldID; } set { _oldID = value; } } public UUID LoadedItemID { get { return _loadedID; } set { _loadedID = value; } } public UUID LastOwnerID { get { return _lastOwnerID; } set { _lastOwnerID = value; } } public string Name { get { return _name; } set { _name = value; } } public uint NextPermissions { get { return _nextOwnerMask; } set { _nextOwnerMask = value; } } public UUID OwnerID { get { return _ownerID; } set { _ownerID = value; } } public uint CurrentPermissions { get { return _ownerMask; } set { _ownerMask = value; } } public UUID ParentID { get { return _parentID; } set { _parentID = value; } } public UUID ParentPartID { get { return _parentPartID; } set { _parentPartID = value; } } public UUID PermsGranter { get { return _permsGranter; } set { _permsGranter = value; } } public int PermsMask { get { return _permsMask; } set { _permsMask = value; } } public int Type { get { return _type; } set { _type = value; } } public bool OwnerChanged { get { return _ownerChanged; } set { _ownerChanged = value; // m_log.DebugFormat( // "[TASK INVENTORY ITEM]: Owner changed set {0} for {1} {2} owned by {3}", // _ownerChanged, Name, ItemID, OwnerID); } } /// <summary> /// This used ONLY during copy. It can't be relied on at other times! /// </summary> /// <remarks> /// For true script running status, use IEntityInventory.TryGetScriptInstanceRunning() for now. /// </remarks> public bool ScriptRunning { get; set; } // See ICloneable #region ICloneable Members public Object Clone() { return MemberwiseClone(); } #endregion /// <summary> /// Reset the UUIDs for this item. /// </summary> /// <param name="partID">The new part ID to which this item belongs</param> public void ResetIDs(UUID partID) { LoadedItemID = OldItemID; OldItemID = ItemID; ItemID = UUID.Random(); ParentPartID = partID; ParentID = partID; } public TaskInventoryItem() { ScriptRunning = true; CreationDate = (uint)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; } } }
// // (C) Copyright 2003-2011 by Autodesk, Inc. // // Permission to use, copy, modify, and distribute this software in // object code form for any purpose and without fee is hereby granted, // provided that the above copyright notice appears in all copies and // that both that copyright notice and the limited warranty and // restricted rights notice below appear in all supporting // documentation. // // AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. // AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF // MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC. // DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE // UNINTERRUPTED OR ERROR FREE. // // Use, duplication, or disclosure by the U.S. Government is subject to // restrictions set forth in FAR 52.227-19 (Commercial Computer // Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii) // (Rights in Technical Data and Computer Software), as applicable. // namespace Revit.SDK.Samples.AreaReinParameters.CS { using System; using System.Collections.Generic; using System.Text; using System.Windows.Forms; using System.ComponentModel; using Autodesk.Revit; using Autodesk.Revit.DB; using Autodesk.Revit.DB.Structure; /// <summary> /// can be the datasource of propertygrid /// </summary> class FloorAreaReinData : IAreaReinData { //member Parameter m_layoutRule; //top major layer Parameter m_topMajorBarType = null; Parameter m_topMajorHookType = null; Parameter m_topMajorHookOrientation = null; //top minor layer Parameter m_topMinorBarType = null; Parameter m_topMinorHookType = null; Parameter m_topMinorHookOrientation = null; //bottom major layer Parameter m_bottomMajorBarType = null; Parameter m_bottomMajorHookType = null; Parameter m_bottomMajorHookOrientation = null; //bottom minor layer Parameter m_bottomMinorBarType = null; Parameter m_bottomMinorHookType = null; Parameter m_bottomMinorHookOrientation = null; /// <summary> /// fill in data with given AreaReinforcement /// </summary> /// <param name="areaRein"></param> /// <returns></returns> public bool FillInData(AreaReinforcement areaRein) { bool flag = false; //member m_layoutRule = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_LAYOUT_RULE); flag = (m_layoutRule != null); //top major layer m_topMajorBarType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_BAR_TYPE_TOP_DIR_1); m_topMajorHookType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_TYPE_TOP_DIR_1); m_topMajorHookOrientation = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_ORIENT_TOP_DIR_1); flag &= (m_topMajorBarType != null) && (m_topMajorHookOrientation != null) && (m_topMajorHookType != null); //top minor layer m_topMinorBarType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_BAR_TYPE_TOP_DIR_2); m_topMinorHookType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_TYPE_TOP_DIR_2); m_topMinorHookOrientation = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_ORIENT_TOP_DIR_2); flag &= (m_topMinorBarType != null) && (m_topMinorHookOrientation != null) && (m_topMinorHookType != null); //bottom major layer m_bottomMajorBarType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_BAR_TYPE_BOTTOM_DIR_1); m_bottomMajorHookType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_TYPE_BOTTOM_DIR_1); m_bottomMajorHookOrientation = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_ORIENT_BOTTOM_DIR_1); flag &= (m_bottomMajorBarType != null) && (m_bottomMajorHookOrientation != null) && (m_bottomMajorHookType != null); //bottom minor layer m_bottomMinorBarType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_BAR_TYPE_BOTTOM_DIR_2); m_bottomMinorHookType = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_TYPE_BOTTOM_DIR_2); m_bottomMinorHookOrientation = areaRein.get_Parameter( BuiltInParameter.REBAR_SYSTEM_HOOK_ORIENT_BOTTOM_DIR_2); flag &= (m_bottomMinorBarType != null) && (m_bottomMinorHookOrientation != null) && (m_bottomMinorHookType != null); return flag; } /// <summary> /// layout rule /// </summary> [Category("Construction")] public LayoutRules Layout_Rule { get { int index = m_layoutRule.AsInteger(); return (LayoutRules)index; } set { int index = (int)value; m_layoutRule.Set(index); } } #region top major layer [CategoryAttribute("Top Major Layer"), TypeConverter(typeof(BarTypeItem))] public Autodesk.Revit.DB.ElementId Top_Major_Bar_Type { get { return m_topMajorBarType.AsElementId(); } set { m_topMajorBarType.Set(value); } } [CategoryAttribute("Top Major Layer"), TypeConverter(typeof(HookTypeItem))] public Autodesk.Revit.DB.ElementId Top_Major_Hook_Type { get { return m_topMajorHookType.AsElementId(); } set { m_topMajorHookType.Set(value); } } [CategoryAttribute("Top Major Layer")] public FloorHookOrientations Top_Major_Hook_Orientation { get { int index = m_topMajorHookOrientation.AsInteger(); return (FloorHookOrientations)index; } set { int index = (int)value; m_topMajorHookOrientation.Set(index); } } #endregion #region top minor layer [CategoryAttribute("Top Minor Layer"), TypeConverter(typeof(BarTypeItem))] public Autodesk.Revit.DB.ElementId Top_Minor_Bar_Type { get { return m_topMinorBarType.AsElementId(); } set { m_topMinorBarType.Set(value); } } [CategoryAttribute("Top Minor Layer"), TypeConverter(typeof(HookTypeItem))] public Autodesk.Revit.DB.ElementId Top_Minor_Hook_Type { get { return m_topMinorHookType.AsElementId(); } set { m_topMinorHookType.Set(value); } } [CategoryAttribute("Top Minor Layer")] public FloorHookOrientations Top_Minor_Hook_Orientation { get { int index = m_topMinorHookOrientation.AsInteger(); return (FloorHookOrientations)index; } set { int index = (int)value; m_topMinorHookOrientation.Set(index); } } #endregion #region bottom major layer [CategoryAttribute("Bottom Major Layer"), TypeConverter(typeof(BarTypeItem))] public Autodesk.Revit.DB.ElementId Bottom_Major_Bar_Type { get { return m_bottomMajorBarType.AsElementId(); } set { m_bottomMajorBarType.Set(value); } } [CategoryAttribute("Bottom Major Layer"), TypeConverter(typeof(HookTypeItem))] public Autodesk.Revit.DB.ElementId Bottom_Major_Hook_Type { get { return m_bottomMajorHookType.AsElementId(); } set { m_bottomMajorHookType.Set(value); } } [CategoryAttribute("Bottom Major Layer")] public FloorHookOrientations Bottom_Major_Hook_Orientation { get { int index = m_bottomMajorHookOrientation.AsInteger(); return (FloorHookOrientations)index; } set { int index = (int)value; m_bottomMajorHookOrientation.Set(index); } } #endregion #region bottom minor layer [CategoryAttribute("Bottom Minor Layer"), TypeConverter(typeof(BarTypeItem))] public Autodesk.Revit.DB.ElementId Bottom_Minor_Bar_Type { get { return m_bottomMinorBarType.AsElementId(); } set { m_bottomMinorBarType.Set(value); } } [CategoryAttribute("Bottom Minor Layer"), TypeConverter(typeof(HookTypeItem))] public Autodesk.Revit.DB.ElementId Bottom_Minor_Hook_Type { get { return m_bottomMinorHookType.AsElementId(); } set { m_bottomMinorHookType.Set(value); } } [CategoryAttribute("Bottom Minor Layer")] public FloorHookOrientations Bottom_Minor_Hook_Orientation { get { int index = m_bottomMinorHookOrientation.AsInteger(); return (FloorHookOrientations)index; } set { int index = (int)value; m_bottomMinorHookOrientation.Set(index); } } #endregion } }
#region license // Copyright 2014 JetBrains s.r.o. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using JetBrains.DataFlow; using JetBrains.ReSharper.Plugins.AngularJS.Psi.AngularJs.Parsing.Tree; using JetBrains.ReSharper.Psi.AngularJs.Parsing; using JetBrains.ReSharper.Psi.ExtensionsAPI.Tree; using JetBrains.ReSharper.Psi.JavaScript.Impl.Tree; using JetBrains.ReSharper.Psi.JavaScript.Parsing; using JetBrains.ReSharper.Psi.Parsing; namespace JetBrains.ReSharper.Plugins.AngularJS.Psi.AngularJs.Parsing { public class AngularJsTreeBuilder : JavaScriptTreeBuilder { // ReSharper disable InconsistentNaming private CompositeNodeType FILTER_EXPRESSION; private CompositeNodeType FILTER_ARGUMENT_LIST; private CompositeNodeType REPEAT_EXPRESSION; // ReSharper restore InconsistentNaming public AngularJsTreeBuilder(ILexer lexer, Lifetime lifetime) : base(lexer, lifetime) { } protected override void InitElementTypes() { FILTER_EXPRESSION = AngularJsElementType.FILTER_EXPRESSION; FILTER_ARGUMENT_LIST = AngularJsElementType.FILTER_ARGUMENT_LIST; REPEAT_EXPRESSION = AngularJsElementType.REPEAT_EXPRESSION; base.InitElementTypes(); } public override void ParseStatement() { var tokenType = GetTokenType(); if (tokenType == TokenType.SEMICOLON) { ParseEmptyStatement(); return; } if (CanBeIdentifier(tokenType)) { var nextTokenType = LookAhead(1); if (nextTokenType == TokenType.EQ) { ParseVariableStatement(); return; } if (nextTokenType == TokenType.IN_KEYWORD) { ParseInStatement(); return; } } if (tokenType == TokenType.LPARENTH) { if (ParseInStatement()) return; } // TODO: Check ExpressionFirst if (!Builder.Eof() && ExpressionFirst[tokenType]) { ParseExpressionStatement(); } else { var mark = Mark(); if (!Builder.Eof()) Advance(); Builder.Error(mark, "Unexpected token"); } } private void ParseEmptyStatement() { var mark = Mark(); ExpectToken(TokenType.SEMICOLON); Builder.DoneBeforeWhitespaces(mark, EMPTY_STATEMENT, null); } private new void ParseVariableStatement() { var mark = Mark(); ParseVariableDeclaration(); ParseOptionalSemiColon(); Builder.DoneBeforeWhitespaces(mark, VARIABLE_STATEMENT, null); } private void ParseVariableDeclaration() { var mark = Mark(); base.ParseIdentifierExpression(); if (GetTokenType() == TokenType.EQ) { Advance(); ParseExpression(); } Builder.DoneBeforeWhitespaces(mark, VARIABLE_DECLARATION, null); } private void ParseOptionalSemiColon() { if (!Builder.Eof()) ExpectToken(TokenType.SEMICOLON); } private bool ParseInStatement() { var mark = Mark(); if (!ParseInExpression()) { Builder.Drop(mark); return false; } Builder.DoneBeforeWhitespaces(mark, EXPRESSION_STATEMENT, null); return true; } private bool ParseInExpression() { var mark = Mark(); if (CanBeIdentifier(GetTokenType())) { var m = Mark(); base.ParseIdentifierExpression(); Builder.DoneBeforeWhitespaces(m, REFERENCE_EXPRESSION, null); } else if (GetTokenType() == TokenType.LPARENTH) { var m = Mark(); ParseKeyValue(); if (GetTokenType() != TokenType.IN_KEYWORD) { Builder.RollbackTo(mark); return false; } Builder.DoneBeforeWhitespaces(m, PARENTHESIZED_EXPRESSION, null); } else { Builder.ErrorBeforeWhitespaces("Unexpected token", CommentsOrWhiteSpacesTokens); return false; } ExpectToken(TokenType.IN_KEYWORD); ParseExpression(); if (GetTokenType() == AngularJsTokenType.TRACK_BY_KEYWORD) { Advance(); ParseExpression(); } Builder.DoneBeforeWhitespaces(mark, REPEAT_EXPRESSION, null); return true; } private void ParseKeyValue() { ExpectToken(TokenType.LPARENTH); var mark = Mark(); if (CanBeIdentifier(GetTokenType())) { var m = Mark(); base.ParseIdentifierExpression(); Builder.DoneBeforeWhitespaces(m, REFERENCE_EXPRESSION, null); } else { Builder.ErrorBeforeWhitespaces("Expected identifier", CommentsOrWhiteSpacesTokens); } ExpectToken(TokenType.COMMA); if (CanBeIdentifier(GetTokenType())) { var m = Mark(); base.ParseIdentifierExpression(); Builder.DoneBeforeWhitespaces(m, REFERENCE_EXPRESSION, null); } else { Builder.ErrorBeforeWhitespaces("Expected identifier", CommentsOrWhiteSpacesTokens); } Builder.DoneBeforeWhitespaces(mark, COMPOUND_EXPRESSION, null); ExpectToken(TokenType.RPARENTH); } private void ParseExpressionStatement() { var mark = Mark(); ParseExpression(); ParseOptionalSemiColon(); Builder.DoneBeforeWhitespaces(mark, EXPRESSION_STATEMENT, null); } private void ParseExpression() { ParseFilterChainExpression(); } private void ParseFilterChainExpression() { var mark = Mark(); ParseAssignmentExpression(); if (GetTokenType() == TokenType.PIPE) { while (GetTokenType() == TokenType.PIPE) { ParseFilterExpression(); Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); Builder.Precede(mark); } } Builder.Drop(mark); } private void ParseFilterExpression() { ExpectToken(TokenType.PIPE); SkipWhitespaces(); var mark = Builder.Mark(); ParseFilterIdentifierExpression(); ParseFilterArgumentList(); Builder.DoneBeforeWhitespaces(mark, FILTER_EXPRESSION, null); } private void ParseFilterIdentifierExpression() { var mark = Builder.Mark(); base.ParseIdentifierExpression(); Builder.DoneBeforeWhitespaces(mark, REFERENCE_EXPRESSION, null); } private void ParseFilterArgumentList() { var mark = Builder.Mark(); while (GetTokenType() == TokenType.COLON) { ExpectToken(TokenType.COLON); // TODO: ParseExpression? This prevents recursive filter chains, // but requires knowledge of what ParseExpressionStatement and // ParseFilterChainExpression do ParseAssignmentExpression(); } if (Builder.IsEmpty(mark)) { Builder.Drop(mark); return; } Builder.DoneBeforeWhitespaces(mark, FILTER_ARGUMENT_LIST, null); } private new void ParseAssignmentExpression() { var mark = Mark(); if (!ParseTernaryExpression()) { Builder.Drop(mark); return; } if (GetTokenType() != TokenType.EQ) { Builder.Drop(mark); return; } Advance(); ParseAssignmentExpression(); Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); } private bool ParseTernaryExpression() { if (!ParseLogicalOrExpression()) return false; if (GetTokenType() == JavaScriptTokenType.QUESTION) { var mark = Builder.PrecedeCurrent(); // This pattern might be wrong - we backtrack too much? // The real JS parser doesn't check return values. Does this handle // parse failures better? if (ExpectToken(JavaScriptTokenType.QUESTION) && ParseTernaryExpression() && ExpectToken(JavaScriptTokenType.COLON) && ParseTernaryExpression()) { Builder.DoneBeforeWhitespaces(mark, CONDITIONAL_TERNARY_EXPRESSION, null); return true; } Builder.Drop(mark); return false; } return true; } private bool ParseLogicalOrExpression() { if (!ParseLogicalAndExpression()) return false; if (GetTokenType() == JavaScriptTokenType.PIPE2) { var mark = Builder.PrecedeCurrent(); if (ExpectToken(JavaScriptTokenType.PIPE2) && ParseLogicalOrExpression()) // TODO: AngularJs parser uses a loop instead of recursion { Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); return true; } Builder.Drop(mark); return false; } return true; } private bool ParseLogicalAndExpression() { if (!ParseEqualityExpression()) return false; if (GetTokenType() == JavaScriptTokenType.AMPER2) { var mark = Builder.PrecedeCurrent(); if (ExpectToken(JavaScriptTokenType.AMPER2) && ParseLogicalAndExpression()) // TODO: AngularJs parser uses a loop instead of recursion { Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); return true; } Builder.Drop(mark); return false; } return true; } private bool ParseEqualityExpression() { if (!ParseRelationalExpression()) return false; var tokenType = GetTokenType(); if (tokenType == JavaScriptTokenType.EQ2 || tokenType == JavaScriptTokenType.NOTEQ || tokenType == JavaScriptTokenType.EQ3 || tokenType == JavaScriptTokenType.NOTEQ2) { var mark = Builder.PrecedeCurrent(); Advance(); // Past operator ParseEqualityExpression(); Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); } return true; } private bool ParseRelationalExpression() { if (!ParseAdditiveExpression()) return false; var tokenType = GetTokenType(); if (tokenType == JavaScriptTokenType.LT || tokenType == JavaScriptTokenType.GT || tokenType == JavaScriptTokenType.LTEQ || tokenType == JavaScriptTokenType.GTEQ) { var mark = Builder.PrecedeCurrent(); Advance(); // Past operator ParseRelationalExpression(); Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); } return true; } private bool ParseAdditiveExpression() { if (!ParseMultiplicativeExpression()) return false; var tokenType = GetTokenType(); if (tokenType == JavaScriptTokenType.PLUS || tokenType == JavaScriptTokenType.MINUS) { var mark = Builder.PrecedeCurrent(); Advance(); // Past operator ParseAdditiveExpression(); Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); } return true; } private bool ParseMultiplicativeExpression() { if (!ParsePrefixPostfixExpression()) return false; var tokenType = GetTokenType(); // 8.2 uses STAR. This change might break if (tokenType == JavaScriptTokenType.ASTERISK || tokenType == JavaScriptTokenType.DIVIDE || tokenType == JavaScriptTokenType.PERCENT) { var mark = Builder.PrecedeCurrent(); Advance(); // Past operator ParseMultiplicativeExpression(); Builder.DoneBeforeWhitespaces(mark, BINARY_EXPRESSION, null); } return true; } private new bool ParsePrefixPostfixExpression() { var tokenType = GetTokenType(); if (tokenType == TokenType.PLUS || tokenType == TokenType.MINUS || tokenType == TokenType.EXCLAMATION) { return ParsePrefixExpression(); } return ParsePostfixExpression(); } private bool ParsePrefixExpression() { var tokenType = GetTokenType(); if (tokenType == TokenType.PLUS || tokenType == TokenType.MINUS || tokenType == TokenType.EXCLAMATION) { var mark = Mark(); Advance(); ParsePrefixPostfixExpression(); Builder.DoneBeforeWhitespaces(mark, PREFIX_EXPRESSION, null); } return true; } private bool ParsePostfixExpression() { return ParseMemberExpression(); } private new bool ParseMemberExpression() { if (MemberExpressionFirst[GetTokenType()]) { ParseMemberExpressionInner(); } else { if (!base.ParseIdentifierExpression()) return false; var ident = Builder.PrecedeCurrent(); Builder.DoneBeforeWhitespaces(ident, REFERENCE_EXPRESSION, null); } // TODO: Implement ParseArgumentListAux, since it calls ParseJavaScriptExpression return base.ParseMemberExpressionFollows(stopAtInvocation: false); } private void ParseMemberExpressionInner() { var tokenType = GetTokenType(); if (JavaScriptTokenType.LITERALS[tokenType]) { ParseLiteralExpression(); } else if (tokenType == TokenType.LBRACKET) { ParseArrayLiteral(); } else if (tokenType == TokenType.LBRACE) { ParseObjectLiteral(); } else if (tokenType == TokenType.LPARENTH) { ParseParenthesizedExpression(); } else { Builder.ErrorBeforeWhitespaces("Primary expression expected", CommentsOrWhiteSpacesTokens); } } private void ParseLiteralExpression() { if (JavaScriptTokenType.LITERALS[GetTokenType()]) { int mark = Mark(); Advance(); Builder.DoneBeforeWhitespaces(mark, LITERAL_EXPRESSION, null); } } // Same as JavaScriptTreeBuilder's implementation, but calls out ParseExpression // rather than the private ParseJavaScriptExpression private void ParseArrayLiteral() { var mark = Mark(); ExpectToken(TokenType.LBRACKET); var tokenType = GetTokenType(); if (!Builder.Eof() && ExpressionFirst[tokenType]) ParseExpression(); while (GetTokenType() == TokenType.COMMA) { Advance(); if (!Builder.Eof() && ExpressionFirst[GetTokenType()]) { ParseExpression(); } } ExpectToken(TokenType.RBRACKET); Builder.DoneBeforeWhitespaces(mark, ARRAY_LITERAL, null); } private new void ParseParenthesizedExpression() { var mark = Mark(); ExpectToken(TokenType.LPARENTH); ParseCompoundExpression(); ExpectToken(TokenType.RPARENTH); Builder.DoneBeforeWhitespaces(mark, PARENTHESIZED_EXPRESSION, null); } private new void ParseCompoundExpression() { var mark = Mark(); ParseExpression(); while (GetTokenType() == TokenType.COMMA) { Advance(); ParseExpression(); } Builder.DoneBeforeWhitespaces(mark, COMPOUND_EXPRESSION, null); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Collections; using System.Collections.Generic; using System.Threading; #if ES_BUILD_PCL using System.Threading.Tasks; #endif #if ES_BUILD_STANDALONE namespace Microsoft.Diagnostics.Tracing #else namespace System.Diagnostics.Tracing #endif { /// <summary> /// Provides the ability to collect statistics through EventSource /// /// See https://github.com/dotnet/corefx/blob/master/src/System.Diagnostics.Tracing/documentation/EventCounterTutorial.md /// for a tutorial guide. /// /// See https://github.com/dotnet/corefx/blob/master/src/System.Diagnostics.Tracing/tests/BasicEventSourceTest/TestEventCounter.cs /// which shows tests, which are also useful in seeing actual use. /// </summary> public class EventCounter : IDisposable { /// <summary> /// Initializes a new instance of the <see cref="EventCounter"/> class. /// EVentCounters live as long as the EventSource that they are attached to unless they are /// explicitly Disposed. /// </summary> /// <param name="name">The name.</param> /// <param name="eventSource">The event source.</param> public EventCounter(string name, EventSource eventSource) { if (name == null) { throw new ArgumentNullException(nameof(name)); } if (eventSource == null) { throw new ArgumentNullException(nameof(eventSource)); } InitializeBuffer(); _name = name; _group = EventCounterGroup.GetEventCounterGroup(eventSource); _group.Add(this); _min = float.PositiveInfinity; _max = float.NegativeInfinity; } /// <summary> /// Writes 'value' to the stream of values tracked by the counter. This updates the sum and other statistics that will /// be logged on the next timer interval. /// </summary> /// <param name="value">The value.</param> public void WriteMetric(float value) { Enqueue(value); } /// <summary> /// Removes the counter from set that the EventSource will report on. After being disposed, this /// counter will do nothing and its resource will be reclaimed if all references to it are removed. /// If an EventCounter is not explicitly disposed it will be cleaned up automatically when the /// EventSource it is attached to dies. /// </summary> public void Dispose() { var group = _group; if (group != null) { group.Remove(this); _group = null; } } public override string ToString() { return "EventCounter '" + _name + "' Count " + _count + " Mean " + (((double)_sum) / _count).ToString("n3"); } #region private implementation private readonly string _name; private EventCounterGroup _group; #region Buffer Management // Values buffering private const int BufferedSize = 10; private const float UnusedBufferSlotValue = float.NegativeInfinity; private const int UnsetIndex = -1; private volatile float[] _bufferedValues; private volatile int _bufferedValuesIndex; // arbitrarily we use _bufferfValues as the lock object. private object MyLock { get { return _bufferedValues; } } private void InitializeBuffer() { _bufferedValues = new float[BufferedSize]; for (int i = 0; i < _bufferedValues.Length; i++) { _bufferedValues[i] = UnusedBufferSlotValue; } } private void Enqueue(float value) { // It is possible that two threads read the same bufferedValuesIndex, but only one will be able to write the slot, so that is okay. int i = _bufferedValuesIndex; while (true) { float result = Interlocked.CompareExchange(ref _bufferedValues[i], value, UnusedBufferSlotValue); i++; if (_bufferedValues.Length <= i) { // It is possible that two threads both think the buffer is full, but only one get to actually flush it, the other // will eventually enter this code path and potentially calling Flushing on a buffer that is not full, and that's okay too. lock (MyLock) // Lock the counter Flush(); i = 0; } if (result == UnusedBufferSlotValue) { // CompareExchange succeeded _bufferedValuesIndex = i; return; } } } private void Flush() { Debug.Assert(Monitor.IsEntered(MyLock)); for (int i = 0; i < _bufferedValues.Length; i++) { var value = Interlocked.Exchange(ref _bufferedValues[i], UnusedBufferSlotValue); if (value != UnusedBufferSlotValue) { OnMetricWritten(value); } } _bufferedValuesIndex = 0; } #endregion // Buffer Management #region Statistics Calculation // Statistics private int _count; private float _sum; private float _sumSquared; private float _min; private float _max; private void OnMetricWritten(float value) { Debug.Assert(Monitor.IsEntered(MyLock)); _sum += value; _sumSquared += value * value; if (value > _max) _max = value; if (value < _min) _min = value; _count++; } internal EventCounterPayload GetEventCounterPayload() { lock (MyLock) // Lock the counter { Flush(); EventCounterPayload result = new EventCounterPayload(); result.Name = _name; result.Count = _count; if (0 < _count) { result.Mean = _sum / _count; result.StandardDeviation = (float)Math.Sqrt(_sumSquared / _count - _sum * _sum / _count / _count); } else { result.Mean = 0; result.StandardDeviation = 0; } result.Min = _min; result.Max = _max; ResetStatistics(); return result; } } private void ResetStatistics() { Debug.Assert(Monitor.IsEntered(MyLock)); _count = 0; _sum = 0; _sumSquared = 0; _min = float.PositiveInfinity; _max = float.NegativeInfinity; } #endregion // Statistics Calculation #endregion // private implementation } #region internal supporting classes [EventData] internal class EventCounterPayload : IEnumerable<KeyValuePair<string, object>> { public string Name { get; set; } public float Mean { get; set; } public float StandardDeviation { get; set; } public int Count { get; set; } public float Min { get; set; } public float Max { get; set; } public float IntervalSec { get; internal set; } #region Implementation of the IEnumerable interface public IEnumerator<KeyValuePair<string, object>> GetEnumerator() { return ForEnumeration.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return ForEnumeration.GetEnumerator(); } private IEnumerable<KeyValuePair<string, object>> ForEnumeration { get { yield return new KeyValuePair<string, object>("Name", Name); yield return new KeyValuePair<string, object>("Mean", Mean); yield return new KeyValuePair<string, object>("StandardDeviation", StandardDeviation); yield return new KeyValuePair<string, object>("Count", Count); yield return new KeyValuePair<string, object>("Min", Min); yield return new KeyValuePair<string, object>("Max", Max); } } #endregion // Implementation of the IEnumerable interface } internal class EventCounterGroup { private readonly EventSource _eventSource; private readonly List<EventCounter> _eventCounters; internal EventCounterGroup(EventSource eventSource) { _eventSource = eventSource; _eventCounters = new List<EventCounter>(); RegisterCommandCallback(); } internal void Add(EventCounter eventCounter) { lock (this) // Lock the EventCounterGroup _eventCounters.Add(eventCounter); } internal void Remove(EventCounter eventCounter) { lock (this) // Lock the EventCounterGroup _eventCounters.Remove(eventCounter); } #region EventSource Command Processing private void RegisterCommandCallback() { _eventSource.EventCommandExecuted += OnEventSourceCommand; } private void OnEventSourceCommand(object sender, EventCommandEventArgs e) { if (e.Command == EventCommand.Enable || e.Command == EventCommand.Update) { string valueStr; float value; if (e.Arguments.TryGetValue("EventCounterIntervalSec", out valueStr) && float.TryParse(valueStr, out value)) { // Recursion through EventSource callbacks possible. When we enable the timer // we synchonously issue a EventSource.Write event, which in turn can call back // to user code (in an EventListener) while holding this lock. This is dangerous // because it mean this code might inadvertantly participate in a lock loop. // The scenario seems very unlikely so we ignore that problem for now. lock (this) // Lock the EventCounterGroup { EnableTimer(value); } } } } #endregion // EventSource Command Processing #region Global EventCounterGroup Array management // We need eventCounters to 'attach' themselves to a particular EventSource. // this table provides the mapping from EventSource -> EventCounterGroup // which represents this 'attached' information. private static WeakReference<EventCounterGroup>[] s_eventCounterGroups; private static readonly object s_eventCounterGroupsLock = new object(); private static void EnsureEventSourceIndexAvailable(int eventSourceIndex) { Debug.Assert(Monitor.IsEntered(s_eventCounterGroupsLock)); if (EventCounterGroup.s_eventCounterGroups == null) { EventCounterGroup.s_eventCounterGroups = new WeakReference<EventCounterGroup>[eventSourceIndex + 1]; } else if (eventSourceIndex >= EventCounterGroup.s_eventCounterGroups.Length) { WeakReference<EventCounterGroup>[] newEventCounterGroups = new WeakReference<EventCounterGroup>[eventSourceIndex + 1]; Array.Copy(EventCounterGroup.s_eventCounterGroups, 0, newEventCounterGroups, 0, EventCounterGroup.s_eventCounterGroups.Length); EventCounterGroup.s_eventCounterGroups = newEventCounterGroups; } } internal static EventCounterGroup GetEventCounterGroup(EventSource eventSource) { lock (s_eventCounterGroupsLock) { int eventSourceIndex = EventListener.EventSourceIndex(eventSource); EnsureEventSourceIndexAvailable(eventSourceIndex); WeakReference<EventCounterGroup> weakRef = EventCounterGroup.s_eventCounterGroups[eventSourceIndex]; EventCounterGroup ret = null; if (weakRef == null || !weakRef.TryGetTarget(out ret)) { ret = new EventCounterGroup(eventSource); EventCounterGroup.s_eventCounterGroups[eventSourceIndex] = new WeakReference<EventCounterGroup>(ret); } return ret; } } #endregion // Global EventCounterGroup Array management #region Timer Processing private DateTime _timeStampSinceCollectionStarted; private int _pollingIntervalInMilliseconds; private Timer _pollingTimer; private void DisposeTimer() { Debug.Assert(Monitor.IsEntered(this)); if (_pollingTimer != null) { _pollingTimer.Dispose(); _pollingTimer = null; } } private void EnableTimer(float pollingIntervalInSeconds) { Debug.Assert(Monitor.IsEntered(this)); if (pollingIntervalInSeconds <= 0) { DisposeTimer(); _pollingIntervalInMilliseconds = 0; } else if (_pollingIntervalInMilliseconds == 0 || pollingIntervalInSeconds * 1000 < _pollingIntervalInMilliseconds) { Debug.WriteLine("Polling interval changed at " + DateTime.UtcNow.ToString("mm.ss.ffffff")); _pollingIntervalInMilliseconds = (int)(pollingIntervalInSeconds * 1000); DisposeTimer(); _timeStampSinceCollectionStarted = DateTime.UtcNow; // Don't capture the current ExecutionContext and its AsyncLocals onto the timer causing them to live forever bool restoreFlow = false; try { if (!ExecutionContext.IsFlowSuppressed()) { ExecutionContext.SuppressFlow(); restoreFlow = true; } _pollingTimer = new Timer(s => ((EventCounterGroup)s).OnTimer(null), this, _pollingIntervalInMilliseconds, _pollingIntervalInMilliseconds); } finally { // Restore the current ExecutionContext if (restoreFlow) ExecutionContext.RestoreFlow(); } } // Always fire the timer event (so you see everything up to this time). OnTimer(null); } private void OnTimer(object state) { Debug.WriteLine("Timer fired at " + DateTime.UtcNow.ToString("mm.ss.ffffff")); lock (this) // Lock the EventCounterGroup { if (_eventSource.IsEnabled()) { DateTime now = DateTime.UtcNow; TimeSpan elapsed = now - _timeStampSinceCollectionStarted; foreach (var eventCounter in _eventCounters) { EventCounterPayload payload = eventCounter.GetEventCounterPayload(); payload.IntervalSec = (float)elapsed.TotalSeconds; _eventSource.Write("EventCounters", new EventSourceOptions() { Level = EventLevel.LogAlways }, new PayloadType(payload)); } _timeStampSinceCollectionStarted = now; } else { DisposeTimer(); } } } /// <summary> /// This is the payload that is sent in the with EventSource.Write /// </summary> [EventData] class PayloadType { public PayloadType(EventCounterPayload payload) { Payload = payload; } public EventCounterPayload Payload { get; set; } } #region PCL timer hack #if ES_BUILD_PCL internal delegate void TimerCallback(object state); internal sealed class Timer : CancellationTokenSource, IDisposable { private int _period; private TimerCallback _callback; private object _state; internal Timer(TimerCallback callback, object state, int dueTime, int period) { _callback = callback; _state = state; _period = period; Schedule(dueTime); } private void Schedule(int dueTime) { Task.Delay(dueTime, Token).ContinueWith(OnTimer, null, CancellationToken.None, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Default); } private void OnTimer(Task t, object s) { Schedule(_period); _callback(_state); } public new void Dispose() { base.Cancel(); } } #endif #endregion // PCL timer hack #endregion // Timer Processing } #endregion // internal supporting classes }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the ec2-2015-04-15.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.EC2.Model { /// <summary> /// Container for the parameters to the CreateVolume operation. /// Creates an EBS volume that can be attached to an instance in the same Availability /// Zone. The volume is created in the regional endpoint that you send the HTTP request /// to. For more information see <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html">Regions /// and Endpoints</a>. /// /// /// <para> /// You can create a new empty volume or restore a volume from an EBS snapshot. Any AWS /// Marketplace product codes from the snapshot are propagated to the volume. /// </para> /// /// <para> /// You can create encrypted volumes with the <code>Encrypted</code> parameter. Encrypted /// volumes may only be attached to instances that support Amazon EBS encryption. Volumes /// that are created from encrypted snapshots are also automatically encrypted. For more /// information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html">Amazon /// EBS Encryption</a> in the <i>Amazon Elastic Compute Cloud User Guide</i>. /// </para> /// /// <para> /// For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-creating-volume.html">Creating /// or Restoring an Amazon EBS Volume</a> in the <i>Amazon Elastic Compute Cloud User /// Guide</i>. /// </para> /// </summary> public partial class CreateVolumeRequest : AmazonEC2Request { private string _availabilityZone; private bool? _encrypted; private int? _iops; private string _kmsKeyId; private int? _size; private string _snapshotId; private VolumeType _volumeType; /// <summary> /// Empty constructor used to set properties independently even when a simple constructor is available /// </summary> public CreateVolumeRequest() { } /// <summary> /// Instantiates CreateVolumeRequest with the parameterized properties /// </summary> /// <param name="availabilityZone">The Availability Zone in which to create the volume. Use <a>DescribeAvailabilityZones</a> to list the Availability Zones that are currently available to you.</param> /// <param name="size">The size of the volume, in GiBs. Constraints: <code>1-1024</code> for <code>standard</code> volumes, <code>1-16384</code> for <code>gp2</code> volumes, and <code>4-16384</code> for <code>io1</code> volumes. If you specify a snapshot, the volume size must be equal to or larger than the snapshot size. Default: If you're creating the volume from a snapshot and don't specify a volume size, the default is the snapshot size.</param> public CreateVolumeRequest(string availabilityZone, int size) { _availabilityZone = availabilityZone; _size = size; } /// <summary> /// Instantiates CreateVolumeRequest with the parameterized properties /// </summary> /// <param name="availabilityZone">The Availability Zone in which to create the volume. Use <a>DescribeAvailabilityZones</a> to list the Availability Zones that are currently available to you.</param> /// <param name="snapshotId">The snapshot from which to create the volume.</param> public CreateVolumeRequest(string availabilityZone, string snapshotId) { _availabilityZone = availabilityZone; _snapshotId = snapshotId; } /// <summary> /// Gets and sets the property AvailabilityZone. /// <para> /// The Availability Zone in which to create the volume. Use <a>DescribeAvailabilityZones</a> /// to list the Availability Zones that are currently available to you. /// </para> /// </summary> public string AvailabilityZone { get { return this._availabilityZone; } set { this._availabilityZone = value; } } // Check to see if AvailabilityZone property is set internal bool IsSetAvailabilityZone() { return this._availabilityZone != null; } /// <summary> /// Gets and sets the property Encrypted. /// <para> /// Specifies whether the volume should be encrypted. Encrypted Amazon EBS volumes may /// only be attached to instances that support Amazon EBS encryption. Volumes that are /// created from encrypted snapshots are automatically encrypted. There is no way to create /// an encrypted volume from an unencrypted snapshot or vice versa. If your AMI uses encrypted /// volumes, you can only launch it on supported instance types. For more information, /// see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html">Amazon /// EBS Encryption</a> in the <i>Amazon Elastic Compute Cloud User Guide</i>. /// </para> /// </summary> public bool Encrypted { get { return this._encrypted.GetValueOrDefault(); } set { this._encrypted = value; } } // Check to see if Encrypted property is set internal bool IsSetEncrypted() { return this._encrypted.HasValue; } /// <summary> /// Gets and sets the property Iops. /// <para> /// Only valid for Provisioned IOPS (SSD) volumes. The number of I/O operations per second /// (IOPS) to provision for the volume, with a maximum ratio of 30 IOPS/GiB. /// </para> /// /// <para> /// Constraint: Range is 100 to 20000 for Provisioned IOPS (SSD) volumes /// </para> /// </summary> public int Iops { get { return this._iops.GetValueOrDefault(); } set { this._iops = value; } } // Check to see if Iops property is set internal bool IsSetIops() { return this._iops.HasValue; } /// <summary> /// Gets and sets the property KmsKeyId. /// <para> /// The full ARN of the AWS Key Management Service (AWS KMS) customer master key (CMK) /// to use when creating the encrypted volume. This parameter is only required if you /// want to use a non-default CMK; if this parameter is not specified, the default CMK /// for EBS is used. The ARN contains the <code>arn:aws:kms</code> namespace, followed /// by the region of the CMK, the AWS account ID of the CMK owner, the <code>key</code> /// namespace, and then the CMK ID. For example, arn:aws:kms:<i>us-east-1</i>:<i>012345678910</i>:key/<i>abcd1234-a123-456a-a12b-a123b4cd56ef</i>. /// If a <code>KmsKeyId</code> is specified, the <code>Encrypted</code> flag must also /// be set. /// </para> /// </summary> public string KmsKeyId { get { return this._kmsKeyId; } set { this._kmsKeyId = value; } } // Check to see if KmsKeyId property is set internal bool IsSetKmsKeyId() { return this._kmsKeyId != null; } /// <summary> /// Gets and sets the property Size. /// <para> /// The size of the volume, in GiBs. /// </para> /// /// <para> /// Constraints: <code>1-1024</code> for <code>standard</code> volumes, <code>1-16384</code> /// for <code>gp2</code> volumes, and <code>4-16384</code> for <code>io1</code> volumes. /// If you specify a snapshot, the volume size must be equal to or larger than the snapshot /// size. /// </para> /// /// <para> /// Default: If you're creating the volume from a snapshot and don't specify a volume /// size, the default is the snapshot size. /// </para> /// </summary> public int Size { get { return this._size.GetValueOrDefault(); } set { this._size = value; } } // Check to see if Size property is set internal bool IsSetSize() { return this._size.HasValue; } /// <summary> /// Gets and sets the property SnapshotId. /// <para> /// The snapshot from which to create the volume. /// </para> /// </summary> public string SnapshotId { get { return this._snapshotId; } set { this._snapshotId = value; } } // Check to see if SnapshotId property is set internal bool IsSetSnapshotId() { return this._snapshotId != null; } /// <summary> /// Gets and sets the property VolumeType. /// <para> /// The volume type. This can be <code>gp2</code> for General Purpose (SSD) volumes, <code>io1</code> /// for Provisioned IOPS (SSD) volumes, or <code>standard</code> for Magnetic volumes. /// </para> /// /// <para> /// Default: <code>standard</code> /// </para> /// </summary> public VolumeType VolumeType { get { return this._volumeType; } set { this._volumeType = value; } } // Check to see if VolumeType property is set internal bool IsSetVolumeType() { return this._volumeType != null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; namespace System.Linq.Expressions.Tests { public abstract class IncDecAssignTests { protected static class Unreadable<T> { public static T WriteOnly { set { } } } public static IEnumerable<T?> NullableSequence<T>(IEnumerable<T> source) where T : struct { return source.Select(i => (T?)i).Concat(Enumerable.Repeat(default(T?), 1)); } public static IEnumerable<short> Int16s { get { return new short[] { 0, 1, 2, short.MinValue, short.MaxValue }; } } public static IEnumerable<object[]> Int16sAndDecrements() { return Int16s.Select(i => new object[] { typeof(short), i, (short)(i - 1) }); } public static IEnumerable<object[]> Int16sAndIncrements() { return Int16s.Select(i => new object[] { typeof(short), i, (short)(i + 1) }); } public static IEnumerable<short?> NullableInt16s { get { return NullableSequence(Int16s); } } public static IEnumerable<object[]> NullableInt16sAndDecrements() { return NullableInt16s.Select(i => new object[] { typeof(short?), i, (short?)(i - 1) }); } public static IEnumerable<object[]> NullableInt16sAndIncrements() { return NullableInt16s.Select(i => new object[] { typeof(short?), i, (short?)(i + 1) }); } public static IEnumerable<ushort> UInt16s { get { return new ushort[] { 0, 1, ushort.MaxValue }; } } public static IEnumerable<object[]> UInt16sAndDecrements() { return UInt16s.Select(i => new object[] { typeof(ushort), i, (ushort)(i - 1) }); } public static IEnumerable<object[]> UInt16sAndIncrements() { return UInt16s.Select(i => new object[] { typeof(ushort), i, (ushort)(i + 1) }); } public static IEnumerable<ushort?> NullableUInt16s { get { return NullableSequence(UInt16s); } } public static IEnumerable<object[]> NullableUInt16sAndDecrements() { return NullableUInt16s.Select(i => new object[] { typeof(ushort?), i, (ushort?)(i - 1) }); } public static IEnumerable<object[]> NullableUInt16sAndIncrements() { return NullableUInt16s.Select(i => new object[] { typeof(ushort?), i, (ushort?)(i + 1) }); } public static IEnumerable<int> Int32s { get { return new[] { 0, 1, 2, int.MinValue, int.MaxValue }; } } public static IEnumerable<object[]> Int32sAndDecrements() { return Int32s.Select(i => new object[] { typeof(int), i, i - 1 }); } public static IEnumerable<object[]> Int32sAndIncrements() { return Int32s.Select(i => new object[] { typeof(int), i, i + 1 }); } public static IEnumerable<int?> NullableInt32s { get { return NullableSequence(Int32s); } } public static IEnumerable<object[]> NullableInt32sAndDecrements() { return NullableInt32s.Select(i => new object[] { typeof(int?), i, i - 1 }); } public static IEnumerable<object[]> NullableInt32sAndIncrements() { return NullableInt32s.Select(i => new object[] { typeof(int?), i, i + 1 }); } public static IEnumerable<uint> UInt32s { get { return new[] { 0U, 1U, 2U, (uint)int.MaxValue, 1U + int.MaxValue, uint.MaxValue }; } } public static IEnumerable<object[]> UInt32sAndDecrements() { return UInt32s.Select(i => new object[] { typeof(uint), i, i - 1 }); } public static IEnumerable<object[]> UInt32sAndIncrements() { return UInt32s.Select(i => new object[] { typeof(uint), i, i + 1 }); } public static IEnumerable<uint?> NullableUInt32s { get { return NullableSequence(UInt32s); } } public static IEnumerable<object[]> NullableUInt32sAndDecrements() { return NullableUInt32s.Select(i => new object[] { typeof(uint?), i, i - 1 }); } public static IEnumerable<object[]> NullableUInt32sAndIncrements() { return NullableUInt32s.Select(i => new object[] { typeof(uint?), i, i + 1 }); } public static IEnumerable<long> Int64s { get { return new[] { 0L, 1L, 2L, long.MinValue, long.MaxValue }; } } public static IEnumerable<object[]> Int64sAndDecrements() { return Int64s.Select(i => new object[] { typeof(long), i, i - 1 }); } public static IEnumerable<object[]> Int64sAndIncrements() { return Int64s.Select(i => new object[] { typeof(long), i, i + 1 }); } public static IEnumerable<long?> NullableInt64s { get { return NullableSequence(Int64s); } } public static IEnumerable<object[]> NullableInt64sAndDecrements() { return NullableInt64s.Select(i => new object[] { typeof(long?), i, i - 1 }); } public static IEnumerable<object[]> NullableInt64sAndIncrements() { return NullableInt64s.Select(i => new object[] { typeof(long?), i, i + 1 }); } public static IEnumerable<ulong> UInt64s { get { return new[] { 0UL, 1UL, 2U, (ulong)long.MaxValue, 1UL + long.MaxValue, ulong.MaxValue }; } } public static IEnumerable<object[]> UInt64sAndDecrements() { return UInt64s.Select(i => new object[] { typeof(ulong), i, i - 1 }); } public static IEnumerable<object[]> UInt64sAndIncrements() { return UInt64s.Select(i => new object[] { typeof(ulong), i, i + 1 }); } public static IEnumerable<ulong?> NullableUInt64s { get { return NullableSequence(UInt64s); } } public static IEnumerable<object[]> NullableUInt64sAndDecrements() { return NullableUInt64s.Select(i => new object[] { typeof(ulong?), i, i - 1 }); } public static IEnumerable<object[]> NullableUInt64sAndIncrements() { return NullableUInt64s.Select(i => new object[] { typeof(ulong?), i, i + 1 }); } public static IEnumerable<decimal> Decimals { get { return new[] { 0m, 1m, -1m, decimal.MinValue + 1, decimal.MaxValue - 1 }; } } public static IEnumerable<object[]> DecimalsAndDecrements() { return Decimals.Select(i => new object[] { typeof(decimal), i, i - 1 }); } public static IEnumerable<object[]> DecimalsAndIncrements() { return Decimals.Select(i => new object[] { typeof(decimal), i, i + 1 }); } public static IEnumerable<decimal?> NullableDecimals { get { return NullableSequence(Decimals); } } public static IEnumerable<object[]> NullableDecimalsAndDecrements() { return NullableDecimals.Select(i => new object[] { typeof(decimal?), i, i - 1 }); } public static IEnumerable<object[]> NullableDecimalsAndIncrements() { return NullableDecimals.Select(i => new object[] { typeof(decimal?), i, i + 1 }); } public static IEnumerable<float> Singles { get { return new[] { 0F, 1F, float.MinValue, float.MaxValue, float.NegativeInfinity, float.PositiveInfinity }; } } public static IEnumerable<object[]> SinglesAndDecrements() { return Singles.Select(i => new object[] { typeof(float), i, i - 1 }); } public static IEnumerable<object[]> SinglesAndIncrements() { return Singles.Select(i => new object[] { typeof(float), i, i + 1 }); } public static IEnumerable<float?> NullableSingles { get { return NullableSequence(Singles); } } public static IEnumerable<object[]> NullableSinglesAndDecrements() { return NullableSingles.Select(i => new object[] { typeof(float?), i, i - 1 }); } public static IEnumerable<object[]> NullableSinglesAndIncrements() { return NullableSingles.Select(i => new object[] { typeof(float?), i, i + 1 }); } public static IEnumerable<double> Doubles { get { return new[] { 0F, 1F, double.MinValue, double.MaxValue, double.NegativeInfinity, double.PositiveInfinity }; } } public static IEnumerable<object[]> DoublesAndDecrements() { return Doubles.Select(i => new object[] { typeof(double), i, i - 1 }); } public static IEnumerable<object[]> DoublesAndIncrements() { return Doubles.Select(i => new object[] { typeof(double), i, i + 1 }); } public static IEnumerable<double?> NullableDoubles { get { return NullableSequence(Doubles); } } public static IEnumerable<object[]> NullableDoublesAndDecrements() { return NullableDoubles.Select(i => new object[] { typeof(double?), i, i - 1 }); } public static IEnumerable<object[]> NullableDoublesAndIncrements() { return NullableDoubles.Select(i => new object[] { typeof(double?), i, i + 1 }); } public static IEnumerable<object[]> DecrementOverflowingValues() { yield return new object[] { decimal.MinValue }; } public static IEnumerable<object[]> IncrementOverflowingValues() { yield return new object[] { decimal.MaxValue }; } public static IEnumerable<object[]> UnincrementableAndUndecrementableTypes() { yield return new object[] { typeof(string) }; yield return new object[] { typeof(DateTime) }; yield return new object[] { typeof(Uri) }; yield return new object[] { typeof(Tuple<string, int>) }; } protected static string SillyMethod(string value) { return value == null ? null : "Eggplant"; } protected static string GetString(int x) { return x.ToString(); } protected class TestPropertyClass<T> { public static T TestStatic { get; set; } public T TestInstance { get; set; } } } }
#region Licence... /* The MIT License (MIT) Copyright (c) 2014 Oleg Shilo Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion using System; using System.Linq; using System.Linq.Expressions; using IO = System.IO; namespace WixSharp { /// <summary> /// Collection of a 'utility' routines. /// </summary> public static class Utils { /// <summary> /// Combines two path strings. /// <para> /// It is a fix for unexpected behavior of System.IO.Path.Combine: Path.Combine(@"C:\Test", @"\Docs\readme.txt") return @"\Docs\readme.txt"; /// </para> /// </summary> /// <param name="path1">The path1.</param> /// <param name="path2">The path2.</param> /// <returns></returns> public static string PathCombine(string path1, string path2) { var p1 = (path1 ?? "").ExpandEnvVars(); var p2 = (path2 ?? "").ExpandEnvVars(); if (p2.Length == 0) { return p1; } else if (p2.Length == 1 && p2[0] == IO.Path.DirectorySeparatorChar) { return p1; } else if (p2[0] == IO.Path.DirectorySeparatorChar) { if (p2[0] != p2[1]) return IO.Path.Combine(p1, p2.Substring(1)); } return IO.Path.Combine(p1, p2); } internal static string MakeRelative(this string filePath, string referencePath) { //1 - 'Uri.MakeRelativeUri' doesn't work without *.config file //2 - Substring doesn't work for paths containing ..\..\ char dirSeparator = IO.Path.DirectorySeparatorChar; Func<string, string[]> split = path => IO.Path.GetFullPath(path).Trim(dirSeparator).Split(dirSeparator); string[] absParts = split(filePath); string[] relParts = split(referencePath); int commonElementsLength = 0; do { if (string.Compare(absParts[commonElementsLength], relParts[commonElementsLength], true) != 0) break; } while (++commonElementsLength < Math.Min(absParts.Length, relParts.Length)); if (commonElementsLength == 0) //throw new ArgumentException("The two paths don't have common root."); return IO.Path.GetFullPath(filePath); var result = relParts.Skip(commonElementsLength) .Select(x => "..") .Concat(absParts.Skip(commonElementsLength)) .ToArray(); return string.Join(dirSeparator.ToString(), result); } internal static string[] AllConstStringValues<T>() { var fields = typeof(T).GetFields() .Where(f => f.IsStatic && f.IsPublic && f.IsLiteral && f.FieldType == typeof(string)) .Select(f => f.GetValue(null) as string) .ToArray(); return fields; } internal static string GetTempDirectory() { string tempDir = IO.Path.GetTempFileName(); if (IO.File.Exists(tempDir)) IO.File.Exists(tempDir); if (!IO.Directory.Exists(tempDir)) IO.Directory.CreateDirectory(tempDir); return tempDir; } internal static string OriginalAssemblyFile(string file) { //need to do it in a separate domain as we do not want to lock the assembly return (string)ExecuteInTempDomain<AsmReflector>(asm => { return asm.OriginalAssemblyFile(file); }); } internal static void ExecuteInTempDomain<T>(Action<T> action) where T : MarshalByRefObject { ExecuteInTempDomain<T>(asm => { action(asm); return null; }); } internal static object ExecuteInTempDomain<T>(Func<T, object> action) where T : MarshalByRefObject { var domain = AppDomain.CurrentDomain.Clone(); AppDomain.CurrentDomain.AssemblyResolve += Domain_AssemblyResolve; domain.AssemblyResolve += Domain_AssemblyResolve; try { var obj = domain.CreateInstanceFromAndUnwrap<T>(); var result = action(obj); return result; } finally { domain.AssemblyResolve -= Domain_AssemblyResolve; AppDomain.CurrentDomain.AssemblyResolve -= Domain_AssemblyResolve; domain.Unload(); } } static System.Reflection.Assembly Domain_AssemblyResolve(object sender, ResolveEventArgs args) { if (Compiler.AssemblyResolve != null) return Compiler.AssemblyResolve(sender, args); else return DefaultDomain_AssemblyResolve(sender, args); } static System.Reflection.Assembly DefaultDomain_AssemblyResolve(object sender, ResolveEventArgs args) { //args.Name -> "mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" string asmName = args.Name.Split(',').First() + ".dll"; string wixSharpAsmLocation = IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); string potentialAsm = IO.Path.Combine(wixSharpAsmLocation, asmName); if (IO.File.Exists(potentialAsm)) try { return System.Reflection.Assembly.LoadFrom(potentialAsm); } catch { } return null; } internal static void Unload(this AppDomain domain) { AppDomain.Unload(domain); } internal static T CreateInstanceFromAndUnwrap<T>(this AppDomain domain) { return (T)domain.CreateInstanceFromAndUnwrap(typeof(T).Assembly.Location, typeof(T).ToString()); } internal static AppDomain Clone(this AppDomain domain, string name = null) { //return AppDomain.CreateDomain(name ?? Guid.NewGuid().ToString(), null, new AppDomainSetup()); var setup = new AppDomainSetup(); setup.ApplicationBase = IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); setup.ShadowCopyFiles = "true"; setup.ShadowCopyDirectories = setup.ApplicationBase; setup.PrivateBinPath = AppDomain.CurrentDomain.BaseDirectory; return AppDomain.CreateDomain(name ?? Guid.NewGuid().ToString(), null, setup); } internal static void EnsureFileDir(string file) { var dir = IO.Path.GetDirectoryName(file); if (!IO.Directory.Exists(dir)) IO.Directory.CreateDirectory(dir); } /// <summary> /// Gets the program files directory. /// </summary> /// <value> /// The program files directory. /// </value> internal static string ProgramFilesDirectory { get { string programFilesDir = Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles); if ("".GetType().Assembly.Location.Contains("Framework64")) programFilesDir += " (x86)"; //for x64 systems return programFilesDir; } } /// <summary> /// Returns the hash code for the instance of a string. It uses deterministic hash-code generation algorithm, /// which produces the same result on x86 and x64 OSs (ebject.GetHashCode doesn't). /// </summary> /// <param name="s">The string.</param> /// <returns></returns> public static int GetHashCode32(this string s) { char[] chars = s.ToCharArray(); int lastCharInd = chars.Length - 1; int num1 = 0x15051505; int num2 = num1; int ind = 0; while (ind <= lastCharInd) { char ch = chars[ind]; char nextCh = ++ind > lastCharInd ? '\0' : chars[ind]; num1 = (((num1 << 5) + num1) + (num1 >> 0x1b)) ^ (nextCh << 16 | ch); if (++ind > lastCharInd) break; ch = chars[ind]; nextCh = ++ind > lastCharInd ? '\0' : chars[ind++]; num2 = (((num2 << 5) + num2) + (num2 >> 0x1b)) ^ (nextCh << 16 | ch); } return num1 + num2 * 0x5d588b65; } } }
// Copyright 2018 Esri. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. // You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific // language governing permissions and limitations under the License. using ArcGISRuntime.Samples.Managers; using Esri.ArcGISRuntime.Geometry; using Esri.ArcGISRuntime.Mapping; using Esri.ArcGISRuntime.Symbology; using Esri.ArcGISRuntime.UI; using Esri.ArcGISRuntime.UI.GeoAnalysis; using System; using System.Timers; using System.Drawing; using System.Windows; namespace ArcGISRuntime.WPF.Samples.LineOfSightGeoElement { [ArcGISRuntime.Samples.Shared.Attributes.Sample( name: "Line of sight (geoelement)", category: "Analysis", description: "Show a line of sight between two moving objects.", instructions: "A line of sight will display between a point on the Empire State Building (observer) and a taxi (target).", tags: new[] { "3D", "line of sight", "visibility", "visibility analysis" })] [ArcGISRuntime.Samples.Shared.Attributes.OfflineData("3af5cfec0fd24dac8d88aea679027cb9")] public partial class LineOfSightGeoElement { // URL of the elevation service - provides elevation component of the scene private readonly Uri _elevationUri = new Uri("https://elevation3d.arcgis.com/arcgis/rest/services/WorldElevation3D/Terrain3D/ImageServer"); // URL of the building service - provides builidng models private readonly Uri _buildingsUri = new Uri("https://tiles.arcgis.com/tiles/z2tnIkrLQ2BRzr6P/arcgis/rest/services/New_York_LoD2_3D_Buildings/SceneServer/layers/0"); // Starting point of the observation point private readonly MapPoint _observerPoint = new MapPoint(-73.984988, 40.748131, 20, SpatialReferences.Wgs84); // Graphic to represent the observation point private Graphic _observerGraphic; // Graphic to represent the observed target private Graphic _taxiGraphic; // Line of Sight Analysis private GeoElementLineOfSight _geoLine; // For taxi animation - four points in a loop private readonly MapPoint[] _points = { new MapPoint(-73.984513, 40.748469, SpatialReferences.Wgs84), new MapPoint(-73.985068, 40.747786, SpatialReferences.Wgs84), new MapPoint(-73.983452, 40.747091, SpatialReferences.Wgs84), new MapPoint(-73.982961, 40.747762, SpatialReferences.Wgs84) }; // For taxi animation - tracks animation state private int _pointIndex = 0; private int _frameIndex = 0; private readonly int _frameMax = 150; public LineOfSightGeoElement() { InitializeComponent(); // Create the UI, setup the control references and execute initialization Initialize(); } private async void Initialize() { // Create scene Scene myScene = new Scene(BasemapStyle.ArcGISImagery) { // Set initial viewpoint InitialViewpoint = new Viewpoint(_observerPoint, 1600) }; // Create the elevation source ElevationSource myElevationSource = new ArcGISTiledElevationSource(_elevationUri); // Add the elevation source to the scene myScene.BaseSurface.ElevationSources.Add(myElevationSource); // Create the building scene layer ArcGISSceneLayer mySceneLayer = new ArcGISSceneLayer(_buildingsUri); // Add the building layer to the scene myScene.OperationalLayers.Add(mySceneLayer); // Add the observer to the scene // Create a graphics overlay with relative surface placement; relative surface placement allows the Z position of the observation point to be adjusted GraphicsOverlay overlay = new GraphicsOverlay() { SceneProperties = new LayerSceneProperties(SurfacePlacement.Relative) }; // Create the symbol that will symbolize the observation point SimpleMarkerSceneSymbol symbol = new SimpleMarkerSceneSymbol(SimpleMarkerSceneSymbolStyle.Sphere, Color.Red, 10, 10, 10, SceneSymbolAnchorPosition.Bottom); // Create the observation point graphic from the point and symbol _observerGraphic = new Graphic(_observerPoint, symbol); // Add the observer to the overlay overlay.Graphics.Add(_observerGraphic); // Add the overlay to the scene MySceneView.GraphicsOverlays.Add(overlay); try { // Add the taxi to the scene // Create the model symbol for the taxi ModelSceneSymbol taxiSymbol = await ModelSceneSymbol.CreateAsync(new Uri(GetModelUri())); // Set the anchor position for the mode; ensures that the model appears above the ground taxiSymbol.AnchorPosition = SceneSymbolAnchorPosition.Bottom; // Create the graphic from the taxi starting point and the symbol _taxiGraphic = new Graphic(_points[0], taxiSymbol); // Add the taxi graphic to the overlay overlay.Graphics.Add(_taxiGraphic); // Create GeoElement Line of sight analysis (taxi to building) // Create the analysis _geoLine = new GeoElementLineOfSight(_observerGraphic, _taxiGraphic) { // Apply an offset to the target. This helps avoid some false negatives TargetOffsetZ = 2 }; // Create the analysis overlay AnalysisOverlay myAnalysisOverlay = new AnalysisOverlay(); // Add the analysis to the overlay myAnalysisOverlay.Analyses.Add(_geoLine); // Add the analysis overlay to the scene MySceneView.AnalysisOverlays.Add(myAnalysisOverlay); // Create a timer; this will enable animating the taxi Timer animationTimer = new Timer(60) { Enabled = true, AutoReset = true }; // Move the taxi every time the timer expires animationTimer.Elapsed += AnimationTimer_Elapsed; // Start the timer animationTimer.Start(); // Subscribe to TargetVisible events; allows for updating the UI and selecting the taxi when it is visible _geoLine.TargetVisibilityChanged += Geoline_TargetVisibilityChanged; // Add the scene to the view MySceneView.Scene = myScene; } catch (Exception e) { MessageBox.Show(e.ToString(), "Error"); } } private void AnimationTimer_Elapsed(object sender, ElapsedEventArgs e) { // Note: the contents of this function are solely related to animating the taxi // Increment the frame counter _frameIndex++; // Reset the frame counter once one segment of the path has been travelled if (_frameIndex == _frameMax) { _frameIndex = 0; // Start navigating toward the next point _pointIndex++; // Restart if finished circuit if (_pointIndex == _points.Length) { _pointIndex = 0; } } // Get the point the taxi is travelling from MapPoint starting = _points[_pointIndex]; // Get the point the taxi is travelling to MapPoint ending = _points[(_pointIndex + 1) % _points.Length]; // Calculate the progress based on the current frame double progress = _frameIndex / (double)_frameMax; // Calculate the position of the taxi when it is {progress}% of the way through MapPoint intermediatePoint = InterpolatedPoint(starting, ending, progress); // Update the taxi geometry _taxiGraphic.Geometry = intermediatePoint; // Update the taxi rotation. GeodeticDistanceResult distance = GeometryEngine.DistanceGeodetic(starting, ending, LinearUnits.Meters, AngularUnits.Degrees, GeodeticCurveType.Geodesic); ((ModelSceneSymbol)_taxiGraphic.Symbol).Heading = distance.Azimuth1; } private MapPoint InterpolatedPoint(MapPoint firstPoint, MapPoint secondPoint, double progress) { // This function returns a MapPoint that is the result of travelling {progress}% of the way from {firstPoint} to {secondPoint} // Get the difference between the two points MapPoint difference = new MapPoint(secondPoint.X - firstPoint.X, secondPoint.Y - firstPoint.Y, secondPoint.Z - firstPoint.Z, SpatialReferences.Wgs84); // Scale the difference by the progress towards the destination MapPoint scaled = new MapPoint(difference.X * progress, difference.Y * progress, difference.Z * progress); // Add the scaled progress to the starting point return new MapPoint(firstPoint.X + scaled.X, firstPoint.Y + scaled.Y, firstPoint.Z + scaled.Z); } private async void Geoline_TargetVisibilityChanged(object sender, EventArgs e) { // This is needed because Runtime delivers notifications from a different thread that doesn't have access to UI controls await Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.Normal, (Action)UpdateUiAndSelection); } private void UpdateUiAndSelection() { switch (_geoLine.TargetVisibility) { case LineOfSightTargetVisibility.Obstructed: MyStatusLabel.Text = "Status: Obstructed"; _taxiGraphic.IsSelected = false; break; case LineOfSightTargetVisibility.Visible: MyStatusLabel.Text = "Status: Visible"; _taxiGraphic.IsSelected = true; break; default: case LineOfSightTargetVisibility.Unknown: MyStatusLabel.Text = "Status: Unknown"; _taxiGraphic.IsSelected = false; break; } } private static string GetModelUri() { // Returns the taxi model return DataManager.GetDataFolder("3af5cfec0fd24dac8d88aea679027cb9", "dolmus.3ds"); } private void HeightSlider_ValueChanged(object sender, System.Windows.RoutedPropertyChangedEventArgs<double> e) { // Update the height of the observer based on the slider value // Constrain the min and max to 20 and 150 units double minHeight = 20; double maxHeight = 150; // Scale the slider value; its default range is 0-10 double value = e.NewValue / 10; // Get the current point MapPoint oldPoint = (MapPoint)_observerGraphic.Geometry; // Create a new point with the same (x,y) but updated z MapPoint newPoint = new MapPoint(oldPoint.X, oldPoint.Y, (maxHeight - minHeight) * value + minHeight); // Apply the updated geometry to the observer point _observerGraphic.Geometry = newPoint; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Xml; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Reflection; using System.Text; using System.Threading; using OpenMetaverse; using Nini.Config; using OpenSim.Framework.Servers.HttpServer; using log4net; namespace OpenSim.Framework.Console { public class ConsoleConnection { public int last; public long lastLineSeen; } // A console that uses REST interfaces // public class RemoteConsole : CommandConsole { private IHttpServer m_Server = null; private IConfigSource m_Config = null; private List<string> m_Scrollback = new List<string>(); private ManualResetEvent m_DataEvent = new ManualResetEvent(false); private List<string> m_InputData = new List<string>(); private long m_LineNumber = 0; private Dictionary<UUID, ConsoleConnection> m_Connections = new Dictionary<UUID, ConsoleConnection>(); private string m_UserName = String.Empty; private string m_Password = String.Empty; public RemoteConsole(string defaultPrompt) : base(defaultPrompt) { } public void ReadConfig(IConfigSource config) { m_Config = config; IConfig netConfig = m_Config.Configs["Network"]; if (netConfig == null) return; m_UserName = netConfig.GetString("ConsoleUser", String.Empty); m_Password = netConfig.GetString("ConsolePass", String.Empty); } public void SetServer(IHttpServer server) { m_Server = server; m_Server.AddHTTPHandler("/StartSession/", HandleHttpStartSession); m_Server.AddHTTPHandler("/CloseSession/", HandleHttpCloseSession); m_Server.AddHTTPHandler("/SessionCommand/", HandleHttpSessionCommand); } public override void Output(string text, string level) { lock (m_Scrollback) { while (m_Scrollback.Count >= 1000) m_Scrollback.RemoveAt(0); m_LineNumber++; m_Scrollback.Add(String.Format("{0}", m_LineNumber)+":"+level+":"+text); } System.Console.WriteLine(text.Trim()); } public override void Output(string text) { Output(text, "normal"); } public override string ReadLine(string p, bool isCommand, bool e) { m_DataEvent.WaitOne(); lock (m_InputData) { if (m_InputData.Count == 0) { m_DataEvent.Reset(); return ""; } string cmdinput = m_InputData[0]; m_InputData.RemoveAt(0); if (m_InputData.Count == 0) m_DataEvent.Reset(); if (isCommand) { string[] cmd = Commands.Resolve(Parser.Parse(cmdinput)); if (cmd.Length != 0) { int i; for (i=0 ; i < cmd.Length ; i++) { if (cmd[i].Contains(" ")) cmd[i] = "\"" + cmd[i] + "\""; } return String.Empty; } } return cmdinput; } } private void DoExpire() { List<UUID> expired = new List<UUID>(); lock (m_Connections) { foreach (KeyValuePair<UUID, ConsoleConnection> kvp in m_Connections) { if (System.Environment.TickCount - kvp.Value.last > 500000) expired.Add(kvp.Key); } foreach (UUID id in expired) { m_Connections.Remove(id); CloseConnection(id); } } } private Hashtable HandleHttpStartSession(Hashtable request) { DoExpire(); Hashtable post = DecodePostString(request["body"].ToString()); Hashtable reply = new Hashtable(); reply["str_response_string"] = ""; reply["int_response_code"] = 401; reply["content_type"] = "text/plain"; if (m_UserName == String.Empty) return reply; if (post["USER"] == null || post["PASS"] == null) return reply; if (m_UserName != post["USER"].ToString() || m_Password != post["PASS"].ToString()) { return reply; } ConsoleConnection c = new ConsoleConnection(); c.last = System.Environment.TickCount; c.lastLineSeen = 0; UUID sessionID = UUID.Random(); lock (m_Connections) { m_Connections[sessionID] = c; } string uri = "/ReadResponses/" + sessionID.ToString() + "/"; m_Server.AddPollServiceHTTPHandler(uri, HandleHttpPoll, new PollServiceEventArgs(null, HasEvents, GetEvents, NoEvents, sessionID)); XmlDocument xmldoc = new XmlDocument(); XmlNode xmlnode = xmldoc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); xmldoc.AppendChild(xmlnode); XmlElement rootElement = xmldoc.CreateElement("", "ConsoleSession", ""); xmldoc.AppendChild(rootElement); XmlElement id = xmldoc.CreateElement("", "SessionID", ""); id.AppendChild(xmldoc.CreateTextNode(sessionID.ToString())); rootElement.AppendChild(id); XmlElement prompt = xmldoc.CreateElement("", "Prompt", ""); prompt.AppendChild(xmldoc.CreateTextNode(DefaultPrompt)); rootElement.AppendChild(prompt); rootElement.AppendChild(MainConsole.Instance.Commands.GetXml(xmldoc)); reply["str_response_string"] = xmldoc.InnerXml; reply["int_response_code"] = 200; reply["content_type"] = "text/xml"; return reply; } private Hashtable HandleHttpPoll(Hashtable request) { return new Hashtable(); } private Hashtable HandleHttpCloseSession(Hashtable request) { DoExpire(); Hashtable post = DecodePostString(request["body"].ToString()); Hashtable reply = new Hashtable(); reply["str_response_string"] = ""; reply["int_response_code"] = 404; reply["content_type"] = "text/plain"; if (post["ID"] == null) return reply; UUID id; if (!UUID.TryParse(post["ID"].ToString(), out id)) return reply; lock (m_Connections) { if (m_Connections.ContainsKey(id)) { m_Connections.Remove(id); CloseConnection(id); } } XmlDocument xmldoc = new XmlDocument(); XmlNode xmlnode = xmldoc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); xmldoc.AppendChild(xmlnode); XmlElement rootElement = xmldoc.CreateElement("", "ConsoleSession", ""); xmldoc.AppendChild(rootElement); XmlElement res = xmldoc.CreateElement("", "Result", ""); res.AppendChild(xmldoc.CreateTextNode("OK")); rootElement.AppendChild(res); reply["str_response_string"] = xmldoc.InnerXml; reply["int_response_code"] = 200; reply["content_type"] = "text/plain"; return reply; } private Hashtable HandleHttpSessionCommand(Hashtable request) { DoExpire(); Hashtable post = DecodePostString(request["body"].ToString()); Hashtable reply = new Hashtable(); reply["str_response_string"] = ""; reply["int_response_code"] = 404; reply["content_type"] = "text/plain"; if (post["ID"] == null) return reply; UUID id; if (!UUID.TryParse(post["ID"].ToString(), out id)) return reply; lock (m_Connections) { if (!m_Connections.ContainsKey(id)) return reply; } if (post["COMMAND"] == null || post["COMMAND"].ToString() == String.Empty) return reply; lock (m_InputData) { m_DataEvent.Set(); m_InputData.Add(post["COMMAND"].ToString()); } XmlDocument xmldoc = new XmlDocument(); XmlNode xmlnode = xmldoc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); xmldoc.AppendChild(xmlnode); XmlElement rootElement = xmldoc.CreateElement("", "ConsoleSession", ""); xmldoc.AppendChild(rootElement); XmlElement res = xmldoc.CreateElement("", "Result", ""); res.AppendChild(xmldoc.CreateTextNode("OK")); rootElement.AppendChild(res); reply["str_response_string"] = xmldoc.InnerXml; reply["int_response_code"] = 200; reply["content_type"] = "text/plain"; return reply; } private Hashtable DecodePostString(string data) { Hashtable result = new Hashtable(); string[] terms = data.Split(new char[] {'&'}); foreach (string term in terms) { string[] elems = term.Split(new char[] {'='}); if (elems.Length == 0) continue; string name = System.Web.HttpUtility.UrlDecode(elems[0]); string value = String.Empty; if (elems.Length > 1) value = System.Web.HttpUtility.UrlDecode(elems[1]); result[name] = value; } return result; } public void CloseConnection(UUID id) { try { string uri = "/ReadResponses/" + id.ToString() + "/"; m_Server.RemovePollServiceHTTPHandler("", uri); } catch (Exception) { } } private bool HasEvents(UUID RequestID, UUID sessionID) { ConsoleConnection c = null; lock (m_Connections) { if (!m_Connections.ContainsKey(sessionID)) return false; c = m_Connections[sessionID]; } c.last = System.Environment.TickCount; if (c.lastLineSeen < m_LineNumber) return true; return false; } private Hashtable GetEvents(UUID RequestID, UUID sessionID, string request) { ConsoleConnection c = null; lock (m_Connections) { if (!m_Connections.ContainsKey(sessionID)) return NoEvents(RequestID, UUID.Zero); c = m_Connections[sessionID]; } c.last = System.Environment.TickCount; if (c.lastLineSeen >= m_LineNumber) return NoEvents(RequestID, UUID.Zero); Hashtable result = new Hashtable(); XmlDocument xmldoc = new XmlDocument(); XmlNode xmlnode = xmldoc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); xmldoc.AppendChild(xmlnode); XmlElement rootElement = xmldoc.CreateElement("", "ConsoleSession", ""); lock (m_Scrollback) { long startLine = m_LineNumber - m_Scrollback.Count; long sendStart = startLine; if (sendStart < c.lastLineSeen) sendStart = c.lastLineSeen; for (long i = sendStart ; i < m_LineNumber ; i++) { XmlElement res = xmldoc.CreateElement("", "Line", ""); long line = i + 1; res.SetAttribute("Number", line.ToString()); res.AppendChild(xmldoc.CreateTextNode(m_Scrollback[(int)(i - startLine)])); rootElement.AppendChild(res); } } c.lastLineSeen = m_LineNumber; xmldoc.AppendChild(rootElement); result["str_response_string"] = xmldoc.InnerXml; result["int_response_code"] = 200; result["content_type"] = "application/xml"; result["keepalive"] = false; result["reusecontext"] = false; return result; } private Hashtable NoEvents(UUID RequestID, UUID id) { Hashtable result = new Hashtable(); XmlDocument xmldoc = new XmlDocument(); XmlNode xmlnode = xmldoc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); xmldoc.AppendChild(xmlnode); XmlElement rootElement = xmldoc.CreateElement("", "ConsoleSession", ""); xmldoc.AppendChild(rootElement); result["str_response_string"] = xmldoc.InnerXml; result["int_response_code"] = 200; result["content_type"] = "text/xml"; result["keepalive"] = false; result["reusecontext"] = false; return result; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Diagnostics; using System.Runtime.InteropServices; using Internal.Runtime.CompilerServices; namespace System.Text.Unicode { public static class Utf8 { /* * OperationStatus-based APIs for transcoding of chunked data. * This method is similar to Encoding.UTF8.GetBytes / GetChars but has a * different calling convention, different error handling mechanisms, and * different performance characteristics. * * If 'replaceInvalidSequences' is true, the method will replace any ill-formed * subsequence in the source with U+FFFD when transcoding to the destination, * then it will continue processing the remainder of the buffers. Otherwise * the method will return OperationStatus.InvalidData. * * If the method does return an error code, the out parameters will represent * how much of the data was successfully transcoded, and the location of the * ill-formed subsequence can be deduced from these values. * * If 'replaceInvalidSequences' is true, the method is guaranteed never to return * OperationStatus.InvalidData. If 'isFinalBlock' is true, the method is * guaranteed never to return OperationStatus.NeedMoreData. */ /// <summary> /// Transcodes the UTF-16 <paramref name="source"/> buffer to <paramref name="destination"/> as UTF-8. /// </summary> /// <remarks> /// If <paramref name="replaceInvalidSequences"/> is <see langword="true"/>, invalid UTF-16 sequences /// in <paramref name="source"/> will be replaced with U+FFFD in <paramref name="destination"/>, and /// this method will not return <see cref="OperationStatus.InvalidData"/>. /// </remarks> public static unsafe OperationStatus FromUtf16(ReadOnlySpan<char> source, Span<byte> destination, out int charsRead, out int bytesWritten, bool replaceInvalidSequences = true, bool isFinalBlock = true) { // Throwaway span accesses - workaround for https://github.com/dotnet/coreclr/issues/23437 _ = source.Length; _ = destination.Length; fixed (char* pOriginalSource = &MemoryMarshal.GetReference(source)) fixed (byte* pOriginalDestination = &MemoryMarshal.GetReference(destination)) { // We're going to bulk transcode as much as we can in a loop, iterating // every time we see bad data that requires replacement. OperationStatus operationStatus = OperationStatus.Done; char* pInputBufferRemaining = pOriginalSource; byte* pOutputBufferRemaining = pOriginalDestination; while (!source.IsEmpty) { // We've pinned the spans at the entry point to this method. // It's safe for us to use Unsafe.AsPointer on them during this loop. operationStatus = Utf8Utility.TranscodeToUtf8( pInputBuffer: (char*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(source)), inputLength: source.Length, pOutputBuffer: (byte*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(destination)), outputBytesRemaining: destination.Length, pInputBufferRemaining: out pInputBufferRemaining, pOutputBufferRemaining: out pOutputBufferRemaining); // If we finished the operation entirely or we ran out of space in the destination buffer, // or if we need more input data and the caller told us that there's possibly more data // coming, return immediately. if (operationStatus <= OperationStatus.DestinationTooSmall || (operationStatus == OperationStatus.NeedMoreData && !isFinalBlock)) { break; } // We encountered invalid data, or we need more data but the caller told us we're // at the end of the stream. In either case treat this as truly invalid. // If the caller didn't tell us to replace invalid sequences, return immediately. if (!replaceInvalidSequences) { operationStatus = OperationStatus.InvalidData; // status code may have been NeedMoreData - force to be error break; } // We're going to attempt to write U+FFFD to the destination buffer. // Do we even have enough space to do so? destination = destination.Slice((int)(pOutputBufferRemaining - (byte*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(destination)))); if (2 >= (uint)destination.Length) { operationStatus = OperationStatus.DestinationTooSmall; break; } destination[0] = 0xEF; // U+FFFD = [ EF BF BD ] in UTF-8 destination[1] = 0xBF; destination[2] = 0xBD; destination = destination.Slice(3); // Invalid UTF-16 sequences are always of length 1. Just skip the next character. source = source.Slice((int)(pInputBufferRemaining - (char*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(source))) + 1); operationStatus = OperationStatus.Done; // we patched the error - if we're about to break out of the loop this is a success case pInputBufferRemaining = (char*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(source)); pOutputBufferRemaining = (byte*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(destination)); } // Not possible to make any further progress - report to our caller how far we got. charsRead = (int)(pInputBufferRemaining - pOriginalSource); bytesWritten = (int)(pOutputBufferRemaining - pOriginalDestination); return operationStatus; } } /// <summary> /// Transcodes the UTF-8 <paramref name="source"/> buffer to <paramref name="destination"/> as UTF-16. /// </summary> /// <remarks> /// If <paramref name="replaceInvalidSequences"/> is <see langword="true"/>, invalid UTF-8 sequences /// in <paramref name="source"/> will be replaced with U+FFFD in <paramref name="destination"/>, and /// this method will not return <see cref="OperationStatus.InvalidData"/>. /// </remarks> public static unsafe OperationStatus ToUtf16(ReadOnlySpan<byte> source, Span<char> destination, out int bytesRead, out int charsWritten, bool replaceInvalidSequences = true, bool isFinalBlock = true) { // Throwaway span accesses - workaround for https://github.com/dotnet/coreclr/issues/23437 _ = source.Length; _ = destination.Length; // We'll be mutating these values throughout our loop. fixed (byte* pOriginalSource = &MemoryMarshal.GetReference(source)) fixed (char* pOriginalDestination = &MemoryMarshal.GetReference(destination)) { // We're going to bulk transcode as much as we can in a loop, iterating // every time we see bad data that requires replacement. OperationStatus operationStatus = OperationStatus.Done; byte* pInputBufferRemaining = pOriginalSource; char* pOutputBufferRemaining = pOriginalDestination; while (!source.IsEmpty) { // We've pinned the spans at the entry point to this method. // It's safe for us to use Unsafe.AsPointer on them during this loop. operationStatus = Utf8Utility.TranscodeToUtf16( pInputBuffer: (byte*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(source)), inputLength: source.Length, pOutputBuffer: (char*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(destination)), outputCharsRemaining: destination.Length, pInputBufferRemaining: out pInputBufferRemaining, pOutputBufferRemaining: out pOutputBufferRemaining); // If we finished the operation entirely or we ran out of space in the destination buffer, // or if we need more input data and the caller told us that there's possibly more data // coming, return immediately. if (operationStatus <= OperationStatus.DestinationTooSmall || (operationStatus == OperationStatus.NeedMoreData && !isFinalBlock)) { break; } // We encountered invalid data, or we need more data but the caller told us we're // at the end of the stream. In either case treat this as truly invalid. // If the caller didn't tell us to replace invalid sequences, return immediately. if (!replaceInvalidSequences) { operationStatus = OperationStatus.InvalidData; // status code may have been NeedMoreData - force to be error break; } // We're going to attempt to write U+FFFD to the destination buffer. // Do we even have enough space to do so? destination = destination.Slice((int)(pOutputBufferRemaining - (char*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(destination)))); if (destination.IsEmpty) { operationStatus = OperationStatus.DestinationTooSmall; break; } destination[0] = (char)UnicodeUtility.ReplacementChar; destination = destination.Slice(1); // Now figure out how many bytes of the source we must skip over before we should retry // the operation. This might be more than 1 byte. source = source.Slice((int)(pInputBufferRemaining - (byte*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(source)))); Debug.Assert(!source.IsEmpty, "Expected 'Done' if source is fully consumed."); Rune.DecodeFromUtf8(source, out _, out int bytesConsumedJustNow); source = source.Slice(bytesConsumedJustNow); operationStatus = OperationStatus.Done; // we patched the error - if we're about to break out of the loop this is a success case pInputBufferRemaining = (byte*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(source)); pOutputBufferRemaining = (char*)Unsafe.AsPointer(ref MemoryMarshal.GetReference(destination)); } // Not possible to make any further progress - report to our caller how far we got. bytesRead = (int)(pInputBufferRemaining - pOriginalSource); charsWritten = (int)(pOutputBufferRemaining - pOriginalDestination); return operationStatus; } } } }
/* Project Orleans Cloud Service SDK ver. 1.0 Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Security.Cryptography; using System.Text; namespace Orleans.Runtime { /// <summary> /// The Utils class contains a variety of utility methods for use in application and grain code. /// </summary> public static class Utils { /// <summary> /// Returns a human-readable text string that describes an IEnumerable collection of objects. /// </summary> /// <typeparam name="T">The type of the list elements.</typeparam> /// <param name="collection">The IEnumerable to describe.</param> /// <returns>A string assembled by wrapping the string descriptions of the individual /// elements with square brackets and separating them with commas.</returns> public static string EnumerableToString<T>(IEnumerable<T> collection, Func<T, string> toString = null, string separator = ", ", bool putInBrackets = true) { if (collection == null) { if (putInBrackets) return "[]"; else return "null"; } var sb = new StringBuilder(); if (putInBrackets) sb.Append("["); var enumerator = collection.GetEnumerator(); bool firstDone = false; while (enumerator.MoveNext()) { T value = enumerator.Current; string val; if (toString != null) val = toString(value); else val = value == null ? "null" : value.ToString(); if (firstDone) { sb.Append(separator); sb.Append(val); } else { sb.Append(val); firstDone = true; } } if (putInBrackets) sb.Append("]"); return sb.ToString(); } /// <summary> /// Returns a human-readable text string that describes a dictionary that maps objects to objects. /// </summary> /// <typeparam name="T1">The type of the dictionary keys.</typeparam> /// <typeparam name="T2">The type of the dictionary elements.</typeparam> /// <param name="separateWithNewLine">Whether the elements should appear separated by a new line.</param> /// <param name="dict">The dictionary to describe.</param> /// <returns>A string assembled by wrapping the string descriptions of the individual /// pairs with square brackets and separating them with commas. /// Each key-value pair is represented as the string description of the key followed by /// the string description of the value, /// separated by " -> ", and enclosed in curly brackets.</returns> public static string DictionaryToString<T1, T2>(ICollection<KeyValuePair<T1, T2>> dict, Func<T2, string> toString = null, string separator = null) { if (dict == null || dict.Count == 0) { return "[]"; } if (separator == null) { separator = Environment.NewLine; } var sb = new StringBuilder("["); var enumerator = dict.GetEnumerator(); int index = 0; while (enumerator.MoveNext()) { var pair = enumerator.Current; sb.Append("{"); sb.Append(pair.Key); sb.Append(" -> "); string val; if (toString != null) val = toString(pair.Value); else val = pair.Value == null ? "null" : pair.Value.ToString(); sb.Append(val); sb.Append("}"); if (index++ < dict.Count - 1) sb.Append(separator); } sb.Append("]"); return sb.ToString(); } public static string TimeSpanToString(TimeSpan timeSpan) { //00:03:32.8289777 return String.Format("{0}h:{1}m:{2}s.{3}ms", timeSpan.Hours, timeSpan.Minutes, timeSpan.Seconds, timeSpan.Milliseconds); } public static long TicksToMilliSeconds(long ticks) { return (long)TimeSpan.FromTicks(ticks).TotalMilliseconds; } public static float AverageTicksToMilliSeconds(float ticks) { return (float)TimeSpan.FromTicks((long)ticks).TotalMilliseconds; } /// <summary> /// Parse a Uri as an IPEndpoint. /// </summary> /// <param name="uri">The input Uri</param> /// <returns></returns> public static System.Net.IPEndPoint ToIPEndPoint(this Uri uri) { switch (uri.Scheme) { case "gwy.tcp": return new System.Net.IPEndPoint(System.Net.IPAddress.Parse(uri.Host), uri.Port); } return null; } /// <summary> /// Parse a Uri as a Silo address, including the IPEndpoint and generation identifier. /// </summary> /// <param name="uri">The input Uri</param> /// <returns></returns> public static SiloAddress ToSiloAddress(this Uri uri) { switch (uri.Scheme) { case "gwy.tcp": return SiloAddress.New(uri.ToIPEndPoint(), uri.Segments.Length > 1 ? int.Parse(uri.Segments[1]) : 0); } return null; } /// <summary> /// Represent an IP end point in the gateway URI format.. /// </summary> /// <param name="ep">The input IP end point</param> /// <returns></returns> public static Uri ToGatewayUri(this System.Net.IPEndPoint ep) { return new Uri(string.Format("gwy.tcp://{0}:{1}/0", ep.Address, ep.Port)); } /// <summary> /// Represent a silo address in the gateway URI format. /// </summary> /// <param name="address">The input silo address</param> /// <returns></returns> public static Uri ToGatewayUri(this SiloAddress address) { return new Uri(string.Format("gwy.tcp://{0}:{1}/{2}", address.Endpoint.Address, address.Endpoint.Port, address.Generation)); } /// <summary> /// Calculates an integer hash value based on the consistent identity hash of a string. /// </summary> /// <param name="text">The string to hash.</param> /// <returns>An integer hash for the string.</returns> public static int CalculateIdHash(string text) { SHA256 sha = SHA256.Create(); // This is one implementation of the abstract class SHA1. int hash = 0; try { byte[] data = Encoding.Unicode.GetBytes(text); byte[] result = sha.ComputeHash(data); for (int i = 0; i < result.Length; i += 4) { int tmp = (result[i] << 24) | (result[i + 1] << 16) | (result[i + 2] << 8) | (result[i + 3]); hash = hash ^ tmp; } } finally { sha.Dispose(); } return hash; } /// <summary> /// Calculates a Guid hash value based on the consistent identity a string. /// </summary> /// <param name="text">The string to hash.</param> /// <returns>An integer hash for the string.</returns> internal static Guid CalculateGuidHash(string text) { SHA256 sha = SHA256.Create(); // This is one implementation of the abstract class SHA1. byte[] hash = new byte[16]; try { byte[] data = Encoding.Unicode.GetBytes(text); byte[] result = sha.ComputeHash(data); for (int i = 0; i < result.Length; i ++) { byte tmp = (byte)(hash[i % 16] ^ result[i]); hash[i%16] = tmp; } } finally { sha.Dispose(); } return new Guid(hash); } public static bool TryFindException(Exception original, Type targetType, out Exception target) { if (original.GetType() == targetType) { target = original; return true; } else if (original is AggregateException) { var baseEx = original.GetBaseException(); if (baseEx.GetType() == targetType) { target = baseEx; return true; } else { var newEx = ((AggregateException)original).Flatten(); foreach (var exc in newEx.InnerExceptions) { if (exc.GetType() == targetType) { target = newEx; return true; } } } } target = null; return false; } public static void SafeExecute(Action action, Logger logger = null, string caller = null) { SafeExecute(action, logger, caller==null ? (Func<string>)null : () => caller); } // a function to safely execute an action without any exception being thrown. // callerGetter function is called only in faulty case (now string is generated in the success case). [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")] public static void SafeExecute(Action action, Logger logger, Func<string> callerGetter) { try { action(); } catch (Exception exc) { try { if (logger != null) { string caller = null; if (callerGetter != null) { try { caller = callerGetter(); }catch (Exception) { } } foreach (var e in exc.FlattenAggregate()) { logger.Warn((int)ErrorCode.Runtime_Error_100325, String.Format("Ignoring {0} exception thrown from an action called by {1}.", e.GetType().FullName, caller ?? String.Empty), exc); } } } catch (Exception) { // now really, really ignore. } } } /// <summary> /// Get the last characters of a string /// </summary> /// <param name="s"></param> /// <param name="count"></param> /// <returns></returns> public static string Tail(this string s, int count) { return s.Substring(Math.Max(0, s.Length - count)); } public static TimeSpan Since(DateTime start) { return DateTime.UtcNow.Subtract(start); } public static List<T> ObjectToList<T>(object data) { if (data is List<T>) return (List<T>) data; T[] dataArray; if (data is ArrayList) { dataArray = (T[]) (data as ArrayList).ToArray(typeof(T)); } else if (data is ICollection<T>) { dataArray = (data as ICollection<T>).ToArray(); } else { throw new InvalidCastException(string.Format( "Cannot convert type {0} to type List<{1}>", TypeUtils.GetFullName(data.GetType()), TypeUtils.GetFullName(typeof(T)))); } var list = new List<T>(); list.AddRange(dataArray); return list; } public static List<Exception> FlattenAggregate(this Exception exc) { var result = new List<Exception>(); if (exc is AggregateException) result.AddRange(exc.InnerException.FlattenAggregate()); else result.Add(exc); return result; } public static AggregateException Flatten(this ReflectionTypeLoadException rtle) { // if ReflectionTypeLoadException is thrown, we need to provide the // LoaderExceptions property in order to make it meaningful. var all = new List<Exception> { rtle }; all.AddRange(rtle.LoaderExceptions); throw new AggregateException("A ReflectionTypeLoadException has been thrown. The original exception and the contents of the LoaderExceptions property have been aggregated for your convenience.", all); } /// <summary> /// </summary> public static IEnumerable<List<T>> BatchIEnumerable<T>(this IEnumerable<T> sequence, int batchSize) { var batch = new List<T>(batchSize); foreach (var item in sequence) { batch.Add(item); // when we've accumulated enough in the batch, send it out if (batch.Count >= batchSize) { yield return batch; // batch.ToArray(); batch = new List<T>(batchSize); } } if (batch.Count > 0) { yield return batch; //batch.ToArray(); } } internal static MethodInfo GetStaticMethodThroughReflection(string assemblyName, string className, string methodName, Type[] argumentTypes) { var asm = Assembly.Load(assemblyName); if (asm == null) throw new InvalidOperationException(string.Format("Cannot find assembly {0}", assemblyName)); var cl = asm.GetType(className); if (cl == null) throw new InvalidOperationException(string.Format("Cannot find class {0} in assembly {1}", className, assemblyName)); MethodInfo method; method = argumentTypes == null ? cl.GetMethod(methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static) : cl.GetMethod(methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static, null, argumentTypes, null); if (method == null) throw new InvalidOperationException(string.Format("Cannot find static method {0} of class {1} in assembly {2}", methodName, className, assemblyName)); return method; } internal static object InvokeStaticMethodThroughReflection(string assemblyName, string className, string methodName, Type[] argumentTypes, object[] arguments) { var method = GetStaticMethodThroughReflection(assemblyName, className, methodName, argumentTypes); return method.Invoke(null, arguments); } internal static Type LoadTypeThroughReflection(string assemblyName, string className) { var asm = Assembly.Load(assemblyName); if (asm == null) throw new InvalidOperationException(string.Format("Cannot find assembly {0}", assemblyName)); var cl = asm.GetType(className); if (cl == null) throw new InvalidOperationException(string.Format("Cannot find class {0} in assembly {1}", className, assemblyName)); return cl; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using Xunit; namespace System.Linq.Tests { public class MinTests : EnumerableTests { [Fact] public void SameResultsRepeatCallsIntQuery() { var q = from x in new[] { 9999, 0, 888, -1, 66, -777, 1, 2, -12345 } where x > Int32.MinValue select x; Assert.Equal(q.Min(), q.Min()); } [Fact] public void SameResultsRepeatCallsStringQuery() { var q = from x in new[] { "!@#$%^", "C", "AAA", "", "Calling Twice", "SoS", String.Empty } where !String.IsNullOrEmpty(x) select x; Assert.Equal(q.Min(), q.Min()); } public static IEnumerable<object[]> Min_Int_TestData() { yield return new object[] { Enumerable.Repeat(42, 1), 42 }; yield return new object[] { Enumerable.Range(1, 10).ToArray(), 1 }; yield return new object[] { new int[] { -1, -10, 10, 200, 1000 }, -10 }; yield return new object[] { new int[] { 3000, 100, 200, 1000 }, 100 }; yield return new object[] { new int[] { 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat(int.MinValue, 1)), int.MinValue }; yield return new object[] { Enumerable.Repeat(20, 1), 20 }; yield return new object[] { Enumerable.Repeat(-2, 5), -2 }; yield return new object[] { Enumerable.Range(1, 10).ToArray(), 1 }; yield return new object[] { new int[] { 6, 9, 10, 7, 8 }, 6 }; yield return new object[] { new int[] { 6, 9, 10, 0, -5 }, -5 }; yield return new object[] { new int[] { 6, 0, 9, 0, 10, 0 }, 0 }; } [Theory] [MemberData(nameof(Min_Int_TestData))] public void Min_Int(IEnumerable<int> source, int expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_Int_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<int>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<int>)null).Min(x => x)); } [Fact] public void Min_Int_EmptySource_ThrowsInvalidOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<int>().Min()); Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<int>().Min(x => x)); } public static IEnumerable<object[]> Min_Long_TestData() { yield return new object[] { Enumerable.Repeat(42L, 1), 42L }; yield return new object[] { Enumerable.Range(1, 10).Select(i => (long)i).ToArray(), 1L }; yield return new object[] { new long[] { -1, -10, 10, 200, 1000 }, -10L }; yield return new object[] { new long[] { 3000, 100, 200, 1000 }, 100L }; yield return new object[] { new long[] { 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat(long.MinValue, 1)), long.MinValue }; yield return new object[] { Enumerable.Repeat(int.MaxValue + 10L, 1), int.MaxValue + 10L }; yield return new object[] { Enumerable.Repeat(500L, 5), 500L }; yield return new object[] { new long[] { -250, 49, 130, 47, 28 }, -250L }; yield return new object[] { new long[] { 6, 9, 10, 0, -int.MaxValue - 50L }, -int.MaxValue - 50L }; yield return new object[] { new long[] { 6, -5, 9, -5, 10, -5 }, -5 }; } [Theory] [MemberData(nameof(Min_Long_TestData))] public void Min_Long(IEnumerable<long> source, long expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_Long_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<long>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<long>)null).Min(x => x)); } [Fact] public void Min_Long_EmptySource_ThrowsInvalidOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<long>().Min()); Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<long>().Min(x => x)); } public static IEnumerable<object[]> Min_Float_TestData() { yield return new object[] { Enumerable.Repeat(42f, 1), 42f }; yield return new object[] { Enumerable.Range(1, 10).Select(i => (float)i).ToArray(), 1f }; yield return new object[] { new float[] { -1, -10, 10, 200, 1000 }, -10f }; yield return new object[] { new float[] { 3000, 100, 200, 1000 }, 100 }; yield return new object[] { new float[] { 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat(float.MinValue, 1)), float.MinValue }; yield return new object[] { Enumerable.Repeat(5.5f, 1), 5.5f }; yield return new object[] { Enumerable.Repeat(float.NaN, 5), float.NaN }; yield return new object[] { new float[] { -2.5f, 4.9f, 130f, 4.7f, 28f }, -2.5f}; yield return new object[] { new float[] { 6.8f, 9.4f, 10f, 0, -5.6f }, -5.6f }; yield return new object[] { new float[] { -5.5f, float.NegativeInfinity, 9.9f, float.NegativeInfinity }, float.NegativeInfinity }; yield return new object[] { new float[] { float.NaN, 6.8f, 9.4f, 10f, 0, -5.6f }, float.NaN }; yield return new object[] { new float[] { 6.8f, 9.4f, 10f, 0, -5.6f, float.NaN }, float.NaN }; yield return new object[] { new float[] { float.NaN, float.NegativeInfinity }, float.NaN }; yield return new object[] { new float[] { float.NegativeInfinity, float.NaN }, float.NaN }; // In .NET Core, Enumerable.Min shortcircuits if it finds any float.NaN in the array, // as nothing can be less than float.NaN. See https://github.com/dotnet/corefx/pull/2426. // Without this optimization, we would iterate through int.MaxValue elements, which takes // a long time. if (!PlatformDetection.IsFullFramework) { yield return new object[] { Enumerable.Repeat(float.NaN, int.MaxValue), float.NaN }; } yield return new object[] { Enumerable.Repeat(float.NaN, 3), float.NaN }; // Normally NaN < anything is false, as is anything < NaN // However, this leads to some irksome outcomes in Min and Max. // If we use those semantics then Min(NaN, 5.0) is NaN, but // Min(5.0, NaN) is 5.0! To fix this, we impose a total // ordering where NaN is smaller than every value, including // negative infinity. yield return new object[] { Enumerable.Range(1, 10).Select(i => (float)i).Concat(Enumerable.Repeat(float.NaN, 1)).ToArray(), float.NaN }; yield return new object[] { new float[] { -1F, -10, float.NaN, 10, 200, 1000 }, float.NaN }; yield return new object[] { new float[] { float.MinValue, 3000F, 100, 200, float.NaN, 1000 }, float.NaN }; } [Theory] [MemberData(nameof(Min_Float_TestData))] public void Min_Float(IEnumerable<float> source, float expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_Float_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<float>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<float>)null).Min(x => x)); } [Fact] public void Min_Float_EmptySource_ThrowsInvalidOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<float>().Min()); Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<float>().Min(x => x)); } public static IEnumerable<object[]> Min_Double_TestData() { yield return new object[] { Enumerable.Repeat(42.0, 1), 42.0 }; yield return new object[] { Enumerable.Range(1, 10).Select(i => (double)i).ToArray(), 1.0 }; yield return new object[] { new double[] { -1, -10, 10, 200, 1000 }, -10.0 }; yield return new object[] { new double[] { 3000, 100, 200, 1000 }, 100.0 }; yield return new object[] { new double[] { 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat(double.MinValue, 1)), double.MinValue }; yield return new object[] { Enumerable.Repeat(5.5, 1), 5.5 }; yield return new object[] { new double[] { -2.5, 4.9, 130, 4.7, 28 }, -2.5 }; yield return new object[] { new double[] { 6.8, 9.4, 10, 0, -5.6 }, -5.6 }; yield return new object[] { new double[] { -5.5, double.NegativeInfinity, 9.9, double.NegativeInfinity }, double.NegativeInfinity }; // In .NET Core, Enumerable.Min shortcircuits if it finds any double.NaN in the array, // as nothing can be less than double.NaN. See https://github.com/dotnet/corefx/pull/2426. // Without this optimization, we would iterate through int.MaxValue elements, which takes // a long time. if (!PlatformDetection.IsFullFramework) { yield return new object[] { Enumerable.Repeat(double.NaN, int.MaxValue), double.NaN }; } yield return new object[] { Enumerable.Repeat(double.NaN, 3), double.NaN }; yield return new object[] { new double[] { double.NaN, 6.8, 9.4, 10, 0, -5.6 }, double.NaN }; yield return new object[] { new double[] { 6.8, 9.4, 10, 0, -5.6, double.NaN }, double.NaN }; yield return new object[] { new double[] { double.NaN, double.NegativeInfinity }, double.NaN }; yield return new object[] { new double[] { double.NegativeInfinity, double.NaN }, double.NaN }; // Normally NaN < anything is false, as is anything < NaN // However, this leads to some irksome outcomes in Min and Max. // If we use those semantics then Min(NaN, 5.0) is NaN, but // Min(5.0, NaN) is 5.0! To fix this, we impose a total // ordering where NaN is smaller than every value, including // negative infinity. yield return new object[] { Enumerable.Range(1, 10).Select(i => (double)i).Concat(Enumerable.Repeat(double.NaN, 1)).ToArray(), double.NaN }; yield return new object[] { new double[] { -1, -10, double.NaN, 10, 200, 1000 }, double.NaN }; yield return new object[] { new double[] { double.MinValue, 3000F, 100, 200, double.NaN, 1000 }, double.NaN }; } [Theory] [MemberData(nameof(Min_Double_TestData))] public void Min_Double(IEnumerable<double> source, double expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_Double_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<double>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<double>)null).Min(x => x)); } [Fact] public void Min_Double_EmptySource_ThrowsInvalidOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<double>().Min()); Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<double>().Min(x => x)); } public static IEnumerable<object[]> Min_Decimal_TestData() { yield return new object[] { Enumerable.Repeat(42m, 1), 42m }; yield return new object[] { Enumerable.Range(1, 10).Select(i => (decimal)i).ToArray(), 1m }; yield return new object[] { new decimal[] { -1, -10, 10, 200, 1000 }, -10m }; yield return new object[] { new decimal[] { 3000, 100, 200, 1000 }, 100m }; yield return new object[] { new decimal[] { 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat(decimal.MinValue, 1)), decimal.MinValue }; yield return new object[] { Enumerable.Repeat(5.5m, 1), 5.5m }; yield return new object[] { Enumerable.Repeat(-3.4m, 5), -3.4m }; yield return new object[] { new decimal[] { -2.5m, 4.9m, 130m, 4.7m, 28m }, -2.5m }; yield return new object[] { new decimal[] { 6.8m, 9.4m, 10m, 0m, 0m, decimal.MinValue }, decimal.MinValue }; yield return new object[] { new decimal[] { -5.5m, 0m, 9.9m, -5.5m, 5m }, -5.5m }; } [Theory] [MemberData(nameof(Min_Decimal_TestData))] public void Min_Decimal(IEnumerable<decimal> source, decimal expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_Decimal_EmptySource_ThrowsInvalidOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<decimal>().Min()); Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<decimal>().Min(x => x)); } [Fact] public void Min_Decimal_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal>)null).Min(x => x)); } public static IEnumerable<object[]> Min_NullableInt_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => (int?)i).ToArray(), 1 }; yield return new object[] { new int?[] { null, -1, -10, 10, 200, 1000 }, -10 }; yield return new object[] { new int?[] { null, 3000, 100, 200, 1000 }, 100 }; yield return new object[] { new int?[] { null, 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat((int?)int.MinValue, 1)), int.MinValue }; yield return new object[] { Enumerable.Repeat(default(int?), 100), null }; yield return new object[] { Enumerable.Repeat((int?)42, 1), 42 }; yield return new object[] { Enumerable.Empty<int?>(), null }; yield return new object[] { Enumerable.Repeat((int?)20, 1), 20 }; yield return new object[] { Enumerable.Repeat(default(int?), 5), null }; yield return new object[] { new int?[] { 6, null, 9, 10, null, 7, 8 }, 6 }; yield return new object[] { new int?[] { null, null, null, null, null, -5 }, -5 }; yield return new object[] { new int?[] { 6, null, null, 0, 9, 0, 10, 0 }, 0 }; } [Theory] [MemberData(nameof(Min_NullableInt_TestData))] public void Min_NullableInt(IEnumerable<int?> source, int? expected) { Assert.Equal(expected, source.Min()); } [Theory, MemberData(nameof(Min_NullableInt_TestData))] public void Min_NullableIntRunOnce(IEnumerable<int?> source, int? expected) { Assert.Equal(expected, source.RunOnce().Min()); } [Fact] public void Min_NullableInt_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<int?>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<int?>)null).Min(x => x)); } public static IEnumerable<object[]> Min_NullableLong_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => (long?)i).ToArray(), 1L }; yield return new object[] { new long?[] { null, -1, -10, 10, 200, 1000 }, -10L }; yield return new object[] { new long?[] { null, 3000, 100, 200, 1000 }, 100L }; yield return new object[] { new long?[] { null, 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat((long?)long.MinValue, 1)), long.MinValue }; yield return new object[] { Enumerable.Repeat(default(long?), 100), null }; yield return new object[] { Enumerable.Repeat((long?)42, 1), 42L }; yield return new object[] { Enumerable.Empty<long?>(), null }; yield return new object[] { Enumerable.Repeat((long?)long.MaxValue, 1), long.MaxValue }; yield return new object[] { Enumerable.Repeat(default(long?), 5), null }; yield return new object[] { new long?[] { long.MinValue, null, 9, 10, null, 7, 8 }, long.MinValue }; yield return new object[] { new long?[] { null, null, null, null, null, -long.MaxValue }, -long.MaxValue }; yield return new object[] { new long?[] { 6, null, null, 0, 9, 0, 10, 0 }, 0L }; } [Theory] [MemberData(nameof(Min_NullableLong_TestData))] public void Min_NullableLong(IEnumerable<long?> source, long? expected) { Assert.Equal(expected, source.Min()); } [Fact] public void Min_NullableLong_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<long?>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<long?>)null).Min(x => x)); } public static IEnumerable<object[]> Min_NullableFloat_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => (float?)i).ToArray(), 1f }; yield return new object[] { new float?[] { null, -1, -10, 10, 200, 1000 }, -10f }; yield return new object[] { new float?[] { null, 3000, 100, 200, 1000 }, 100f }; yield return new object[] { new float?[] { null, 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat((float?)float.MinValue, 1)), float.MinValue }; yield return new object[] { Enumerable.Repeat(default(float?), 100), null }; yield return new object[] { Enumerable.Repeat((float?)42, 1), 42f }; yield return new object[] { Enumerable.Empty<float?>(), null }; yield return new object[] { Enumerable.Repeat((float?)float.MinValue, 1), float.MinValue }; yield return new object[] { Enumerable.Repeat(default(float?), 100), null }; yield return new object[] { new float?[] { -4.50f, null, 10.98f, null, 7.5f, 8.6f }, -4.5f }; yield return new object[] { new float?[] { null, null, null, null, null, 0f }, 0f }; yield return new object[] { new float?[] { 6.4f, null, null, -0.5f, 9.4f, -0.5f, 10.9f, -0.5f }, -0.5f }; yield return new object[] { new float?[] { float.NaN, 6.8f, 9.4f, 10f, 0, null, -5.6f }, float.NaN }; yield return new object[] { new float?[] { 6.8f, 9.4f, 10f, 0, null, -5.6f, float.NaN }, float.NaN }; yield return new object[] { new float?[] { float.NaN, float.NegativeInfinity }, float.NaN }; yield return new object[] { new float?[] { float.NegativeInfinity, float.NaN }, float.NaN }; yield return new object[] { new float?[] { float.NaN, null, null, null }, float.NaN }; yield return new object[] { new float?[] { null, null, null, float.NaN }, float.NaN }; yield return new object[] { new float?[] { null, float.NaN, null }, float.NaN }; // In .NET Core, Enumerable.Min shortcircuits if it finds any float.NaN in the array, // as nothing can be less than float.NaN. See https://github.com/dotnet/corefx/pull/2426. // Without this optimization, we would iterate through int.MaxValue elements, which takes // a long time. if (!PlatformDetection.IsFullFramework) { yield return new object[] { Enumerable.Repeat((float?)float.NaN, int.MaxValue), float.NaN }; } yield return new object[] { Enumerable.Repeat((float?)float.NaN, 3), float.NaN }; } [Theory] [MemberData(nameof(Min_NullableFloat_TestData))] public void Min_NullableFloat(IEnumerable<float?> source, float? expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_NullableFloat_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<float?>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<float?>)null).Min(x => x)); } public static IEnumerable<object[]> Min_NullableDouble_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => (double?)i).ToArray(), 1.0 }; yield return new object[] { new double?[] { null, -1, -10, 10, 200, 1000 }, -10.0 }; yield return new object[] { new double?[] { null, 3000, 100, 200, 1000 }, 100.0 }; yield return new object[] { new double?[] { null, 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat((double?)double.MinValue, 1)), double.MinValue }; yield return new object[] { Enumerable.Repeat(default(double?), 100), null }; yield return new object[] { Enumerable.Repeat((double?)42, 1), 42.0 }; yield return new object[] { Enumerable.Empty<double?>(), null }; yield return new object[] { Enumerable.Repeat((double?)double.MinValue, 1), double.MinValue }; yield return new object[] { Enumerable.Repeat(default(double?), 5), null }; yield return new object[] { new double?[] { -4.50, null, 10.98, null, 7.5, 8.6 }, -4.5 }; yield return new object[] { new double?[] { null, null, null, null, null, 0 }, 0.0 }; yield return new object[] { new double?[] { 6.4, null, null, -0.5, 9.4, -0.5, 10.9, -0.5 }, -0.5 }; yield return new object[] { new double?[] { double.NaN, 6.8, 9.4, 10.0, 0.0, null, -5.6 }, double.NaN }; yield return new object[] { new double?[] { 6.8, 9.4, 10, 0.0, null, -5.6f, double.NaN }, double.NaN }; yield return new object[] { new double?[] { double.NaN, double.NegativeInfinity }, double.NaN }; yield return new object[] { new double?[] { double.NegativeInfinity, double.NaN }, double.NaN }; yield return new object[] { new double?[] { double.NaN, null, null, null }, double.NaN }; yield return new object[] { new double?[] { null, null, null, double.NaN }, double.NaN }; yield return new object[] { new double?[] { null, double.NaN, null }, double.NaN }; // In .NET Core, Enumerable.Min shortcircuits if it finds any double.NaN in the array, // as nothing can be less than double.NaN. See https://github.com/dotnet/corefx/pull/2426. // Without this optimization, we would iterate through int.MaxValue elements, which takes // a long time. if (!PlatformDetection.IsFullFramework) { yield return new object[] { Enumerable.Repeat((double?)double.NaN, int.MaxValue), double.NaN }; } yield return new object[] { Enumerable.Repeat((double?)double.NaN, 3), double.NaN }; } [Theory] [MemberData(nameof(Min_NullableDouble_TestData))] public void Min_NullableDouble(IEnumerable<double?> source, double? expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_NullableDouble_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<double?>)null).Min()); } public static IEnumerable<object[]> Min_NullableDecimal_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => (decimal?)i).ToArray(), 1m }; yield return new object[] { new decimal?[] { null, -1, -10, 10, 200, 1000 }, -10m }; yield return new object[] { new decimal?[] { null, 3000, 100, 200, 1000 }, 100m }; yield return new object[] { new decimal?[] { null, 3000, 100, 200, 1000 }.Concat(Enumerable.Repeat((decimal?)decimal.MinValue, 1)), decimal.MinValue }; yield return new object[] { Enumerable.Repeat(default(decimal?), 100), null }; yield return new object[] { Enumerable.Repeat((decimal?)42, 1), 42m }; yield return new object[] { Enumerable.Empty<decimal?>(), null }; yield return new object[] { Enumerable.Repeat((decimal?)decimal.MaxValue, 1), decimal.MaxValue }; yield return new object[] { Enumerable.Repeat(default(decimal?), 5), null }; yield return new object[] { new decimal?[] { -4.50m, null, null, 10.98m, null, 7.5m, 8.6m }, -4.5m }; yield return new object[] { new decimal?[] { null, null, null, null, null, 0m }, 0m }; yield return new object[] { new decimal?[] { 6.4m, null, null, decimal.MinValue, 9.4m, decimal.MinValue, 10.9m, decimal.MinValue }, decimal.MinValue }; } [Theory] [MemberData(nameof(Min_NullableDecimal_TestData))] public void Min_NullableDecimal(IEnumerable<decimal?> source, decimal? expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_NullableDecimal_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal?>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal?>)null).Min(x => x)); } public static IEnumerable<object[]> Min_DateTime_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => new DateTime(2000, 1, i)).ToArray(), new DateTime(2000, 1, 1) }; yield return new object[] { new DateTime[] { new DateTime(2000, 12, 1), new DateTime(2000, 1, 1), new DateTime(2000, 1, 12) }, new DateTime(2000, 1, 1) }; DateTime[] hundred = new DateTime[] { new DateTime(3000, 1, 1), new DateTime(100, 1, 1), new DateTime(200, 1, 1), new DateTime(1000, 1, 1) }; yield return new object[] { hundred, new DateTime(100, 1, 1) }; yield return new object[] { hundred.Concat(Enumerable.Repeat(DateTime.MinValue, 1)), DateTime.MinValue }; } [Theory] [MemberData(nameof(Min_DateTime_TestData))] public void Min_DateTime(IEnumerable<DateTime> source, DateTime expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Fact] public void Min_DateTime_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<DateTime>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<DateTime>)null).Min(x => x)); } [Fact] public void Min_DateTime_EmptySource_ThrowsInvalidOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<DateTime>().Min()); Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<DateTime>().Min(x => x)); } public static IEnumerable<object[]> Min_String_TestData() { yield return new object[] { Enumerable.Range(1, 10).Select(i => i.ToString()).ToArray(), "1" }; yield return new object[] { new string[] { "Alice", "Bob", "Charlie", "Eve", "Mallory", "Trent", "Victor" }, "Alice" }; yield return new object[] { new string[] { null, "Charlie", null, "Victor", "Trent", null, "Eve", "Alice", "Mallory", "Bob" }, "Alice" }; yield return new object[] { Enumerable.Empty<string>(), null }; yield return new object[] { Enumerable.Repeat("Hello", 1), "Hello" }; yield return new object[] { Enumerable.Repeat("hi", 5), "hi" }; yield return new object[] { new string[] { "aaa", "abcd", "bark", "temp", "cat" }, "aaa" }; yield return new object[] { new string[] { null, null, null, null, "aAa" }, "aAa" }; yield return new object[] { new string[] { "ooo", "www", "www", "ooo", "ooo", "ppp" }, "ooo" }; yield return new object[] { Enumerable.Repeat(default(string), 5), null }; } [Theory] [MemberData(nameof(Min_String_TestData))] public void Min_String(IEnumerable<string> source, string expected) { Assert.Equal(expected, source.Min()); Assert.Equal(expected, source.Min(x => x)); } [Theory, MemberData(nameof(Min_String_TestData))] public void Min_StringRunOnce(IEnumerable<string> source, string expected) { Assert.Equal(expected, source.RunOnce().Min()); Assert.Equal(expected, source.RunOnce().Min(x => x)); } [Fact] public void Min_String_NullSource_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<string>)null).Min()); AssertExtensions.Throws<ArgumentNullException>("source", () => ((IEnumerable<string>)null).Min(x => x)); } [Fact] public void Min_Int_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=10 }, new { name="John", num=-105 }, new { name="Bob", num=-30 } }; Assert.Equal(-105, source.Min(e => e.num)); } [Fact] public void Min_Int_NullSelector_ThrowsArgumentNullException() { Func<int, int> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int>().Min(selector)); } [Fact] public void Min_Long_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=10L }, new { name="John", num=long.MinValue }, new { name="Bob", num=-10L } }; Assert.Equal(long.MinValue, source.Min(e => e.num)); } [Fact] public void Min_Long_NullSelector_ThrowsArgumentNullException() { Func<long, long> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long>().Min(selector)); } [Fact] public void Min_Float_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=-45.5f }, new { name="John", num=-132.5f }, new { name="Bob", num=20.45f } }; Assert.Equal(-132.5f, source.Min(e => e.num)); } [Fact] public void Min_Float_NullSelector_ThrowsArgumentNullException() { Func<float, float> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float>().Min(selector)); } [Fact] public void Min_Double_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=-45.5 }, new { name="John", num=-132.5 }, new { name="Bob", num=20.45 } }; Assert.Equal(-132.5, source.Min(e => e.num)); } [Fact] public void Min_Double_NullSelector_ThrowsArgumentNullException() { Func<double, double> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double>().Min(selector)); } [Fact] public void Min_Decimal_WithSelectorAccessingProperty() { var source = new[] { new {name="Tim", num=100.45m}, new {name="John", num=10.5m}, new {name="Bob", num=0.05m} }; Assert.Equal(0.05m, source.Min(e => e.num)); } [Fact] public void Min_Decimal_NullSelector_ThrowsArgumentNullException() { Func<decimal, decimal> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal>().Min(selector)); } [Fact] public void Min_NullableInt_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=(int?)10 }, new { name="John", num=default(int?) }, new { name="Bob", num=(int?)-30 } }; Assert.Equal(-30, source.Min(e => e.num)); } [Fact] public void Min_NullableInt_NullSelector_ThrowsArgumentNullException() { Func<int?, int?> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int?>().Min(selector)); } [Fact] public void Min_NullableLong_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=default(long?) }, new { name="John", num=(long?)long.MinValue }, new { name="Bob", num=(long?)-10L } }; Assert.Equal(long.MinValue, source.Min(e => e.num)); } [Fact] public void Min_NullableLong_NullSelector_ThrowsArgumentNullException() { Func<long?, long?> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long?>().Min(selector)); } [Fact] public void Min_NullableFloat_WithSelectorAccessingProperty() { var source = new[] { new {name="Tim", num=(float?)-45.5f}, new {name="John", num=(float?)-132.5f}, new {name="Bob", num=default(float?)} }; Assert.Equal(-132.5f, source.Min(e => e.num)); } [Fact] public void Min_NullableFloat_NullSelector_ThrowsArgumentNullException() { Func<float?, float?> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float?>().Min(selector)); } [Fact] public void Min_NullableDouble_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=(double?)-45.5 }, new { name="John", num=(double?)-132.5 }, new { name="Bob", num=default(double?) } }; Assert.Equal(-132.5, source.Min(e => e.num)); } [Fact] public void Min_NullableDouble_NullSelector_ThrowsArgumentNullException() { Func<double?, double?> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double?>().Min(selector)); } [Fact] public void Min_NullableDecimal_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=(decimal?)100.45m }, new { name="John", num=(decimal?)10.5m }, new { name="Bob", num=default(decimal?) } }; Assert.Equal(10.5m, source.Min(e => e.num)); } [Fact] public void Min_NullableDecimal_NullSelector_ThrowsArgumentNullException() { Func<decimal?, decimal?> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal?>().Min(selector)); } [Fact] public void Min_DateTime_NullSelector_ThrowsArgumentNullException() { Func<DateTime, DateTime> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<DateTime>().Min(selector)); } [Fact] public void Min_String_WithSelectorAccessingProperty() { var source = new[] { new { name="Tim", num=100.45m }, new { name="John", num=10.5m }, new { name="Bob", num=0.05m } }; Assert.Equal("Bob", source.Min(e => e.name)); } [Fact] public void Min_String_NullSelector_ThrowsArgumentNullException() { Func<string, string> selector = null; AssertExtensions.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<string>().Min(selector)); } [Fact] public void Min_Bool_EmptySource_ThrowsInvalodOperationException() { Assert.Throws<InvalidOperationException>(() => Enumerable.Empty<bool>().Min()); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.ComponentModel.Design { using Microsoft.Win32; using System; using System.Collections.Generic; using System.Diagnostics; using System.Security.Permissions; /// <summary> /// This is a simple implementation of IServiceContainer. /// </summary> public class ServiceContainer : IServiceContainer, IDisposable { private ServiceCollection<object> _services; private IServiceProvider _parentProvider; private static Type[] s_defaultServices = new Type[] { typeof(IServiceContainer), typeof(ServiceContainer) }; private static TraceSwitch s_TRACESERVICE = new TraceSwitch("TRACESERVICE", "ServiceProvider: Trace service provider requests."); /// <summary> /// Creates a new service object container. /// </summary> public ServiceContainer() { } /// <summary> /// Creates a new service object container. /// </summary> public ServiceContainer(IServiceProvider parentProvider) { _parentProvider = parentProvider; } /// <summary> /// Retrieves the parent service container, or null /// if there is no parent container. /// </summary> private IServiceContainer Container { get { IServiceContainer container = null; if (_parentProvider != null) { container = (IServiceContainer)_parentProvider.GetService(typeof(IServiceContainer)); } return container; } } /// <summary> /// This property returns the default services that are implemented directly on this IServiceContainer. /// the default implementation of this property is to return the IServiceContainer and ServiceContainer /// types. You may override this proeprty and return your own types, modifying the default behavior /// of GetService. /// </summary> protected virtual Type[] DefaultServices => s_defaultServices; /// <summary> /// Our collection of services. The service collection is demand /// created here. /// </summary> private ServiceCollection<object> Services => _services ?? (_services = new ServiceCollection<object>()); /// <summary> /// Adds the given service to the service container. /// </summary> public void AddService(Type serviceType, object serviceInstance) { AddService(serviceType, serviceInstance, false); } /// <summary> /// Adds the given service to the service container. /// </summary> public virtual void AddService(Type serviceType, object serviceInstance, bool promote) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, $"Adding service (instance) {serviceType.Name}. Promoting: {promote.ToString()}"); if (promote) { IServiceContainer container = Container; if (container != null) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, "Promoting to container"); container.AddService(serviceType, serviceInstance, promote); return; } } // We're going to add this locally. Ensure that the service instance // is correct. // if (serviceType == null) throw new ArgumentNullException(nameof(serviceType)); if (serviceInstance == null) throw new ArgumentNullException(nameof(serviceInstance)); if (!(serviceInstance is ServiceCreatorCallback) && !serviceInstance.GetType().IsCOMObject && !serviceType.IsInstanceOfType(serviceInstance)) { throw new ArgumentException(SR.Format(SR.ErrorInvalidServiceInstance, serviceType.FullName)); } if (Services.ContainsKey(serviceType)) { throw new ArgumentException(SR.Format(SR.ErrorServiceExists, serviceType.FullName), nameof(serviceType)); } Services[serviceType] = serviceInstance; } /// <summary> /// Adds the given service to the service container. /// </summary> public void AddService(Type serviceType, ServiceCreatorCallback callback) { AddService(serviceType, callback, false); } /// <summary> /// Adds the given service to the service container. /// </summary> public virtual void AddService(Type serviceType, ServiceCreatorCallback callback, bool promote) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, $"Adding service (callback) {serviceType.Name}. Promoting: {promote.ToString()}"); if (promote) { IServiceContainer container = Container; if (container != null) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, "Promoting to container"); container.AddService(serviceType, callback, promote); return; } } // We're going to add this locally. Ensure that the service instance // is correct. // if (serviceType == null) throw new ArgumentNullException(nameof(serviceType)); if (callback == null) throw new ArgumentNullException(nameof(callback)); if (Services.ContainsKey(serviceType)) { throw new ArgumentException(SR.Format(SR.ErrorServiceExists, serviceType.FullName), nameof(serviceType)); } Services[serviceType] = callback; } /// <summary> /// Disposes this service container. This also walks all instantiated services within the container /// and disposes any that implement IDisposable, and clears the service list. /// </summary> public void Dispose() { Dispose(true); } /// <summary> /// Disposes this service container. This also walks all instantiated services within the container /// and disposes any that implement IDisposable, and clears the service list. /// </summary> protected virtual void Dispose(bool disposing) { if (disposing) { ServiceCollection<object> serviceCollection = _services; _services = null; if (serviceCollection != null) { foreach (object o in serviceCollection.Values) { if (o is IDisposable) { ((IDisposable)o).Dispose(); } } } } } /// <summary> /// Retrieves the requested service. /// </summary> public virtual object GetService(Type serviceType) { object service = null; Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, $"Searching for service {serviceType.Name}"); // Try locally. We first test for services we // implement and then look in our service collection. // Type[] defaults = DefaultServices; for (int idx = 0; idx < defaults.Length; idx++) { if (serviceType.IsEquivalentTo(defaults[idx])) { service = this; break; } } if (service == null) { Services.TryGetValue(serviceType, out service); } // Is the service a creator delegate? // if (service is ServiceCreatorCallback) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, "Encountered a callback. Invoking it"); service = ((ServiceCreatorCallback)service)(this, serviceType); Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, $"Callback return object: {(service == null ? "(null)" : service.ToString())}"); if (service != null && !service.GetType().IsCOMObject && !serviceType.IsInstanceOfType(service)) { // Callback passed us a bad service. NULL it, rather than throwing an exception. // Callers here do not need to be prepared to handle bad callback implemetations. Debug.Fail($"Object {service.GetType().Name} was returned from a service creator callback but it does not implement the registered type of {serviceType.Name}"); Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, "**** Object does not implement service interface"); service = null; } // And replace the callback with our new service. // Services[serviceType] = service; } if (service == null && _parentProvider != null) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, "Service unresolved. Trying parent"); service = _parentProvider.GetService(serviceType); } #if DEBUG if (s_TRACESERVICE.TraceVerbose && service == null) { Debug.WriteLine("******************************************"); Debug.WriteLine("FAILED to resolve service " + serviceType.Name); Debug.WriteLine("AT: " + Environment.StackTrace); Debug.WriteLine("******************************************"); } #endif return service; } /// <summary> /// Removes the given service type from the service container. /// </summary> public void RemoveService(Type serviceType) { RemoveService(serviceType, false); } /// <summary> /// Removes the given service type from the service container. /// </summary> public virtual void RemoveService(Type serviceType, bool promote) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, $"Removing service: {serviceType.Name}, Promote: {promote.ToString()}"); if (promote) { IServiceContainer container = Container; if (container != null) { Debug.WriteLineIf(s_TRACESERVICE.TraceVerbose, "Invoking parent container"); container.RemoveService(serviceType, promote); return; } } // We're going to remove this from our local list. // if (serviceType == null) throw new ArgumentNullException(nameof(serviceType)); Services.Remove(serviceType); } /// <summary> /// Use this collection to store mapping from the Type of a service to the object that provides it in a way /// that is aware of embedded types. The comparer for this collection will call Type.IsEquivalentTo(...) /// instead of doing a reference comparison which will fail in type embedding scenarios. To speed the lookup /// performance we will use hash code of Type.FullName. /// </summary> /// <typeparam name="T"></typeparam> private sealed class ServiceCollection<T> : Dictionary<Type, T> { private static EmbeddedTypeAwareTypeComparer s_serviceTypeComparer = new EmbeddedTypeAwareTypeComparer(); private sealed class EmbeddedTypeAwareTypeComparer : IEqualityComparer<Type> { #region IEqualityComparer<Type> Members public bool Equals(Type x, Type y) { return x.IsEquivalentTo(y); } public int GetHashCode(Type obj) { return obj.FullName.GetHashCode(); } #endregion } public ServiceCollection() : base(s_serviceTypeComparer) { } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Collections.Generic; using System.Diagnostics; using System.Text; namespace System.Security.Cryptography.Pkcs { internal static class Pkcs12Kdf { private const byte CipherKeyId = 1; private const byte IvId = 2; private const byte MacKeyId = 3; // This is a dictionary representation of the table in // https://tools.ietf.org/html/rfc7292#appendix-B.2 private static readonly Dictionary<HashAlgorithmName, Tuple<int, int>> s_uvLookup = new Dictionary<HashAlgorithmName, Tuple<int, int>> { { HashAlgorithmName.MD5, Tuple.Create(128, 512) }, { HashAlgorithmName.SHA1, Tuple.Create(160, 512) }, { HashAlgorithmName.SHA256, Tuple.Create(256, 512) }, { HashAlgorithmName.SHA384, Tuple.Create(384, 1024) }, { HashAlgorithmName.SHA512, Tuple.Create(512, 1024) }, }; internal static void DeriveCipherKey( ReadOnlySpan<char> password, HashAlgorithmName hashAlgorithm, int iterationCount, ReadOnlySpan<byte> salt, Span<byte> destination) { Derive( password, hashAlgorithm, iterationCount, CipherKeyId, salt, destination); } internal static void DeriveIV( ReadOnlySpan<char> password, HashAlgorithmName hashAlgorithm, int iterationCount, ReadOnlySpan<byte> salt, Span<byte> destination) { Derive( password, hashAlgorithm, iterationCount, IvId, salt, destination); } internal static void DeriveMacKey( ReadOnlySpan<char> password, HashAlgorithmName hashAlgorithm, int iterationCount, ReadOnlySpan<byte> salt, Span<byte> destination) { Derive( password, hashAlgorithm, iterationCount, MacKeyId, salt, destination); } private static void Derive( ReadOnlySpan<char> password, HashAlgorithmName hashAlgorithm, int iterationCount, byte id, ReadOnlySpan<byte> salt, Span<byte> destination) { // https://tools.ietf.org/html/rfc7292#appendix-B.2 Debug.Assert(iterationCount >= 1); if (!s_uvLookup.TryGetValue(hashAlgorithm, out Tuple<int, int> uv)) { throw new CryptographicException(SR.Cryptography_UnknownHashAlgorithm, hashAlgorithm.Name); } (int u, int v) = uv; Debug.Assert(v <= 1024); // 1. Construct a string, D (the "diversifier"), by concatenating v/8 copies of ID. int vBytes = v >> 3; Span<byte> D = stackalloc byte[vBytes]; D.Fill(id); // 2. Concatenate copies of the salt together to create a string S of // length v(ceiling(s/ v)) bits(the final copy of the salt may be // truncated to create S). Note that if the salt is the empty // string, then so is S. int SLen = ((salt.Length - 1 + vBytes) / vBytes) * vBytes; // The password is a null-terminated UTF-16BE version of the input. int passLen = checked((password.Length + 1) * 2); // If password == default then the span represents the null string (as opposed to // an empty string), and the P block should then have size 0 in the next step. if (password == default) { passLen = 0; } // 3. Concatenate copies of the password together to create a string P // of length v(ceiling(p/v)) bits (the final copy of the password // may be truncated to create P). Note that if the password is the // empty string, then so is P. // // (The RFC quote considers the trailing '\0' to be part of the string, // so "empty string" from this RFC means "null string" in C#, and C#'s // "empty string" is not 'empty' in this context.) int PLen = ((passLen - 1 + vBytes) / vBytes) * vBytes; // 4. Set I=S||P to be the concatenation of S and P. int ILen = SLen + PLen; Span<byte> I = stackalloc byte[0]; byte[] IRented = null; if (ILen <= 1024) { I = stackalloc byte[ILen]; } else { IRented = ArrayPool<byte>.Shared.Rent(ILen); I = IRented.AsSpan(0, ILen); } IncrementalHash hash = IncrementalHash.CreateHash(hashAlgorithm); try { CircularCopy(salt, I.Slice(0, SLen)); CircularCopyUtf16BE(password, I.Slice(SLen)); int uBytes = u >> 3; Span<byte> hashBuf = stackalloc byte[uBytes]; Span<byte> bBuf = stackalloc byte[vBytes]; // 5. Set c=ceiling(n/u). // 6. For i=1, 2, ..., c, do the following: // (later we're going to start writing A_i values as output, // they mean "while work remains"). while (true) { // A. Set A_i=H^r(D||I). (i.e., the r-th hash of D||I, // H(H(H(... H(D || I)))) hash.AppendData(D); hash.AppendData(I); for (int j = iterationCount; j > 0; j--) { if (!hash.TryGetHashAndReset(hashBuf, out int bytesWritten) || bytesWritten != hashBuf.Length) { Debug.Fail($"Hash output wrote {bytesWritten} bytes when {hashBuf.Length} was expected"); throw new CryptographicException(); } if (j != 1) { hash.AppendData(hashBuf); } } // 7. Concatenate A_1, A_2, ..., A_c together to form a pseudorandom // bit string, A. // // 8. Use the first n bits of A as the output of this entire process. if (hashBuf.Length >= destination.Length) { hashBuf.Slice(0, destination.Length).CopyTo(destination); return; } hashBuf.CopyTo(destination); destination = destination.Slice(hashBuf.Length); // B. Concatenate copies of A_i to create a string B of length v // bits(the final copy of Ai may be truncated to create B). CircularCopy(hashBuf, bBuf); // C. Treating I as a concatenation I_0, I_1, ..., I_(k-1) of v-bit // blocks, where k = ceiling(s / v) + ceiling(p / v), modify I by // setting I_j = (I_j + B + 1) mod 2 ^ v for each j. for (int j = (I.Length / vBytes) - 1; j >= 0; j--) { Span<byte> I_j = I.Slice(j * vBytes, vBytes); AddPlusOne(I_j, bBuf); } } } finally { CryptographicOperations.ZeroMemory(I); if (IRented != null) { ArrayPool<byte>.Shared.Return(IRented); } hash.Dispose(); } } private static void AddPlusOne(Span<byte> into, Span<byte> addend) { Debug.Assert(into.Length == addend.Length); int carry = 1; for (int i = into.Length - 1; i >= 0; i--) { int tmp = carry + into[i] + addend[i]; into[i] = (byte)tmp; carry = tmp >> 8; } } private static void CircularCopy(ReadOnlySpan<byte> bytes, Span<byte> destination) { Debug.Assert(bytes.Length > 0); while (destination.Length > 0) { if (destination.Length >= bytes.Length) { bytes.CopyTo(destination); destination = destination.Slice(bytes.Length); } else { bytes.Slice(0, destination.Length).CopyTo(destination); return; } } } private static void CircularCopyUtf16BE(ReadOnlySpan<char> password, Span<byte> destination) { int fullCopyLen = password.Length * 2; Encoding bigEndianUnicode = System.Text.Encoding.BigEndianUnicode; Debug.Assert(destination.Length % 2 == 0); while (destination.Length > 0) { if (destination.Length >= fullCopyLen) { int count = bigEndianUnicode.GetBytes(password, destination); if (count != fullCopyLen) { Debug.Fail($"Unexpected written byte count ({count} vs {fullCopyLen})"); throw new CryptographicException(); } destination = destination.Slice(count); Span<byte> nullTerminator = destination.Slice(0, Math.Min(2, destination.Length)); nullTerminator.Clear(); destination = destination.Slice(nullTerminator.Length); } else { ReadOnlySpan<char> trimmed = password.Slice(0, destination.Length / 2); int count = bigEndianUnicode.GetBytes(trimmed, destination); if (count != destination.Length) { Debug.Fail($"Partial copy wrote {count} bytes of {destination.Length} expected"); throw new CryptographicException(); } return; } } } } }
using System; using System.Collections; using System.Text; using System.Collections.Generic; using UnityEngine; // Source: UIToolkit -- https://github.com/prime31/UIToolkit/blob/master/Assets/Plugins/MiniJSON.cs // Based on the JSON parser from // http://techblog.procurios.nl/k/618/news/view/14605/14863/How-do-I-write-my-own-parser-for-JSON.html /// <summary> /// This class encodes and decodes JSON strings. /// Spec. details, see http://www.json.org/ /// /// JSON uses Arrays and Objects. These correspond here to the datatypes ArrayList and Hashtable. /// All numbers are parsed to doubles. /// </summary> public class NGUIJson { private const int TOKEN_NONE = 0; private const int TOKEN_CURLY_OPEN = 1; private const int TOKEN_CURLY_CLOSE = 2; private const int TOKEN_SQUARED_OPEN = 3; private const int TOKEN_SQUARED_CLOSE = 4; private const int TOKEN_COLON = 5; private const int TOKEN_COMMA = 6; private const int TOKEN_STRING = 7; private const int TOKEN_NUMBER = 8; private const int TOKEN_TRUE = 9; private const int TOKEN_FALSE = 10; private const int TOKEN_NULL = 11; private const int BUILDER_CAPACITY = 2000; /// <summary> /// On decoding, this value holds the position at which the parse failed (-1 = no error). /// </summary> protected static int lastErrorIndex = -1; protected static string lastDecode = ""; /// <summary> /// Parse the specified JSon file, loading sprite information for the specified atlas. /// </summary> public static void LoadSpriteData (UIAtlas atlas, TextAsset asset) { if (asset == null || atlas == null) return; string jsonString = asset.text; Hashtable decodedHash = jsonDecode(jsonString) as Hashtable; if (decodedHash == null) { Debug.LogWarning("Unable to parse Json file: " + asset.name); return; } atlas.coordinates = UIAtlas.Coordinates.Pixels; List<UIAtlas.Sprite> oldSprites = atlas.spriteList; atlas.spriteList = new List<UIAtlas.Sprite>(); Hashtable frames = (Hashtable)decodedHash["frames"]; foreach (System.Collections.DictionaryEntry item in frames) { UIAtlas.Sprite newSprite = new UIAtlas.Sprite(); newSprite.name = item.Key.ToString(); bool exists = false; // Check to see if this sprite exists foreach (UIAtlas.Sprite oldSprite in oldSprites) { if (oldSprite.name.Equals(newSprite.name, StringComparison.OrdinalIgnoreCase)) { exists = true; break; } } // Get rid of the extension if the sprite doesn't exist // The extension is kept for backwards compatibility so it's still possible to update older atlases. if (!exists) newSprite.name = newSprite.name.Replace(".png", ""); // Extract the info we need from the TexturePacker json file, mainly uvRect and size Hashtable table = (Hashtable)item.Value; Hashtable frame = (Hashtable)table["frame"]; int frameX = int.Parse(frame["x"].ToString()); int frameY = int.Parse(frame["y"].ToString()); int frameW = int.Parse(frame["w"].ToString()); int frameH = int.Parse(frame["h"].ToString()); // Read the rotation value newSprite.rotated = (bool)table["rotated"]; // Fill in the proper values if (newSprite.rotated) { newSprite.outer = new Rect(frameX, frameY, frameH, frameW); newSprite.inner = new Rect(frameX, frameY, frameH, frameW); } else { newSprite.outer = new Rect(frameX, frameY, frameW, frameH); newSprite.inner = new Rect(frameX, frameY, frameW, frameH); } // Support for trimmed sprites Hashtable sourceSize = (Hashtable)table["sourceSize"]; Hashtable spriteSize = (Hashtable)table["spriteSourceSize"]; if (spriteSize != null && sourceSize != null) { // TODO: Account for rotated sprites if (frameW > 0) { float spriteX = int.Parse(spriteSize["x"].ToString()); float spriteW = int.Parse(spriteSize["w"].ToString()); float sourceW = int.Parse(sourceSize["w"].ToString()); newSprite.paddingLeft = spriteX / frameW; newSprite.paddingRight = (sourceW - (spriteX + spriteW)) / frameW; } if (frameH > 0) { float spriteY = int.Parse(spriteSize["y"].ToString()); float spriteH = int.Parse(spriteSize["h"].ToString()); float sourceH = int.Parse(sourceSize["h"].ToString()); newSprite.paddingTop = spriteY / frameH; newSprite.paddingBottom = (sourceH - (spriteY + spriteH)) / frameH; } } // If the sprite was present before, see if we can copy its inner rect foreach (UIAtlas.Sprite oldSprite in oldSprites) { if (oldSprite.name.Equals(newSprite.name, StringComparison.OrdinalIgnoreCase)) { CopyInnerRect(oldSprite, newSprite); } } // Add this new sprite atlas.spriteList.Add(newSprite); } // Sort imported sprites alphabetically atlas.spriteList.Sort(CompareSprites); Debug.Log("Imported " + atlas.spriteList.Count + " sprites"); // Unload the asset asset = null; Resources.UnloadUnusedAssets(); } /// <summary> /// Sprite comparison function for sorting. /// </summary> static int CompareSprites (UIAtlas.Sprite a, UIAtlas.Sprite b) { return a.name.CompareTo(b.name); } /// <summary> /// Copy the inner rectangle from one sprite to another. /// </summary> static void CopyInnerRect (UIAtlas.Sprite oldSprite, UIAtlas.Sprite newSprite) { float offsetX = oldSprite.inner.xMin - oldSprite.outer.xMin; float offsetY = oldSprite.inner.yMin - oldSprite.outer.yMin; float sizeX = oldSprite.inner.width; float sizeY = oldSprite.inner.height; if (Mathf.Approximately(newSprite.outer.width, oldSprite.outer.width)) { // The sprite has not been rotated or it's a square newSprite.inner = new Rect(newSprite.outer.xMin + offsetX, newSprite.outer.yMin + offsetY, sizeX, sizeY); } else if (Mathf.Approximately(newSprite.outer.width, oldSprite.outer.height)) { // The sprite was rotated since the last time it was imported newSprite.inner = new Rect(newSprite.outer.xMin + offsetY, newSprite.outer.yMin + offsetX, sizeY, sizeX); } } /// <summary> /// Parses the string json into a value /// </summary> /// <param name="json">A JSON string.</param> /// <returns>An ArrayList, a Hashtable, a double, a string, null, true, or false</returns> public static object jsonDecode( string json ) { // save the string for debug information NGUIJson.lastDecode = json; if( json != null ) { char[] charArray = json.ToCharArray(); int index = 0; bool success = true; object value = NGUIJson.parseValue( charArray, ref index, ref success ); if( success ) NGUIJson.lastErrorIndex = -1; else NGUIJson.lastErrorIndex = index; return value; } else { return null; } } /// <summary> /// Converts a Hashtable / ArrayList / Dictionary(string,string) object into a JSON string /// </summary> /// <param name="json">A Hashtable / ArrayList</param> /// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns> public static string jsonEncode( object json ) { var builder = new StringBuilder( BUILDER_CAPACITY ); var success = NGUIJson.serializeValue( json, builder ); return ( success ? builder.ToString() : null ); } /// <summary> /// On decoding, this function returns the position at which the parse failed (-1 = no error). /// </summary> /// <returns></returns> public static bool lastDecodeSuccessful() { return ( NGUIJson.lastErrorIndex == -1 ); } /// <summary> /// On decoding, this function returns the position at which the parse failed (-1 = no error). /// </summary> /// <returns></returns> public static int getLastErrorIndex() { return NGUIJson.lastErrorIndex; } /// <summary> /// If a decoding error occurred, this function returns a piece of the JSON string /// at which the error took place. To ease debugging. /// </summary> /// <returns></returns> public static string getLastErrorSnippet() { if( NGUIJson.lastErrorIndex == -1 ) { return ""; } else { int startIndex = NGUIJson.lastErrorIndex - 5; int endIndex = NGUIJson.lastErrorIndex + 15; if( startIndex < 0 ) startIndex = 0; if( endIndex >= NGUIJson.lastDecode.Length ) endIndex = NGUIJson.lastDecode.Length - 1; return NGUIJson.lastDecode.Substring( startIndex, endIndex - startIndex + 1 ); } } #region Parsing protected static Hashtable parseObject( char[] json, ref int index ) { Hashtable table = new Hashtable(); int token; // { nextToken( json, ref index ); bool done = false; while( !done ) { token = lookAhead( json, index ); if( token == NGUIJson.TOKEN_NONE ) { return null; } else if( token == NGUIJson.TOKEN_COMMA ) { nextToken( json, ref index ); } else if( token == NGUIJson.TOKEN_CURLY_CLOSE ) { nextToken( json, ref index ); return table; } else { // name string name = parseString( json, ref index ); if( name == null ) { return null; } // : token = nextToken( json, ref index ); if( token != NGUIJson.TOKEN_COLON ) return null; // value bool success = true; object value = parseValue( json, ref index, ref success ); if( !success ) return null; table[name] = value; } } return table; } protected static ArrayList parseArray( char[] json, ref int index ) { ArrayList array = new ArrayList(); // [ nextToken( json, ref index ); bool done = false; while( !done ) { int token = lookAhead( json, index ); if( token == NGUIJson.TOKEN_NONE ) { return null; } else if( token == NGUIJson.TOKEN_COMMA ) { nextToken( json, ref index ); } else if( token == NGUIJson.TOKEN_SQUARED_CLOSE ) { nextToken( json, ref index ); break; } else { bool success = true; object value = parseValue( json, ref index, ref success ); if( !success ) return null; array.Add( value ); } } return array; } protected static object parseValue( char[] json, ref int index, ref bool success ) { switch( lookAhead( json, index ) ) { case NGUIJson.TOKEN_STRING: return parseString( json, ref index ); case NGUIJson.TOKEN_NUMBER: return parseNumber( json, ref index ); case NGUIJson.TOKEN_CURLY_OPEN: return parseObject( json, ref index ); case NGUIJson.TOKEN_SQUARED_OPEN: return parseArray( json, ref index ); case NGUIJson.TOKEN_TRUE: nextToken( json, ref index ); return Boolean.Parse( "TRUE" ); case NGUIJson.TOKEN_FALSE: nextToken( json, ref index ); return Boolean.Parse( "FALSE" ); case NGUIJson.TOKEN_NULL: nextToken( json, ref index ); return null; case NGUIJson.TOKEN_NONE: break; } success = false; return null; } protected static string parseString( char[] json, ref int index ) { string s = ""; char c; eatWhitespace( json, ref index ); // " c = json[index++]; bool complete = false; while( !complete ) { if( index == json.Length ) break; c = json[index++]; if( c == '"' ) { complete = true; break; } else if( c == '\\' ) { if( index == json.Length ) break; c = json[index++]; if( c == '"' ) { s += '"'; } else if( c == '\\' ) { s += '\\'; } else if( c == '/' ) { s += '/'; } else if( c == 'b' ) { s += '\b'; } else if( c == 'f' ) { s += '\f'; } else if( c == 'n' ) { s += '\n'; } else if( c == 'r' ) { s += '\r'; } else if( c == 't' ) { s += '\t'; } else if( c == 'u' ) { int remainingLength = json.Length - index; if( remainingLength >= 4 ) { char[] unicodeCharArray = new char[4]; Array.Copy( json, index, unicodeCharArray, 0, 4 ); // Drop in the HTML markup for the unicode character s += "&#x" + new string( unicodeCharArray ) + ";"; /* uint codePoint = UInt32.Parse(new string(unicodeCharArray), NumberStyles.HexNumber); // convert the integer codepoint to a unicode char and add to string s += Char.ConvertFromUtf32((int)codePoint); */ // skip 4 chars index += 4; } else { break; } } } else { s += c; } } if( !complete ) return null; return s; } protected static double parseNumber( char[] json, ref int index ) { eatWhitespace( json, ref index ); int lastIndex = getLastIndexOfNumber( json, index ); int charLength = ( lastIndex - index ) + 1; char[] numberCharArray = new char[charLength]; Array.Copy( json, index, numberCharArray, 0, charLength ); index = lastIndex + 1; return Double.Parse( new string( numberCharArray ) ); // , CultureInfo.InvariantCulture); } protected static int getLastIndexOfNumber( char[] json, int index ) { int lastIndex; for( lastIndex = index; lastIndex < json.Length; lastIndex++ ) if( "0123456789+-.eE".IndexOf( json[lastIndex] ) == -1 ) { break; } return lastIndex - 1; } protected static void eatWhitespace( char[] json, ref int index ) { for( ; index < json.Length; index++ ) if( " \t\n\r".IndexOf( json[index] ) == -1 ) { break; } } protected static int lookAhead( char[] json, int index ) { int saveIndex = index; return nextToken( json, ref saveIndex ); } protected static int nextToken( char[] json, ref int index ) { eatWhitespace( json, ref index ); if( index == json.Length ) { return NGUIJson.TOKEN_NONE; } char c = json[index]; index++; switch( c ) { case '{': return NGUIJson.TOKEN_CURLY_OPEN; case '}': return NGUIJson.TOKEN_CURLY_CLOSE; case '[': return NGUIJson.TOKEN_SQUARED_OPEN; case ']': return NGUIJson.TOKEN_SQUARED_CLOSE; case ',': return NGUIJson.TOKEN_COMMA; case '"': return NGUIJson.TOKEN_STRING; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '-': return NGUIJson.TOKEN_NUMBER; case ':': return NGUIJson.TOKEN_COLON; } index--; int remainingLength = json.Length - index; // false if( remainingLength >= 5 ) { if( json[index] == 'f' && json[index + 1] == 'a' && json[index + 2] == 'l' && json[index + 3] == 's' && json[index + 4] == 'e' ) { index += 5; return NGUIJson.TOKEN_FALSE; } } // true if( remainingLength >= 4 ) { if( json[index] == 't' && json[index + 1] == 'r' && json[index + 2] == 'u' && json[index + 3] == 'e' ) { index += 4; return NGUIJson.TOKEN_TRUE; } } // null if( remainingLength >= 4 ) { if( json[index] == 'n' && json[index + 1] == 'u' && json[index + 2] == 'l' && json[index + 3] == 'l' ) { index += 4; return NGUIJson.TOKEN_NULL; } } return NGUIJson.TOKEN_NONE; } #endregion #region Serialization protected static bool serializeObjectOrArray( object objectOrArray, StringBuilder builder ) { if( objectOrArray is Hashtable ) { return serializeObject( (Hashtable)objectOrArray, builder ); } else if( objectOrArray is ArrayList ) { return serializeArray( (ArrayList)objectOrArray, builder ); } else { return false; } } protected static bool serializeObject( Hashtable anObject, StringBuilder builder ) { builder.Append( "{" ); IDictionaryEnumerator e = anObject.GetEnumerator(); bool first = true; while( e.MoveNext() ) { string key = e.Key.ToString(); object value = e.Value; if( !first ) { builder.Append( ", " ); } serializeString( key, builder ); builder.Append( ":" ); if( !serializeValue( value, builder ) ) { return false; } first = false; } builder.Append( "}" ); return true; } protected static bool serializeDictionary( Dictionary<string,string> dict, StringBuilder builder ) { builder.Append( "{" ); bool first = true; foreach( var kv in dict ) { if( !first ) builder.Append( ", " ); serializeString( kv.Key, builder ); builder.Append( ":" ); serializeString( kv.Value, builder ); first = false; } builder.Append( "}" ); return true; } protected static bool serializeArray( ArrayList anArray, StringBuilder builder ) { builder.Append( "[" ); bool first = true; for( int i = 0; i < anArray.Count; i++ ) { object value = anArray[i]; if( !first ) { builder.Append( ", " ); } if( !serializeValue( value, builder ) ) { return false; } first = false; } builder.Append( "]" ); return true; } protected static bool serializeValue( object value, StringBuilder builder ) { // Type t = value.GetType(); // Debug.Log("type: " + t.ToString() + " isArray: " + t.IsArray); if( value == null ) { builder.Append( "null" ); } else if( value.GetType().IsArray ) { serializeArray( new ArrayList( (ICollection)value ), builder ); } else if( value is string ) { serializeString( (string)value, builder ); } else if( value is Char ) { serializeString( Convert.ToString( (char)value ), builder ); } else if( value is Hashtable ) { serializeObject( (Hashtable)value, builder ); } else if( value is Dictionary<string,string> ) { serializeDictionary( (Dictionary<string,string>)value, builder ); } else if( value is ArrayList ) { serializeArray( (ArrayList)value, builder ); } else if( ( value is Boolean ) && ( (Boolean)value == true ) ) { builder.Append( "true" ); } else if( ( value is Boolean ) && ( (Boolean)value == false ) ) { builder.Append( "false" ); } else if( value.GetType().IsPrimitive ) { serializeNumber( Convert.ToDouble( value ), builder ); } else { return false; } return true; } protected static void serializeString( string aString, StringBuilder builder ) { builder.Append( "\"" ); char[] charArray = aString.ToCharArray(); for( int i = 0; i < charArray.Length; i++ ) { char c = charArray[i]; if( c == '"' ) { builder.Append( "\\\"" ); } else if( c == '\\' ) { builder.Append( "\\\\" ); } else if( c == '\b' ) { builder.Append( "\\b" ); } else if( c == '\f' ) { builder.Append( "\\f" ); } else if( c == '\n' ) { builder.Append( "\\n" ); } else if( c == '\r' ) { builder.Append( "\\r" ); } else if( c == '\t' ) { builder.Append( "\\t" ); } else { int codepoint = Convert.ToInt32( c ); if( ( codepoint >= 32 ) && ( codepoint <= 126 ) ) { builder.Append( c ); } else { builder.Append( "\\u" + Convert.ToString( codepoint, 16 ).PadLeft( 4, '0' ) ); } } } builder.Append( "\"" ); } protected static void serializeNumber( double number, StringBuilder builder ) { builder.Append( Convert.ToString( number ) ); // , CultureInfo.InvariantCulture)); } #endregion }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Authorization { using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Rest; using Microsoft.Rest.Azure; using Microsoft.Rest.Serialization; using Models; using Newtonsoft.Json; /// <summary> /// ClassicAdministratorsOperations operations. /// </summary> internal partial class ClassicAdministratorsOperations : IServiceOperations<AuthorizationManagementClient>, IClassicAdministratorsOperations { /// <summary> /// Initializes a new instance of the ClassicAdministratorsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal ClassicAdministratorsOperations(AuthorizationManagementClient client) { if (client == null) { throw new ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the AuthorizationManagementClient /// </summary> public AuthorizationManagementClient Client { get; private set; } /// <summary> /// Gets service administrator, account administrator, and co-administrators for the subscription. /// </summary> /// <param name='apiVersion'> /// The API version to use for this operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<ClassicAdministrator>>> ListWithHttpMessagesAsync(string apiVersion, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (apiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "apiVersion"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Authorization/classicAdministrators").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<ClassicAdministrator>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Page<ClassicAdministrator>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets service administrator, account administrator, and co-administrators for the subscription. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<ClassicAdministrator>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<ClassicAdministrator>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Page<ClassicAdministrator>>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Text; using OmniSharp.FileWatching; using OmniSharp.Models; using OmniSharp.Models.ChangeBuffer; using OmniSharp.Models.UpdateBuffer; namespace OmniSharp.Roslyn { public class BufferManager { private readonly OmniSharpWorkspace _workspace; private readonly IDictionary<string, IEnumerable<DocumentId>> _transientDocuments = new Dictionary<string, IEnumerable<DocumentId>>(StringComparer.OrdinalIgnoreCase); private readonly ISet<DocumentId> _transientDocumentIds = new HashSet<DocumentId>(); private readonly object _lock = new object(); private readonly IFileSystemWatcher _fileSystemWatcher; private readonly Action<string, FileChangeType> _onFileChanged; public BufferManager(OmniSharpWorkspace workspace, IFileSystemWatcher fileSystemWatcher) { _workspace = workspace; _workspace.WorkspaceChanged += OnWorkspaceChanged; _fileSystemWatcher = fileSystemWatcher; _onFileChanged = OnFileChanged; } public async Task UpdateBufferAsync(Request request) { var buffer = request.Buffer; var changes = request.Changes; if (request is UpdateBufferRequest updateRequest && updateRequest.FromDisk) { buffer = File.ReadAllText(updateRequest.FileName); } if (request.FileName == null || (buffer == null && changes == null)) { return; } var solution = _workspace.CurrentSolution; var documentIds = solution.GetDocumentIdsWithFilePath(request.FileName); if (!documentIds.IsEmpty) { if (changes == null) { var sourceText = SourceText.From(buffer); foreach (var documentId in documentIds) { solution = solution.WithDocumentText(documentId, sourceText); } } else { foreach (var documentId in documentIds) { var document = solution.GetDocument(documentId); var sourceText = await document.GetTextAsync(); foreach (var change in request.Changes) { var startOffset = sourceText.Lines.GetPosition(new LinePosition(change.StartLine, change.StartColumn)); var endOffset = sourceText.Lines.GetPosition(new LinePosition(change.EndLine, change.EndColumn)); sourceText = sourceText.WithChanges(new[] { new TextChange(new TextSpan(startOffset, endOffset - startOffset), change.NewText) }); } solution = solution.WithDocumentText(documentId, sourceText); } } _workspace.TryApplyChanges(solution); } else if (buffer != null) { TryAddTransientDocument(request.FileName, buffer); } } public async Task UpdateBufferAsync(ChangeBufferRequest request) { if (request.FileName == null) { return; } var solution = _workspace.CurrentSolution; var documentIds = solution.GetDocumentIdsWithFilePath(request.FileName); if (!documentIds.IsEmpty) { foreach (var documentId in documentIds) { var document = solution.GetDocument(documentId); var sourceText = await document.GetTextAsync(); var startOffset = sourceText.Lines.GetPosition(new LinePosition(request.StartLine, request.StartColumn)); var endOffset = sourceText.Lines.GetPosition(new LinePosition(request.EndLine, request.EndColumn)); sourceText = sourceText.WithChanges(new[] { new TextChange(new TextSpan(startOffset, endOffset - startOffset), request.NewText) }); solution = solution.WithDocumentText(documentId, sourceText); } _workspace.TryApplyChanges(solution); } else { // TODO@joh ensure the edit is an insert at offset zero TryAddTransientDocument(request.FileName, request.NewText); } } private bool TryAddTransientDocument(string fileName, string fileContent) { if (string.IsNullOrWhiteSpace(fileName)) { return false; } var projects = FindProjectsByFileName(fileName); if (!projects.Any()) { if (fileName.EndsWith(".cs") && _workspace.TryAddMiscellaneousDocument(fileName, LanguageNames.CSharp) != null) { _fileSystemWatcher.Watch(fileName, OnFileChanged); return true; } return false; } else { var sourceText = SourceText.From(fileContent); var documentInfos = new List<DocumentInfo>(); foreach (var project in projects) { var id = DocumentId.CreateNewId(project.Id); var version = VersionStamp.Create(); var documentInfo = DocumentInfo.Create( id, fileName, filePath: fileName, loader: TextLoader.From(TextAndVersion.Create(sourceText, version))); documentInfos.Add(documentInfo); } lock (_lock) { var documentIds = documentInfos.Select(document => document.Id); _transientDocuments[fileName] = documentIds; _transientDocumentIds.UnionWith(documentIds); } foreach (var documentInfo in documentInfos) { _workspace.AddDocument(documentInfo); } } return true; } private void OnFileChanged(string filePath, FileChangeType changeType) { if (changeType == FileChangeType.Unspecified && !File.Exists(filePath) || changeType == FileChangeType.Delete) { _workspace.TryRemoveMiscellaneousDocument(filePath); } } private IEnumerable<Project> FindProjectsByFileName(string fileName) { var fileInfo = new FileInfo(fileName); var dirInfo = fileInfo.Directory; var candidates = _workspace.CurrentSolution.Projects .Where(project => !String.IsNullOrWhiteSpace (project.FilePath)) .GroupBy(project => new FileInfo(project.FilePath).Directory.FullName) .ToDictionary(grouping => grouping.Key, grouping => grouping.ToList()); List<Project> projects = null; while (dirInfo != null) { if (candidates.TryGetValue(dirInfo.FullName, out projects)) { return projects; } dirInfo = dirInfo.Parent; } return Array.Empty<Project>(); } private void OnWorkspaceChanged(object sender, WorkspaceChangeEventArgs args) { string fileName = null; if (args.Kind == WorkspaceChangeKind.DocumentAdded) { fileName = args.NewSolution.GetDocument(args.DocumentId).FilePath; } else if (args.Kind == WorkspaceChangeKind.DocumentRemoved) { fileName = args.OldSolution.GetDocument(args.DocumentId).FilePath; } if (fileName == null) { return; } lock (_lock) { if (_transientDocumentIds.Contains(args.DocumentId)) { return; } if (!_transientDocuments.TryGetValue(fileName, out var documentIds)) { return; } _transientDocuments.Remove(fileName); foreach (var documentId in documentIds) { _workspace.RemoveDocument(documentId); _transientDocumentIds.Remove(documentId); } } } } }
// Borrowed from the Papercut project: papercut.codeplex.com. using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Text; using System.IO; using System.Net.Mime; using System.Net.Mail; namespace SpecsFor.Mvc.Smtp.Mime { /// <summary> /// This class represents a Mime entity. /// </summary> public class MimeEntity { private StringBuilder _encodedMessage; /// <summary> /// Gets the encoded message. /// </summary> /// <value>The encoded message.</value> public StringBuilder EncodedMessage { get { return _encodedMessage; } } private List<MimeEntity> _children; /// <summary> /// Gets the children. /// </summary> /// <value>The children.</value> public List<MimeEntity> Children { get { return _children; } } private ContentType _contentType; /// <summary> /// Gets the type of the content. /// </summary> /// <value>The type of the content.</value> public ContentType ContentType { get { return _contentType; } } private string _mediaSubType; /// <summary> /// Gets the type of the media sub. /// </summary> /// <value>The type of the media sub.</value> public string MediaSubType { get { return _mediaSubType; } } private string _mediaMainType; /// <summary> /// Gets the type of the media main. /// </summary> /// <value>The type of the media main.</value> public string MediaMainType { get { return _mediaMainType; } } private NameValueCollection _headers; /// <summary> /// Gets the headers. /// </summary> /// <value>The headers.</value> public NameValueCollection Headers { get { return _headers; } } private string _mimeVersion; /// <summary> /// Gets or sets the MIME version. /// </summary> /// <value>The MIME version.</value> public string MimeVersion { get { return _mimeVersion; } set { _mimeVersion = value; } } private string _contentId; /// <summary> /// Gets or sets the content id. /// </summary> /// <value>The content id.</value> public string ContentId { get { return _contentId; } set { _contentId = value; } } private string _contentDescription; /// <summary> /// Gets or sets the content description. /// </summary> /// <value>The content description.</value> public string ContentDescription { get { return _contentDescription; } set { _contentDescription = value; } } private ContentDisposition _contentDisposition; /// <summary> /// Gets or sets the content disposition. /// </summary> /// <value>The content disposition.</value> public ContentDisposition ContentDisposition { get { return _contentDisposition; } set { _contentDisposition = value; } } private string _transferEncoding; /// <summary> /// Gets or sets the transfer encoding. /// </summary> /// <value>The transfer encoding.</value> public string TransferEncoding { get { return _transferEncoding; } set { _transferEncoding = value; } } private TransferEncoding _contentTransferEncoding; /// <summary> /// Gets or sets the content transfer encoding. /// </summary> /// <value>The content transfer encoding.</value> public TransferEncoding ContentTransferEncoding { get { return _contentTransferEncoding; } set { _contentTransferEncoding = value; } } /// <summary> /// Gets a value indicating whether this instance has boundary. /// </summary> /// <value> /// <c>true</c> if this instance has boundary; otherwise, <c>false</c>. /// </value> internal bool HasBoundary { get { return (!string.IsNullOrEmpty(_contentType.Boundary)) || (!string.IsNullOrEmpty(_startBoundary)); } } private string _startBoundary; /// <summary> /// Gets the start boundary. /// </summary> /// <value>The start boundary.</value> public string StartBoundary { get { if (string.IsNullOrEmpty(_startBoundary) || !string.IsNullOrEmpty(_contentType.Boundary)) { return string.Concat("--", _contentType.Boundary); } return _startBoundary; } } /// <summary> /// Gets the end boundary. /// </summary> /// <value>The end boundary.</value> public string EndBoundary { get { return string.Concat(StartBoundary, "--"); } } private MimeEntity _parent; /// <summary> /// Gets or sets the parent. /// </summary> /// <value>The parent.</value> public MimeEntity Parent { get { return _parent; } set { _parent = value; } } private MemoryStream _content; /// <summary> /// Gets or sets the content. /// </summary> /// <value>The content.</value> public MemoryStream Content { get { return _content; } internal set { _content = value; } } /// <summary> /// Initializes a new instance of the <see cref="MimeEntity"/> class. /// </summary> public MimeEntity() { _children = new List<MimeEntity>(); _headers = new NameValueCollection(); _contentType = MimeReader.GetContentType(string.Empty); _parent = null; _encodedMessage = new StringBuilder(); } /// <summary> /// Initializes a new instance of the <see cref="MimeEntity"/> class. /// </summary> /// <param name="parent">The parent.</param> public MimeEntity(MimeEntity parent) : this() { if (parent == null) { throw new ArgumentNullException("parent"); } _parent = parent; _startBoundary = parent.StartBoundary; } /// <summary> /// Sets the type of the content. /// </summary> /// <param name="contentType">Type of the content.</param> internal void SetContentType(ContentType contentType) { _contentType = contentType; _contentType.MediaType = MimeReader.GetMediaType(contentType.MediaType); _mediaMainType = MimeReader.GetMediaMainType(contentType.MediaType); _mediaSubType = MimeReader.GetMediaSubType(contentType.MediaType); } /// <summary> /// Toes the mail message ex. /// </summary> /// <returns></returns> public MailMessageEx ToMailMessageEx() { return ToMailMessageEx(this); } /// <summary> /// Toes the mail message ex. /// </summary> /// <param name="entity">The entity.</param> /// <returns></returns> private MailMessageEx ToMailMessageEx(MimeEntity entity) { if (entity == null) { throw new ArgumentNullException("entity"); } //parse standard headers and create base email. MailMessageEx message = MailMessageEx.CreateMailMessageFromEntity(entity); if (!string.IsNullOrEmpty(entity.ContentType.Boundary)) { message = MailMessageEx.CreateMailMessageFromEntity(entity); BuildMultiPartMessage(entity, message); }//parse multipart message into sub parts. else if (string.Equals(entity.ContentType.MediaType, MediaTypes.MessageRfc822, StringComparison.InvariantCultureIgnoreCase)) { //use the first child to create the multipart message. if (entity.Children.Count < 0) { throw new Exception("Invalid child count on message/rfc822 entity."); } //create the mail message from the first child because it will //contain all of the mail headers. The entity in this state //only contains simple content type headers indicating, disposition, type and description. //This means we can't create the mail message from this type as there is no //internet mail headers attached to this entity. message = MailMessageEx.CreateMailMessageFromEntity(entity.Children[0]); BuildMultiPartMessage(entity, message); } //parse nested message. else { message = MailMessageEx.CreateMailMessageFromEntity(entity); BuildSinglePartMessage(entity, message); } //Create single part message. return message; } /// <summary> /// Builds the single part message. /// </summary> /// <param name="entity">The entity.</param> /// <param name="message">The message.</param> private void BuildSinglePartMessage(MimeEntity entity, MailMessageEx message) { SetMessageBody(message, entity); } /// <summary> /// Gets the body encoding. /// </summary> /// <param name="contentType">Type of the content.</param> public Encoding GetEncoding() { if (string.IsNullOrEmpty(this.ContentType.CharSet)) { return Encoding.ASCII; } else { try { return Encoding.GetEncoding(this.ContentType.CharSet); } catch (ArgumentException) { return Encoding.ASCII; } } } /// <summary> /// Builds the multi part message. /// </summary> /// <param name="entity">The entity.</param> /// <param name="message">The message.</param> private void BuildMultiPartMessage(MimeEntity entity, MailMessageEx message) { foreach (MimeEntity child in entity.Children) { if (string.Equals(child.ContentType.MediaType, MediaTypes.MultipartAlternative, StringComparison.InvariantCultureIgnoreCase) || string.Equals(child.ContentType.MediaType, MediaTypes.MultipartMixed, StringComparison.InvariantCultureIgnoreCase) || string.Equals(child.ContentType.MediaType, MediaTypes.MultipartRelated, StringComparison.InvariantCultureIgnoreCase)) { BuildMultiPartMessage(child, message); } //if the message is mulitpart/alternative or multipart/mixed then the entity will have children needing parsed. else if (!IsAttachment(child) && (string.Equals(child.ContentType.MediaType, MediaTypes.TextPlain) || string.Equals(child.ContentType.MediaType, MediaTypes.TextHtml))) { message.AlternateViews.Add(CreateAlternateView(child)); SetMessageBody(message, child); } //add the alternative views. else if (string.Equals(child.ContentType.MediaType, MediaTypes.MessageRfc822, StringComparison.InvariantCultureIgnoreCase) && string.Equals(child.ContentDisposition.DispositionType, DispositionTypeNames.Attachment, StringComparison.InvariantCultureIgnoreCase)) { message.Children.Add(ToMailMessageEx(child)); } //create a child message and else if (IsAttachment(child)) { message.Attachments.Add(CreateAttachment(child)); } else if (string.Equals(entity.ContentType.MediaType, MediaTypes.MultipartRelated, StringComparison.InvariantCultureIgnoreCase) || string.Equals(entity.ContentType.MediaType, MediaTypes.MultipartMixed, StringComparison.InvariantCultureIgnoreCase)) { message.Attachments.Add(CreateAttachment(child)); } } } private static bool IsAttachment(MimeEntity child) { return (child.ContentDisposition != null) && (string.Equals(child.ContentDisposition.DispositionType, DispositionTypeNames.Attachment, StringComparison.InvariantCultureIgnoreCase)); } /// <summary> /// Sets the message body. /// </summary> /// <param name="message">The message.</param> /// <param name="child">The child.</param> private void SetMessageBody(MailMessageEx message, MimeEntity child) { Encoding encoding = child.GetEncoding(); message.Body = DecodeBytes(child.Content.ToArray(), encoding); message.BodyEncoding = encoding; message.IsBodyHtml = string.Equals(MediaTypes.TextHtml, child.ContentType.MediaType, StringComparison.InvariantCultureIgnoreCase); } /// <summary> /// Decodes the bytes. /// </summary> /// <param name="buffer">The buffer.</param> /// <param name="encoding">The encoding.</param> /// <returns></returns> private string DecodeBytes(byte[] buffer, Encoding encoding) { if (buffer == null) { return null; } if (encoding == null) { encoding = Encoding.UTF7; } //email defaults to 7bit. return encoding.GetString(buffer); } /// <summary> /// Creates the alternate view. /// </summary> /// <param name="view">The view.</param> /// <returns></returns> private AlternateView CreateAlternateView(MimeEntity view) { AlternateView alternateView = new AlternateView(view.Content, view.ContentType); alternateView.TransferEncoding = view.ContentTransferEncoding; alternateView.ContentId = TrimBrackets(view.ContentId); return alternateView; } /// <summary> /// Trims the brackets. /// </summary> /// <param name="value">The value.</param> /// <returns></returns> public static string TrimBrackets(string value) { if (value == null) { return value; } if (value.StartsWith("<") && value.EndsWith(">")) { return value.Trim('<', '>'); } return value; } /// <summary> /// Creates the attachment. /// </summary> /// <param name="entity">The entity.</param> /// <returns></returns> private Attachment CreateAttachment(MimeEntity entity) { Attachment attachment = new Attachment(entity.Content, entity.ContentType); if (entity.ContentDisposition != null) { attachment.ContentDisposition.Parameters.Clear(); foreach (string key in entity.ContentDisposition.Parameters.Keys) { attachment.ContentDisposition.Parameters.Add(key, entity.ContentDisposition.Parameters[key]); } attachment.ContentDisposition.CreationDate = entity.ContentDisposition.CreationDate; attachment.ContentDisposition.DispositionType = entity.ContentDisposition.DispositionType; attachment.ContentDisposition.FileName = entity.ContentDisposition.FileName; attachment.ContentDisposition.Inline = entity.ContentDisposition.Inline; attachment.ContentDisposition.ModificationDate = entity.ContentDisposition.ModificationDate; attachment.ContentDisposition.ReadDate = entity.ContentDisposition.ReadDate; attachment.ContentDisposition.Size = entity.ContentDisposition.Size; } if (!string.IsNullOrEmpty(entity.ContentId)) { attachment.ContentId = TrimBrackets(entity.ContentId); } attachment.TransferEncoding = entity.ContentTransferEncoding; return attachment; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Threading.Tasks; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.Testing; using Test.Utilities; using Xunit; using VerifyCS = Test.Utilities.CSharpCodeFixVerifier< Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines.OverrideEqualsAndOperatorEqualsOnValueTypesAnalyzer, Microsoft.CodeQuality.CSharp.Analyzers.ApiDesignGuidelines.CSharpOverrideEqualsAndOperatorEqualsOnValueTypesFixer>; using VerifyVB = Test.Utilities.VisualBasicCodeFixVerifier< Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines.OverrideEqualsAndOperatorEqualsOnValueTypesAnalyzer, Microsoft.CodeQuality.VisualBasic.Analyzers.ApiDesignGuidelines.BasicOverrideEqualsAndOperatorEqualsOnValueTypesFixer>; namespace Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines.UnitTests { public class OverrideEqualsAndOperatorEqualsOnValueTypesTests { [Fact] public async Task CSharpDiagnosticForBothEqualsAndOperatorEqualsOnStruct() { await VerifyCS.VerifyAnalyzerAsync(@" public struct A { public int X; }", GetCSharpOverrideEqualsDiagnostic(2, 15, "A"), GetCSharpOperatorEqualsDiagnostic(2, 15, "A")); } [WorkItem(895, "https://github.com/dotnet/roslyn-analyzers/issues/895")] [Fact] public async Task CSharpNoDiagnosticForInternalAndPrivateStruct() { await VerifyCS.VerifyAnalyzerAsync(@" internal struct A { public int X; } public class B { private struct C { public int X; } } "); } [WorkItem(899, "https://github.com/dotnet/roslyn-analyzers/issues/899")] [Fact] public async Task CSharpNoDiagnosticForEnum() { await VerifyCS.VerifyAnalyzerAsync(@" public enum E { F = 0 } "); } [WorkItem(899, "https://github.com/dotnet/roslyn-analyzers/issues/899")] [Fact] public async Task CSharpNoDiagnosticForStructsWithoutMembers() { await VerifyCS.VerifyAnalyzerAsync(@" public struct EmptyStruct { } "); } [WorkItem(899, "https://github.com/dotnet/roslyn-analyzers/issues/899")] [Fact] public async Task CSharpNoDiagnosticForEnumerators() { await VerifyCS.VerifyAnalyzerAsync(@" using System; using System.Collections; public struct MyEnumerator : System.Collections.IEnumerator { public object Current { get { throw new NotImplementedException(); } } public bool MoveNext() { throw new NotImplementedException(); } public void Reset() { throw new NotImplementedException(); } } public struct MyGenericEnumerator<T> : System.Collections.Generic.IEnumerator<T> { public T Current { get { throw new NotImplementedException(); } } object IEnumerator.Current { get { throw new NotImplementedException(); } } public void Dispose() { throw new NotImplementedException(); } public bool MoveNext() { throw new NotImplementedException(); } public void Reset() { throw new NotImplementedException(); } } "); } [Fact] public async Task CSharpNoDiagnosticForEqualsOrOperatorEqualsOnClass() { await VerifyCS.VerifyAnalyzerAsync(@" public class A { public int X; }"); } [Fact] public async Task CSharpNoDiagnosticWhenStructImplementsEqualsAndOperatorEquals() { await VerifyCS.VerifyAnalyzerAsync(@" public struct A { public override bool Equals(object other) { return true; } public static bool operator==(A left, A right) { return true; } public static bool operator!=(A left, A right) { return true; } }"); } [Fact] public async Task CSharpDiagnosticWhenEqualsHasWrongSignature() { await VerifyCS.VerifyAnalyzerAsync(@" public struct A { public bool Equals(A other) { return true; } public static bool operator==(A left, A right) { return true; } public static bool operator!=(A left, A right) { return true; } }", GetCSharpOverrideEqualsDiagnostic(2, 15, "A")); } [Fact] public async Task CSharpDiagnosticWhenEqualsIsNotAnOverride() { await VerifyCS.VerifyAnalyzerAsync(@" public struct A { public new bool Equals(object other) { return true; } public static bool operator==(A left, A right) { return true; } public static bool operator!=(A left, A right) { return true; } }", GetCSharpOverrideEqualsDiagnostic(2, 15, "A")); } [Fact] public async Task BasicDiagnosticsForEqualsOnStructure() { await VerifyVB.VerifyAnalyzerAsync(@" Public Structure A Public X As Integer End Structure ", GetBasicOverrideEqualsDiagnostic(2, 18, "A"), GetBasicOperatorEqualsDiagnostic(2, 18, "A")); } [WorkItem(895, "https://github.com/dotnet/roslyn-analyzers/issues/895")] [Fact] public async Task BasicNoDiagnosticsForInternalAndPrivateStructure() { await VerifyVB.VerifyAnalyzerAsync(@" Friend Structure A Public X As Integer End Structure Public Class B Private Structure C Public X As Integer End Structure End Class "); } [WorkItem(899, "https://github.com/dotnet/roslyn-analyzers/issues/899")] [Fact] public async Task BasicNoDiagnosticForEnum() { await VerifyVB.VerifyAnalyzerAsync(@" Public Enum E F = 0 End Enum "); } [WorkItem(899, "https://github.com/dotnet/roslyn-analyzers/issues/899")] [Fact] public async Task BasicNoDiagnosticForStructsWithoutMembers() { await VerifyVB.VerifyAnalyzerAsync(@" Public Structure EmptyStruct End Structure "); } [WorkItem(899, "https://github.com/dotnet/roslyn-analyzers/issues/899")] [Fact] public async Task BasicNoDiagnosticForEnumerators() { await VerifyVB.VerifyAnalyzerAsync(@" Imports System Imports System.Collections Imports System.Collections.Generic Public Structure MyEnumerator Implements IEnumerator Public ReadOnly Property Current As Object Implements IEnumerator.Current Get Throw New NotImplementedException() End Get End Property Public Function MoveNext() As Boolean Implements IEnumerator.MoveNext Throw New NotImplementedException() End Function Public Sub Reset() Implements IEnumerator.Reset Throw New NotImplementedException() End Sub End Structure Public Structure MyGenericEnumerator(Of T) Implements IEnumerator(Of T) Public ReadOnly Property Current As T Implements IEnumerator(Of T).Current Get Throw New NotImplementedException() End Get End Property Private ReadOnly Property IEnumerator_Current() As Object Implements IEnumerator.Current Get Throw New NotImplementedException() End Get End Property Public Sub Dispose() Implements IEnumerator(Of T).Dispose Throw New NotImplementedException() End Sub Public Function MoveNext() As Boolean Implements IEnumerator(Of T).MoveNext Throw New NotImplementedException() End Function Public Sub Reset() Implements IEnumerator(Of T).Reset Throw New NotImplementedException() End Sub End Structure "); } [Fact] public async Task BasicNoDiagnosticForEqualsOnClass() { await VerifyVB.VerifyAnalyzerAsync(@" Public Class A End Class "); } [Fact] public async Task BasicNoDiagnosticWhenStructureImplementsEqualsAndOperatorEquals() { await VerifyVB.VerifyAnalyzerAsync(@" Public Structure A Public Overrides Overloads Function Equals(obj As Object) As Boolean Return True End Function Public Shared Operator =(left As A, right As A) Return True End Operator Public Shared Operator <>(left As A, right As A) Return False End Operator End Structure "); } [Fact] public async Task BasicDiagnosticWhenEqualsHasWrongSignature() { await VerifyVB.VerifyAnalyzerAsync(@" Public Structure A Public Overrides Overloads Function {|BC30284:Equals|}(obj As A) As Boolean Return True End Function Public Shared Operator =(left As A, right As A) Return True End Operator Public Shared Operator <>(left As A, right As A) Return False End Operator End Structure ", GetBasicOverrideEqualsDiagnostic(2, 18, "A")); } [Fact] public async Task BasicDiagnosticWhenEqualsIsNotAnOverride() { await VerifyVB.VerifyAnalyzerAsync(@" Public Structure A Public Shadows Function Equals(obj As Object) As Boolean Return True End Function Public Shared Operator =(left As A, right As A) Return True End Operator Public Shared Operator <>(left As A, right As A) Return False End Operator End Structure ", GetBasicOverrideEqualsDiagnostic(2, 18, "A")); } [Fact, WorkItem(2324, "https://github.com/dotnet/roslyn-analyzers/issues/2324")] public async Task CSharp_RefStruct_NoDiagnostic() { await new VerifyCS.Test { TestCode = @" public ref struct A { public int X; }", LanguageVersion = LanguageVersion.CSharp8 }.RunAsync(); } private static DiagnosticResult GetCSharpOverrideEqualsDiagnostic(int line, int column, string typeName) => #pragma warning disable RS0030 // Do not used banned APIs VerifyCS.Diagnostic(OverrideEqualsAndOperatorEqualsOnValueTypesAnalyzer.EqualsRule) .WithLocation(line, column) #pragma warning restore RS0030 // Do not used banned APIs .WithArguments(typeName); private static DiagnosticResult GetCSharpOperatorEqualsDiagnostic(int line, int column, string typeName) => #pragma warning disable RS0030 // Do not used banned APIs VerifyCS.Diagnostic(OverrideEqualsAndOperatorEqualsOnValueTypesAnalyzer.OpEqualityRule) .WithLocation(line, column) #pragma warning restore RS0030 // Do not used banned APIs .WithArguments(typeName); private static DiagnosticResult GetBasicOverrideEqualsDiagnostic(int line, int column, string typeName) => #pragma warning disable RS0030 // Do not used banned APIs VerifyVB.Diagnostic(OverrideEqualsAndOperatorEqualsOnValueTypesAnalyzer.EqualsRule) .WithLocation(line, column) #pragma warning restore RS0030 // Do not used banned APIs .WithArguments(typeName); private static DiagnosticResult GetBasicOperatorEqualsDiagnostic(int line, int column, string typeName) => #pragma warning disable RS0030 // Do not used banned APIs VerifyVB.Diagnostic(OverrideEqualsAndOperatorEqualsOnValueTypesAnalyzer.OpEqualityRule) .WithLocation(line, column) #pragma warning restore RS0030 // Do not used banned APIs .WithArguments(typeName); } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Collections.Generic; using System.Reactive.Linq; using Moq; using Avalonia.Styling; using Avalonia.UnitTests; using Xunit; using Avalonia.LogicalTree; namespace Avalonia.Controls.UnitTests { public class ControlTests { [Fact] public void Classes_Should_Initially_Be_Empty() { var target = new Control(); Assert.Empty(target.Classes); } [Fact] public void Setting_Parent_Should_Also_Set_InheritanceParent() { var parent = new Decorator(); var target = new TestControl(); parent.Child = target; Assert.Equal(parent, target.Parent); Assert.Equal(parent, target.InheritanceParent); } [Fact] public void Setting_Parent_Should_Not_Set_InheritanceParent_If_Already_Set() { var parent = new Decorator(); var inheritanceParent = new Decorator(); var target = new TestControl(); ((ISetInheritanceParent)target).SetParent(inheritanceParent); parent.Child = target; Assert.Equal(parent, target.Parent); Assert.Equal(inheritanceParent, target.InheritanceParent); } [Fact] public void InheritanceParent_Should_Be_Cleared_When_Removed_From_Parent() { var parent = new Decorator(); var target = new TestControl(); parent.Child = target; parent.Child = null; Assert.Null(target.InheritanceParent); } [Fact] public void InheritanceParent_Should_Be_Cleared_When_Removed_From_Parent_When_Has_Different_InheritanceParent() { var parent = new Decorator(); var inheritanceParent = new Decorator(); var target = new TestControl(); ((ISetInheritanceParent)target).SetParent(inheritanceParent); parent.Child = target; parent.Child = null; Assert.Null(target.InheritanceParent); } [Fact] public void AttachedToLogicalParent_Should_Be_Called_When_Added_To_Tree() { var root = new TestRoot(); var parent = new Border(); var child = new Border(); var grandchild = new Border(); var parentRaised = false; var childRaised = false; var grandchildRaised = false; parent.AttachedToLogicalTree += (s, e) => parentRaised = true; child.AttachedToLogicalTree += (s, e) => childRaised = true; grandchild.AttachedToLogicalTree += (s, e) => grandchildRaised = true; parent.Child = child; child.Child = grandchild; Assert.False(parentRaised); Assert.False(childRaised); Assert.False(grandchildRaised); root.Child = parent; Assert.True(parentRaised); Assert.True(childRaised); Assert.True(grandchildRaised); } [Fact] public void AttachedToLogicalParent_Should_Be_Called_Before_Parent_Change_Signalled() { var root = new TestRoot(); var child = new Border(); var raised = new List<string>(); child.AttachedToLogicalTree += (s, e) => { Assert.Equal(root, child.Parent); raised.Add("attached"); }; child.GetObservable(Control.ParentProperty).Skip(1).Subscribe(_ => raised.Add("parent")); root.Child = child; Assert.Equal(new[] { "attached", "parent" }, raised); } [Fact] public void AttachedToLogicalParent_Should_Not_Be_Called_With_GlobalStyles_As_Root() { var globalStyles = Mock.Of<IGlobalStyles>(); var root = new TestRoot { StylingParent = globalStyles }; var child = new Border(); var raised = false; child.AttachedToLogicalTree += (s, e) => { Assert.Equal(root, e.Root); raised = true; }; root.Child = child; Assert.True(raised); } [Fact] public void DetachedFromLogicalParent_Should_Be_Called_When_Removed_From_Tree() { var root = new TestRoot(); var parent = new Border(); var child = new Border(); var grandchild = new Border(); var parentRaised = false; var childRaised = false; var grandchildRaised = false; parent.Child = child; child.Child = grandchild; root.Child = parent; parent.DetachedFromLogicalTree += (s, e) => parentRaised = true; child.DetachedFromLogicalTree += (s, e) => childRaised = true; grandchild.DetachedFromLogicalTree += (s, e) => grandchildRaised = true; root.Child = null; Assert.True(parentRaised); Assert.True(childRaised); Assert.True(grandchildRaised); } [Fact] public void DetachedFromLogicalParent_Should_Not_Be_Called_With_GlobalStyles_As_Root() { var globalStyles = Mock.Of<IGlobalStyles>(); var root = new TestRoot { StylingParent = globalStyles }; var child = new Border(); var raised = false; child.DetachedFromLogicalTree += (s, e) => { Assert.Equal(root, e.Root); raised = true; }; root.Child = child; root.Child = null; Assert.True(raised); } [Fact] public void Adding_Tree_To_IStyleRoot_Should_Style_Controls() { using (AvaloniaLocator.EnterScope()) { var root = new TestRoot(); var parent = new Border(); var child = new Border(); var grandchild = new Control(); var styler = new Mock<IStyler>(); AvaloniaLocator.CurrentMutable.Bind<IStyler>().ToConstant(styler.Object); parent.Child = child; child.Child = grandchild; styler.Verify(x => x.ApplyStyles(It.IsAny<IStyleable>()), Times.Never()); root.Child = parent; styler.Verify(x => x.ApplyStyles(parent), Times.Once()); styler.Verify(x => x.ApplyStyles(child), Times.Once()); styler.Verify(x => x.ApplyStyles(grandchild), Times.Once()); } } [Fact] public void Styles_Not_Applied_Until_Initialization_Finished() { using (AvaloniaLocator.EnterScope()) { var root = new TestRoot(); var child = new Border(); var styler = new Mock<IStyler>(); AvaloniaLocator.CurrentMutable.Bind<IStyler>().ToConstant(styler.Object); ((ISupportInitialize)child).BeginInit(); root.Child = child; styler.Verify(x => x.ApplyStyles(It.IsAny<IStyleable>()), Times.Never()); ((ISupportInitialize)child).EndInit(); styler.Verify(x => x.ApplyStyles(child), Times.Once()); } } [Fact] public void Adding_To_Logical_Tree_Should_Register_With_NameScope() { using (AvaloniaLocator.EnterScope()) { var root = new TestRoot(); var child = new Border(); child.Name = "foo"; root.Child = child; Assert.Same(root.FindControl<Border>("foo"), child); } } [Fact] public void Name_Cannot_Be_Set_After_Added_To_Logical_Tree() { using (AvaloniaLocator.EnterScope()) { var root = new TestRoot(); var child = new Border(); root.Child = child; Assert.Throws<InvalidOperationException>(() => child.Name = "foo"); } } [Fact] public void Name_Can_Be_Set_While_Initializing() { using (AvaloniaLocator.EnterScope()) { var root = new TestRoot(); var child = new Border(); ((ISupportInitialize)child).BeginInit(); root.Child = child; child.Name = "foo"; Assert.Null(root.FindControl<Border>("foo")); ((ISupportInitialize)child).EndInit(); Assert.Same(root.FindControl<Border>("foo"), child); } } [Fact] public void StyleDetach_Is_Triggered_When_Control_Removed_From_Logical_Tree() { using (AvaloniaLocator.EnterScope()) { var root = new TestRoot(); var child = new Border(); root.Child = child; bool styleDetachTriggered = false; ((IStyleable)child).StyleDetach.Subscribe(_ => styleDetachTriggered = true); root.Child = null; Assert.True(styleDetachTriggered); } } [Fact] public void EndInit_Should_Raise_Initialized() { var root = new TestRoot(); var target = new Border(); var called = false; target.Initialized += (s, e) => called = true; ((ISupportInitialize)target).BeginInit(); root.Child = target; ((ISupportInitialize)target).EndInit(); Assert.True(called); Assert.True(target.IsInitialized); } [Fact] public void Attaching_To_Visual_Tree_Should_Raise_Initialized() { var root = new TestRoot(); var target = new Border(); var called = false; target.Initialized += (s, e) => called = true; root.Child = target; Assert.True(called); Assert.True(target.IsInitialized); } [Fact] public void DataContextChanged_Should_Be_Called() { var root = new TestStackPanel { Name = "root", Children = { new TestControl { Name = "a1", Child = new TestControl { Name = "b1", } }, new TestControl { Name = "a2", DataContext = "foo", }, } }; var called = new List<string>(); void Record(object sender, EventArgs e) => called.Add(((Control)sender).Name); root.DataContextChanged += Record; foreach (TestControl c in root.GetLogicalDescendants()) { c.DataContextChanged += Record; } root.DataContext = "foo"; Assert.Equal(new[] { "root", "a1", "b1", }, called); } [Fact] public void DataContext_Notifications_Should_Be_Called_In_Correct_Order() { var root = new TestStackPanel { Name = "root", Children = { new TestControl { Name = "a1", Child = new TestControl { Name = "b1", } }, new TestControl { Name = "a2", DataContext = "foo", }, } }; var called = new List<string>(); foreach (IDataContextEvents c in root.GetSelfAndLogicalDescendants()) { c.DataContextBeginUpdate += (s, e) => called.Add("begin " + ((Control)s).Name); c.DataContextChanged += (s, e) => called.Add("changed " + ((Control)s).Name); c.DataContextEndUpdate += (s, e) => called.Add("end " + ((Control)s).Name); } root.DataContext = "foo"; Assert.Equal( new[] { "begin root", "begin a1", "begin b1", "changed root", "changed a1", "changed b1", "end b1", "end a1", "end root", }, called); } private interface IDataContextEvents { event EventHandler DataContextBeginUpdate; event EventHandler DataContextChanged; event EventHandler DataContextEndUpdate; } private class TestControl : Decorator, IDataContextEvents { public event EventHandler DataContextBeginUpdate; public event EventHandler DataContextEndUpdate; public new IAvaloniaObject InheritanceParent => base.InheritanceParent; protected override void OnDataContextBeginUpdate() { DataContextBeginUpdate?.Invoke(this, EventArgs.Empty); base.OnDataContextBeginUpdate(); } protected override void OnDataContextEndUpdate() { DataContextEndUpdate?.Invoke(this, EventArgs.Empty); base.OnDataContextEndUpdate(); } } private class TestStackPanel : StackPanel, IDataContextEvents { public event EventHandler DataContextBeginUpdate; public event EventHandler DataContextEndUpdate; protected override void OnDataContextBeginUpdate() { DataContextBeginUpdate?.Invoke(this, EventArgs.Empty); base.OnDataContextBeginUpdate(); } protected override void OnDataContextEndUpdate() { DataContextEndUpdate?.Invoke(this, EventArgs.Empty); base.OnDataContextEndUpdate(); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Xunit; namespace System.Collections.Immutable.Test { public class ImmutableArrayExtensionsTest { private static readonly ImmutableArray<int> emptyDefault = default(ImmutableArray<int>); private static readonly ImmutableArray<int> empty = ImmutableArray.Create<int>(); private static readonly ImmutableArray<int> oneElement = ImmutableArray.Create(1); private static readonly ImmutableArray<int> manyElements = ImmutableArray.Create(1, 2, 3); private static readonly ImmutableArray<GenericParameterHelper> oneElementRefType = ImmutableArray.Create(new GenericParameterHelper(1)); private static readonly ImmutableArray<string> twoElementRefTypeWithNull = ImmutableArray.Create("1", null); private static readonly ImmutableArray<int>.Builder emptyBuilder = ImmutableArray.Create<int>().ToBuilder(); private static readonly ImmutableArray<int>.Builder oneElementBuilder = ImmutableArray.Create<int>(1).ToBuilder(); private static readonly ImmutableArray<int>.Builder manyElementsBuilder = ImmutableArray.Create<int>(1, 2, 3).ToBuilder(); [Fact] public void Select() { Assert.Equal(new[] { 4, 5, 6 }, ImmutableArrayExtensions.Select(manyElements, n => n + 3)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Select<int, bool>(manyElements, null)); } [Fact] public void SelectEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Select<int, bool>(emptyDefault, null)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Select(emptyDefault, n => true)); } [Fact] public void SelectEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Select<int, bool>(empty, null)); Assert.False(ImmutableArrayExtensions.Select(empty, n => true).Any()); } [Fact] public void Where() { Assert.Equal(new[] { 2, 3 }, ImmutableArrayExtensions.Where(manyElements, n => n > 1)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Where(manyElements, null)); } [Fact] public void WhereEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Where(emptyDefault, null)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Where(emptyDefault, n => true)); } [Fact] public void WhereEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Where(empty, null)); Assert.False(ImmutableArrayExtensions.Where(empty, n => true).Any()); } [Fact] public void Any() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Any(oneElement, null)); Assert.True(ImmutableArrayExtensions.Any(oneElement)); Assert.True(ImmutableArrayExtensions.Any(manyElements, n => n == 2)); Assert.False(ImmutableArrayExtensions.Any(manyElements, n => n == 4)); Assert.True(ImmutableArrayExtensions.Any(oneElementBuilder)); } [Fact] public void AnyEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Any(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Any(emptyDefault, n => true)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Any(emptyDefault, null)); } [Fact] public void AnyEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Any(empty, null)); Assert.False(ImmutableArrayExtensions.Any(empty)); Assert.False(ImmutableArrayExtensions.Any(empty, n => true)); } [Fact] public void All() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.All(oneElement, null)); Assert.False(ImmutableArrayExtensions.All(manyElements, n => n == 2)); Assert.True(ImmutableArrayExtensions.All(manyElements, n => n > 0)); } [Fact] public void AllEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.All(emptyDefault, n => true)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.All(emptyDefault, null)); } [Fact] public void AllEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.All(empty, null)); Assert.True(ImmutableArrayExtensions.All(empty, n => { Assert.True(false); return false; })); // predicate should never be invoked. } [Fact] public void SequenceEqual() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(oneElement, (IEnumerable<int>)null)); foreach (IEqualityComparer<int> comparer in new[] { null, EqualityComparer<int>.Default }) { Assert.True(ImmutableArrayExtensions.SequenceEqual(manyElements, manyElements, comparer)); Assert.True(ImmutableArrayExtensions.SequenceEqual(manyElements, (IEnumerable<int>)manyElements.ToArray(), comparer)); Assert.True(ImmutableArrayExtensions.SequenceEqual(manyElements, ImmutableArray.Create(manyElements.ToArray()), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements, oneElement, comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements, (IEnumerable<int>)oneElement.ToArray(), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements, ImmutableArray.Create(oneElement.ToArray()), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements, (IEnumerable<int>)manyElements.Add(1).ToArray(), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements.Add(1), manyElements.Add(2).ToArray(), comparer)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements.Add(1), (IEnumerable<int>)manyElements.Add(2).ToArray(), comparer)); } Assert.True(ImmutableArrayExtensions.SequenceEqual(manyElements, manyElements, (a, b) => true)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements, oneElement, (a, b) => a == b)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements.Add(1), manyElements.Add(2), (a, b) => a == b)); Assert.True(ImmutableArrayExtensions.SequenceEqual(manyElements.Add(1), manyElements.Add(1), (a, b) => a == b)); Assert.False(ImmutableArrayExtensions.SequenceEqual(manyElements, ImmutableArray.Create(manyElements.ToArray()), (a, b) => false)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(oneElement, oneElement, (Func<int, int, bool>)null)); } [Fact] public void SequenceEqualEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SequenceEqual(oneElement, emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SequenceEqual(emptyDefault, empty)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SequenceEqual(emptyDefault, emptyDefault)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(emptyDefault, emptyDefault, (Func<int, int, bool>)null)); } [Fact] public void SequenceEqualEmpty() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SequenceEqual(empty, (IEnumerable<int>)null)); Assert.True(ImmutableArrayExtensions.SequenceEqual(empty, empty)); Assert.True(ImmutableArrayExtensions.SequenceEqual(empty, empty.ToArray())); Assert.True(ImmutableArrayExtensions.SequenceEqual(empty, empty, (a, b) => true)); Assert.True(ImmutableArrayExtensions.SequenceEqual(empty, empty, (a, b) => false)); } [Fact] public void Aggregate() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate(oneElement, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate(oneElement, 1, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate<int, int, int>(oneElement, 1, null, null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Aggregate<int, int, int>(oneElement, 1, (a, b) => a + b, null)); Assert.Equal(Enumerable.Aggregate(manyElements, (a, b) => a * b), ImmutableArrayExtensions.Aggregate(manyElements, (a, b) => a * b)); Assert.Equal(Enumerable.Aggregate(manyElements, 5, (a, b) => a * b), ImmutableArrayExtensions.Aggregate(manyElements, 5, (a, b) => a * b)); Assert.Equal(Enumerable.Aggregate(manyElements, 5, (a, b) => a * b, a => -a), ImmutableArrayExtensions.Aggregate(manyElements, 5, (a, b) => a * b, a => -a)); } [Fact] public void AggregateEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Aggregate(emptyDefault, (a, b) => a + b)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Aggregate(emptyDefault, 1, (a, b) => a + b)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Aggregate<int, int, int>(emptyDefault, 1, (a, b) => a + b, a => a)); } [Fact] public void AggregateEmpty() { Assert.Equal(0, ImmutableArrayExtensions.Aggregate(empty, (a, b) => a + b)); Assert.Equal(1, ImmutableArrayExtensions.Aggregate(empty, 1, (a, b) => a + b)); Assert.Equal(1, ImmutableArrayExtensions.Aggregate<int, int, int>(empty, 1, (a, b) => a + b, a => a)); } [Fact] public void ElementAt() { // Basis for some assertions that follow Assert.Throws<IndexOutOfRangeException>(() => Enumerable.ElementAt(empty, 0)); Assert.Throws<IndexOutOfRangeException>(() => Enumerable.ElementAt(manyElements, -1)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ElementAt(emptyDefault, 0)); Assert.Throws<IndexOutOfRangeException>(() => ImmutableArrayExtensions.ElementAt(empty, 0)); Assert.Throws<IndexOutOfRangeException>(() => ImmutableArrayExtensions.ElementAt(manyElements, -1)); Assert.Equal(1, ImmutableArrayExtensions.ElementAt(oneElement, 0)); Assert.Equal(3, ImmutableArrayExtensions.ElementAt(manyElements, 2)); } [Fact] public void ElementAtOrDefault() { Assert.Equal(Enumerable.ElementAtOrDefault(manyElements, -1), ImmutableArrayExtensions.ElementAtOrDefault(manyElements, -1)); Assert.Equal(Enumerable.ElementAtOrDefault(manyElements, 3), ImmutableArrayExtensions.ElementAtOrDefault(manyElements, 3)); Assert.Throws<InvalidOperationException>(() => Enumerable.ElementAtOrDefault(emptyDefault, 0)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ElementAtOrDefault(emptyDefault, 0)); Assert.Equal(0, ImmutableArrayExtensions.ElementAtOrDefault(empty, 0)); Assert.Equal(0, ImmutableArrayExtensions.ElementAtOrDefault(empty, 1)); Assert.Equal(1, ImmutableArrayExtensions.ElementAtOrDefault(oneElement, 0)); Assert.Equal(3, ImmutableArrayExtensions.ElementAtOrDefault(manyElements, 2)); } [Fact] public void First() { Assert.Equal(Enumerable.First(oneElement), ImmutableArrayExtensions.First(oneElement)); Assert.Equal(Enumerable.First(oneElement, i => true), ImmutableArrayExtensions.First(oneElement, i => true)); Assert.Equal(Enumerable.First(manyElements), ImmutableArrayExtensions.First(manyElements)); Assert.Equal(Enumerable.First(manyElements, i => true), ImmutableArrayExtensions.First(manyElements, i => true)); Assert.Equal(Enumerable.First(oneElementBuilder), ImmutableArrayExtensions.First(oneElementBuilder)); Assert.Equal(Enumerable.First(manyElementsBuilder), ImmutableArrayExtensions.First(manyElementsBuilder)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(empty, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(manyElements, i => false)); } [Fact] public void FirstEmpty() { Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.First(empty, null)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.First(emptyBuilder)); } [Fact] public void FirstEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.First(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.First(emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.First(emptyDefault, null)); } [Fact] public void FirstOrDefault() { Assert.Equal(Enumerable.FirstOrDefault(oneElement), ImmutableArrayExtensions.FirstOrDefault(oneElement)); Assert.Equal(Enumerable.FirstOrDefault(manyElements), ImmutableArrayExtensions.FirstOrDefault(manyElements)); foreach (bool result in new[] { true, false }) { Assert.Equal(Enumerable.FirstOrDefault(oneElement, i => result), ImmutableArrayExtensions.FirstOrDefault(oneElement, i => result)); Assert.Equal(Enumerable.FirstOrDefault(manyElements, i => result), ImmutableArrayExtensions.FirstOrDefault(manyElements, i => result)); } Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.FirstOrDefault(oneElement, null)); Assert.Equal(Enumerable.FirstOrDefault(oneElementBuilder), ImmutableArrayExtensions.FirstOrDefault(oneElementBuilder)); Assert.Equal(Enumerable.FirstOrDefault(manyElementsBuilder), ImmutableArrayExtensions.FirstOrDefault(manyElementsBuilder)); } [Fact] public void FirstOrDefaultEmpty() { Assert.Equal(0, ImmutableArrayExtensions.FirstOrDefault(empty)); Assert.Equal(0, ImmutableArrayExtensions.FirstOrDefault(empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.FirstOrDefault(empty, null)); Assert.Equal(0, ImmutableArrayExtensions.FirstOrDefault(emptyBuilder)); } [Fact] public void FirstOrDefaultEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.FirstOrDefault(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.FirstOrDefault(emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.FirstOrDefault(emptyDefault, null)); } [Fact] public void Last() { Assert.Equal(Enumerable.Last(oneElement), ImmutableArrayExtensions.Last(oneElement)); Assert.Equal(Enumerable.Last(oneElement, i => true), ImmutableArrayExtensions.Last(oneElement, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(oneElement, i => false)); Assert.Equal(Enumerable.Last(manyElements), ImmutableArrayExtensions.Last(manyElements)); Assert.Equal(Enumerable.Last(manyElements, i => true), ImmutableArrayExtensions.Last(manyElements, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(manyElements, i => false)); Assert.Equal(Enumerable.Last(oneElementBuilder), ImmutableArrayExtensions.Last(oneElementBuilder)); Assert.Equal(Enumerable.Last(manyElementsBuilder), ImmutableArrayExtensions.Last(manyElementsBuilder)); } [Fact] public void LastEmpty() { Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Last(empty, null)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Last(emptyBuilder)); } [Fact] public void LastEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Last(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Last(emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Last(emptyDefault, null)); } [Fact] public void LastOrDefault() { Assert.Equal(Enumerable.LastOrDefault(oneElement), ImmutableArrayExtensions.LastOrDefault(oneElement)); Assert.Equal(Enumerable.LastOrDefault(manyElements), ImmutableArrayExtensions.LastOrDefault(manyElements)); foreach (bool result in new[] { true, false }) { Assert.Equal(Enumerable.LastOrDefault(oneElement, i => result), ImmutableArrayExtensions.LastOrDefault(oneElement, i => result)); Assert.Equal(Enumerable.LastOrDefault(manyElements, i => result), ImmutableArrayExtensions.LastOrDefault(manyElements, i => result)); } Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.LastOrDefault(oneElement, null)); Assert.Equal(Enumerable.LastOrDefault(oneElementBuilder), ImmutableArrayExtensions.LastOrDefault(oneElementBuilder)); Assert.Equal(Enumerable.LastOrDefault(manyElementsBuilder), ImmutableArrayExtensions.LastOrDefault(manyElementsBuilder)); } [Fact] public void LastOrDefaultEmpty() { Assert.Equal(0, ImmutableArrayExtensions.LastOrDefault(empty)); Assert.Equal(0, ImmutableArrayExtensions.LastOrDefault(empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.LastOrDefault(empty, null)); Assert.Equal(0, ImmutableArrayExtensions.LastOrDefault(emptyBuilder)); } [Fact] public void LastOrDefaultEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.LastOrDefault(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.LastOrDefault(emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.LastOrDefault(emptyDefault, null)); } [Fact] public void Single() { Assert.Equal(Enumerable.Single(oneElement), ImmutableArrayExtensions.Single(oneElement)); Assert.Equal(Enumerable.Single(oneElement), ImmutableArrayExtensions.Single(oneElement, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(manyElements)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(manyElements, i => true)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(manyElements, i => false)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(oneElement, i => false)); } [Fact] public void SingleEmpty() { Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(empty)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.Single(empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Single(empty, null)); } [Fact] public void SingleEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Single(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.Single(emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.Single(emptyDefault, null)); } [Fact] public void SingleOrDefault() { Assert.Equal(Enumerable.SingleOrDefault(oneElement), ImmutableArrayExtensions.SingleOrDefault(oneElement)); Assert.Equal(Enumerable.SingleOrDefault(oneElement), ImmutableArrayExtensions.SingleOrDefault(oneElement, i => true)); Assert.Equal(Enumerable.SingleOrDefault(oneElement, i => false), ImmutableArrayExtensions.SingleOrDefault(oneElement, i => false)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.SingleOrDefault(manyElements)); Assert.Throws<InvalidOperationException>(() => ImmutableArrayExtensions.SingleOrDefault(manyElements, i => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SingleOrDefault(oneElement, null)); } [Fact] public void SingleOrDefaultEmpty() { Assert.Equal(0, ImmutableArrayExtensions.SingleOrDefault(empty)); Assert.Equal(0, ImmutableArrayExtensions.SingleOrDefault(empty, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SingleOrDefault(empty, null)); } [Fact] public void SingleOrDefaultEmptyDefault() { Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SingleOrDefault(emptyDefault)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.SingleOrDefault(emptyDefault, n => true)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.SingleOrDefault(emptyDefault, null)); } [Fact] public void ToDictionary() { Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(manyElements, (Func<int, int>)null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(manyElements, (Func<int, int>)null, n => n)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(manyElements, (Func<int, int>)null, n => n, EqualityComparer<int>.Default)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(manyElements, n => n, (Func<int, string>)null)); Assert.Throws<ArgumentNullException>(() => ImmutableArrayExtensions.ToDictionary(manyElements, n => n, (Func<int, string>)null, EqualityComparer<int>.Default)); var stringToString = ImmutableArrayExtensions.ToDictionary(manyElements, n => n.ToString(), n => (n * 2).ToString()); Assert.Equal(stringToString.Count, manyElements.Length); Assert.Equal("2", stringToString["1"]); Assert.Equal("4", stringToString["2"]); Assert.Equal("6", stringToString["3"]); var stringToInt = ImmutableArrayExtensions.ToDictionary(manyElements, n => n.ToString()); Assert.Equal(stringToString.Count, manyElements.Length); Assert.Equal(1, stringToInt["1"]); Assert.Equal(2, stringToInt["2"]); Assert.Equal(3, stringToInt["3"]); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(emptyDefault, n => n)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(emptyDefault, n => n, n => n)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(emptyDefault, n => n, EqualityComparer<int>.Default)); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToDictionary(emptyDefault, n => n, n => n, EqualityComparer<int>.Default)); } [Fact] public void ToArray() { Assert.Equal(0, ImmutableArrayExtensions.ToArray(empty).Length); Assert.Throws<NullReferenceException>(() => ImmutableArrayExtensions.ToArray(emptyDefault)); Assert.Equal(manyElements.ToArray(), ImmutableArrayExtensions.ToArray(manyElements)); } } }
namespace UnityEngine.Rendering.PostProcessing { // Raw, mostly unoptimized implementation of Hable's artist-friendly tonemapping curve // http://filmicworlds.com/blog/filmic-tonemapping-with-piecewise-power-curves/ public class HableCurve { public class Segment { public float offsetX; public float offsetY; public float scaleX; public float scaleY; public float lnA; public float B; public float Eval(float x) { float x0 = (x - offsetX) * scaleX; float y0 = 0f; // log(0) is undefined but our function should evaluate to 0. There are better ways to handle this, // but it's doing it the slow way here for clarity. if (x0 > 0) y0 = Mathf.Exp(lnA + B * Mathf.Log(x0)); return y0 * scaleY + offsetY; } }; struct DirectParams { internal float x0; internal float y0; internal float x1; internal float y1; internal float W; internal float overshootX; internal float overshootY; internal float gamma; }; public float whitePoint { get; private set; } public float inverseWhitePoint { get; private set; } public float x0 { get; private set; } public float x1 { get; private set; } public readonly Segment[] segments = new Segment[3]; public HableCurve() { for (int i = 0; i < 3; i++) segments[i] = new Segment(); } public float Eval(float x) { float normX = x * inverseWhitePoint; int index = (normX < x0) ? 0 : ((normX < x1) ? 1 : 2); var segment = segments[index]; float ret = segment.Eval(normX); return ret; } public void Init(float toeStrength, float toeLength, float shoulderStrength, float shoulderLength, float shoulderAngle, float gamma) { var dstParams = new DirectParams(); // This is not actually the display gamma. It's just a UI space to avoid having to // enter small numbers for the input. const float kPerceptualGamma = 2.2f; // Constraints { toeLength = Mathf.Pow(Mathf.Clamp01(toeLength), kPerceptualGamma); toeStrength = Mathf.Clamp01(toeStrength); shoulderAngle = Mathf.Clamp01(shoulderAngle); shoulderStrength = Mathf.Clamp(shoulderStrength, 1e-5f, 1f - 1e-5f); shoulderLength = Mathf.Max(0f, shoulderLength); gamma = Mathf.Max(1e-5f, gamma); } // Apply base params { // Toe goes from 0 to 0.5 float x0 = toeLength * 0.5f; float y0 = (1f - toeStrength) * x0; // Lerp from 0 to x0 float remainingY = 1f - y0; float initialW = x0 + remainingY; float y1_offset = (1f - shoulderStrength) * remainingY; float x1 = x0 + y1_offset; float y1 = y0 + y1_offset; // Filmic shoulder strength is in F stops float extraW = RuntimeUtilities.Exp2(shoulderLength) - 1f; float W = initialW + extraW; dstParams.x0 = x0; dstParams.y0 = y0; dstParams.x1 = x1; dstParams.y1 = y1; dstParams.W = W; // Bake the linear to gamma space conversion dstParams.gamma = gamma; } dstParams.overshootX = (dstParams.W * 2f) * shoulderAngle * shoulderLength; dstParams.overshootY = 0.5f * shoulderAngle * shoulderLength; InitSegments(dstParams); } void InitSegments(DirectParams srcParams) { var paramsCopy = srcParams; whitePoint = srcParams.W; inverseWhitePoint = 1f / srcParams.W; // normalize params to 1.0 range paramsCopy.W = 1f; paramsCopy.x0 /= srcParams.W; paramsCopy.x1 /= srcParams.W; paramsCopy.overshootX = srcParams.overshootX / srcParams.W; float toeM = 0f; float shoulderM = 0f; { float m, b; AsSlopeIntercept(out m, out b, paramsCopy.x0, paramsCopy.x1, paramsCopy.y0, paramsCopy.y1); float g = srcParams.gamma; // Base function of linear section plus gamma is // y = (mx+b)^g // // which we can rewrite as // y = exp(g*ln(m) + g*ln(x+b/m)) // // and our evaluation function is (skipping the if parts): /* float x0 = (x - offsetX) * scaleX; y0 = exp(m_lnA + m_B*log(x0)); return y0*scaleY + m_offsetY; */ var midSegment = segments[1]; midSegment.offsetX = -(b / m); midSegment.offsetY = 0f; midSegment.scaleX = 1f; midSegment.scaleY = 1f; midSegment.lnA = g * Mathf.Log(m); midSegment.B = g; toeM = EvalDerivativeLinearGamma(m, b, g, paramsCopy.x0); shoulderM = EvalDerivativeLinearGamma(m, b, g, paramsCopy.x1); // apply gamma to endpoints paramsCopy.y0 = Mathf.Max(1e-5f, Mathf.Pow(paramsCopy.y0, paramsCopy.gamma)); paramsCopy.y1 = Mathf.Max(1e-5f, Mathf.Pow(paramsCopy.y1, paramsCopy.gamma)); paramsCopy.overshootY = Mathf.Pow(1f + paramsCopy.overshootY, paramsCopy.gamma) - 1f; } this.x0 = paramsCopy.x0; this.x1 = paramsCopy.x1; // Toe section { var toeSegment = segments[0]; toeSegment.offsetX = 0; toeSegment.offsetY = 0f; toeSegment.scaleX = 1f; toeSegment.scaleY = 1f; float lnA, B; SolveAB(out lnA, out B, paramsCopy.x0, paramsCopy.y0, toeM); toeSegment.lnA = lnA; toeSegment.B = B; } // Shoulder section { // Use the simple version that is usually too flat var shoulderSegment = segments[2]; float x0 = (1f + paramsCopy.overshootX) - paramsCopy.x1; float y0 = (1f + paramsCopy.overshootY) - paramsCopy.y1; float lnA, B; SolveAB(out lnA, out B, x0, y0, shoulderM); shoulderSegment.offsetX = (1f + paramsCopy.overshootX); shoulderSegment.offsetY = (1f + paramsCopy.overshootY); shoulderSegment.scaleX = -1f; shoulderSegment.scaleY = -1f; shoulderSegment.lnA = lnA; shoulderSegment.B = B; } // Normalize so that we hit 1.0 at our white point. We wouldn't have do this if we // skipped the overshoot part. { // Evaluate shoulder at the end of the curve float scale = segments[2].Eval(1f); float invScale = 1f / scale; segments[0].offsetY *= invScale; segments[0].scaleY *= invScale; segments[1].offsetY *= invScale; segments[1].scaleY *= invScale; segments[2].offsetY *= invScale; segments[2].scaleY *= invScale; } } // Find a function of the form: // f(x) = e^(lnA + Bln(x)) // where // f(0) = 0; not really a constraint // f(x0) = y0 // f'(x0) = m void SolveAB(out float lnA, out float B, float x0, float y0, float m) { B = (m * x0) / y0; lnA = Mathf.Log(y0) - B * Mathf.Log(x0); } // Convert to y=mx+b void AsSlopeIntercept(out float m, out float b, float x0, float x1, float y0, float y1) { float dy = (y1 - y0); float dx = (x1 - x0); if (dx == 0) m = 1f; else m = dy / dx; b = y0 - x0 * m; } // f(x) = (mx+b)^g // f'(x) = gm(mx+b)^(g-1) float EvalDerivativeLinearGamma(float m, float b, float g, float x) { float ret = g * m * Mathf.Pow(m * x + b, g - 1f); return ret; } } }
using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Reflection; namespace Umbraco.Core { /// <summary> /// A utility class for type checking, this provides internal caching so that calls to these methods will be faster /// than doing a manual type check in c# /// </summary> internal static class TypeHelper { private static readonly ConcurrentDictionary<Type, FieldInfo[]> GetFieldsCache = new ConcurrentDictionary<Type, FieldInfo[]>(); private static readonly ConcurrentDictionary<Tuple<Type, bool, bool, bool>, PropertyInfo[]> GetPropertiesCache = new ConcurrentDictionary<Tuple<Type, bool, bool, bool>, PropertyInfo[]>(); /// <summary> /// Checks if the method is actually overriding a base method /// </summary> /// <param name="m"></param> /// <returns></returns> public static bool IsOverride(MethodInfo m) { return m.GetBaseDefinition().DeclaringType != m.DeclaringType; } /// <summary> /// Find all assembly references that are referencing the assignTypeFrom Type's assembly found in the assemblyList /// </summary> /// <param name="assignTypeFrom"></param> /// <param name="assemblies"></param> /// <returns></returns> /// <remarks> /// If the assembly of the assignTypeFrom Type is in the App_Code assembly, then we return nothing since things cannot /// reference that assembly, same with the global.asax assembly. /// </remarks> public static Assembly[] GetReferencedAssemblies(Type assignTypeFrom, IEnumerable<Assembly> assemblies) { //check if it is the app_code assembly. //check if it is App_global.asax assembly if (assignTypeFrom.Assembly.IsAppCodeAssembly() || assignTypeFrom.Assembly.IsGlobalAsaxAssembly()) { return Enumerable.Empty<Assembly>().ToArray(); } //find all assembly references that are referencing the current type's assembly since we //should only be scanning those assemblies because any other assembly will definitely not //contain sub type's of the one we're currently looking for return assemblies .Where(assembly => assembly == assignTypeFrom.Assembly || HasReferenceToAssemblyWithName(assembly, assignTypeFrom.Assembly.GetName().Name)) .ToArray(); } /// <summary> /// checks if the assembly has a reference with the same name as the expected assembly name. /// </summary> /// <param name="assembly"></param> /// <param name="expectedAssemblyName"></param> /// <returns></returns> private static bool HasReferenceToAssemblyWithName(Assembly assembly, string expectedAssemblyName) { return assembly .GetReferencedAssemblies() .Select(a => a.Name) .Contains(expectedAssemblyName, StringComparer.Ordinal); } /// <summary> /// Returns true if the type is a class and is not static /// </summary> /// <param name="t"></param> /// <returns></returns> public static bool IsNonStaticClass(Type t) { return t.IsClass && IsStaticClass(t) == false; } /// <summary> /// Returns true if the type is a static class /// </summary> /// <param name="type"></param> /// <returns></returns> /// <remarks> /// In IL a static class is abstract and sealed /// see: http://stackoverflow.com/questions/1175888/determine-if-a-type-is-static /// </remarks> public static bool IsStaticClass(Type type) { return type.IsAbstract && type.IsSealed; } /// <summary> /// Finds a lowest base class amongst a collection of types /// </summary> /// <param name="types"></param> /// <returns></returns> /// <remarks> /// The term 'lowest' refers to the most base class of the type collection. /// If a base type is not found amongst the type collection then an invalid attempt is returned. /// </remarks> public static Attempt<Type> GetLowestBaseType(params Type[] types) { if (types.Length == 0) { return Attempt<Type>.Fail(); } if (types.Length == 1) { return Attempt.Succeed(types[0]); } foreach (var curr in types) { var others = types.Except(new[] {curr}); //is the curr type a common denominator for all others ? var isBase = others.All(curr.IsAssignableFrom); //if this type is the base for all others if (isBase) { return Attempt.Succeed(curr); } } return Attempt<Type>.Fail(); } /// <summary> /// Determines whether the type <paramref name="implementation"/> is assignable from the specified implementation <typeparamref name="TContract"/>, /// and caches the result across the application using a <see cref="ConcurrentDictionary{TKey,TValue}"/>. /// </summary> /// <param name="contract">The type of the contract.</param> /// <param name="implementation">The implementation.</param> /// <returns> /// <c>true</c> if [is type assignable from] [the specified contract]; otherwise, <c>false</c>. /// </returns> public static bool IsTypeAssignableFrom(Type contract, Type implementation) { return contract.IsAssignableFrom(implementation); } /// <summary> /// Determines whether the type <paramref name="implementation"/> is assignable from the specified implementation <typeparamref name="TContract"/>, /// and caches the result across the application using a <see cref="ConcurrentDictionary{TKey,TValue}"/>. /// </summary> /// <typeparam name="TContract">The type of the contract.</typeparam> /// <param name="implementation">The implementation.</param> public static bool IsTypeAssignableFrom<TContract>(Type implementation) { return IsTypeAssignableFrom(typeof(TContract), implementation); } /// <summary> /// A cached method to determine whether <paramref name="implementation"/> represents a value type. /// </summary> /// <param name="implementation">The implementation.</param> public static bool IsValueType(Type implementation) { return implementation.IsValueType || implementation.IsPrimitive; } /// <summary> /// A cached method to determine whether <paramref name="implementation"/> is an implied value type (<see cref="Type.IsValueType"/>, <see cref="Type.IsEnum"/> or a string). /// </summary> /// <param name="implementation">The implementation.</param> public static bool IsImplicitValueType(Type implementation) { return IsValueType(implementation) || implementation.IsEnum || implementation == typeof (string); } public static bool IsTypeAssignableFrom<TContract>(object implementation) { if (implementation == null) throw new ArgumentNullException("implementation"); return IsTypeAssignableFrom<TContract>(implementation.GetType()); } /// <summary> /// Returns a PropertyInfo from a type /// </summary> /// <param name="type"></param> /// <param name="name"></param> /// <param name="mustRead"></param> /// <param name="mustWrite"></param> /// <param name="includeIndexed"></param> /// <param name="caseSensitive"> </param> /// <returns></returns> public static PropertyInfo GetProperty(Type type, string name, bool mustRead = true, bool mustWrite = true, bool includeIndexed = false, bool caseSensitive = true) { return CachedDiscoverableProperties(type, mustRead, mustWrite, includeIndexed) .FirstOrDefault(x => { if (caseSensitive) return x.Name == name; return x.Name.InvariantEquals(name); }); } /// <summary> /// Returns all public properties including inherited properties even for interfaces /// </summary> /// <param name="type"></param> /// <returns></returns> /// <remarks> /// taken from http://stackoverflow.com/questions/358835/getproperties-to-return-all-properties-for-an-interface-inheritance-hierarchy /// </remarks> public static PropertyInfo[] GetPublicProperties(Type type) { if (type.IsInterface) { var propertyInfos = new List<PropertyInfo>(); var considered = new List<Type>(); var queue = new Queue<Type>(); considered.Add(type); queue.Enqueue(type); while (queue.Count > 0) { var subType = queue.Dequeue(); foreach (var subInterface in subType.GetInterfaces()) { if (considered.Contains(subInterface)) continue; considered.Add(subInterface); queue.Enqueue(subInterface); } var typeProperties = subType.GetProperties( BindingFlags.FlattenHierarchy | BindingFlags.Public | BindingFlags.Instance); var newPropertyInfos = typeProperties .Where(x => !propertyInfos.Contains(x)); propertyInfos.InsertRange(0, newPropertyInfos); } return propertyInfos.ToArray(); } return type.GetProperties(BindingFlags.FlattenHierarchy | BindingFlags.Public | BindingFlags.Instance); } /// <summary> /// Gets (and caches) <see cref="FieldInfo"/> discoverable in the current <see cref="AppDomain"/> for a given <paramref name="type"/>. /// </summary> /// <param name="type">The source.</param> /// <returns></returns> public static FieldInfo[] CachedDiscoverableFields(Type type) { return GetFieldsCache.GetOrAdd( type, x => type .GetFields(BindingFlags.Public | BindingFlags.Instance) .Where(y => !y.IsInitOnly) .ToArray()); } /// <summary> /// Gets (and caches) <see cref="PropertyInfo"/> discoverable in the current <see cref="AppDomain"/> for a given <paramref name="type"/>. /// </summary> /// <param name="type">The source.</param> /// <param name="mustRead">true if the properties discovered are readable</param> /// <param name="mustWrite">true if the properties discovered are writable</param> /// <param name="includeIndexed">true if the properties discovered are indexable</param> /// <returns></returns> public static PropertyInfo[] CachedDiscoverableProperties(Type type, bool mustRead = true, bool mustWrite = true, bool includeIndexed = false) { return GetPropertiesCache.GetOrAdd( new Tuple<Type, bool, bool, bool>(type, mustRead, mustWrite, includeIndexed), x => type .GetProperties(BindingFlags.Public | BindingFlags.Instance) .Where(y => (!mustRead || y.CanRead) && (!mustWrite || y.CanWrite) && (includeIndexed || !y.GetIndexParameters().Any())) .ToArray()); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.IO; using System.Collections; using System.Globalization; using System.Text; using System.Threading; using System.Runtime.CompilerServices; using Xunit; public class Directory_Move_str_str { public static String s_strDtTmVer = "2000/07/12 10:42"; public static String s_strClassMethod = "Directory.Move(String,String)"; public static String s_strTFName = "Move_str_str.cs"; public static String s_strTFPath = Directory.GetCurrentDirectory(); [Fact] public static void runTest() { int iCountErrors = 0; int iCountTestcases = 0; String strLoc = "Loc_000oo"; String strValue = String.Empty; try { ///////////////////////// START TESTS //////////////////////////// /////////////////////////////////////////////////////////////////// String tempDirName = Path.Combine(TestInfo.CurrentDirectory, "TempDirectory"); String dirName = Path.Combine(TestInfo.CurrentDirectory, "Move_str_str_test_Dir"); DirectoryInfo dir2 = null; if (Directory.Exists(tempDirName)) Directory.Delete(tempDirName, true); if (Directory.Exists(dirName)) Directory.Delete(dirName, true); // [] Argumentnull exception for null arguments //----------------------------------------------------------------- strLoc = "Loc_00001"; iCountTestcases++; try { Directory.Move(null, dirName); iCountErrors++; printerr("Error_00002! Expected exception not thrown"); } catch (ArgumentNullException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00004! Incorrect exception thrown, exc==" + exc.ToString()); } iCountTestcases++; try { Directory.Move(dirName, null); iCountErrors++; printerr("Error_00005! Expected exception not thrown"); } catch (ArgumentNullException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00007! Incorrect exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- // [] ArgumentException for zero length arguments //----------------------------------------------------------------- strLoc = "Loc_00008"; iCountTestcases++; try { Directory.Move(String.Empty, dirName); iCountErrors++; printerr("Error_00008! Expected exception not thrown"); } catch (ArgumentException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00010! Incorrect exception thrown, exc==" + exc.ToString()); } iCountTestcases++; try { Directory.Move(dirName, String.Empty); iCountErrors++; printerr("Error_00011! Expected exception not thrown"); } catch (ArgumentException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00013! Incorrect exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- // [] Try to move a directory that does not exist //----------------------------------------------------------------- strLoc = "Loc_00014"; iCountTestcases++; try { Directory.Move(Path.Combine(TestInfo.CurrentDirectory, "NonExistentDirectory"), dirName); iCountErrors++; printerr("Error_00015! Expected exception not thrown"); } catch (DirectoryNotFoundException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00017! Incorrect exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- // [] AccessException when moving onto existing directory //----------------------------------------------------------------- strLoc = "Loc_00018"; iCountTestcases++; try { Directory.Move(TestInfo.CurrentDirectory, TestInfo.CurrentDirectory); iCountErrors++; printerr("Error_00019! Expected exception not thrown"); } catch (IOException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00021! Incorrect exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- // [] Move a directory and check that it is moved //----------------------------------------------------------------- strLoc = "Loc_00022"; Directory.CreateDirectory(dirName); Directory.Move(dirName, tempDirName); iCountTestcases++; if (Directory.Exists(dirName)) { iCountErrors++; printerr("Error_00023! Source directory still there"); } if (!Directory.Exists(tempDirName)) { iCountErrors++; printerr("Error_00024! destination directory missing"); } Directory.Delete(tempDirName); //[]Move directories that end with directory separator //----------------------------------------------------------------- Directory.CreateDirectory(dirName); Directory.Move(dirName + Path.DirectorySeparatorChar, tempDirName + Path.DirectorySeparatorChar); iCountTestcases++; if (Directory.Exists(dirName)) { iCountErrors++; printerr("Error_00023! Source directory still there"); } if (!Directory.Exists(tempDirName)) { iCountErrors++; printerr("Error_00024! destination directory missing"); } Directory.Delete(tempDirName); #if !TEST_WINRT if (Interop.IsWindows) // moving between drive labels { // [] Move to different drive will throw AccessException //----------------------------------------------------------------- strLoc = "Loc_00025"; string fullDirName = Path.GetFullPath(dirName); Directory.CreateDirectory(dirName); iCountTestcases++; try { if (fullDirName.Substring(0, 3) == @"d:\" || fullDirName.Substring(0, 3) == @"D:\") Directory.Move(fullDirName, "C:\\TempDirectory"); else Directory.Move(fullDirName, "D:\\TempDirectory"); Console.WriteLine("Root directory..." + fullDirName.Substring(0, 3)); iCountErrors++; printerr("Error_00026! Expected exception not thrown"); } catch (IOException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00000! Incorrect exception thrown, exc==" + exc.ToString()); } if (Directory.Exists(fullDirName)) Directory.Delete(fullDirName, true); //----------------------------------------------------------------- } #endif // [] Moving Directory with subdirectories //----------------------------------------------------------------- strLoc = "Loc_00028"; dir2 = Directory.CreateDirectory(dirName); dir2.CreateSubdirectory(Path.Combine("SubDir", "SubSubDir")); FailSafeDirectoryOperations.MoveDirectory(dirName, tempDirName); // Directory.Move(dirName, tempDirName); iCountTestcases++; if (Directory.Exists(dirName)) { iCountErrors++; printerr("Error_00029! Source directory still there"); } dir2 = new DirectoryInfo(tempDirName); iCountTestcases++; if (!Directory.Exists(dir2.FullName)) { iCountErrors++; printerr("Error_00030! Destination directory missing"); } iCountTestcases++; if (!Directory.Exists(Path.Combine(dir2.FullName, "SubDir", "SubSubDir"))) { iCountErrors++; printerr("Error_00031! Subdirectories not moved"); } dir2.Delete(true); //----------------------------------------------------------------- if (Interop.IsWindows) { // [] wildchars in src directory //----------------------------------------------------------------- strLoc = "Loc_00032"; iCountTestcases++; try { Directory.Move("*", tempDirName); iCountErrors++; printerr("Error_00033! Expected exception not thrown"); } catch (ArgumentException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00035! Incorrect exception thrown, exc==" + exc.ToString()); } } //----------------------------------------------------------------- if (Interop.IsWindows) { // [] wildchars in dest directory //----------------------------------------------------------------- strLoc = "Loc_00036"; iCountTestcases++; try { Directory.Move(TestInfo.CurrentDirectory, "Temp*"); iCountErrors++; printerr("Error_00037! Expected exception not thrown"); } catch (ArgumentException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00039! Incorrect exception thrown, exc==" + exc.ToString()); } } //----------------------------------------------------------------- // [] InvalidPathChars //----------------------------------------------------------------- strLoc = "Loc_00040"; iCountTestcases++; try { Directory.Move(TestInfo.CurrentDirectory, "<MyDirectory\0"); iCountErrors++; printerr("Error_00041! Expected exception not thrown"); } catch (ArgumentException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00043 Incorret exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- // [] PathTooLongException if destination name is too long //----------------------------------------------------------------- strLoc = "Loc_00044"; String str = new string('a', IOInputs.MaxPath); iCountTestcases++; try { Directory.Move(TestInfo.CurrentDirectory, str); iCountErrors++; printerr("Error_00045! Expected exception not thrown"); } catch (PathTooLongException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00047! Incorrect exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- // [] Non-existent drive specified for destination //----------------------------------------------------------------- strLoc = "Loc_00048"; if (Interop.IsWindows) // drive labels { iCountTestcases++; Directory.CreateDirectory(dirName); try { Directory.Move(dirName, "X:\\Temp"); iCountErrors++; printerr("Error_00049! Expected exception not thrown"); } catch (IOException) { } catch (Exception exc) { iCountErrors++; printerr("Error_00051! Incorrect exception thrown, exc==" + exc.ToString()); } Directory.Delete(dirName); } //----------------------------------------------------------------- // [] Use directory names with spaces //----------------------------------------------------------------- strLoc = "Loc_00052"; string destDirName = Path.Combine(TestInfo.CurrentDirectory, "This is my directory"); Directory.CreateDirectory(dirName); Directory.Move(dirName, destDirName); iCountTestcases++; if (!Directory.Exists(destDirName)) { iCountErrors++; printerr("Error_00053! Destination directory missing"); } Directory.Delete(destDirName); //----------------------------------------------------------------- // ][ Directory names in different cultures //----------------------------------------------------------------- //----------------------------------------------------------------- if (Directory.Exists(tempDirName)) Directory.Delete(tempDirName, true); if (Directory.Exists(dirName)) Directory.Delete(dirName, true); /////////////////////////////////////////////////////////////////// /////////////////////////// END TESTS ///////////////////////////// } catch (Exception exc_general) { ++iCountErrors; printerr("Error Err_8888yyy! strLoc==" + strLoc + ", exc_general==" + exc_general.ToString()); } //// Finish Diagnostics if (iCountErrors != 0) { Console.WriteLine("FAiL! " + s_strTFName + " ,iCountErrors==" + iCountErrors.ToString()); } Assert.Equal(0, iCountErrors); } public static void printerr(String err, [CallerMemberName] string memberName = "", [CallerFilePath] string filePath = "", [CallerLineNumber] int lineNumber = 0) { Console.WriteLine("ERROR: ({0}, {1}, {2}) {3}", memberName, filePath, lineNumber, err); } }
using System.Collections.Generic; using System.IO; using EnvDTE; using EnvDTE80; using System.Reflection; using System; using Extensibility; using Microsoft.VisualStudio.CommandBars; using System.Resources; using System.Globalization; using System.Windows.Forms; using System.Diagnostics; using System.Threading; using System.Text; using VSLangProj; namespace Randoop { /// <summary> /// This command opens Windows Explorer in the folder corresponding to the specified /// selected item in the Solution Explorer Window. /// </summary> public class RandoopCommand : CommandBase { /// <summary> /// Initializes a new instance of the <see cref="T:OpenFolderCommand"/> class. /// </summary> /// <param name="application">The application.</param> public RandoopCommand(DTE2 application) : base(application, "RandoopMenuCaption", "RandoopExec") { } /// <summary> /// Opens the selected item folder. /// </summary> public override void Perform() { randoopExe(application); } private void randoopExe(DTE2 application) { { ////////////////////////////////////////////////////////////////////////////////// //step 1. when load randoop_net_addin, the path of "randoop" is defined ////////////////////////////////////////////////////////////////////////////////// string installPath = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); var randoop_path = installPath + "\\Randoop-NET-release"; ////////////////////////////////////////////////////////////////////////////////// // step 2. create win form (if an item is or is not selected in solution explorer) ////////////////////////////////////////////////////////////////////////////////// UIHierarchy solutionExplorer = application.ToolWindows.SolutionExplorer; var items = solutionExplorer.SelectedItems as Array; var arg = new Arguments(randoop_path); //if (items.Length >= 1) if (items.Length == 1) { /* if (items.Length > 1) { MessageBox.Show("Select only one item.", "ERROR"); return; }*/ UIHierarchyItem item1 = items.GetValue(0) as UIHierarchyItem; var prJItem = item1.Object as ProjectItem; if (prJItem != null) { string prjPath = prJItem.Properties.Item("FullPath").Value.ToString(); if (prjPath.EndsWith(".dll") || prjPath.EndsWith(".exe")) arg.SetDllToTest(prjPath); } } ////////////////////////////////////////////////////////////////////////////////// // step 3. show the win form ////////////////////////////////////////////////////////////////////////////////// arg.ShowDialog(); if (arg.ifContinue() == false) { //MessageBox.Show("not going to execute Randoop."); return; } ////////////////////////////////////////////////////////////////////////////////// // step 4. run Randoop.exe while reporting progress ////////////////////////////////////////////////////////////////////////////////// string exepath = randoop_path + "\\bin\\Randoop.exe"; if (!File.Exists(exepath)) { MessageBox.Show("Can't find Randoop.exe!", "ERROR"); return; } var prg = new Progress(); int totalTime = arg.GetTimeLimit(); prg.getTotalTime(totalTime); prg.setRandoopExe(exepath); prg.setRandoopArg(arg.GetRandoopArg()); /* prg.ShowDialog(); if (prg.isNormal() == false) { return; } */ ////////////////////////////////////////////////////////////////////////////////// // step 5. convert all test files to one RandoopTest.cs ////////////////////////////////////////////////////////////////////////////////// //MessageBox.Show("Randoop finishes generating test cases.", "Progress"); // [progress tracking] string out_dir = arg.GetTestFilepath(); int nTestPfile = arg.GetTestNoPerFile(); prg.setOutDir(out_dir); prg.setTestpFile(nTestPfile); //toMSTest objToMsTest = new toMSTest(); //MessageBox.Show("Converting test cases to MSTest format.", "Progress"); // [progress tracking] //objToMsTest.Convert(out_dir, nTestPfile); ////////////////////////////////////////////////////////////////////////////////// // step 6. add/include RandoopTest.cs in a/the Test Project ////////////////////////////////////////////////////////////////////////////////// //MessageBox.Show("Creating a Test Project and add test files ...", "Progress"); // [progress tracking] string dllTest = arg.GetDllToTest(); prg.setObjTested(dllTest); //CreateTestPrj(out_dir, dllTest); //MessageBox.Show("Task Completes!", "Progress"); // [progress tracking] ////////////////////////////////////////////////////////////////////////////////// // final. progress traking complete with the progress bar ////////////////////////////////////////////////////////////////////////////////// prg.ShowDialog(); if (prg.isNormal() == false) { return; } string pathToTestPrj = CreateTestPrj(out_dir, dllTest, application); MessageBox.Show("Test file is created in project: " + pathToTestPrj); //invoke ie to open index.html generated by Randoop System.Diagnostics.Process.Start("IEXPLORE.EXE", pathToTestPrj + "\\index.html"); } return; } private string CreateTestPrj(string outDir, string dll_test, DTE2 application) { try { Solution curSolution = application.Solution; ///////////////////////////////////////////////////////////////////////////////////////////////// // [step a] [TODO] decide if there is a test project already existing in current solution (p2) ///////////////////////////////////////////////////////////////////////////////////////////////// //MessageBox.Show((curSolution.FullName)); //debug //MessageBox.Show(curSolution.GetType().ToString()); //debug //var prjs = curSolution.Projects; //if (prjs == null) //{ // MessageBox.Show(("no project object?")); // return; //} //foreach (Project prj in prjs) //{ // //MessageBox.Show(prj.Kind); //debug // //MessageBox.Show(prj.FullName); //debug // var prjguid = new Guid(prj.Kind); // if (IsVisualCSharpProject(prjguid)) // MessageBox.Show(prj.Name+" is C# project.", "type of project", MessageBoxButtons.OKCancel); //debug // else // { // if(IsTestProject(prjguid)) // MessageBox.Show(prj.Name + " is test project.", "type of project", MessageBoxButtons.OKCancel); //debug // else // MessageBox.Show(prj.Name + "is not a type that we care.", "type of project", MessageBoxButtons.OKCancel); //debug // } //} ////////////////////////////////////////////////////////////////////////////////////////////// // [step b] automatically create a Test Project (particularly for Randoop generated tests) ////////////////////////////////////////////////////////////////////////////////////////////// int existRandoopTest = 0; var prjs = curSolution.Projects; if (prjs == null) { MessageBox.Show("No project in current solution.", "ERROR"); return null; } foreach (Project prj in prjs) { if (prj.Name.Contains("RandoopTestPrj")) existRandoopTest = 1; } //if "RandoopTestPrj" is not existing, create a new one if (existRandoopTest == 0) { string testPrjPath = curSolution.FullName; string prjName = "RandoopTestPrj"; //MessageBox.Show(testPrjPath); //debug int index = testPrjPath.LastIndexOf("\\"); testPrjPath = testPrjPath.Substring(0, index + 1) + prjName; //MessageBox.Show(testPrjPath); //debug Solution2 soln = curSolution as Solution2; string csTemplatePath = soln.GetProjectTemplate("TestProject.zip", "CSharp"); //MessageBox.Show(csTemplatePath); //debug curSolution.AddFromTemplate(csTemplatePath, testPrjPath, prjName, false); //IMPORTANT: it always returns NULL } //locate the Randoop Test Project in current solution var allPrjs = curSolution.Projects; int idTestPrj = 1; foreach (Project prj in allPrjs) { if (prj.FullName.Contains("RandoopTestPrj")) break; idTestPrj++; } /////////////////////////////////////////////////////////////////////////////////////// // [step c] add/included converted RandoopTest.cs file under the project created above /////////////////////////////////////////////////////////////////////////////////////// string testFilePath = outDir + "\\RandoopTest.cs"; string testHtmlPath = outDir + "\\index.html"; string testStatPath = outDir + "\\allstats.txt"; bool isAdded1 = false; bool isAdded2 = false; bool isAdded3 = false; Project testPrj = curSolution.Projects.Item(idTestPrj); if (testPrj != null) { foreach (ProjectItem it in testPrj.ProjectItems) { if (it.Name.Contains("RandoopTest.cs") && (isAdded1 == false)) { it.Delete(); testPrj.ProjectItems.AddFromFileCopy(testFilePath); isAdded1 = true; } else { if (it.Name.Contains("index.html") && (isAdded2 == false)) { it.Delete(); testPrj.ProjectItems.AddFromFileCopy(testHtmlPath); isAdded2 = true; } else { if (it.Name.Contains("allstats.txt") && (isAdded3 == false)) { it.Delete(); testPrj.ProjectItems.AddFromFileCopy(testStatPath); isAdded3 = true; } } } } if (!isAdded1) testPrj.ProjectItems.AddFromFileCopy(testFilePath); if (!isAdded2) testPrj.ProjectItems.AddFromFileCopy(testHtmlPath); if (!isAdded3) testPrj.ProjectItems.AddFromFileCopy(testStatPath); //if (testPrj.ProjectItems.Count > 2) { foreach (ProjectItem it in testPrj.ProjectItems) { if (it.Name.Contains("UnitTest1.cs")) it.Delete(); } } //delete original randoop outputs Directory.Delete(outDir, true); //Programmatically add references to project under test if (existRandoopTest == 0) { VSProject selectedVSProject = null; selectedVSProject = (VSProject)testPrj.Object; selectedVSProject.References.Add(dll_test); } return (testPrj.FullName.Replace("\\RandoopTestPrj.csproj", "")); } } catch (System.Exception ex) { System.Windows.Forms.MessageBox.Show("ERROR: " + ex.Message); } return null; } public static bool IsVisualCSharpProject(Guid projectKind) { return projectKind.CompareTo(new Guid("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}")) == 0; } public static bool IsTestProject(Guid projectKind) { return projectKind.CompareTo(new Guid("{3AC096D0-A1C2-E12C-1390-A8335801FDAB}")) == 0; } } }
#region license // Copyright (c) 2004, Rodrigo B. de Oliveira ([email protected]) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * Neither the name of Rodrigo B. de Oliveira nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF // THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion namespace Boo.Lang.Compiler.Steps { using System.Collections; using Boo.Lang; using Boo.Lang.Compiler.Ast; using Boo.Lang.Compiler.TypeSystem; public class ProcessSharedLocals : AbstractTransformerCompilerStep { Method _currentMethod; ClassDefinition _sharedLocalsClass; Hashtable _mappings = new Hashtable(); List _references = new List(); List _shared = new List(); int _closureDepth; override public void Run() { Visit(CompileUnit); } override public void Dispose() { _shared.Clear(); _references.Clear(); _mappings.Clear(); base.Dispose(); } override public void OnField(Field node) { } override public void OnInterfaceDefinition(InterfaceDefinition node) { } override public void OnEnumDefinition(EnumDefinition node) { } override public void OnConstructor(Constructor node) { OnMethod(node); } override public void OnMethod(Method node) { _references.Clear(); _mappings.Clear(); _currentMethod = node; _sharedLocalsClass = null; _closureDepth = 0; Visit(node.Body); CreateSharedLocalsClass(); if (null != _sharedLocalsClass) { //node.DeclaringType.Members.Add(_sharedLocalsClass); TypeSystemServices.AddCompilerGeneratedType(_sharedLocalsClass); Map(); } } override public void OnBlockExpression(BlockExpression node) { ++_closureDepth; Visit(node.Body); --_closureDepth; } override public void OnGeneratorExpression(GeneratorExpression node) { ++_closureDepth; Visit(node.Iterator); Visit(node.Expression); Visit(node.Filter); --_closureDepth; } override public void OnReferenceExpression(ReferenceExpression node) { ILocalEntity local = node.Entity as ILocalEntity; if (null == local) return; if (local.IsPrivateScope) return; _references.Add(node); if (_closureDepth == 0) return; local.IsShared = _currentMethod.Locals.ContainsEntity(local) || _currentMethod.Parameters.ContainsEntity(local); } void Map() { IType type = (IType)_sharedLocalsClass.Entity; InternalLocal locals = CodeBuilder.DeclareLocal(_currentMethod, "___locals", type); foreach (ReferenceExpression reference in _references) { IField mapped = (IField)_mappings[reference.Entity]; if (null == mapped) continue; reference.ParentNode.Replace( reference, CodeBuilder.CreateMemberReference( CodeBuilder.CreateReference(locals), mapped)); } Block initializationBlock = new Block(); initializationBlock.Add(CodeBuilder.CreateAssignment( CodeBuilder.CreateReference(locals), CodeBuilder.CreateConstructorInvocation(type.GetConstructors()[0]))); InitializeSharedParameters(initializationBlock, locals); _currentMethod.Body.Statements.Insert(0, initializationBlock); foreach (IEntity entity in _mappings.Keys) { _currentMethod.Locals.RemoveByEntity(entity); } } void InitializeSharedParameters(Block block, InternalLocal locals) { foreach (Node node in _currentMethod.Parameters) { InternalParameter param = (InternalParameter)node.Entity; if (param.IsShared) { block.Add( CodeBuilder.CreateAssignment( CodeBuilder.CreateMemberReference( CodeBuilder.CreateReference(locals), (IField)_mappings[param]), CodeBuilder.CreateReference(param))); } } } void CreateSharedLocalsClass() { _shared.Clear(); CollectSharedLocalEntities(_currentMethod.Locals); CollectSharedLocalEntities(_currentMethod.Parameters); if (_shared.Count > 0) { BooClassBuilder builder = CodeBuilder.CreateClass( string.Format("___locals{0}", _context.AllocIndex())); builder.Modifiers |= TypeMemberModifiers.Internal; builder.AddBaseType(TypeSystemServices.ObjectType); int i=0; foreach (ILocalEntity local in _shared) { Field field = builder.AddInternalField( string.Format("___{0}_{1}", local.Name, i), local.Type); ++i; _mappings[local] = field.Entity; } builder.AddConstructor().Body.Add( CodeBuilder.CreateSuperConstructorInvocation(TypeSystemServices.ObjectType)); _sharedLocalsClass = builder.ClassDefinition; } } void CollectSharedLocalEntities<T>(System.Collections.Generic.IEnumerable<T> nodes) where T : Node { foreach (T node in nodes) { ILocalEntity local = (ILocalEntity)node.Entity; if (local.IsShared) { _shared.Add(local); } } } } }
// Licensed to the Apache Software Foundation(ASF) under one // or more contributor license agreements.See the NOTICE file // distributed with this work for additional information // regarding copyright ownership.The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. using System; using System.Collections.Generic; using System.Linq; using System.Security.Cryptography.Xml; using System.Text; using Microsoft.VisualStudio.TestTools.UnitTesting; using Thrift.Collections; namespace Thrift.Tests.Collections { // ReSharper disable once InconsistentNaming [TestClass] public class TCollectionsTests { //TODO: Add tests for IEnumerable with objects and primitive values inside [TestMethod] public void TCollection_List_Equals_Primitive_Test() { var collection1 = new List<int> {1,2,3}; var collection2 = new List<int> {1,2,3}; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_List_Equals_Primitive_Different_Test() { var collection1 = new List<int> { 1, 2, 3 }; var collection2 = new List<int> { 1, 2 }; Assert.IsFalse(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); collection2.Add(4); Assert.IsFalse(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_List_Equals_Objects_Test() { var collection1 = new List<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } }; var collection2 = new List<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_List_List_Equals_Objects_Test() { var collection1 = new List<List<ExampleClass>> { new List<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } } }; var collection2 = new List<List<ExampleClass>> { new List<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } } }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); // SequenceEqual() calls Equals() of the inner list instead of SequenceEqual() } [TestMethod] public void TCollection_List_Equals_OneAndTheSameObject_Test() { var collection1 = new List<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } }; var collection2 = collection1; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Set_Equals_Primitive_Test() { var collection1 = new THashSet<int> {1,2,3}; var collection2 = new THashSet<int> {1,2,3}; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Set_Equals_Primitive_Different_Test() { var collection1 = new THashSet<int> { 1, 2, 3 }; var collection2 = new THashSet<int> { 1, 2 }; Assert.IsFalse(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); collection2.Add(4); Assert.IsFalse(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Set_Equals_Objects_Test() { var collection1 = new THashSet<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } }; var collection2 = new THashSet<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Set_Set_Equals_Objects_Test() { var collection1 = new THashSet<THashSet<ExampleClass>> { new THashSet<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } } }; var collection2 = new THashSet<THashSet<ExampleClass>> { new THashSet<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } } }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); // SequenceEqual() calls Equals() of the inner list instead of SequenceEqual() } [TestMethod] public void TCollection_Set_Equals_OneAndTheSameObject_Test() { var collection1 = new THashSet<ExampleClass> { new ExampleClass { X = 1 }, new ExampleClass { X = 2 } }; var collection2 = collection1; // references to one and the same collection Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Map_Equals_Primitive_Test() { var collection1 = new Dictionary<int, int> { [1] = 1, [2] = 2, [3] = 3 }; var collection2 = new Dictionary<int, int> { [1] = 1, [2] = 2, [3] = 3 }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Map_Equals_Primitive_Different_Test() { var collection1 = new Dictionary<int, int> { [1] = 1, [2] = 2, [3] = 3 }; var collection2 = new Dictionary<int, int> { [1] = 1, [2] = 2 }; Assert.IsFalse(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); collection2[3] = 3; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); collection2[3] = 4; Assert.IsFalse(TCollections.Equals(collection1, collection2)); } [TestMethod] public void TCollection_Map_Equals_Objects_Test() { var collection1 = new Dictionary<int, ExampleClass> { [1] = new ExampleClass { X = 1 }, [-1] = new ExampleClass { X = 2 } }; var collection2 = new Dictionary<int, ExampleClass> { [1] = new ExampleClass { X = 1 }, [-1] = new ExampleClass { X = 2 } }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } [TestMethod] public void TCollection_Map_Map_Equals_Objects_Test() { var collection1 = new Dictionary<int, Dictionary<int, ExampleClass>> { [0] = new Dictionary<int, ExampleClass> { [1] = new ExampleClass { X = 1 }, [-1] = new ExampleClass { X = 2 } } }; var collection2 = new Dictionary<int, Dictionary<int, ExampleClass>> { [0] = new Dictionary<int, ExampleClass> { [1] = new ExampleClass { X = 1 }, [-1] = new ExampleClass { X = 2 } } }; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsFalse(collection1.SequenceEqual(collection2)); // SequenceEqual() calls Equals() of the inner list instead of SequenceEqual() } [TestMethod] public void TCollection_Map_Equals_OneAndTheSameObject_Test() { var collection1 = new Dictionary<int, ExampleClass> { [1] = new ExampleClass { X = 1 }, [-1] = new ExampleClass { X = 2 } }; var collection2 = collection1; Assert.IsTrue(TCollections.Equals(collection1, collection2)); Assert.IsTrue(collection1.SequenceEqual(collection2)); } private class ExampleClass { public int X { get; set; } // all Thrift-generated classes override Equals(), we do just the same public override bool Equals(object that) { if (!(that is ExampleClass other)) return false; if (ReferenceEquals(this, other)) return true; return this.X == other.X; } // overriding Equals() requires GetHashCode() as well public override int GetHashCode() { int hashcode = 157; unchecked { hashcode = (hashcode * 397) + X.GetHashCode(); } return hashcode; } } } }
using System; using System.IO; using System.ComponentModel; using System.Collections.Generic; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using ChargeBee.Internal; using ChargeBee.Api; using ChargeBee.Models.Enums; using ChargeBee.Filters.Enums; namespace ChargeBee.Models { public class Transaction : Resource { public Transaction() { } public Transaction(Stream stream) { using (StreamReader reader = new StreamReader(stream)) { JObj = JToken.Parse(reader.ReadToEnd()); apiVersionCheck (JObj); } } public Transaction(TextReader reader) { JObj = JToken.Parse(reader.ReadToEnd()); apiVersionCheck (JObj); } public Transaction(String jsonString) { JObj = JToken.Parse(jsonString); apiVersionCheck (JObj); } #region Methods public static CreateAuthorizationRequest CreateAuthorization() { string url = ApiUtil.BuildUrl("transactions", "create_authorization"); return new CreateAuthorizationRequest(url, HttpMethod.POST); } public static EntityRequest<Type> VoidTransaction(string id) { string url = ApiUtil.BuildUrl("transactions", CheckNull(id), "void"); return new EntityRequest<Type>(url, HttpMethod.POST); } public static RecordRefundRequest RecordRefund(string id) { string url = ApiUtil.BuildUrl("transactions", CheckNull(id), "record_refund"); return new RecordRefundRequest(url, HttpMethod.POST); } public static RefundRequest Refund(string id) { string url = ApiUtil.BuildUrl("transactions", CheckNull(id), "refund"); return new RefundRequest(url, HttpMethod.POST); } public static TransactionListRequest List() { string url = ApiUtil.BuildUrl("transactions"); return new TransactionListRequest(url); } [Obsolete] public static ListRequest TransactionsForCustomer(string id) { string url = ApiUtil.BuildUrl("customers", CheckNull(id), "transactions"); return new ListRequest(url); } [Obsolete] public static ListRequest TransactionsForSubscription(string id) { string url = ApiUtil.BuildUrl("subscriptions", CheckNull(id), "transactions"); return new ListRequest(url); } public static ListRequest PaymentsForInvoice(string id) { string url = ApiUtil.BuildUrl("invoices", CheckNull(id), "payments"); return new ListRequest(url); } public static EntityRequest<Type> Retrieve(string id) { string url = ApiUtil.BuildUrl("transactions", CheckNull(id)); return new EntityRequest<Type>(url, HttpMethod.GET); } public static DeleteOfflineTransactionRequest DeleteOfflineTransaction(string id) { string url = ApiUtil.BuildUrl("transactions", CheckNull(id), "delete_offline_transaction"); return new DeleteOfflineTransactionRequest(url, HttpMethod.POST); } #endregion #region Properties public string Id { get { return GetValue<string>("id", true); } } public string CustomerId { get { return GetValue<string>("customer_id", false); } } public string SubscriptionId { get { return GetValue<string>("subscription_id", false); } } public string GatewayAccountId { get { return GetValue<string>("gateway_account_id", false); } } public string PaymentSourceId { get { return GetValue<string>("payment_source_id", false); } } public PaymentMethodEnum PaymentMethod { get { return GetEnum<PaymentMethodEnum>("payment_method", true); } } public string ReferenceNumber { get { return GetValue<string>("reference_number", false); } } public GatewayEnum Gateway { get { return GetEnum<GatewayEnum>("gateway", true); } } public TypeEnum TransactionType { get { return GetEnum<TypeEnum>("type", true); } } public DateTime? Date { get { return GetDateTime("date", false); } } public DateTime? SettledAt { get { return GetDateTime("settled_at", false); } } public decimal? ExchangeRate { get { return GetValue<decimal?>("exchange_rate", false); } } public string CurrencyCode { get { return GetValue<string>("currency_code", true); } } public int? Amount { get { return GetValue<int?>("amount", false); } } public string IdAtGateway { get { return GetValue<string>("id_at_gateway", false); } } public StatusEnum? Status { get { return GetEnum<StatusEnum>("status", false); } } public FraudFlagEnum? FraudFlag { get { return GetEnum<FraudFlagEnum>("fraud_flag", false); } } public InitiatorTypeEnum? InitiatorType { get { return GetEnum<InitiatorTypeEnum>("initiator_type", false); } } public bool? ThreeDSecure { get { return GetValue<bool?>("three_d_secure", false); } } public AuthorizationReasonEnum? AuthorizationReason { get { return GetEnum<AuthorizationReasonEnum>("authorization_reason", false); } } public string ErrorCode { get { return GetValue<string>("error_code", false); } } public string ErrorText { get { return GetValue<string>("error_text", false); } } public DateTime? VoidedAt { get { return GetDateTime("voided_at", false); } } public long? ResourceVersion { get { return GetValue<long?>("resource_version", false); } } public DateTime? UpdatedAt { get { return GetDateTime("updated_at", false); } } public string FraudReason { get { return GetValue<string>("fraud_reason", false); } } public int? AmountUnused { get { return GetValue<int?>("amount_unused", false); } } public string MaskedCardNumber { get { return GetValue<string>("masked_card_number", false); } } public string ReferenceTransactionId { get { return GetValue<string>("reference_transaction_id", false); } } public string RefundedTxnId { get { return GetValue<string>("refunded_txn_id", false); } } public string ReferenceAuthorizationId { get { return GetValue<string>("reference_authorization_id", false); } } public int? AmountCapturable { get { return GetValue<int?>("amount_capturable", false); } } public string ReversalTransactionId { get { return GetValue<string>("reversal_transaction_id", false); } } public List<TransactionLinkedInvoice> LinkedInvoices { get { return GetResourceList<TransactionLinkedInvoice>("linked_invoices"); } } public List<TransactionLinkedCreditNote> LinkedCreditNotes { get { return GetResourceList<TransactionLinkedCreditNote>("linked_credit_notes"); } } public List<TransactionLinkedRefund> LinkedRefunds { get { return GetResourceList<TransactionLinkedRefund>("linked_refunds"); } } public List<TransactionLinkedPayment> LinkedPayments { get { return GetResourceList<TransactionLinkedPayment>("linked_payments"); } } public bool Deleted { get { return GetValue<bool>("deleted", true); } } public string Iin { get { return GetValue<string>("iin", false); } } public string Last4 { get { return GetValue<string>("last4", false); } } public string MerchantReferenceId { get { return GetValue<string>("merchant_reference_id", false); } } #endregion #region Requests public class CreateAuthorizationRequest : EntityRequest<CreateAuthorizationRequest> { public CreateAuthorizationRequest(string url, HttpMethod method) : base(url, method) { } public CreateAuthorizationRequest CustomerId(string customerId) { m_params.Add("customer_id", customerId); return this; } public CreateAuthorizationRequest PaymentSourceId(string paymentSourceId) { m_params.AddOpt("payment_source_id", paymentSourceId); return this; } public CreateAuthorizationRequest CurrencyCode(string currencyCode) { m_params.AddOpt("currency_code", currencyCode); return this; } public CreateAuthorizationRequest Amount(int amount) { m_params.Add("amount", amount); return this; } } public class RecordRefundRequest : EntityRequest<RecordRefundRequest> { public RecordRefundRequest(string url, HttpMethod method) : base(url, method) { } public RecordRefundRequest Amount(int amount) { m_params.AddOpt("amount", amount); return this; } public RecordRefundRequest PaymentMethod(ChargeBee.Models.Enums.PaymentMethodEnum paymentMethod) { m_params.Add("payment_method", paymentMethod); return this; } public RecordRefundRequest Date(long date) { m_params.Add("date", date); return this; } public RecordRefundRequest ReferenceNumber(string referenceNumber) { m_params.AddOpt("reference_number", referenceNumber); return this; } public RecordRefundRequest Comment(string comment) { m_params.AddOpt("comment", comment); return this; } } public class RefundRequest : EntityRequest<RefundRequest> { public RefundRequest(string url, HttpMethod method) : base(url, method) { } public RefundRequest Amount(int amount) { m_params.AddOpt("amount", amount); return this; } public RefundRequest Comment(string comment) { m_params.AddOpt("comment", comment); return this; } } public class TransactionListRequest : ListRequestBase<TransactionListRequest> { public TransactionListRequest(string url) : base(url) { } public TransactionListRequest IncludeDeleted(bool includeDeleted) { m_params.AddOpt("include_deleted", includeDeleted); return this; } public StringFilter<TransactionListRequest> Id() { return new StringFilter<TransactionListRequest>("id", this).SupportsMultiOperators(true); } public StringFilter<TransactionListRequest> CustomerId() { return new StringFilter<TransactionListRequest>("customer_id", this).SupportsMultiOperators(true).SupportsPresenceOperator(true); } public StringFilter<TransactionListRequest> SubscriptionId() { return new StringFilter<TransactionListRequest>("subscription_id", this).SupportsMultiOperators(true).SupportsPresenceOperator(true); } public StringFilter<TransactionListRequest> PaymentSourceId() { return new StringFilter<TransactionListRequest>("payment_source_id", this).SupportsMultiOperators(true).SupportsPresenceOperator(true); } public EnumFilter<ChargeBee.Models.Enums.PaymentMethodEnum, TransactionListRequest> PaymentMethod() { return new EnumFilter<ChargeBee.Models.Enums.PaymentMethodEnum, TransactionListRequest>("payment_method", this); } public EnumFilter<ChargeBee.Models.Enums.GatewayEnum, TransactionListRequest> Gateway() { return new EnumFilter<ChargeBee.Models.Enums.GatewayEnum, TransactionListRequest>("gateway", this); } public StringFilter<TransactionListRequest> GatewayAccountId() { return new StringFilter<TransactionListRequest>("gateway_account_id", this).SupportsMultiOperators(true); } public StringFilter<TransactionListRequest> IdAtGateway() { return new StringFilter<TransactionListRequest>("id_at_gateway", this); } public StringFilter<TransactionListRequest> ReferenceNumber() { return new StringFilter<TransactionListRequest>("reference_number", this).SupportsPresenceOperator(true); } public EnumFilter<Transaction.TypeEnum, TransactionListRequest> Type() { return new EnumFilter<Transaction.TypeEnum, TransactionListRequest>("type", this); } public TimestampFilter<TransactionListRequest> Date() { return new TimestampFilter<TransactionListRequest>("date", this); } public NumberFilter<int, TransactionListRequest> Amount() { return new NumberFilter<int, TransactionListRequest>("amount", this); } public NumberFilter<int, TransactionListRequest> AmountCapturable() { return new NumberFilter<int, TransactionListRequest>("amount_capturable", this); } public EnumFilter<Transaction.StatusEnum, TransactionListRequest> Status() { return new EnumFilter<Transaction.StatusEnum, TransactionListRequest>("status", this); } public TimestampFilter<TransactionListRequest> UpdatedAt() { return new TimestampFilter<TransactionListRequest>("updated_at", this); } public TransactionListRequest SortByDate(SortOrderEnum order) { m_params.AddOpt("sort_by["+order.ToString().ToLower()+"]","date"); return this; } public TransactionListRequest SortByUpdatedAt(SortOrderEnum order) { m_params.AddOpt("sort_by["+order.ToString().ToLower()+"]","updated_at"); return this; } } public class DeleteOfflineTransactionRequest : EntityRequest<DeleteOfflineTransactionRequest> { public DeleteOfflineTransactionRequest(string url, HttpMethod method) : base(url, method) { } public DeleteOfflineTransactionRequest Comment(string comment) { m_params.AddOpt("comment", comment); return this; } } #endregion public enum TypeEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "authorization")] Authorization, [EnumMember(Value = "payment")] Payment, [EnumMember(Value = "refund")] Refund, [EnumMember(Value = "payment_reversal")] PaymentReversal, } public enum StatusEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "in_progress")] InProgress, [EnumMember(Value = "success")] Success, [EnumMember(Value = "voided")] Voided, [EnumMember(Value = "failure")] Failure, [EnumMember(Value = "timeout")] Timeout, [EnumMember(Value = "needs_attention")] NeedsAttention, } public enum FraudFlagEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "safe")] Safe, [EnumMember(Value = "suspicious")] Suspicious, [EnumMember(Value = "fraudulent")] Fraudulent, } public enum InitiatorTypeEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "customer")] Customer, [EnumMember(Value = "merchant")] Merchant, } public enum AuthorizationReasonEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "blocking_funds")] BlockingFunds, [EnumMember(Value = "verification")] Verification, } #region Subclasses public class TransactionLinkedInvoice : Resource { public string InvoiceId { get { return GetValue<string>("invoice_id", true); } } public int AppliedAmount { get { return GetValue<int>("applied_amount", true); } } public DateTime AppliedAt { get { return (DateTime)GetDateTime("applied_at", true); } } public DateTime? InvoiceDate { get { return GetDateTime("invoice_date", false); } } public int? InvoiceTotal { get { return GetValue<int?>("invoice_total", false); } } public Invoice.StatusEnum InvoiceStatus { get { return GetEnum<Invoice.StatusEnum>("invoice_status", true); } } } public class TransactionLinkedCreditNote : Resource { public string CnId { get { return GetValue<string>("cn_id", true); } } public int AppliedAmount { get { return GetValue<int>("applied_amount", true); } } public DateTime AppliedAt { get { return (DateTime)GetDateTime("applied_at", true); } } public CreditNote.ReasonCodeEnum? CnReasonCode { get { return GetEnum<CreditNote.ReasonCodeEnum>("cn_reason_code", false); } } public string CnCreateReasonCode { get { return GetValue<string>("cn_create_reason_code", false); } } public DateTime? CnDate { get { return GetDateTime("cn_date", false); } } public int? CnTotal { get { return GetValue<int?>("cn_total", false); } } public CreditNote.StatusEnum CnStatus { get { return GetEnum<CreditNote.StatusEnum>("cn_status", true); } } public string CnReferenceInvoiceId { get { return GetValue<string>("cn_reference_invoice_id", true); } } } public class TransactionLinkedRefund : Resource { public string TxnId { get { return GetValue<string>("txn_id", true); } } public Transaction.StatusEnum TxnStatus { get { return GetEnum<Transaction.StatusEnum>("txn_status", true); } } public DateTime TxnDate { get { return (DateTime)GetDateTime("txn_date", true); } } public int TxnAmount { get { return GetValue<int>("txn_amount", true); } } } public class TransactionLinkedPayment : Resource { public enum StatusEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "in_progress")] InProgress, [EnumMember(Value = "success")] Success, [EnumMember(Value = "voided")] Voided, [EnumMember(Value = "failure")] Failure, [EnumMember(Value = "timeout")] Timeout, [EnumMember(Value = "needs_attention")] NeedsAttention, } public string Id { get { return GetValue<string>("id", true); } } public StatusEnum? Status { get { return GetEnum<StatusEnum>("status", false); } } public int? Amount { get { return GetValue<int?>("amount", false); } } public DateTime? Date { get { return GetDateTime("date", false); } } } #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsSubscriptionIdApiVersion { using Azure; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// GroupOperations operations. /// </summary> internal partial class GroupOperations : IServiceOperations<MicrosoftAzureTestUrl>, IGroupOperations { /// <summary> /// Initializes a new instance of the GroupOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal GroupOperations(MicrosoftAzureTestUrl client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the MicrosoftAzureTestUrl /// </summary> public MicrosoftAzureTestUrl Client { get; private set; } /// <summary> /// Provides a resouce group with name 'testgroup101' and location 'West US'. /// </summary> /// <param name='resourceGroupName'> /// Resource Group name 'testgroup101'. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<SampleResourceGroupInner>> GetSampleResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetSampleResourceGroup", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}").ToString(); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (Newtonsoft.Json.JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<SampleResourceGroupInner>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<SampleResourceGroupInner>(_responseContent, Client.DeserializationSettings); } catch (Newtonsoft.Json.JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// *********************************************************************** // Copyright (c) 2008 Charlie Poole, Rob Prouse // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** #if !MONO using System; namespace NUnit.Framework.Assertions { // TODO: Test with Mono [TestFixture, Category("Generics")] public class NullableTypesTests { [Test] public void CanTestForNull() { int? nullInt = null; int? five = 5; Assert.IsNull(nullInt); Assert.IsNotNull(five); Assert.That(nullInt, Is.Null); Assert.That(five, Is.Not.Null); } [Test] public void CanCompareNullableInts() { int? five = 5; int? answer = 2 + 3; Assert.AreEqual(five, answer); Assert.AreEqual(five, 5); Assert.AreEqual(5, five); Assert.That(five, Is.EqualTo(answer)); Assert.That(five, Is.EqualTo(5)); Assert.That(5, Is.EqualTo(five)); // Assert.Greater(five, 3); // Assert.GreaterOrEqual(five, 5); // Assert.Less(3, five); // Assert.LessOrEqual(5, five); Assert.That(five, Is.GreaterThan(3)); Assert.That(five, Is.GreaterThanOrEqualTo(5)); //Assert.That(3, Is.LessThan(five)); //Assert.That(5, Is.LessThanOrEqualTo(five)); } [Test] public void CanCompareNullableDoubles() { double? five = 5.0; double? answer = 2.0 + 3.0; Assert.AreEqual(five, answer); Assert.AreEqual(five, 5.0); Assert.AreEqual(5.0, five); Assert.That(five, Is.EqualTo(answer)); Assert.That(five, Is.EqualTo(5.0)); Assert.That(5.0, Is.EqualTo(five)); // Assert.Greater(five, 3.0); // Assert.GreaterOrEqual(five, 5.0); // Assert.Less(3.0, five); // Assert.LessOrEqual(5.0, five); Assert.That(five, Is.GreaterThan(3.0)); Assert.That(five, Is.GreaterThanOrEqualTo(5.0)); //Assert.That(3.0, Is.LessThan(five)); //Assert.That(5.0, Is.LessThanOrEqualTo(five)); } [Test] public void CanTestForNaN() { double? anNaN = Double.NaN; Assert.IsNaN(anNaN); Assert.That(anNaN, Is.NaN); } [Test] public void CanCompareNullableDecimals() { decimal? five = 5m; decimal? answer = 2m + 3m; Assert.AreEqual(five, answer); Assert.AreEqual(five, 5m); Assert.AreEqual(5m, five); Assert.That(five, Is.EqualTo(answer)); Assert.That(five, Is.EqualTo(5m)); Assert.That(5m, Is.EqualTo(five)); // Assert.Greater(five, 3m); // Assert.GreaterOrEqual(five, 5m); // Assert.Less(3m, five); // Assert.LessOrEqual(5m, five); Assert.That(five, Is.GreaterThan(3m)); Assert.That(five, Is.GreaterThanOrEqualTo(5m)); //Assert.That(3m, Is.LessThan(five)); //Assert.That(5m, Is.LessThanOrEqualTo(five)); } [Test] public void CanCompareWithTolerance() { double? five = 5.0; Assert.AreEqual(5.0000001, five, .0001); Assert.That( five, Is.EqualTo(5.0000001).Within(.0001)); float? three = 3.0f; Assert.AreEqual(3.00001f, three, .001); Assert.That( three, Is.EqualTo(3.00001f).Within(.001)); } private enum Colors { Red, Blue, Green } [Test] public void CanCompareNullableEnums() { Colors? color = Colors.Red; Colors? other = Colors.Red; Assert.AreEqual(color, other); Assert.AreEqual(color, Colors.Red); Assert.AreEqual(Colors.Red, color); } [Test] public void CanCompareNullableMixedNumerics() { int? int5 = 5; double? double5 = 5.0; decimal? decimal5 = 5.00m; Assert.AreEqual(int5, double5); Assert.AreEqual(int5, decimal5); Assert.AreEqual(double5, int5); Assert.AreEqual(double5, decimal5); Assert.AreEqual(decimal5, int5); Assert.AreEqual(decimal5, double5); Assert.That(int5, Is.EqualTo(double5)); Assert.That(int5, Is.EqualTo(decimal5)); Assert.That(double5, Is.EqualTo(int5)); Assert.That(double5, Is.EqualTo(decimal5)); Assert.That(decimal5, Is.EqualTo(int5)); Assert.That(decimal5, Is.EqualTo(double5)); Assert.AreEqual(5, double5); Assert.AreEqual(5, decimal5); Assert.AreEqual(5.0, int5); Assert.AreEqual(5.0, decimal5); Assert.AreEqual(5m, int5); Assert.AreEqual(5m, double5); Assert.That(5, Is.EqualTo(double5)); Assert.That(5, Is.EqualTo(decimal5)); Assert.That(5.0, Is.EqualTo(int5)); Assert.That(5.0, Is.EqualTo(decimal5)); Assert.That(5m, Is.EqualTo(int5)); Assert.That(5m, Is.EqualTo(double5)); Assert.AreEqual(double5, 5); Assert.AreEqual(decimal5, 5); Assert.AreEqual(int5, 5.0); Assert.AreEqual(decimal5, 5.0); Assert.AreEqual(int5, 5m); Assert.AreEqual(double5, 5m); Assert.That(double5, Is.EqualTo(5)); Assert.That(decimal5, Is.EqualTo(5)); Assert.That(int5, Is.EqualTo(5.0)); Assert.That(decimal5, Is.EqualTo(5.0)); Assert.That(int5, Is.EqualTo(5m)); Assert.That(double5, Is.EqualTo(5m)); // Assert.Greater(int5, 3.0); // Assert.Greater(int5, 3m); // Assert.Greater(double5, 3); // Assert.Greater(double5, 3m); // Assert.Greater(decimal5, 3); // Assert.Greater(decimal5, 3.0); Assert.That(int5, Is.GreaterThan(3.0)); Assert.That(int5, Is.GreaterThan(3m)); Assert.That(double5, Is.GreaterThan(3)); Assert.That(double5, Is.GreaterThan(3m)); Assert.That(decimal5, Is.GreaterThan(3)); Assert.That(decimal5, Is.GreaterThan(3.0)); // Assert.Less(3.0, int5); // Assert.Less(3m, int5); // Assert.Less(3, double5); // Assert.Less(3m, double5); // Assert.Less(3, decimal5); // Assert.Less(3.0, decimal5); //Assert.That(3.0, Is.LessThan(int5)); //Assert.That(3m, Is.LessThan(int5)); //Assert.That(3, Is.LessThan(double5)); //Assert.That(3m, Is.LessThan(double5)); //Assert.That(3, Is.LessThan(decimal5)); //Assert.That(3.0, Is.LessThan(decimal5)); } private struct MyStruct { int i; string s; public MyStruct(int i, string s) { this.i = i; this.s = s; } } [Test] public void CanCompareNullableStructs() { MyStruct struct1 = new MyStruct(5, "Hello"); MyStruct struct2 = new MyStruct(5, "Hello"); Nullable<MyStruct> one = new MyStruct(5, "Hello"); Nullable<MyStruct> two = new MyStruct(5, "Hello"); Assert.AreEqual(struct1, struct2); // Control Assert.AreEqual(one, two); Assert.AreEqual(one, struct1); Assert.AreEqual(struct2, two); } } } #endif
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.AspNet.Identity; using Microsoft.Data.Entity; using Microsoft.Dnx.Runtime; using Microsoft.Framework.Configuration; using Microsoft.Framework.DependencyInjection; using PartsUnlimited.Areas.Admin; using System; using System.Collections.Generic; using System.Linq; using System.Security.Claims; using System.Threading.Tasks; namespace PartsUnlimited.Models { public static class SampleData { private static string AdminRoleSectionName = "AdminRole"; private static string DefaultAdminNameKey = "UserName"; private static string DefaultAdminPasswordKey = "Password"; public static async Task InitializePartsUnlimitedDatabaseAsync(IServiceProvider serviceProvider, bool createUser = true) { using (var serviceScope = serviceProvider.GetRequiredService<IServiceScopeFactory>().CreateScope()) { var db = serviceScope.ServiceProvider.GetService<PartsUnlimitedContext>(); bool dbNewlyCreated = await db.Database.EnsureCreatedAsync(); //Seeding a database using migrations is not yet supported. (https://github.com/aspnet/EntityFramework/issues/629.) //Add seed data, only if the tables are empty. bool tablesEmpty = !db.Products.Any() && !db.Orders.Any() && !db.Categories.Any() && !db.Stores.Any(); if (dbNewlyCreated || tablesEmpty) { await InsertTestData(serviceProvider); await CreateAdminUser(serviceProvider); } } } public static async Task InsertTestData(IServiceProvider serviceProvider) { var categories = GetCategories().ToList(); await AddOrUpdateAsync(serviceProvider, g => g.Name, categories); var products = GetProducts(categories).ToList(); await AddOrUpdateAsync(serviceProvider, a => a.Title, products); var stores = GetStores().ToList(); await AddOrUpdateAsync(serviceProvider, a => a.Name, stores); var rainchecks = GetRainchecks(stores, products).ToList(); await AddOrUpdateAsync(serviceProvider, a => a.RaincheckId, rainchecks); PopulateOrderHistory(serviceProvider, products); } private static async Task AddOrUpdateAsync<TEntity>( IServiceProvider serviceProvider, Func<TEntity, object> propertyToMatch, IEnumerable<TEntity> entities) where TEntity : class { // Query in a separate context so that we can attach existing entities as modified List<TEntity> existingData; using (var serviceScope = serviceProvider.GetRequiredService<IServiceScopeFactory>().CreateScope()) { var db = serviceScope.ServiceProvider.GetService<PartsUnlimitedContext>(); existingData = db.Set<TEntity>().ToList(); } using (var serviceScope = serviceProvider.GetRequiredService<IServiceScopeFactory>().CreateScope()) { var db = serviceScope.ServiceProvider.GetService<PartsUnlimitedContext>(); foreach (var item in entities) { db.Entry(item).State = existingData.Any(g => propertyToMatch(g).Equals(propertyToMatch(item))) ? EntityState.Modified : EntityState.Added; } await db.SaveChangesAsync(); } } /// <summary> /// Returns configuration section for AdminRole. /// </summary> /// <param name="serviceProvider"></param> /// <returns></returns> private static IConfigurationSection GetAdminRoleConfiguration(IServiceProvider serviceProvider) { var appEnv = serviceProvider.GetService<IApplicationEnvironment>(); var builder = new ConfigurationBuilder().SetBasePath(appEnv.ApplicationBasePath) .AddJsonFile("config.json") .AddEnvironmentVariables(); var configuration = builder.Build(); return configuration.GetSection(AdminRoleSectionName); } /// <summary> /// Creates a store manager user who can manage the inventory. /// </summary> /// <param name="serviceProvider"></param> /// <returns></returns> private static async Task CreateAdminUser(IServiceProvider serviceProvider) { IConfigurationSection configuration = GetAdminRoleConfiguration(serviceProvider); UserManager<ApplicationUser> userManager = serviceProvider.GetService<UserManager<ApplicationUser>>(); var user = await userManager.FindByNameAsync(configuration[DefaultAdminNameKey]); if (user == null) { user = new ApplicationUser { UserName = configuration[DefaultAdminNameKey] }; await userManager.CreateAsync(user, configuration[DefaultAdminPasswordKey]); await userManager.AddClaimAsync(user, new Claim(AdminConstants.ManageStore.Name, AdminConstants.ManageStore.Allowed)); } } /// <summary> /// Generate an enumeration of rainchecks. The random number generator uses a seed to ensure /// that the sequence is consistent, but provides somewhat random looking data. /// </summary> public static IEnumerable<Raincheck> GetRainchecks(IEnumerable<Store> stores, IList<Product> products) { var random = new Random(1234); foreach (var store in stores) { for (var i = 0; i < random.Next(1, 5); i++) { yield return new Raincheck { StoreId = store.StoreId, Name = $"John Smith{random.Next()}", Quantity = random.Next(1, 10), ProductId = products[random.Next(0, products.Count)].ProductId, SalePrice = Math.Round(100 * random.NextDouble(), 2) }; } } } public static IEnumerable<Store> GetStores() { return Enumerable.Range(1, 20).Select(id => new Store { Name = $"Store{id}" }); } public static IEnumerable<Category> GetCategories() { yield return new Category { Name = "Brakes", Description = "Brakes description", ImageUrl = "product_brakes_disc.jpg" }; yield return new Category { Name = "Lighting", Description = "Lighting description", ImageUrl = "product_lighting_headlight.jpg" }; yield return new Category { Name = "Wheels & Tires", Description = "Wheels & Tires description", ImageUrl = "product_wheel_rim.jpg" }; yield return new Category { Name = "Batteries", Description = "Batteries description", ImageUrl = "product_batteries_basic-battery.jpg" }; yield return new Category { Name = "Oil", Description = "Oil description", ImageUrl = "product_oil_premium-oil.jpg" }; } public static void PopulateOrderHistory(IServiceProvider serviceProvider, IEnumerable<Product> products) { var random = new Random(1234); var recomendationCombinations = new[] { new{ Transactions = new []{1, 3, 8}, Multiplier = 60 }, new{ Transactions = new []{2, 6}, Multiplier = 10 }, new{ Transactions = new []{4, 11}, Multiplier = 20 }, new{ Transactions = new []{5, 14}, Multiplier = 10 }, new{ Transactions = new []{6, 16, 18}, Multiplier = 20 }, new{ Transactions = new []{7, 17}, Multiplier = 25 }, new{ Transactions = new []{8, 1}, Multiplier = 5 }, new{ Transactions = new []{10, 17,9}, Multiplier = 15 }, new{ Transactions = new []{11, 5}, Multiplier = 15 }, new{ Transactions = new []{12, 8}, Multiplier = 5 }, new{ Transactions = new []{13, 15}, Multiplier = 50 }, new{ Transactions = new []{14, 15}, Multiplier = 30 }, new{ Transactions = new []{16, 18}, Multiplier = 80 } }; IConfigurationSection configuration = GetAdminRoleConfiguration(serviceProvider); string userName = configuration[DefaultAdminNameKey]; using (var serviceScope = serviceProvider.GetRequiredService<IServiceScopeFactory>().CreateScope()) { var db = serviceScope.ServiceProvider.GetService<PartsUnlimitedContext>(); var orders = new List<Order>(); foreach (var combination in recomendationCombinations) { for (int i = 0; i < combination.Multiplier; i++) { var order = new Order { Username = userName, OrderDate = DateTime.Now, Name = $"John Smith{random.Next()}", Address = "15010 NE 36th St", City = "Redmond", State = "WA", PostalCode = "98052", Country = "United States", Phone = "425-703-6214", Email = userName }; db.Orders.Add(order); decimal total = 0; foreach (var id in combination.Transactions) { var product = products.Single(x => x.RecommendationId == id); var orderDetail = GetOrderDetail(product, order); db.OrderDetails.Add(orderDetail); total += orderDetail.UnitPrice; } order.Total = total; } } db.SaveChanges(); } } public static OrderDetail GetOrderDetail(Product product, Order order) { var random = new Random(); int quantity; switch (product.Category.Name) { case "Brakes": case "Wheels & Tires": { quantity = random.Next(1, 5); break; } default: { quantity = random.Next(1, 3); break; } } return new OrderDetail { ProductId = product.ProductId, UnitPrice = product.Price, OrderId = order.OrderId, Quantity = quantity, }; } public static IEnumerable<Product> GetProducts(IEnumerable<Category> categories) { var categoriesMap = categories.ToDictionary(c => c.Name, c => c); yield return new Product { SkuNumber = "LIG-0001", Title = "Halogen Headlights (2 Pack)", Category = categoriesMap["Lighting"], CategoryId = categoriesMap["Lighting"].CategoryId, Price = 38.99M, SalePrice = 38.99M, ProductArtUrl = "product_lighting_headlight.jpg", ProductDetails = "{ \"Light Source\" : \"Halogen\", \"Assembly Required\": \"Yes\", \"Color\" : \"Clear\", \"Interior\" : \"Chrome\", \"Beam\": \"low and high\", \"Wiring harness included\" : \"Yes\", \"Bulbs Included\" : \"No\", \"Includes Parking Signal\" : \"Yes\"}", Description = "Our Halogen Headlights are made to fit majority of vehicles with our universal fitting mold. Product requires some assembly.", Inventory = 10, LeadTime = 0, RecommendationId = 1 }; yield return new Product { SkuNumber = "LIG-0002", Title = "Bugeye Headlights (2 Pack)", Category = categoriesMap["Lighting"], CategoryId = categoriesMap["Lighting"].CategoryId, Price = 48.99M, SalePrice = 48.99M, ProductArtUrl = "product_lighting_bugeye-headlight.jpg", ProductDetails = "{ \"Light Source\" : \"Halogen\", \"Assembly Required\": \"Yes\", \"Color\" : \"Clear\", \"Interior\" : \"Chrome\", \"Beam\": \"low and high\", \"Wiring harness included\" : \"No\", \"Bulbs Included\" : \"Yes\", \"Includes Parking Signal\" : \"Yes\"}", Description = "Our Bugeye Headlights use Halogen light bulbs are made to fit into a standard bugeye slot. Product requires some assembly and includes light bulbs.", Inventory = 7, LeadTime = 0, RecommendationId = 2 }; yield return new Product { SkuNumber = "LIG-0003", Title = "Turn Signal Light Bulb", Category = categoriesMap["Lighting"], CategoryId = categoriesMap["Lighting"].CategoryId, Price = 6.49M, SalePrice = 6.49M, ProductArtUrl = "product_lighting_lightbulb.jpg", ProductDetails = "{ \"Color\" : \"Clear\", \"Fit\" : \"Universal\", \"Wattage\" : \"30 Watts\", \"Includes Socket\" : \"Yes\"}", Description = " Clear bulb that with a universal fitting for all headlights/taillights. Simple Installation, low wattage and a clear light for optimal visibility and efficiency.", Inventory = 18, LeadTime = 0, RecommendationId = 3 }; yield return new Product { SkuNumber = "WHE-0001", Title = "Matte Finish Rim", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 75.99M, SalePrice = 75.99M, ProductArtUrl = "product_wheel_rim.jpg", ProductDetails = "{ \"Material\" : \"Aluminum alloy\", \"Design\" : \"Spoke\", \"Spokes\" : \"9\", \"Number of Lugs\" : \"4\", \"Wheel Diameter\" : \"17 in.\", \"Color\" : \"Black\", \"Finish\" : \"Matte\" } ", Description = "A Parts Unlimited favorite, the Matte Finish Rim is affordable low profile style. Fits all low profile tires.", Inventory = 4, LeadTime = 0, RecommendationId = 4 }; yield return new Product { SkuNumber = "WHE-0002", Title = "Blue Performance Alloy Rim", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 88.99M, SalePrice = 88.99M, ProductArtUrl = "product_wheel_rim-blue.jpg", ProductDetails = "{ \"Material\" : \"Aluminum alloy\", \"Design\" : \"Spoke\", \"Spokes\" : \"5\", \"Number of Lugs\" : \"4\", \"Wheel Diameter\" : \"18 in.\", \"Color\" : \"Blue\", \"Finish\" : \"Glossy\" } ", Description = "Stand out from the crowd with a set of aftermarket blue rims to make you vehicle turn heads and at a price that will do the same.", Inventory = 8, LeadTime = 0, RecommendationId = 5 }; yield return new Product { SkuNumber = "WHE-0003", Title = "High Performance Rim", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 99.99M, SalePrice = 99.49M, ProductArtUrl = "product_wheel_rim-red.jpg", ProductDetails = "{ \"Material\" : \"Aluminum alloy\", \"Design\" : \"Spoke\", \"Spokes\" : \"12\", \"Number of Lugs\" : \"5\", \"Wheel Diameter\" : \"18 in.\", \"Color\" : \"Red\", \"Finish\" : \"Matte\" } ", Description = "Light Weight Rims with a twin cross spoke design for stability and reliable performance.", Inventory = 3, LeadTime = 0, RecommendationId = 6 }; yield return new Product { SkuNumber = "WHE-0004", Title = "Wheel Tire Combo", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 72.49M, SalePrice = 72.49M, ProductArtUrl = "product_wheel_tyre-wheel-combo.jpg", ProductDetails = "{ \"Material\" : \"Steel\", \"Design\" : \"Spoke\", \"Spokes\" : \"8\", \"Number of Lugs\" : \"4\", \"Wheel Diameter\" : \"19 in.\", \"Color\" : \"Gray\", \"Finish\" : \"Standard\", \"Pre-Assembled\" : \"Yes\" } ", Description = "For the endurance driver, take advantage of our best wearing tire yet. Composite rubber and a heavy duty steel rim.", Inventory = 0, LeadTime = 4, RecommendationId = 7 }; yield return new Product { SkuNumber = "WHE-0005", Title = "Chrome Rim Tire Combo", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 129.99M, SalePrice = 129.99M, ProductArtUrl = "product_wheel_tyre-rim-chrome-combo.jpg", ProductDetails = "{ \"Material\" : \"Aluminum alloy\", \"Design\" : \"Spoke\", \"Spokes\" : \"10\", \"Number of Lugs\" : \"5\", \"Wheel Diameter\" : \"17 in.\", \"Color\" : \"Silver\", \"Finish\" : \"Chrome\", \"Pre-Assembled\" : \"Yes\" } ", Description = "Save time and money with our ever popular wheel and tire combo. Pre-assembled and ready to go.", Inventory = 1, LeadTime = 0, RecommendationId = 8 }; yield return new Product { SkuNumber = "WHE-0006", Title = "Wheel Tire Combo (4 Pack)", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 219.99M, SalePrice = 219.99M, ProductArtUrl = "product_wheel_tyre-wheel-combo-pack.jpg", ProductDetails = "{ \"Material\" : \"Steel\", \"Design\" : \"Spoke\", \"Spokes\" : \"8\", \"Number of Lugs\" : \"5\", \"Wheel Diameter\" : \"19 in.\", \"Color\" : \"Gray\", \"Finish\" : \"Standard\", \"Pre-Assembled\" : \"Yes\" } ", Description = "Having trouble in the wet? Then try our special patent tire on a heavy duty steel rim. These wheels perform excellent in all conditions but were designed specifically for wet weather.", Inventory = 3, LeadTime = 0, RecommendationId = 9 }; yield return new Product { SkuNumber = "BRA-0001", Title = "Disk and Pad Combo", Category = categoriesMap["Wheels & Tires"], CategoryId = categoriesMap["Wheels & Tires"].CategoryId, Price = 25.99M, SalePrice = 25.99M, ProductArtUrl = "product_brakes_disk-pad-combo.jpg", ProductDetails = "{ \"Disk Design\" : \"Cross Drill Slotted\", \" Pad Material\" : \"Ceramic\", \"Construction\" : \"Vented Rotor\", \"Diameter\" : \"10.3 in.\", \"Finish\" : \"Silver Zinc Plated\", \"Hat Finish\" : \"Silver Zinc Plated\", \"Material\" : \"Cast Iron\" }", Description = "Our brake disks and pads perform the best togeather. Better stopping distances without locking up, reduced rust and dusk.", Inventory = 0, LeadTime = 6, RecommendationId = 10 }; yield return new Product { SkuNumber = "BRA-0002", Title = "Brake Rotor", Category = categoriesMap["Brakes"], CategoryId = categoriesMap["Brakes"].CategoryId, Price = 18.99M, SalePrice = 18.99M, ProductArtUrl = "product_brakes_disc.jpg", ProductDetails = "{ \"Disk Design\" : \"Cross Drill Slotted\", \"Construction\" : \"Vented Rotor\", \"Diameter\" : \"10.3 in.\", \"Finish\" : \"Silver Zinc Plated\", \"Hat Finish\" : \"Black E-coating\", \"Material\" : \"Cast Iron\" }", Description = "Our Brake Rotor Performs well in wet coditions with a smooth responsive feel. Machined to a high tolerance to ensure all of our Brake Rotors are safe and reliable.", Inventory = 4, LeadTime = 0, RecommendationId = 11 }; yield return new Product { SkuNumber = "BRA-0003", Title = "Brake Disk and Calipers", Category = categoriesMap["Brakes"], CategoryId = categoriesMap["Brakes"].CategoryId, Price = 43.99M, SalePrice = 43.99M, ProductArtUrl = "product_brakes_disc-calipers-red.jpg", ProductDetails = "{\"Disk Design\" : \"Cross Drill Slotted\", \" Pad Material\" : \"Carbon Ceramic\", \"Construction\" : \"Vented Rotor\", \"Diameter\" : \"11.3 in.\", \"Bolt Pattern\": \"6 x 5.31 in.\", \"Finish\" : \"Silver Zinc Plated\", \"Material\" : \"Carbon Alloy\", \"Includes Brake Pads\" : \"Yes\" }", Description = "Upgrading your brakes can increase stopping power, reduce dust and noise. Our Disk Calipers exceed factory specification for the best performance.", Inventory = 2, LeadTime = 0, RecommendationId = 12 }; yield return new Product { SkuNumber = "BAT-0001", Title = "12-Volt Calcium Battery", Category = categoriesMap["Batteries"], CategoryId = categoriesMap["Batteries"].CategoryId, Price = 129.99M, SalePrice = 129.99M, ProductArtUrl = "product_batteries_basic-battery.jpg", ProductDetails = "{ \"Type\": \"Calcium\", \"Volts\" : \"12\", \"Weight\" : \"22.9 lbs\", \"Size\" : \"7.7x5x8.6\", \"Cold Cranking Amps\" : \"510\" }", Description = "Calcium is the most common battery type. It is durable and has a long shelf and service life. They also provide high cold cranking amps.", Inventory = 9, LeadTime = 0, RecommendationId = 13 }; yield return new Product { SkuNumber = "BAT-0002", Title = "Spiral Coil Battery", Category = categoriesMap["Batteries"], CategoryId = categoriesMap["Batteries"].CategoryId, Price = 154.99M, SalePrice = 154.99M, ProductArtUrl = "product_batteries_premium-battery.jpg", ProductDetails = "{ \"Type\": \"Spiral Coil\", \"Volts\" : \"12\", \"Weight\" : \"20.3 lbs\", \"Size\" : \"7.4x5.1x8.5\", \"Cold Cranking Amps\" : \"460\" }", Description = "Spiral Coil batteries are the preferred option for high performance Vehicles where extra toque is need for starting. They are more resistant to heat and higher charge rates than conventional batteries.", Inventory = 3, LeadTime = 0, RecommendationId = 14 }; yield return new Product { SkuNumber = "BAT-0003", Title = "Jumper Leads", Category = categoriesMap["Batteries"], CategoryId = categoriesMap["Batteries"].CategoryId, Price = 16.99M, SalePrice = 16.99M, ProductArtUrl = "product_batteries_jumper-leads.jpg", ProductDetails = "{ \"length\" : \"6ft.\", \"Connection Type\" : \"Alligator Clips\", \"Fit\" : \"Universal\", \"Max Amp's\" : \"750\" }", Description = "Battery Jumper Leads have a built in surge protector and a includes a plastic carry case to keep them safe from corrosion.", Inventory = 6, LeadTime = 0, RecommendationId = 15 }; yield return new Product { SkuNumber = "OIL-0001", Title = "Filter Set", Category = categoriesMap["Oil"], CategoryId = categoriesMap["Oil"].CategoryId, Price = 28.99M, SalePrice = 28.99M, ProductArtUrl = "product_oil_filters.jpg", ProductDetails = "{ \"Filter Type\" : \"Canister and Cartridge\", \"Thread Size\" : \"0.75-16 in.\", \"Anti-Drainback Valve\" : \"Yes\"}", Description = "Ensure that your vehicle's engine has a longer life with our new filter set. Trapping more dirt to ensure old freely circulates through your engine.", Inventory = 3, LeadTime = 0, RecommendationId = 16 }; yield return new Product { SkuNumber = "OIL-0002", Title = "Oil and Filter Combo", Category = categoriesMap["Oil"], CategoryId = categoriesMap["Oil"].CategoryId, Price = 34.49M, SalePrice = 34.49M, ProductArtUrl = "product_oil_oil-filter-combo.jpg", ProductDetails = "{ \"Filter Type\" : \"Canister\", \"Thread Size\" : \"0.75-16 in.\", \"Anti-Drainback Valve\" : \"Yes\", \"Size\" : \"1.1 gal.\", \"Synthetic\" : \"No\" }", Description = "This Oil and Oil Filter combo is suitable for all types of passenger and light commercial vehicles. Providing affordable performance through excellent lubrication and breakdown resistance.", Inventory = 5, LeadTime = 0, RecommendationId = 17 }; yield return new Product { SkuNumber = "OIL-0003", Title = "Synthetic Engine Oil", Category = categoriesMap["Oil"], CategoryId = categoriesMap["Oil"].CategoryId, Price = 36.49M, SalePrice = 36.49M, ProductArtUrl = "product_oil_premium-oil.jpg", ProductDetails = "{ \"Size\" : \"1.1 Gal.\" , \"Synthetic \" : \"Yes\"}", Description = "This Oil is designed to reduce sludge deposits and metal friction throughout your cars engine. Provides performance no matter the condition or temperature.", Inventory = 11, LeadTime = 0, RecommendationId = 18 }; } } }
using System; using System.Data; using System.Configuration; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using System.Text; using DataAccess; // //reference: http://msdn.microsoft.com/en-us/library/c8y19k6h.aspx // //This is our base master page, all other master pages will derive from this //master page. This page will handle db connection and cleanup etc... // //By defining the BaseMasterPage class as abstract, it can't be //created directly and can only serve as the base for another class. // //sequence of events for reference... //Master page controls Init event //Content controls Init event // //Master page Init event //Content page Init event // //Content page Load event //Master page Load event // //Content page PreRender event //Master page PreRender event // //Master page controls PreRender event //Content controls PreRender event // public abstract class BaseMaster : System.Web.UI.MasterPage { //04/16/2012 - Security Updates/////////////////////////// /// <summary> /// ASP .NET session id /// </summary> public string ASPSessionID { get { return Context.Session.SessionID; } } /// <summary> /// time out /// </summary> int m_nTimeOut = 15; public int Timeout { set { m_nTimeOut = value; } get { return m_nTimeOut; } } /// <summary> /// get/set session /// </summary> //DB session id member, this gets set when the user logs in public string DBSessionID { get { string strDBSessionID = ""; if (Session["DBSessionID"] == null) { return strDBSessionID; } strDBSessionID = Session["DBSessionID"].ToString(); return strDBSessionID; } set { Session["DBSessionID"] = value; } } ////////////////////////////////////////////////////////// //data connection member private CDataConnection m_DBConnection; //app specific controller not part of framework... public AppMaster APPMaster; /// <summary> /// get the database connection /// </summary> public CDataConnection DBConn { get { return m_DBConnection; } } public string Key { get; private set; } /// <summary> /// get/set status comment info, it will append... this allows us to keep /// a running list of errors if needed /// </summary> private string m_strStatusComment; public string StatusComment { get { return m_strStatusComment; } set { if (m_strStatusComment != "") { string strEnding = ""; if (m_strStatusComment.Length >= 6) { strEnding = m_strStatusComment.Substring(m_strStatusComment.Length - 6); } if (strEnding != "<br />") { Session["StatusComment"] += "<br />"; m_strStatusComment += "<br />"; } } Session["StatusComment"] += value; m_strStatusComment += value; //if the status contains an oracle error //then show a more user-friendly error instead //example: ORA-00942: table or view does not exist //search for "ORA-" if (m_strStatusComment.ToUpper().IndexOf("ORA-") != -1) { //todo: logg the current status in an error table m_strStatusComment = "An error occured while processing, please contact your system administrator."; m_strStatusComment += "<br />"; Session["StatusComment"] = m_strStatusComment; } } } /// <summary> /// clear the status, called by the masterpage after we display status info /// </summary> public void ClearStatus() { Session["StatusComment"] = ""; Session["StatusCode"] = 0; m_strStatusComment = ""; m_lStatusCode = 0; } /// <summary> /// get/set status code info /// </summary> private long m_lStatusCode; public long StatusCode { get { return m_lStatusCode; } set { //do not clear it if its already set to 1, this way //errors will overrule success for display if (m_lStatusCode < 1) //Changed because we have error status codes that are greater than 1. { Session["StatusCode"] = value; m_lStatusCode = value; } } } //use this check to see if the user clicked the //apps main "Save" button... public bool OnMasterSAVE() { //get the postback control string strPostBackControl = Request.Params["__EVENTTARGET"]; if (strPostBackControl != null) { //did we do a patient lookup? if (strPostBackControl.IndexOf("btnMasterSave") > -1) { return true; } } return false; } /// <summary> /// get/set user id /// </summary> //user id member - set when we login private long m_lFXUserID; public long FXUserID { get { return m_lFXUserID; } set { m_lFXUserID = value; } } //are we still logged in... public bool IsLoggedIn() { if (m_lFXUserID < 1) { return false; } return true; } /// <summary> /// get client ip, may be router but better than nothing /// </summary> public string ClientIP { get { return Context.Request.ServerVariables["REMOTE_ADDR"]; } } //sets a string viewstate value public void SetVSValue(string strKey, string strValue) { ViewState[strKey] = strValue; } //sets a bool viewstate value public void SetVSValue(string strKey, bool bValue) { ViewState[strKey] = bValue; } //sets a long viewstate value public void SetVSValue(string strKey, long lValue) { ViewState[strKey] = Convert.ToString(lValue); } //gets a string value from viewstate public string GetVSStringValue(string strKey) { string strValue = ""; if (ViewState[strKey] != null) { strValue = Convert.ToString(ViewState[strKey]); } return strValue; } //gets a bool value from view state public bool GetVSBoolValue(string strKey) { bool bValue = false; if (ViewState[strKey] != null) { bValue = Convert.ToBoolean(ViewState[strKey]); } return bValue; } //gets a long value from viewstate public long GetVSLongValue(string strKey) { long lValue = -1; if (ViewState[strKey] != null) { lValue = Convert.ToInt32(ViewState[strKey]); } return lValue; } //closes the patient public void ClosePatient() { //remove patient lock CPatientLock patlock = new CPatientLock(this); string strSelectedPatientID = (!String.IsNullOrEmpty(this.SelectedPatientID)) ? this.SelectedPatientID : "null"; patlock.DeletePatientLock(strSelectedPatientID); this.SelectedPatientID = ""; this.SelectedTreatmentID = -1; this.LookupSearchCase = -1; this.SelectedEncounterID = ""; this.SelectedProblemID = -1; Session["PATIENT_LOCKED"] = null; Session["PAT_LOCK_PROVIDER"] = null; Session["PAT_LOCK_EMAIL"] = null; this.IsPatientLocked = false; //remove session variables associated to the patient Session["PATIENTNAME"] = null; Session["PAT_DEMOGRAPHICS_DS"] = null; Session["PAT_SPONSOR_DS"] = null; Session["TREATMENTS_LIST_DS"] = null; Session["ENCOUNTERS_LIST_DS"] = null; Session["ASSESSMENTS_LIST_DS"] = null; Session["PROBLEMS_LIST_DS"] = null; } //this is the currently looked up patient id... public string SelectedPatientID { get { CSec sec = new CSec(); string strValue = ""; //more efficient to just use a session var //no db hit this way //GetSessionValue("SELECTED_PATIENT_ID", out strValue); if(Session["SELECTED_PATIENT_ID"] != null) { strValue = Session["SELECTED_PATIENT_ID"].ToString(); } return sec.dec(strValue, ""); } //set { SetSessionValue("SELECTED_PATIENT_ID", Convert.ToString(value)); } set { CSec sec = new CSec(); Session["SELECTED_PATIENT_ID"] = sec.Enc(Convert.ToString(value), ""); } } public bool IsPatientLocked { get { bool bLocked = false; if (Session["PATIENT_LOCKED_BM"] != null) { bLocked = Convert.ToBoolean(Session["PATIENT_LOCKED_BM"]); } return bLocked; } set { Session["PATIENT_LOCKED_BM"] = value; } } public long GraphicOption { get { string strValue = "0"; if (Session["GRAPHIC_OPTION"] != null) { strValue = Session["GRAPHIC_OPTION"].ToString(); } if (strValue.Length > 0) { return Convert.ToInt32(strValue); } else { return 0; } } set { Session["GRAPHIC_OPTION"] = Convert.ToString(value); } } // 03/11/2011 - Selected treatment id public long SelectedTreatmentID { get { string strValue = "-1"; //GetSessionValue("SELECTED_TREATMENT_ID", out strValue); if (Session["SELECTED_TREATMENT_ID"] != null) { strValue = Session["SELECTED_TREATMENT_ID"].ToString(); } if (strValue.Length > 0) { return Convert.ToInt32(strValue); } else { return -1; } } // set { SetSessionValue("SELECTED_TREATMENT_ID", Convert.ToString(value)); } set { Session["SELECTED_TREATMENT_ID"] = Convert.ToString(value); } } //this is the currently looked up patient id... public string SelectedEncounterID { get { string strValue = ""; //more efficient to just use a session var //no db hit this way //GetSessionValue("SELECTED_ENCOUNTER_ID", out strValue); // if(Session["SELECTED_ENCOUNTER_ID"] != null) { strValue = Session["SELECTED_ENCOUNTER_ID"].ToString(); } return strValue; } //set { SetSessionValue("SELECTED_ENCOUNTER_ID", Convert.ToString(value)); } set { Session["SELECTED_ENCOUNTER_ID"] = Convert.ToString(value); } } //this is the currently looked up provider id... public string SelectedProviderID { get { string strValue = ""; //more efficient to just use a session var //no db hit this way //GetSessionValue("SELECTED_PROVIDER_ID", out strValue); if(Session["SELECTED_PROVIDER_ID"] != null) { strValue = Session["SELECTED_PROVIDER_ID"].ToString(); } return strValue; } //set { SetSessionValue("SELECTED_PROVIDER_ID", Convert.ToString(value)); } set { Session["SELECTED_PROVIDER_ID"] = Convert.ToString(value); } } // 03/11/2011 - this is the type of lookup:: 1- all cases, 2- open cases, 3- closed cases public long LookupSearchCase { get { string strValue = "-1"; GetSessionValue("LOOKUP_SEARCH_CASE", out strValue); if (strValue.Length > 0) { return Convert.ToInt32(strValue); } else { return -1; } } set { SetSessionValue("LOOKUP_SEARCH_CASE", Convert.ToString(value)); } } // 11/17/2011 - Selected problem id public long SelectedProblemID { get { string strValue = "-1"; //GetSessionValue("SELECTED_PROBLEM_ID", out strValue); if (Session["SELECTED_PROBLEM_ID"] != null) { strValue = Session["SELECTED_PROBLEM_ID"].ToString(); } if (strValue.Length > 0) { return Convert.ToInt32(strValue); } else { return -1; } } // set { SetSessionValue("SELECTED_PROBLEM_ID", Convert.ToString(value)); } set { Session["SELECTED_PROBLEM_ID"] = Convert.ToString(value); } } /// <summary> /// constructor /// </summary> public BaseMaster() { //create a new dataconnection object m_DBConnection = new CDataConnection(); //clear status m_strStatusComment = string.Empty; m_lStatusCode = -1; FXUserID = 0; } /// <summary> /// this is the proper place to do initialization in a master page /// </summary> /// <param name="sender"></param> /// <param name="e"></param> protected void Page_Init(object sender, EventArgs e) { //app specific stuff outside the base controller APPMaster = new AppMaster(); APPMaster.SetBaseMaster(this); //Returns a string that can be used in a client //event to cause postback to the server. Page.ClientScript.GetPostBackEventReference(this, String.Empty); //set the character set, since all pages derive from basemaster //this will set the encoding for all pages... Response.ContentEncoding = Encoding.UTF8; //init status info and objects m_strStatusComment = string.Empty; m_lStatusCode = -1;//-1 = success no show //04/16/2012 - Security Updates //set the timeout Timeout = (Session.Timeout < 15) ? Timeout = 15 : Timeout = Session.Timeout; //connect to the data source if (!ConnectToDataSource()) { //redirect to an error page Response.Redirect("error_database.aspx"); Response.End(); } //sec helper CSec sec = new CSec(); //DBSessionID gets set in the database when the user //logs in. this is used to cache values in the db and to determine if the //user is logged in // //reset FXUserID, only gets set in the call below FXUserID = 0; if (!String.IsNullOrEmpty(DBSessionID)) { //get actual user id from the database session created when the //user logs in string strUID = ""; if (GetSessionValue("FX_USER_ID", out strUID)) { if (strUID != "") { FXUserID = Convert.ToInt32(strUID); } //load the app specific user details //needed for the application APPMaster.LoadUserDetails(); } else { //log off if we cannot retrieve a valid session, //user timed out LogOff(); } } //user does not have access to this page //so logoff. if (!sec.AuditPageAccess(this)) { LogOff(); } string strPageName = this.GetPageName(); if (strPageName.IndexOf("event_management.aspx") != -1) { Response.AddHeader("X-UA-Compatible", "IE=8,chrome=1"); } else { Response.AddHeader("X-UA-Compatible", "IE=9,chrome=1"); } } /// <summary> /// caches a session value in the database. /// stored encrypted and more secure then an asp.net session var /// </summary> /// <param name="strKey"></param> /// <param name="strKeyValue"></param> /// <returns></returns> public bool SetSessionValue( string strKey, string strKeyValue) { //status info long lStatusCode = -1; string strStatusComment = ""; if (!IsLoggedIn()) { return true; } //create a new parameter list CDataParameterList pList = new CDataParameterList(); //add params for the DB call // //in paramaters //these will always be passed in to all sp calls pList.AddParameter("pi_vSessionID", DBSessionID, ParameterDirection.Input); pList.AddParameter("pi_vSessionClientIP", ClientIP, ParameterDirection.Input); pList.AddParameter("pi_nUserID", FXUserID, ParameterDirection.Input); // pList.AddParameter("pi_vKey", strKey, ParameterDirection.Input); pList.AddParameter("pi_vKeyValue", strKeyValue, ParameterDirection.Input); // //execute the stored procedure DBConn.ExecuteOracleSP("PCK_FX_SEC.SetSessionValue", pList, out lStatusCode, out strStatusComment); // 0 = success if strStatus is populated it will show on the screen // 1 to n are errors and we always show errors if (lStatusCode == 0) { return true; } return false; } /// <summary> /// gets a cached session value from the database. /// stored encrypted and more secure then an asp.net session var /// </summary> /// <param name="strKey"></param> /// <param name="strKeyValue"></param> /// <returns></returns> public bool GetSessionValue(string strKey, out string strKeyValue) { strKeyValue = ""; //status info long lStatusCode = -1; string strStatusComment = ""; //create a new parameter list CDataParameterList pList = new CDataParameterList(); //in paramaters //these will always be passed in to all sp calls pList.AddParameter("pi_vDBSessionID", DBSessionID, ParameterDirection.Input); pList.AddParameter("pi_vWebSessionID", ASPSessionID, ParameterDirection.Input); pList.AddParameter("pi_vSessionClientIP", ClientIP, ParameterDirection.Input); pList.AddParameter("pi_nUserID", FXUserID, ParameterDirection.Input); // pList.AddParameter("pi_vKey", strKey, ParameterDirection.Input); pList.AddParameter("po_vKeyValue", strKeyValue, ParameterDirection.Output); // //execute the stored procedure DBConn.ExecuteOracleSP("PCK_FX_SEC.GetSessionValue", pList, out lStatusCode, out strStatusComment); // 0 = success if strStatus is populated it will show on the screen // 1 to n are errors and we always show errors if (lStatusCode == 0) { CDataParameter paramValue = pList.GetItemByName("po_vKeyValue"); strKeyValue = paramValue.StringParameterValue; return true; } strKeyValue = ""; return false; } /// <summary> /// deletes a cached session value in the database. /// </summary> /// <param name="strKey"></param> /// <returns></returns> public bool DeleteSessionValue(string strKey) { //status info long lStatusCode = -1; string strStatusComment = ""; //create a new parameter list CDataParameterList pList = new CDataParameterList(); //add params for the DB call // //in paramaters //these will always be passed in to all sp calls pList.AddParameter("pi_vSessionID", DBSessionID, ParameterDirection.Input); pList.AddParameter("pi_vSessionClientIP", ClientIP, ParameterDirection.Input); pList.AddParameter("pi_nUserID", FXUserID, ParameterDirection.Input); pList.AddParameter("pi_vKey", strKey, ParameterDirection.Input); // //execute the stored procedure DBConn.ExecuteOracleSP("PCK_FX_SEC.DeleteSessionValue", pList, out lStatusCode, out strStatusComment); // 0 = success if strStatus is populated it will show on the screen // 1 to n are errors and we always show errors if (lStatusCode == 0) { return true; } return false; } /// <summary> /// deletes all cached session values in the database. /// </summary> /// <returns></returns> public bool DeleteAllSessionValues() { //status info long lStatusCode = -1; string strStatusComment = ""; //create a new parameter list CDataParameterList pList = new CDataParameterList(); //add params for the DB call // //in paramaters //these will always be passed in to all sp calls pList.AddParameter("pi_vSessionID", DBSessionID, ParameterDirection.Input); pList.AddParameter("pi_vSessionClientIP", ClientIP, ParameterDirection.Input); pList.AddParameter("pi_nUserID", FXUserID, ParameterDirection.Input); //execute the stored procedure DBConn.ExecuteOracleSP("PCK_FX_SEC.DeleteAllSessionValues", pList, out lStatusCode, out strStatusComment); // 0 = success if strStatus is populated it will show on the screen // 1 to n are errors and we always show errors if (lStatusCode == 0) { return true; } return false; } /// <summary> /// helper to get the current page name /// </summary> /// <returns></returns> public string GetPageName() { string strPath = System.Web.HttpContext.Current.Request.Url.AbsolutePath; System.IO.FileInfo oInfo = new System.IO.FileInfo(strPath); return oInfo.Name.ToLower(); } /// <summary> /// good place to close connections etc... /// </summary> public override void Dispose() { //close the database connection if (m_DBConnection != null) { m_DBConnection.Close(); } base.Dispose(); } /// <summary> /// connect to datasource /// </summary> private bool ConnectToDataSource() { //get the connection string from the web.config file //connection string is encrypted in the file using MS recommended procedures // //cd\ //cd windows //cd microsoft.net //cd framework //cd v2.0.50727 //aspnet_regiis -pe "connectionStrings" -app "/PrimeCarePlus" -prov "RsaProtectedConfigurationProvider" // //look for connection strings in connection strings and app settings string strConnectionString = string.Empty; try { strConnectionString = ConfigurationManager.ConnectionStrings["DBConnString"].ConnectionString; Key = ConfigurationManager.ConnectionStrings["Key"].ConnectionString; } catch (Exception e) { string strStatus = e.Message; } bool bAudit = (ConfigurationManager.AppSettings["AUDIT"] == "1") ? true : false; //Connect to the database, connection is housed in the master page //so that all pages that use the master have access to it. if (!m_DBConnection.Connect( strConnectionString, (int)DataConnectionType.Oracle, bAudit)) { Session["DB_ERROR_CODE"] = string.Empty; Session["DB_ERROR"] = string.Empty; m_strStatusComment = "Error Connecting to Data Source"; m_lStatusCode = 1; return false; } return true; } /// <summary> /// called to logoff the user /// </summary> public void LogOff() { //clear the patient this.ClosePatient(); //clear FX_USER session var Session["FX_USER"] = null; //clear Graphic Option session var Session["GRAPHIC_OPTION"] = null; //clear account details session var Session["ACC_DETAILS"] = null; //do any clean up necessary to logoff CSec sec = new CSec(); sec.LogOff(this); //is an extra step for timeouts etc... if (!String.IsNullOrEmpty(DBSessionID)) { DeleteAllSessionValues(); } //clear the dbsessionid DBSessionID = String.Empty; //clear the session Session.Clear(); //abandon the session Session.Abandon(); //redirect; Response.Redirect("default.aspx"); } public void LogOff(bool bRedirect) { //clear the patient this.ClosePatient(); //clear FX_USER session var Session["FX_USER"] = null; //clear account details session var Session["ACC_DETAILS"] = null; //do any clean up necessary to logoff CSec sec = new CSec(); sec.LogOff(this); //is an extra step for timeouts etc... if (!String.IsNullOrEmpty(DBSessionID)) { DeleteAllSessionValues(); } //clear the dbsessionid DBSessionID = String.Empty; //clear the session Session.Clear(); //abandon the session Session.Abandon(); //redirect; if (bRedirect) { Response.Redirect("default.aspx"); } } /// <summary> /// Get a random chars, good for forcing the browser to refresh a page /// also used to help generate our session id /// </summary> /// <returns></returns> public string GenerateRandomChars() { string strRand = ""; Random r = new Random(); strRand = Convert.ToString(r.NextDouble()); strRand = strRand.Replace(".", ""); string strRandChars = ""; for (int i = 0; i < strRand.Length; i++) { string strC = ""; strC = strRand.Substring(i, 1); if (strC == "0") { strRandChars += "a"; } else if (strC == "1") { strRandChars += "b"; } else if (strC == "2") { strRandChars += "c"; } else if (strC == "3") { strRandChars += "d"; } else if (strC == "4") { strRandChars += "e"; } else if (strC == "5") { strRandChars += "f"; } else if (strC == "6") { strRandChars += "g"; } else if (strC == "7") { strRandChars += "h"; } else if (strC == "8") { strRandChars += "i"; } else if (strC == "9") { strRandChars += "j"; } else { strRandChars += "z"; } } return strRandChars; } // 2011-07-21 D.S. // Get "Last Updated" info public bool getLastUpdated(DataSet ds) { if (ds != null) { DateTime dtLastUpdated; DateTime.TryParse("1900-01-01", out dtLastUpdated); long lLastUpdatedBy = -1; foreach (DataTable dt in ds.Tables) { foreach (DataRow dr in dt.Rows) { if (!dr.IsNull("last_updated") && !dr.IsNull("last_updated_by")) { DateTime dtRecUpdated = Convert.ToDateTime(dr["last_updated"]); if (dtRecUpdated > dtLastUpdated) { dtLastUpdated = dtRecUpdated; lLastUpdatedBy = Convert.ToInt32(dr["last_updated_by"]); } } } } if (lLastUpdatedBy > -1) { string strLastUpdatedBy = ""; string strLastUpdated = dtLastUpdated.ToString(); CUser user = new CUser(); DataSet dsUser = user.GetLoginUserDS(this, lLastUpdatedBy); if (dsUser != null) { CDataUtils utils = new CDataUtils(); strLastUpdatedBy = utils.GetStringValueFromDS(dsUser, "name"); string strUpdated = "Last updated on " + strLastUpdated + " by " + strLastUpdatedBy + "."; this.SetVSValue("LAST_UPDATED", strUpdated); return true; } } } return false; } //specify a different VS key public bool getLastUpdated(DataSet ds, string strVSKey) { if (ds != null) { DateTime dtLastUpdated; DateTime.TryParse("1900-01-01", out dtLastUpdated); long lLastUpdatedBy = -1; foreach (DataTable dt in ds.Tables) { foreach (DataRow dr in dt.Rows) { if (!dr.IsNull("last_updated") && !dr.IsNull("last_updated_by")) { DateTime dtRecUpdated = Convert.ToDateTime(dr["last_updated"]); if (dtRecUpdated > dtLastUpdated) { dtLastUpdated = dtRecUpdated; lLastUpdatedBy = Convert.ToInt32(dr["last_updated_by"]); } } } } if (lLastUpdatedBy > -1) { string strLastUpdatedBy = ""; string strLastUpdated = dtLastUpdated.ToString(); CUser user = new CUser(); DataSet dsUser = user.GetLoginUserDS(this, lLastUpdatedBy); if (dsUser != null) { CDataUtils utils = new CDataUtils(); strLastUpdatedBy = utils.GetStringValueFromDS(dsUser, "name"); string strUpdated = "Last updated on " + strLastUpdated + " by " + strLastUpdatedBy + "."; this.SetVSValue(strVSKey, strUpdated); return true; } } } return false; } }
using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Reflection; using AutoFixture.Kernel; using Xunit; namespace AutoFixtureUnitTest.Kernel { public class TemplateMethodQueryTests { [Fact] public void SutIsIMethodQuery() { Action dummy = () => { }; var sut = new TemplateMethodQuery(dummy.GetMethodInfo()); Assert.IsAssignableFrom<IMethodQuery>(sut); } [Fact] public void InitializeWithNullTemplateThrows() { Assert.Throws<ArgumentNullException>(() => new TemplateMethodQuery(null)); } [Fact] public void InitializeSecondConstructorWithNullTemplateThrows() { var anonymousOwner = new object(); Assert.Throws<ArgumentNullException>(() => new TemplateMethodQuery(null, anonymousOwner)); } [Fact] public void InitializeWithNullOwnerThrows() { Action dummy = () => { }; Assert.Throws<ArgumentNullException>(() => new TemplateMethodQuery(dummy.GetMethodInfo(), null)); } [Fact] public void TemplateIsCorrect() { Action dummy = () => { }; var sut = new TemplateMethodQuery(dummy.GetMethodInfo()); Assert.Equal(dummy.GetMethodInfo(), sut.Template); } [Fact] public void OwnerIsCorrect() { Action dummy = () => { }; var owner = new object(); var sut = new TemplateMethodQuery(dummy.GetMethodInfo(), owner); Assert.Equal(owner, sut.Owner); } [Fact] public void SelectMethodsWithNullThrows() { Action dummy = () => { }; var sut = new TemplateMethodQuery(dummy.GetMethodInfo()); Assert.Throws<ArgumentNullException>(() => sut.SelectMethods(null)); } [Theory] [InlineData(typeof(TypeWithTemplateMethods), "VoidMethodWithString", typeof(TypeWithMethods), new[] { "s" }, new object[] { null })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithString", typeof(TypeWithMethods), new[] { "s" }, new object[] { "s" })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithStringArray", typeof(TypeWithMethods), new object[] { new[] { "a", "b" } }, new object[] { new[] { "a", "b" }, new[] { "a", "b" }, new[] { "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithGenerics", typeof(TypeWithMethods), new[] { "s" }, new object[] { "s" })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithGenerics", typeof(TypeWithMethods), new object[] { 1 }, new object[] { 1 })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithEnumerable", typeof(TypeWithMethods), new object[] { new[] { "a", "b" } }, new object[] { new[] { "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithArray", typeof(TypeWithMethods), new object[] { new[] { "a", "b" } }, new object[] { new[] { "a", "b" }, new[] { "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithOptionalParameter", typeof(TypeWithMethods), new object[] { 1 }, new object[] { new object[] { 1, "s" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithOptionalParameter", typeof(TypeWithMethods), new object[] { 1, "a" }, new object[] { new object[] { 1, "a" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithParamsParameter", typeof(TypeWithMethods), new object[] { 1 }, new object[] { new object[] { 1 } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithParamsParameter", typeof(TypeWithMethods), new object[] { 1, new[] { "a", "b" } }, new object[] { new object[] { 1, "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithSameName", typeof(TypeWithMethods), new object[] { "s" }, new object[] { "s", "s" })] public void SelectMethodsReturnsCorrectResult(Type templateType, string methodName, Type targetType, object[] arguments, object[] expected) { var templateMethod = templateType.GetMethod(methodName); var owner = Activator.CreateInstance(targetType); var sut = new TemplateMethodQuery(templateMethod, owner); var results = sut.SelectMethods(targetType) .Select(m => m.Invoke(arguments)) .ToArray(); Assert.Equal(expected, results); } [Theory] [InlineData(typeof(TypeWithTemplateMethods), "VoidMethodWithString", typeof(TypeWithMethods), new[] { "s" }, new object[] { null })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithString", typeof(TypeWithMethods), new[] { "s" }, new object[] { "s" })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithStringArray", typeof(TypeWithMethods), new object[] { new[] { "a", "b" } }, new object[] { new[] { "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithGenerics", typeof(TypeWithMethods), new[] { "s" }, new object[] { "s" })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithGenerics", typeof(TypeWithMethods), new object[] { 1 }, new object[] { 1 })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithEnumerable", typeof(TypeWithMethods), new object[] { new[] { "a", "b" } }, new object[] { new[] { "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithArray", typeof(TypeWithMethods), new object[] { new[] { "a", "b" } }, new object[] { new[] { "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithOptionalParameter", typeof(TypeWithMethods), new object[] { 1 }, new object[] { new object[] { 1, "s" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithOptionalParameter", typeof(TypeWithMethods), new object[] { 1, "a" }, new object[] { new object[] { 1, "a" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithParamsParameter", typeof(TypeWithMethods), new object[] { 1 }, new object[] { new object[] { 1 } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithParamsParameter", typeof(TypeWithMethods), new object[] { 1, new[] { "a", "b" } }, new object[] { new object[] { 1, "a", "b" } })] [InlineData(typeof(TypeWithTemplateMethods), "MethodWithSameName", typeof(TypeWithMethods), new object[] { "s" }, new object[] { "s" })] public void SelectMethodsReturnsCorrectResultWhenInitializedWithNullOwner(Type templateType, string methodName, Type targetType, object[] arguments, object[] expected) { var templateMethod = templateType.GetMethod(methodName); var sut = new TemplateMethodQuery(templateMethod); var results = sut.SelectMethods(targetType) .Select(m => m.Invoke(arguments)) .ToArray(); Assert.Equal(expected, results); } [Fact] public void SelectMethodsDoesNotReturnIncorrectResult() { var templateMethod = typeof(TypeWithTemplateMethods).GetMethod("MethodWithFunc"); var sut = new TemplateMethodQuery(templateMethod); var results = sut.SelectMethods(typeof(TypeWithMethods)) .Select(m => m.Invoke(new[] { new Func<string>(() => string.Empty) })); var result = Assert.Single(results); Assert.True((bool)result); } [Theory] [InlineData(typeof(TypeWithTemplateMethods), typeof(TypeWithMethods), "MethodInOrder", new object[] { new[] { "a" } })] [InlineData(typeof(TypeWithTemplateMethods), typeof(TypeWithMethods), "MethodInOrderWithTwoParameters", new object[] { new[] { "a" }, new[] { "a" } })] public void SelectMethodsReturnsInCorrectOrderWhenInitializedWithNullOwner(Type templateType, Type targetType, string methodName, object[] arguments) { var templateMethod = templateType.GetMethod(methodName); var owner = Activator.CreateInstance(targetType); var sut = new TemplateMethodQuery(templateMethod, owner); var results = sut.SelectMethods(targetType) .Select(m => m.Invoke(arguments)) .ToArray(); var expected = Enumerable.Range(0, results.Length).Cast<object>().ToArray(); Assert.Equal(expected, results); } } public class TypeWithTemplateMethods { public static void VoidMethodWithString(string arg) { } public static void MethodWithString(string arg) { } public static void MethodWithStringArray(string[] arg) { } public static void MethodWithGenerics<T>(T arg) { } public static void MethodWithEnumerable<T>(IEnumerable<T> arg) { } public static void MethodWithArray<T>(T[] arg) { } public static void MethodWithOptionalParameter<T>(T arg) { } public static void MethodWithParamsParameter<T>(T arg) { } public static void MethodWithSameName(string arg) { } public static void MethodInOrder(string[] arg) { } public static void MethodInOrderWithTwoParameters(string[] arg1, IEnumerable<string> arg2) { } public static void MethodWithFunc<T>(Func<T> arg) { } } public class TypeWithMethods { public static void VoidMethodWithString(string arg) { } public static string MethodWithString(string arg) { return arg; } public static int MethodWithString(int arg) { return arg; } public static int[] MethodWithStringArray(int[] arg) { return arg; } public static object[] MethodWithStringArray(object[] arg) { return arg; } public string[] MethodWithStringArray(string[] arg) { return arg; } public static IEnumerable<int> MethodWithStringArray(IEnumerable<int> arg) { return arg; } public IEnumerable<string> MethodWithStringArray(IEnumerable<string> arg) { return arg; } public static T MethodWithGenerics<T>(T arg) { return arg; } public static IEnumerable<T> MethodWithEnumerable<T>(IEnumerable<T> arg) { return arg; } public static IEnumerable<T> MethodWithArray<T>(T[] arg) { return arg; } public IEnumerable<T> MethodWithArray<T>(IEnumerable<T> arg) { return arg; } public static object[] MethodWithOptionalParameter<T>(T arg, string s = "s") { return new object[] { arg, s }; } public static object[] MethodWithParamsParameter<T>(T arg, params string[] s) { return new object[] { arg }.Concat(s).ToArray(); } public static object MethodWithSameName(object arg) { return arg; } public string MethodWithSameName(string arg) { return arg; } public static int MethodInOrder(object arg) { return 6; } public int MethodInOrder(string[] arg1, object arg2 = null) { return 1; } public static int MethodInOrder(string[] arg) { return 0; } public int MethodInOrder(IEnumerable<object> arg) { return 5; } public static int MethodInOrder(IEnumerable<string> arg) { return 3; } public int MethodInOrder(object[] arg) { return 4; } public static int MethodInOrder(string[] arg1, object arg2 = null, params object[] arg3) { return 2; } public static int MethodInOrderWithTwoParameters(IEnumerable<string> arg1, object arg2) { return 5; } public int MethodInOrderWithTwoParameters(string[] arg1, IEnumerable arg2) { return 3; } public static int MethodInOrderWithTwoParameters(IEnumerable<string> arg1, IEnumerable arg2) { return 4; } public int MethodInOrderWithTwoParameters(string[] arg1, IEnumerable<string> arg2, object arg3 = null) { return 1; } public static int MethodInOrderWithTwoParameters(IEnumerable<string> arg1, IEnumerable<string> arg2) { return 2; } public int MethodInOrderWithTwoParameters(string[] arg1, IEnumerable<string> arg2) { return 0; } public static int MethodWithFunc<T>(Func<T, T> arg) { return -1; } public static bool MethodWithFunc<T>(Func<T> arg) { return true; } } }
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="Activity.cs" company="Catel development team"> // Copyright (c) 2008 - 2015 Catel development team. All rights reserved. // </copyright> // -------------------------------------------------------------------------------------------------------------------- namespace Catel.Android.App { using System; using System.ComponentModel; using MVVM; using MVVM.Providers; using MVVM.Views; /// <summary> /// View implementation that automatically takes care of view models. /// </summary> public class Activity : global::Android.App.Activity, IPage { #region Fields private readonly PageLogic _logic; private object _dataContext; private BindingContext _bindingContext; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="Activity"/> class. /// </summary> public Activity() { if (CatelEnvironment.IsInDesignMode) { return; } _logic = new PageLogic(this); _logic.TargetViewPropertyChanged += (sender, e) => { OnPropertyChanged(e); PropertyChanged.SafeInvoke(this, e); }; _logic.ViewModelChanged += (sender, e) => RaiseViewModelChanged(); _logic.ViewModelPropertyChanged += (sender, e) => { OnViewModelPropertyChanged(e); ViewModelPropertyChanged.SafeInvoke(this, e); }; } #endregion #region Properties /// <summary> /// Gets or sets the data context. /// </summary> /// <value>The data context.</value> public object DataContext { get { return _dataContext; } set { var oldValue = _dataContext; var newValue = value; _dataContext = value; DataContextChanged.SafeInvoke(this, new DataContextChangedEventArgs(oldValue, newValue)); } } /// <summary> /// Gets or sets the tag. /// </summary> /// <value>The tag.</value> public object Tag { get; set; } /// <summary> /// Gets the type of the view model that this user control uses. /// </summary> public Type ViewModelType { get { return _logic.GetValue<PageLogic, Type>(x => x.ViewModelType); } } /// <summary> /// Gets or sets a value indicating whether the view model container should prevent the /// creation of a view model. /// <para /> /// This property is very useful when using views in transitions where the view model is no longer required. /// </summary> /// <value><c>true</c> if the view model container should prevent view model creation; otherwise, <c>false</c>.</value> public bool PreventViewModelCreation { get { return _logic.GetValue<PageLogic, bool>(x => x.PreventViewModelCreation); } set { _logic.SetValue<PageLogic>(x => x.PreventViewModelCreation = value); } } /// <summary> /// Gets the view model that is contained by the container. /// </summary> /// <value>The view model.</value> public IViewModel ViewModel { get { return _logic.GetValue<PageLogic, IViewModel>(x => x.ViewModel); } } /// <summary> /// Gets or sets a value indicating whether the view is enabled. /// </summary> /// <value><c>true</c> if the view is enabled; otherwise, <c>false</c>.</value> public bool IsEnabled { get; set; } #endregion #region Events /// <summary> /// Occurs when a property on the container has changed. /// </summary> /// <remarks> /// This event makes it possible to externally subscribe to property changes of a view /// (mostly the container of a view model) because the .NET Framework does not allows us to. /// </remarks> public event PropertyChangedEventHandler PropertyChanged; /// <summary> /// Occurs when the <see cref="ViewModel"/> property has changed. /// </summary> public event EventHandler<EventArgs> ViewModelChanged; /// <summary> /// Occurs when a property on the <see cref="ViewModel"/> has changed. /// </summary> public event EventHandler<PropertyChangedEventArgs> ViewModelPropertyChanged; /// <summary> /// Occurs when the view is loaded. /// </summary> public event EventHandler<EventArgs> Loaded; /// <summary> /// Occurs when the view is unloaded. /// </summary> public event EventHandler<EventArgs> Unloaded; /// <summary> /// Occurs when the data context has changed. /// </summary> public event EventHandler<DataContextChangedEventArgs> DataContextChanged; #endregion #region Methods private void RaiseViewModelChanged() { OnViewModelChanged(); ViewModelChanged.SafeInvoke(this); PropertyChanged.SafeInvoke(this, new PropertyChangedEventArgs("ViewModel")); if (_bindingContext != null) { _bindingContext.DetermineIfBindingsAreRequired(ViewModel); } } /// <summary> /// Called when the bindings must be added. This can happen /// <para /> /// Normally the binding system would take care of this. /// </summary> /// <param name="bindingContext">The binding context.</param> /// <param name="viewModel">The view model.</param> /// <returns><c>true</c> if the bindings were successfully added.</returns> protected virtual void AddBindings(BindingContext bindingContext, IViewModel viewModel) { } /// <summary> /// Called when the view is loaded. /// </summary> protected override void OnResume() { base.OnResume(); RaiseViewModelChanged(); Loaded.SafeInvoke(this); InitializeBindingContext(); } /// <summary> /// Called as part of the activity lifecycle when an activity is going into /// the background, but has not (yet) been killed. /// </summary> protected override void OnPause() { base.OnPause(); // Note: call *after* base so NavigationAdapter always gets called Unloaded.SafeInvoke(this); UninitializeBindingContext(); } private void InitializeBindingContext() { if (_bindingContext != null) { UninitializeBindingContext(); } _bindingContext = new BindingContext(); _bindingContext.BindingUpdateRequired += OnBindingUpdateRequired; _bindingContext.DetermineIfBindingsAreRequired(ViewModel); } private void UninitializeBindingContext() { if (_bindingContext == null) { return; } _bindingContext.BindingUpdateRequired -= OnBindingUpdateRequired; _bindingContext.Clear(); _bindingContext = null; } private void OnBindingUpdateRequired(object sender, EventArgs e) { AddBindings(_bindingContext, ViewModel); } /// <summary> /// Called when a dependency property on this control has changed. /// </summary> /// <param name="e">The <see cref="PropertyChangedEventArgs"/> instance containing the event data.</param> protected virtual void OnPropertyChanged(PropertyChangedEventArgs e) { } /// <summary> /// Called when a property on the current <see cref="ViewModel"/> has changed. /// </summary> /// <param name="e">The <see cref="System.ComponentModel.PropertyChangedEventArgs"/> instance containing the event data.</param> protected virtual void OnViewModelPropertyChanged(PropertyChangedEventArgs e) { } /// <summary> /// Called when the <see cref="ViewModel"/> has changed. /// </summary> /// <remarks> /// This method does not implement any logic and saves a developer from subscribing/unsubscribing /// to the <see cref="ViewModelChanged"/> event inside the same user control. /// </remarks> protected virtual void OnViewModelChanged() { } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Text; /// <summary> /// Insert(System.Int32,System.String,System.Int32) /// </summary> public class StringBuilderInsert4 { #region Private Fields private const int c_LENGTH_OF_STRING = 256; #endregion #region Public Methods public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; retVal = PosTest5() && retVal; TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; retVal = NegTest4() && retVal; return retVal; } #region Positive Test Cases public bool PosTest1() { bool retVal = true; string randString = null; TestLibrary.TestFramework.BeginScenario("PosTest1: Call insert on an empty string builder instance"); try { randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); StringBuilder builder = new StringBuilder(); StringBuilder newBuilder = builder.Insert(0, randString, 1); string actualString = newBuilder.ToString(); if (!actualString.Equals(randString)) { TestLibrary.TestFramework.LogError("001.1", "Calling insert on an empty string builder instance returns wrong string builder instance"); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] randString = " + randString + ", actualString = " + actualString); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("001.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] randString = " + randString); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; string randString = null; int randIndex = 0; int randCount = 0; TestLibrary.TestFramework.BeginScenario("PosTest2: Call insert on a non empty string builder instance"); try { randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); randIndex = TestLibrary.Generator.GetByte(-55); randCount = TestLibrary.Generator.GetByte(-55); StringBuilder builder = new StringBuilder(randString); StringBuilder newBuilder = builder.Insert(randIndex, randString, randCount); char[] characters = new char[randString.Length + randCount * randString.Length]; int index = 0; for (int i = 0; i < randIndex; ++i) { characters[index++] = randString[i]; } for (int c = 0; c < randCount; ++c) { for (int i = 0; i < randString.Length; ++i) { characters[index++] = randString[i]; } } for (int i = randIndex; i < randString.Length; ++i) { characters[index++] = randString[i]; } string desiredString = new string(characters); string actualString = newBuilder.ToString(); if (!desiredString.Equals(actualString)) { TestLibrary.TestFramework.LogError("002.1", "Calling insert on a non empty string builder instance returns wrong string builder instance"); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] randString = " + randString + ", actualString = " + actualString + ", desiredString = " + desiredString + ", randIndex = " + randIndex + ", randCount = " + randCount); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("002.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] randString = " + randString + ", randIndex = " + randIndex + ", randCount = " + randCount); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; string randString = null; TestLibrary.TestFramework.BeginScenario("PosTest3: Call insert on an empty string builder instance and with count is 0"); try { randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); StringBuilder builder = new StringBuilder(); StringBuilder newBuilder = builder.Insert(0, randString, 0); string actualString = newBuilder.ToString(); if (!actualString.Equals(String.Empty)) { TestLibrary.TestFramework.LogError("003.1", "Calling insert on an empty string builder instance and with count is 0 returns wrong string builder instance"); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] randString = " + randString + ", actualString = " + actualString); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("003.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] randString = " + randString); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest4: Call insert on an empty string builder instance and with value is null reference"); try { StringBuilder builder = new StringBuilder(); StringBuilder newBuilder = builder.Insert(0, null, 1); string actualString = newBuilder.ToString(); if (!actualString.Equals(String.Empty)) { TestLibrary.TestFramework.LogError("004.1", "Calling insert on an empty string builder instance and with value is null reference returns wrong string builder instance"); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] actualString = " + actualString); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("004.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest5() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest5: Call insert on an empty string builder instance and with value is String.Empty"); try { StringBuilder builder = new StringBuilder(); StringBuilder newBuilder = builder.Insert(0, String.Empty, 1); string actualString = newBuilder.ToString(); if (!actualString.Equals(String.Empty)) { TestLibrary.TestFramework.LogError("005.1", "Calling insert on an empty string builder instance and with value is String.Empty returns wrong string builder instance"); TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLES] actualString = " + actualString); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("005.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } #endregion #region Nagetive Test Cases public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1: ArgumentOutOfRangeException should be thrown when index is less than zero"); try { string randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); StringBuilder builder = new StringBuilder(); builder.Insert(-1, randString, 1); TestLibrary.TestFramework.LogError("101.1", "ArgumentOutOfRangeException is not thrown when index is less than zero"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("101.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest2: ArgumentOutOfRangeException should be thrown when index is greater than the current length of this instance"); try { string randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); StringBuilder builder = new StringBuilder(); builder.Insert(1, randString, 1); TestLibrary.TestFramework.LogError("102.1", "ArgumentOutOfRangeException is not thrown when index is greater than the current length of this instance"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("102.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest3: ArgumentOutOfRangeException should be thrown when count is less than zero. "); try { string randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); StringBuilder builder = new StringBuilder(); builder.Insert(0, randString, -1); TestLibrary.TestFramework.LogError("103.1", "ArgumentOutOfRangeException is not thrown when count is less than zero. "); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("103.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest4: OutOfMemoryException should be thrown when The current length of this StringBuilder object plus the length of value times count exceeds MaxCapacity."); try { string randString = TestLibrary.Generator.GetString(-55, false, c_LENGTH_OF_STRING, c_LENGTH_OF_STRING); StringBuilder builder = new StringBuilder(); builder.Insert(0, randString, Int32.MaxValue); TestLibrary.TestFramework.LogError("104.1", "OutOfMemoryException is not thrown when The current length of this StringBuilder object plus the length of value times count exceeds MaxCapacity."); retVal = false; } catch (OutOfMemoryException) // StringBuilder new implementation is now throwing OutOfMemoryException { } catch (Exception e) { TestLibrary.TestFramework.LogError("104.0", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } #endregion #endregion public static int Main() { StringBuilderInsert4 test = new StringBuilderInsert4(); TestLibrary.TestFramework.BeginTestCase("StringBuilderInsert4"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Runtime.Serialization { using System; using System.Collections; using System.Diagnostics; using System.Globalization; using System.IO; using System.Reflection; using System.Text; using System.Xml; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.CompilerServices; using DataContractDictionary = System.Collections.Generic.Dictionary<System.Xml.XmlQualifiedName, DataContract>; public sealed class DataContractSerializer : XmlObjectSerializer { private Type _rootType; private DataContract _rootContract; // post-surrogate private bool _needsContractNsAtRoot; private XmlDictionaryString _rootName; private XmlDictionaryString _rootNamespace; private int _maxItemsInObjectGraph; private bool _ignoreExtensionDataObject; private bool _preserveObjectReferences; private ReadOnlyCollection<Type> _knownTypeCollection; internal IList<Type> knownTypeList; internal DataContractDictionary knownDataContracts; private DataContractResolver _dataContractResolver; private ISerializationSurrogateProvider _serializationSurrogateProvider; private bool _serializeReadOnlyTypes; public DataContractSerializer(Type type) : this(type, (IEnumerable<Type>)null) { } public DataContractSerializer(Type type, IEnumerable<Type> knownTypes) { Initialize(type, knownTypes, int.MaxValue, false, false, null, false); } public DataContractSerializer(Type type, string rootName, string rootNamespace) : this(type, rootName, rootNamespace, null) { } public DataContractSerializer(Type type, string rootName, string rootNamespace, IEnumerable<Type> knownTypes) { XmlDictionary dictionary = new XmlDictionary(2); Initialize(type, dictionary.Add(rootName), dictionary.Add(DataContract.GetNamespace(rootNamespace)), knownTypes, int.MaxValue, false, false, null, false); } public DataContractSerializer(Type type, XmlDictionaryString rootName, XmlDictionaryString rootNamespace) : this(type, rootName, rootNamespace, null) { } public DataContractSerializer(Type type, XmlDictionaryString rootName, XmlDictionaryString rootNamespace, IEnumerable<Type> knownTypes) { Initialize(type, rootName, rootNamespace, knownTypes, int.MaxValue, false, false, null, false); } #if NET_NATIVE public DataContractSerializer(Type type, IEnumerable<Type> knownTypes, int maxItemsInObjectGraph, bool ignoreExtensionDataObject, bool preserveObjectReferences) #else internal DataContractSerializer(Type type, IEnumerable<Type> knownTypes, int maxItemsInObjectGraph, bool ignoreExtensionDataObject, bool preserveObjectReferences) #endif { Initialize(type, knownTypes, maxItemsInObjectGraph, ignoreExtensionDataObject, preserveObjectReferences, null, false); } public DataContractSerializer(Type type, XmlDictionaryString rootName, XmlDictionaryString rootNamespace, IEnumerable<Type> knownTypes, int maxItemsInObjectGraph, bool ignoreExtensionDataObject, bool preserveObjectReferences, DataContractResolver dataContractResolver) { Initialize(type, rootName, rootNamespace, knownTypes, maxItemsInObjectGraph, ignoreExtensionDataObject, preserveObjectReferences, /*dataContractSurrogate,*/ dataContractResolver, false); } public DataContractSerializer(Type type, DataContractSerializerSettings settings) { if (settings == null) { settings = new DataContractSerializerSettings(); } Initialize(type, settings.RootName, settings.RootNamespace, settings.KnownTypes, settings.MaxItemsInObjectGraph, false, settings.PreserveObjectReferences, settings.DataContractResolver, settings.SerializeReadOnlyTypes); } private void Initialize(Type type, IEnumerable<Type> knownTypes, int maxItemsInObjectGraph, bool ignoreExtensionDataObject, bool preserveObjectReferences, DataContractResolver dataContractResolver, bool serializeReadOnlyTypes) { CheckNull(type, "type"); _rootType = type; if (knownTypes != null) { this.knownTypeList = new List<Type>(); foreach (Type knownType in knownTypes) { this.knownTypeList.Add(knownType); } } if (maxItemsInObjectGraph < 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("maxItemsInObjectGraph", SR.Format(SR.ValueMustBeNonNegative))); _maxItemsInObjectGraph = maxItemsInObjectGraph; _ignoreExtensionDataObject = ignoreExtensionDataObject; _preserveObjectReferences = preserveObjectReferences; _dataContractResolver = dataContractResolver; _serializeReadOnlyTypes = serializeReadOnlyTypes; } private void Initialize(Type type, XmlDictionaryString rootName, XmlDictionaryString rootNamespace, IEnumerable<Type> knownTypes, int maxItemsInObjectGraph, bool ignoreExtensionDataObject, bool preserveObjectReferences, DataContractResolver dataContractResolver, bool serializeReadOnlyTypes) { Initialize(type, knownTypes, maxItemsInObjectGraph, ignoreExtensionDataObject, preserveObjectReferences, dataContractResolver, serializeReadOnlyTypes); // validate root name and namespace are both non-null _rootName = rootName; _rootNamespace = rootNamespace; } public ReadOnlyCollection<Type> KnownTypes { get { if (_knownTypeCollection == null) { if (knownTypeList != null) { _knownTypeCollection = new ReadOnlyCollection<Type>(knownTypeList); } else { _knownTypeCollection = new ReadOnlyCollection<Type>(Array.Empty<Type>()); } } return _knownTypeCollection; } } internal override DataContractDictionary KnownDataContracts { get { if (this.knownDataContracts == null && this.knownTypeList != null) { // This assignment may be performed concurrently and thus is a race condition. // It's safe, however, because at worse a new (and identical) dictionary of // data contracts will be created and re-assigned to this field. Introduction // of a lock here could lead to deadlocks. this.knownDataContracts = XmlObjectSerializerContext.GetDataContractsForKnownTypes(this.knownTypeList); } return this.knownDataContracts; } } public int MaxItemsInObjectGraph { get { return _maxItemsInObjectGraph; } } internal ISerializationSurrogateProvider SerializationSurrogateProvider { get { return _serializationSurrogateProvider; } set { _serializationSurrogateProvider = value; } } public bool PreserveObjectReferences { get { return _preserveObjectReferences; } } public bool IgnoreExtensionDataObject { get { return _ignoreExtensionDataObject; } } public DataContractResolver DataContractResolver { get { return _dataContractResolver; } } public bool SerializeReadOnlyTypes { get { return _serializeReadOnlyTypes; } } private DataContract RootContract { get { if (_rootContract == null) { _rootContract = DataContract.GetDataContract((_serializationSurrogateProvider == null) ? _rootType : GetSurrogatedType(_serializationSurrogateProvider, _rootType)); _needsContractNsAtRoot = CheckIfNeedsContractNsAtRoot(_rootName, _rootNamespace, _rootContract); } return _rootContract; } } internal override void InternalWriteObject(XmlWriterDelegator writer, object graph) { InternalWriteObject(writer, graph, null); } internal override void InternalWriteObject(XmlWriterDelegator writer, object graph, DataContractResolver dataContractResolver) { InternalWriteStartObject(writer, graph); InternalWriteObjectContent(writer, graph, dataContractResolver); InternalWriteEndObject(writer); } public override void WriteObject(XmlWriter writer, object graph) { WriteObjectHandleExceptions(new XmlWriterDelegator(writer), graph); } public override void WriteStartObject(XmlWriter writer, object graph) { WriteStartObjectHandleExceptions(new XmlWriterDelegator(writer), graph); } public override void WriteObjectContent(XmlWriter writer, object graph) { WriteObjectContentHandleExceptions(new XmlWriterDelegator(writer), graph); } public override void WriteEndObject(XmlWriter writer) { WriteEndObjectHandleExceptions(new XmlWriterDelegator(writer)); } public override void WriteStartObject(XmlDictionaryWriter writer, object graph) { WriteStartObjectHandleExceptions(new XmlWriterDelegator(writer), graph); } public override void WriteObjectContent(XmlDictionaryWriter writer, object graph) { WriteObjectContentHandleExceptions(new XmlWriterDelegator(writer), graph); } public override void WriteEndObject(XmlDictionaryWriter writer) { WriteEndObjectHandleExceptions(new XmlWriterDelegator(writer)); } public override object ReadObject(XmlReader reader) { return ReadObjectHandleExceptions(new XmlReaderDelegator(reader), true /*verifyObjectName*/); } public override object ReadObject(XmlReader reader, bool verifyObjectName) { return ReadObjectHandleExceptions(new XmlReaderDelegator(reader), verifyObjectName); } public override bool IsStartObject(XmlReader reader) { return IsStartObjectHandleExceptions(new XmlReaderDelegator(reader)); } public override object ReadObject(XmlDictionaryReader reader, bool verifyObjectName) { return ReadObjectHandleExceptions(new XmlReaderDelegator(reader), verifyObjectName); } public override bool IsStartObject(XmlDictionaryReader reader) { return IsStartObjectHandleExceptions(new XmlReaderDelegator(reader)); } internal override void InternalWriteStartObject(XmlWriterDelegator writer, object graph) { WriteRootElement(writer, RootContract, _rootName, _rootNamespace, _needsContractNsAtRoot); } internal override void InternalWriteObjectContent(XmlWriterDelegator writer, object graph) { InternalWriteObjectContent(writer, graph, null); } internal void InternalWriteObjectContent(XmlWriterDelegator writer, object graph, DataContractResolver dataContractResolver) { if (MaxItemsInObjectGraph == 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ExceededMaxItemsQuota, MaxItemsInObjectGraph))); DataContract contract = RootContract; Type declaredType = contract.UnderlyingType; Type graphType = (graph == null) ? declaredType : graph.GetType(); if (_serializationSurrogateProvider != null) { graph = SurrogateToDataContractType(_serializationSurrogateProvider, graph, declaredType, ref graphType); } if (dataContractResolver == null) dataContractResolver = this.DataContractResolver; if (graph == null) { if (IsRootXmlAny(_rootName, contract)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.IsAnyCannotBeNull, declaredType))); WriteNull(writer); } else { if (declaredType == graphType) { if (contract.CanContainReferences) { XmlObjectSerializerWriteContext context = XmlObjectSerializerWriteContext.CreateContext(this, contract , dataContractResolver ); context.HandleGraphAtTopLevel(writer, graph, contract); context.SerializeWithoutXsiType(contract, writer, graph, declaredType.TypeHandle); } else { contract.WriteXmlValue(writer, graph, null); } } else { XmlObjectSerializerWriteContext context = null; if (IsRootXmlAny(_rootName, contract)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.IsAnyCannotBeSerializedAsDerivedType, graphType, contract.UnderlyingType))); contract = GetDataContract(contract, declaredType, graphType); context = XmlObjectSerializerWriteContext.CreateContext(this, RootContract , dataContractResolver ); if (contract.CanContainReferences) { context.HandleGraphAtTopLevel(writer, graph, contract); } context.OnHandleIsReference(writer, contract, graph); context.SerializeWithXsiTypeAtTopLevel(contract, writer, graph, declaredType.TypeHandle, graphType); } } } internal static DataContract GetDataContract(DataContract declaredTypeContract, Type declaredType, Type objectType) { if (declaredType.GetTypeInfo().IsInterface && CollectionDataContract.IsCollectionInterface(declaredType)) { return declaredTypeContract; } else if (declaredType.IsArray)//Array covariance is not supported in XSD { return declaredTypeContract; } else { return DataContract.GetDataContract(objectType.TypeHandle, objectType, SerializationMode.SharedContract); } } internal override void InternalWriteEndObject(XmlWriterDelegator writer) { if (!IsRootXmlAny(_rootName, RootContract)) { writer.WriteEndElement(); } } internal override object InternalReadObject(XmlReaderDelegator xmlReader, bool verifyObjectName) { return InternalReadObject(xmlReader, verifyObjectName, null); } internal override object InternalReadObject(XmlReaderDelegator xmlReader, bool verifyObjectName, DataContractResolver dataContractResolver) { if (MaxItemsInObjectGraph == 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ExceededMaxItemsQuota, MaxItemsInObjectGraph))); if (dataContractResolver == null) dataContractResolver = this.DataContractResolver; #if NET_NATIVE // Give the root contract a chance to initialize or pre-verify the read RootContract.PrepareToRead(xmlReader); #endif if (verifyObjectName) { if (!InternalIsStartObject(xmlReader)) { XmlDictionaryString expectedName; XmlDictionaryString expectedNs; if (_rootName == null) { expectedName = RootContract.TopLevelElementName; expectedNs = RootContract.TopLevelElementNamespace; } else { expectedName = _rootName; expectedNs = _rootNamespace; } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationExceptionWithReaderDetails(SR.Format(SR.ExpectingElement, expectedNs, expectedName), xmlReader)); } } else if (!IsStartElement(xmlReader)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationExceptionWithReaderDetails(SR.Format(SR.ExpectingElementAtDeserialize, XmlNodeType.Element), xmlReader)); } DataContract contract = RootContract; if (contract.IsPrimitive && object.ReferenceEquals(contract.UnderlyingType, _rootType) /*handle Nullable<T> differently*/) { return contract.ReadXmlValue(xmlReader, null); } if (IsRootXmlAny(_rootName, contract)) { return XmlObjectSerializerReadContext.ReadRootIXmlSerializable(xmlReader, contract as XmlDataContract, false /*isMemberType*/); } XmlObjectSerializerReadContext context = XmlObjectSerializerReadContext.CreateContext(this, contract, dataContractResolver); return context.InternalDeserialize(xmlReader, _rootType, contract, null, null); } internal override bool InternalIsStartObject(XmlReaderDelegator reader) { return IsRootElement(reader, RootContract, _rootName, _rootNamespace); } internal override Type GetSerializeType(object graph) { return (graph == null) ? _rootType : graph.GetType(); } internal override Type GetDeserializeType() { return _rootType; } internal static object SurrogateToDataContractType(ISerializationSurrogateProvider serializationSurrogateProvider, object oldObj, Type surrogatedDeclaredType, ref Type objType) { object obj = DataContractSurrogateCaller.GetObjectToSerialize(serializationSurrogateProvider, oldObj, objType, surrogatedDeclaredType); if (obj != oldObj) { objType = obj != null ? obj.GetType() : Globals.TypeOfObject; } return obj; } internal static Type GetSurrogatedType(ISerializationSurrogateProvider serializationSurrogateProvider, Type type) { return DataContractSurrogateCaller.GetDataContractType(serializationSurrogateProvider, DataContract.UnwrapNullableType(type)); } } }
#region Copyright and license information // Copyright 2001-2009 Stephen Colebourne // Copyright 2009-2011 Jon Skeet // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; namespace NodaTime.TimeZones { /// <summary> /// Provides a <see cref="DateTimeZone"/> wrapper class that implements a simple cache to /// speed up the lookup of transitions. /// </summary> /// <remarks> /// <para> /// The cache supports multiple caching strategies which are implemented in nested subclasses of /// this one. Until we have a better sense of what the usage behavior is, we cannot tune the /// cache. It is possible that we may support multiple strategies selectable at runtime so the /// user can tune the performance based on their knowledge of how they are using the system. /// </para> /// <para> /// In fact, only one cache type is currently implemented: an MRU cache existed before /// the GetZoneIntervals call was created in DateTimeZone, but as it wasn't being used, it /// was more effort than it was worth to update. The mechanism is still available for future /// expansion though. /// </para> /// </remarks> [Serializable] internal abstract class CachedDateTimeZone : DateTimeZone { #region CacheType enum internal enum CacheType { Hashtable } #endregion private readonly DateTimeZone timeZone; /// <summary> /// Initializes a new instance of the <see cref="CachedDateTimeZone"/> class. /// </summary> /// <param name="timeZone">The time zone to cache.</param> private CachedDateTimeZone(DateTimeZone timeZone) : base(timeZone.Id, false, timeZone.MinOffset, timeZone.MaxOffset) { this.timeZone = timeZone; } /// <summary> /// Gets the cached time zone. /// </summary> /// <value>The time zone.</value> internal DateTimeZone TimeZone { get { return timeZone; } } /// <summary> /// Returns a cached time zone for the given time zone. /// </summary> /// <remarks> /// If the time zone is already cached or it is fixed then it is returned unchanged. /// </remarks> /// <param name="timeZone">The time zone to cache.</param> /// <returns>The cached time zone.</returns> internal static DateTimeZone ForZone(DateTimeZone timeZone) { return ForZone(timeZone, CacheType.Hashtable); } /// <summary> /// Returns a cached time zone for the given time zone. /// </summary> /// <remarks> /// If the time zone is already cached or it is fixed then it is returned unchanged. /// </remarks> /// <param name="timeZone">The time zone to cache.</param> /// <param name="type">The type of cache to store the zone in.</param> /// <returns>The cached time zone.</returns> private static DateTimeZone ForZone(DateTimeZone timeZone, CacheType type) { if (timeZone == null) { throw new ArgumentNullException("timeZone"); } if (timeZone is CachedDateTimeZone || timeZone.IsFixed) { return timeZone; } switch (type) { case CacheType.Hashtable: return new HashArrayCache(timeZone); default: throw new ArgumentException("The type parameter is invalid", "type"); } } #region Overrides of DateTimeZone /// <summary> /// Writes the time zone to the specified writer. /// </summary> /// <param name="writer">The writer to write to.</param> internal override void Write(DateTimeZoneWriter writer) { if (writer == null) { throw new ArgumentNullException("writer"); } writer.WriteTimeZone(timeZone); } /// <summary> /// Reads the zone from the specified reader. /// </summary> /// <param name="reader">The reader.</param> /// <param name="id">The id.</param> /// <returns></returns> internal static DateTimeZone Read(DateTimeZoneReader reader, string id) { if (reader == null) { throw new ArgumentNullException("reader"); } var timeZone = reader.ReadTimeZone(id); return ForZone(timeZone); } #endregion #region Nested type: HashArrayCache /// <summary> /// This provides a simple cache based on two hash tables (one for local instants, another /// for instants). /// </summary> /// <remarks> /// Each hash table entry is either entry or contains a node with enough /// information for a particular "period" of about 40 days - so multiple calls for time /// zone information within the same few years are likely to hit the cache. Note that /// a single "period" may include a daylight saving change (or conceivably more than one); /// a node therefore has to contain enough intervals to completely represent that period. /// /// If another call is made which maps to the same cache entry number but is for a different /// period, the existing hash entry is simply overridden. /// </remarks> private class HashArrayCache : CachedDateTimeZone { // Currently we have no need or way to create hash cache zones with // different cache sizes. But the cache size should always be a power of 2 to get the // "period to cache entry" conversion simply as a bitmask operation. private const int CacheSize = 512; // Mask to AND the period number with in order to get the cache entry index. The // result will always be in the range [0, CacheSize). private const int CachePeriodMask = CacheSize - 1; /// <summary> /// Defines the number of bits to shift an instant value to get the period. This /// converts a number of ticks to a number of 40.6 days periods. /// </summary> private const int PeriodShift = 45; private readonly HashCacheNode[] instantCache; /// <summary> /// Initializes a new instance of the <see cref="CachedDateTimeZone"/> class. /// </summary> /// <param name="timeZone">The time zone to cache.</param> internal HashArrayCache(DateTimeZone timeZone) : base(timeZone) { if (timeZone == null) { throw new ArgumentNullException("timeZone"); } instantCache = new HashCacheNode[CacheSize]; } /// <summary> /// Gets the zone offset period for the given instant. Null is returned if no period is /// defined by the time zone for the given instant. /// </summary> /// <param name="instant">The Instant to test.</param> /// <returns>The defined ZoneOffsetPeriod or <c>null</c>.</returns> public override ZoneInterval GetZoneInterval(Instant instant) { int period = (int)(instant.Ticks >> PeriodShift); int index = period & CachePeriodMask; var node = instantCache[index]; if (node == null || node.Period != period) { node = HashCacheNode.CreateNode(period, TimeZone); instantCache[index] = node; } // Note: moving this code into an instance method in HashCacheNode makes a surprisingly // large performance difference. while (node.Interval.Start > instant) { node = node.Previous; } return node.Interval; } #region Nested type: HashCacheNode // Note: I (Jon) have tried optimizing this as a struct containing two ZoneIntervals // and a list of zone intervals (normally null) for the rare case where there are more // than two zone intervals in a period. It halved the performance... private class HashCacheNode { private readonly ZoneInterval interval; internal ZoneInterval Interval { get { return interval; } } private readonly int period; internal int Period { get { return period; } } private readonly HashCacheNode previous; internal HashCacheNode Previous { get { return previous; } } /// <summary> /// Creates a hash table node with all the information for this period. /// We start off by finding the interval for the start of the period, and /// then repeatedly check whether that interval ends after the end of the /// period - at which point we're done. If not, find the next interval, create /// a new node referring to that interval and the previous interval, and keep going. /// </summary> internal static HashCacheNode CreateNode(int period, DateTimeZone zone) { var periodStart = new Instant((long)period << PeriodShift); var periodEnd = new Instant((long)(period + 1) << PeriodShift); var interval = zone.GetZoneInterval(periodStart); var node = new HashCacheNode(interval, period, null); // Keep going while the current interval ends before the period. while (interval.End < periodEnd) { interval = zone.GetZoneInterval(interval.End); node = new HashCacheNode(interval, period, node); } return node; } /// <summary> /// Initializes a new instance of the <see cref="HashCacheNode"/> class. /// </summary> /// <param name="interval">The zone interval.</param> /// <param name="period"></param> /// <param name="previous">The previous <see cref="HashCacheNode"/> node.</param> private HashCacheNode(ZoneInterval interval, int period, HashCacheNode previous) { this.period = period; this.interval = interval; this.previous = previous; } internal ZoneInterval FindInterval(Instant instant) { HashCacheNode node = this; while (node.Interval.Start > instant) { node = node.Previous; } return node.Interval; } } #endregion } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Runtime.InteropServices; using Internal.TypeSystem; using Internal.TypeSystem.Interop; using Debug = System.Diagnostics.Debug; namespace Internal.IL.Stubs { public enum StructMarshallingThunkType : byte { ManagedToNative = 1, NativeToManage = 2, Cleanup = 4 } public struct InlineArrayCandidate { public readonly MetadataType ElementType; public readonly uint Length; public InlineArrayCandidate(MetadataType type, uint length) { ElementType = type; Length = length; } } public partial class StructMarshallingThunk : ILStubMethod { internal readonly MetadataType ManagedType; internal readonly NativeStructType NativeType; internal readonly StructMarshallingThunkType ThunkType; private InteropStateManager _interopStateManager; private TypeDesc _owningType; private Marshaller[] _marshallers; public StructMarshallingThunk(TypeDesc owningType, MetadataType managedType, StructMarshallingThunkType thunkType, InteropStateManager interopStateManager) { _owningType = owningType; ManagedType = managedType; _interopStateManager = interopStateManager; NativeType = _interopStateManager.GetStructMarshallingNativeType(managedType); ThunkType = thunkType; _marshallers = InitializeMarshallers(); } public override TypeSystemContext Context { get { return ManagedType.Context; } } public override TypeDesc OwningType { get { return _owningType; } } private MethodSignature _signature; public override MethodSignature Signature { get { if (_signature == null) { TypeDesc[] parameters; if (ThunkType == StructMarshallingThunkType.Cleanup) { parameters = new TypeDesc[] { NativeType.MakeByRefType() }; } else { parameters = new TypeDesc[] { ManagedType.MakeByRefType(), NativeType.MakeByRefType() }; } _signature = new MethodSignature(MethodSignatureFlags.Static, 0, Context.GetWellKnownType(WellKnownType.Void), parameters); } return _signature; } } private string NamePrefix { get { switch (ThunkType) { case StructMarshallingThunkType.ManagedToNative: return "ManagedToNative"; case StructMarshallingThunkType.NativeToManage: return "NativeToManaged"; case StructMarshallingThunkType.Cleanup: return "Cleanup"; default: System.Diagnostics.Debug.Assert(false, "Unexpected Struct marshalling thunk type"); return string.Empty; } } } public override string Name { get { return NamePrefix + "__" + ((MetadataType)ManagedType).Name; } } public IEnumerable<InlineArrayCandidate> GetInlineArrayCandidates() { int index = 0; MarshalAsDescriptor[] marshalAsDescriptors = ((MetadataType)ManagedType).GetFieldMarshalAsDescriptors(); foreach (FieldDesc field in ManagedType.GetFields()) { if (field.IsStatic) { continue; } Marshaller marshaller = _marshallers[index]; if (marshaller.MarshallerKind == MarshallerKind.ByValAnsiString || marshaller.MarshallerKind == MarshallerKind.ByValUnicodeString) { yield return MarshalHelpers.GetInlineArrayCandidate(marshaller.ManagedType.Context.GetWellKnownType(WellKnownType.Char), marshaller.ElementMarshallerKind, _interopStateManager, marshalAsDescriptors[index]); } else if (marshaller.MarshallerKind == MarshallerKind.ByValArray || marshaller.MarshallerKind == MarshallerKind.ByValAnsiCharArray) { var arrayType = marshaller.ManagedType as ArrayType; Debug.Assert(arrayType != null); yield return MarshalHelpers.GetInlineArrayCandidate(arrayType.ElementType, marshaller.ElementMarshallerKind, _interopStateManager, marshalAsDescriptors[index]); } index++; } } private Marshaller[] InitializeMarshallers() { Debug.Assert(_interopStateManager != null); MarshalAsDescriptor[] marshalAsDescriptors = ((MetadataType)ManagedType).GetFieldMarshalAsDescriptors(); Marshaller[] marshallers = new Marshaller[marshalAsDescriptors.Length]; PInvokeFlags flags = new PInvokeFlags(); if (ManagedType.PInvokeStringFormat == PInvokeStringFormat.UnicodeClass || ManagedType.PInvokeStringFormat == PInvokeStringFormat.AutoClass) { flags.CharSet = CharSet.Unicode; } else { flags.CharSet = CharSet.Ansi; } int index = 0; foreach (FieldDesc field in ManagedType.GetFields()) { if (field.IsStatic) { continue; } marshallers[index] = Marshaller.CreateMarshaller(field.FieldType, MarshallerType.Field, marshalAsDescriptors[index], (ThunkType == StructMarshallingThunkType.NativeToManage) ? MarshalDirection.Reverse : MarshalDirection.Forward, marshallers, _interopStateManager, index, flags, isIn: true, /* Struct fields are considered as IN within the helper*/ isOut: false, isReturn: false); index++; } return marshallers; } private MethodIL EmitMarshallingIL(PInvokeILCodeStreams pInvokeILCodeStreams) { ILEmitter emitter = pInvokeILCodeStreams.Emitter; IEnumerator<FieldDesc> nativeEnumerator = NativeType.GetFields().GetEnumerator(); int index = 0; foreach (var managedField in ManagedType.GetFields()) { if (managedField.IsStatic) { continue; } bool notEmpty = nativeEnumerator.MoveNext(); Debug.Assert(notEmpty == true); var nativeField = nativeEnumerator.Current; Debug.Assert(nativeField != null); bool isInlineArray = nativeField.FieldType is InlineArrayType; // // Field marshallers expects the value of the fields to be // loaded on the stack. We load the value on the stack // before calling the marshallers. // Only exception is ByValArray marshallers. Since they can // only be used for field marshalling, they load/store values // directly from arguments. // if (isInlineArray) { var byValMarshaller = _marshallers[index++] as ByValArrayMarshaller; Debug.Assert(byValMarshaller != null); byValMarshaller.EmitMarshallingIL(pInvokeILCodeStreams, managedField, nativeField); } else { if (ThunkType == StructMarshallingThunkType.ManagedToNative) { LoadFieldValueFromArg(0, managedField, pInvokeILCodeStreams); } else if (ThunkType == StructMarshallingThunkType.NativeToManage) { LoadFieldValueFromArg(1, nativeField, pInvokeILCodeStreams); } _marshallers[index++].EmitMarshallingIL(pInvokeILCodeStreams); if (ThunkType == StructMarshallingThunkType.ManagedToNative) { StoreFieldValueFromArg(1, nativeField, pInvokeILCodeStreams); } else if (ThunkType == StructMarshallingThunkType.NativeToManage) { StoreFieldValueFromArg(0, managedField, pInvokeILCodeStreams); } } } Debug.Assert(!nativeEnumerator.MoveNext()); pInvokeILCodeStreams.UnmarshallingCodestream.Emit(ILOpcode.ret); return emitter.Link(this); } private MethodIL EmitCleanupIL(PInvokeILCodeStreams pInvokeILCodeStreams) { ILEmitter emitter = pInvokeILCodeStreams.Emitter; ILCodeStream codeStream = pInvokeILCodeStreams.MarshallingCodeStream; IEnumerator<FieldDesc> nativeEnumerator = NativeType.GetFields().GetEnumerator(); int index = 0; foreach (var managedField in ManagedType.GetFields()) { if (managedField.IsStatic) { continue; } bool notEmpty = nativeEnumerator.MoveNext(); Debug.Assert(notEmpty == true); var nativeField = nativeEnumerator.Current; Debug.Assert(nativeField != null); if (_marshallers[index].CleanupRequired) { LoadFieldValueFromArg(0, nativeField, pInvokeILCodeStreams); _marshallers[index].EmitElementCleanup(codeStream, emitter); } index++; } pInvokeILCodeStreams.UnmarshallingCodestream.Emit(ILOpcode.ret); return emitter.Link(this); } public override MethodIL EmitIL() { try { PInvokeILCodeStreams pInvokeILCodeStreams = new PInvokeILCodeStreams(); if (ThunkType == StructMarshallingThunkType.Cleanup) { return EmitCleanupIL(pInvokeILCodeStreams); } else { return EmitMarshallingIL(pInvokeILCodeStreams); } } catch (NotSupportedException) { string message = "Struct '" + ((MetadataType)ManagedType).Name + "' requires non-trivial marshalling that is not yet supported by this compiler."; return MarshalHelpers.EmitExceptionBody(message, this); } catch (InvalidProgramException ex) { Debug.Assert(!String.IsNullOrEmpty(ex.Message)); return MarshalHelpers.EmitExceptionBody(ex.Message, this); } } /// <summary> /// Loads the value of field of a struct at argument index argIndex to stack /// </summary> private void LoadFieldValueFromArg(int argIndex, FieldDesc field, PInvokeILCodeStreams pInvokeILCodeStreams) { ILCodeStream stream = pInvokeILCodeStreams.MarshallingCodeStream; ILEmitter emitter = pInvokeILCodeStreams.Emitter; stream.EmitLdArg(argIndex); stream.Emit(ILOpcode.ldfld, emitter.NewToken(field)); } private void StoreFieldValueFromArg(int argIndex, FieldDesc field, PInvokeILCodeStreams pInvokeILCodeStreams) { ILCodeStream stream = pInvokeILCodeStreams.MarshallingCodeStream; ILEmitter emitter = pInvokeILCodeStreams.Emitter; Internal.IL.Stubs.ILLocalVariable var = emitter.NewLocal(field.FieldType); stream.EmitStLoc(var); stream.EmitLdArg(argIndex); stream.EmitLdLoc(var); stream.Emit(ILOpcode.stfld, emitter.NewToken(field)); } } }
/* Project Orleans Cloud Service SDK ver. 1.0 Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Globalization; using System.Reflection; using System.Threading.Tasks; using Orleans.Core; using Orleans.Streams; using Orleans.Runtime; namespace Orleans.Providers { internal class ClientProviderRuntime : IStreamProviderRuntime { private IStreamPubSub pubSub; private StreamDirectory streamDirectory; private readonly Dictionary<Type, Tuple<IGrainExtension, IAddressable>> caoTable; private readonly AsyncLock lockable; private ClientProviderRuntime(IGrainFactory grainFactory) { caoTable = new Dictionary<Type, Tuple<IGrainExtension, IAddressable>>(); lockable = new AsyncLock(); GrainFactory = grainFactory; } public IGrainFactory GrainFactory { get; private set; } public static ClientProviderRuntime Instance { get; private set; } public static void InitializeSingleton(IGrainFactory grainFactory) { if (Instance != null) { UninitializeSingleton(); } Instance = new ClientProviderRuntime(grainFactory); } public static void StreamingInitialize(IGrainFactory grainFactory, ImplicitStreamSubscriberTable implicitStreamSubscriberTable) { if (null == implicitStreamSubscriberTable) { throw new ArgumentNullException("implicitStreamSubscriberTable"); } Instance.pubSub = new StreamPubSubImpl(new GrainBasedPubSubRuntime(grainFactory), implicitStreamSubscriberTable); Instance.streamDirectory = new StreamDirectory(); } public StreamDirectory GetStreamDirectory() { return streamDirectory; } public static void UninitializeSingleton() { Instance = null; } public Logger GetLogger(string loggerName) { return TraceLogger.GetLogger(loggerName, TraceLogger.LoggerType.Provider); } public Guid ServiceId { get { // Note: In theory nobody should be requesting ServcieId from client so might want to throw exception in this case, // but several PersistenceProvider_Azure_* test cases in PersistenceProviderTests.cs // are testing Azure provider in standalone mode which currently looks like access from "client", // so we return default value here instead of throw exception. // return Guid.Empty; } } public string SiloIdentity { get { throw new InvalidOperationException("Cannot access SiloIdentity from client."); } } public string ExecutingEntityIdentity() { return RuntimeClient.Current.Identity; } public SiloAddress ExecutingSiloAddress { get { throw new NotImplementedException(); } } public void RegisterSystemTarget(ISystemTarget target) { throw new NotImplementedException(); } public void UnRegisterSystemTarget(ISystemTarget target) { throw new NotImplementedException(); } public IDisposable RegisterTimer(Func<object, Task> asyncCallback, object state, TimeSpan dueTime, TimeSpan period) { return new AsyncTaskSafeTimer(asyncCallback, state, dueTime, period); } public async Task<Tuple<TExtension, TExtensionInterface>> BindExtension<TExtension, TExtensionInterface>(Func<TExtension> newExtensionFunc) where TExtension : IGrainExtension where TExtensionInterface : IGrainExtension { IAddressable addressable; TExtension extension; using (await lockable.LockAsync()) { Tuple<IGrainExtension, IAddressable> entry; if (caoTable.TryGetValue(typeof(TExtensionInterface), out entry)) { extension = (TExtension)entry.Item1; addressable = entry.Item2; } else { extension = newExtensionFunc(); var obj = ((Orleans.GrainFactory)this.GrainFactory).CreateObjectReference<TExtensionInterface>(extension); addressable = (IAddressable) await (Task<TExtensionInterface>) obj; if (null == addressable) { throw new NullReferenceException("addressable"); } entry = Tuple.Create((IGrainExtension)extension, addressable); caoTable.Add(typeof(TExtensionInterface), entry); } } var typedAddressable = addressable.Cast<TExtensionInterface>(); // we have to return the extension as well as the IAddressable because the caller needs to root the extension // to prevent it from being collected (the IAddressable uses a weak reference). return Tuple.Create(extension, typedAddressable); } public IStreamPubSub PubSub(StreamPubSubType pubSubType) { return pubSubType == StreamPubSubType.GrainBased ? pubSub : null; } public IConsistentRingProviderForGrains GetConsistentRingProvider(int mySubRangeIndex, int numSubRanges) { throw new NotImplementedException("GetConsistentRingProvider"); } public bool InSilo { get { return false; } } public Task InvokeWithinSchedulingContextAsync(Func<Task> asyncFunc, object context) { if (context != null) throw new ArgumentException("The grain client only supports a null scheduling context."); return Task.Run(asyncFunc); } public object GetCurrentSchedulingContext() { return null; } public Task StartPullingAgents( string streamProviderName, StreamQueueBalancerType balancerType, IQueueAdapterFactory adapterFactory, IQueueAdapter queueAdapter, TimeSpan getQueueMsgsTimerPeriod, TimeSpan initQueueTimeout, TimeSpan maxEventDeliveryTime) { return TaskDone.Done; } } }
// <copyright file="IncompleteLU.cs" company="Math.NET"> // Math.NET Numerics, part of the Math.NET Project // http://numerics.mathdotnet.com // http://github.com/mathnet/mathnet-numerics // http://mathnetnumerics.codeplex.com // // Copyright (c) 2009-2010 Math.NET // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. // </copyright> using System; using MathNet.Numerics.LinearAlgebra.Solvers; using MathNet.Numerics.Properties; namespace MathNet.Numerics.LinearAlgebra.Complex.Solvers { #if NOSYSNUMERICS using Complex = Numerics.Complex; #else using Complex = System.Numerics.Complex; #endif /// <summary> /// An incomplete, level 0, LU factorization preconditioner. /// </summary> /// <remarks> /// The ILU(0) algorithm was taken from: <br/> /// Iterative methods for sparse linear systems <br/> /// Yousef Saad <br/> /// Algorithm is described in Chapter 10, section 10.3.2, page 275 <br/> /// </remarks> public sealed class ILU0Preconditioner : IPreconditioner<Complex> { /// <summary> /// The matrix holding the lower (L) and upper (U) matrices. The /// decomposition matrices are combined to reduce storage. /// </summary> SparseMatrix _decompositionLU; /// <summary> /// Returns the upper triagonal matrix that was created during the LU decomposition. /// </summary> /// <returns>A new matrix containing the upper triagonal elements.</returns> internal Matrix<Complex> UpperTriangle() { var result = new SparseMatrix(_decompositionLU.RowCount); for (var i = 0; i < _decompositionLU.RowCount; i++) { for (var j = i; j < _decompositionLU.ColumnCount; j++) { result[i, j] = _decompositionLU[i, j]; } } return result; } /// <summary> /// Returns the lower triagonal matrix that was created during the LU decomposition. /// </summary> /// <returns>A new matrix containing the lower triagonal elements.</returns> internal Matrix<Complex> LowerTriangle() { var result = new SparseMatrix(_decompositionLU.RowCount); for (var i = 0; i < _decompositionLU.RowCount; i++) { for (var j = 0; j <= i; j++) { if (i == j) { result[i, j] = 1.0; } else { result[i, j] = _decompositionLU[i, j]; } } } return result; } /// <summary> /// Initializes the preconditioner and loads the internal data structures. /// </summary> /// <param name="matrix">The matrix upon which the preconditioner is based. </param> /// <exception cref="ArgumentNullException">If <paramref name="matrix"/> is <see langword="null" />.</exception> /// <exception cref="ArgumentException">If <paramref name="matrix"/> is not a square matrix.</exception> public void Initialize(Matrix<Complex> matrix) { if (matrix == null) { throw new ArgumentNullException("matrix"); } if (matrix.RowCount != matrix.ColumnCount) { throw new ArgumentException(Resource.ArgumentMatrixSquare, "matrix"); } _decompositionLU = SparseMatrix.OfMatrix(matrix); // M == A // for i = 2, ... , n do // for k = 1, .... , i - 1 do // if (i,k) == NZ(Z) then // compute z(i,k) = z(i,k) / z(k,k); // for j = k + 1, ...., n do // if (i,j) == NZ(Z) then // compute z(i,j) = z(i,j) - z(i,k) * z(k,j) // end // end // end // end // end for (var i = 0; i < _decompositionLU.RowCount; i++) { for (var k = 0; k < i; k++) { if (_decompositionLU[i, k] != 0.0) { var t = _decompositionLU[i, k]/_decompositionLU[k, k]; _decompositionLU[i, k] = t; if (_decompositionLU[k, i] != 0.0) { _decompositionLU[i, i] = _decompositionLU[i, i] - (t*_decompositionLU[k, i]); } for (var j = k + 1; j < _decompositionLU.RowCount; j++) { if (j == i) { continue; } if (_decompositionLU[i, j] != 0.0) { _decompositionLU[i, j] = _decompositionLU[i, j] - (t*_decompositionLU[k, j]); } } } } } } /// <summary> /// Approximates the solution to the matrix equation <b>Ax = b</b>. /// </summary> /// <param name="rhs">The right hand side vector.</param> /// <param name="lhs">The left hand side vector. Also known as the result vector.</param> public void Approximate(Vector<Complex> rhs, Vector<Complex> lhs) { if (_decompositionLU == null) { throw new ArgumentException(Resource.ArgumentMatrixDoesNotExist); } if ((lhs.Count != rhs.Count) || (lhs.Count != _decompositionLU.RowCount)) { throw new ArgumentException(Resource.ArgumentVectorsSameLength); } // Solve: // Lz = y // Which gives // for (int i = 1; i < matrix.RowLength; i++) // { // z_i = l_ii^-1 * (y_i - SUM_(j<i) l_ij * z_j) // } // NOTE: l_ii should be 1 because u_ii has to be the value var rowValues = new DenseVector(_decompositionLU.RowCount); for (var i = 0; i < _decompositionLU.RowCount; i++) { // Clear the rowValues rowValues.Clear(); _decompositionLU.Row(i, rowValues); var sum = Complex.Zero; for (var j = 0; j < i; j++) { sum += rowValues[j]*lhs[j]; } lhs[i] = rhs[i] - sum; } // Solve: // Ux = z // Which gives // for (int i = matrix.RowLength - 1; i > -1; i--) // { // x_i = u_ii^-1 * (z_i - SUM_(j > i) u_ij * x_j) // } for (var i = _decompositionLU.RowCount - 1; i > -1; i--) { _decompositionLU.Row(i, rowValues); var sum = Complex.Zero; for (var j = _decompositionLU.RowCount - 1; j > i; j--) { sum += rowValues[j]*lhs[j]; } lhs[i] = 1/rowValues[i]*(lhs[i] - sum); } } } }
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Linq; using JetBrains.Annotations; using Newtonsoft.Json; using osu.Framework.Bindables; using osu.Framework.Lists; using osu.Framework.Utils; using osu.Game.Screens.Edit; using osu.Game.Utils; namespace osu.Game.Beatmaps.ControlPoints { [Serializable] public class ControlPointInfo : IDeepCloneable<ControlPointInfo> { /// <summary> /// All control points grouped by time. /// </summary> [JsonProperty] public IBindableList<ControlPointGroup> Groups => groups; private readonly BindableList<ControlPointGroup> groups = new BindableList<ControlPointGroup>(); /// <summary> /// All timing points. /// </summary> [JsonProperty] public IReadOnlyList<TimingControlPoint> TimingPoints => timingPoints; private readonly SortedList<TimingControlPoint> timingPoints = new SortedList<TimingControlPoint>(Comparer<TimingControlPoint>.Default); /// <summary> /// All effect points. /// </summary> [JsonProperty] public IReadOnlyList<EffectControlPoint> EffectPoints => effectPoints; private readonly SortedList<EffectControlPoint> effectPoints = new SortedList<EffectControlPoint>(Comparer<EffectControlPoint>.Default); /// <summary> /// All control points, of all types. /// </summary> [JsonIgnore] public IEnumerable<ControlPoint> AllControlPoints => Groups.SelectMany(g => g.ControlPoints).ToArray(); /// <summary> /// Finds the effect control point that is active at <paramref name="time"/>. /// </summary> /// <param name="time">The time to find the effect control point at.</param> /// <returns>The effect control point.</returns> [NotNull] public EffectControlPoint EffectPointAt(double time) => BinarySearchWithFallback(EffectPoints, time, EffectControlPoint.DEFAULT); /// <summary> /// Finds the timing control point that is active at <paramref name="time"/>. /// </summary> /// <param name="time">The time to find the timing control point at.</param> /// <returns>The timing control point.</returns> [NotNull] public TimingControlPoint TimingPointAt(double time) => BinarySearchWithFallback(TimingPoints, time, TimingPoints.Count > 0 ? TimingPoints[0] : TimingControlPoint.DEFAULT); /// <summary> /// Finds the maximum BPM represented by any timing control point. /// </summary> [JsonIgnore] public double BPMMaximum => 60000 / (TimingPoints.OrderBy(c => c.BeatLength).FirstOrDefault() ?? TimingControlPoint.DEFAULT).BeatLength; /// <summary> /// Finds the minimum BPM represented by any timing control point. /// </summary> [JsonIgnore] public double BPMMinimum => 60000 / (TimingPoints.OrderByDescending(c => c.BeatLength).FirstOrDefault() ?? TimingControlPoint.DEFAULT).BeatLength; /// <summary> /// Remove all <see cref="ControlPointGroup"/>s and return to a pristine state. /// </summary> public virtual void Clear() { groups.Clear(); timingPoints.Clear(); effectPoints.Clear(); } /// <summary> /// Add a new <see cref="ControlPoint"/>. Note that the provided control point may not be added if the correct state is already present at the provided time. /// </summary> /// <param name="time">The time at which the control point should be added.</param> /// <param name="controlPoint">The control point to add.</param> /// <returns>Whether the control point was added.</returns> public bool Add(double time, ControlPoint controlPoint) { if (CheckAlreadyExisting(time, controlPoint)) return false; GroupAt(time, true).Add(controlPoint); return true; } public ControlPointGroup GroupAt(double time, bool addIfNotExisting = false) { var newGroup = new ControlPointGroup(time); int i = groups.BinarySearch(newGroup); if (i >= 0) return groups[i]; if (addIfNotExisting) { newGroup.ItemAdded += GroupItemAdded; newGroup.ItemRemoved += GroupItemRemoved; groups.Insert(~i, newGroup); return newGroup; } return null; } public void RemoveGroup(ControlPointGroup group) { foreach (var item in group.ControlPoints.ToArray()) group.Remove(item); group.ItemAdded -= GroupItemAdded; group.ItemRemoved -= GroupItemRemoved; groups.Remove(group); } /// <summary> /// Returns the time on the given beat divisor closest to the given time. /// </summary> /// <param name="time">The time to find the closest snapped time to.</param> /// <param name="beatDivisor">The beat divisor to snap to.</param> /// <param name="referenceTime">An optional reference point to use for timing point lookup.</param> public double GetClosestSnappedTime(double time, int beatDivisor, double? referenceTime = null) { var timingPoint = TimingPointAt(referenceTime ?? time); return getClosestSnappedTime(timingPoint, time, beatDivisor); } /// <summary> /// Returns the time on *ANY* valid beat divisor, favouring the divisor closest to the given time. /// </summary> /// <param name="time">The time to find the closest snapped time to.</param> public double GetClosestSnappedTime(double time) => GetClosestSnappedTime(time, GetClosestBeatDivisor(time)); /// <summary> /// Returns the beat snap divisor closest to the given time. If two are equally close, the smallest divisor is returned. /// </summary> /// <param name="time">The time to find the closest beat snap divisor to.</param> /// <param name="referenceTime">An optional reference point to use for timing point lookup.</param> public int GetClosestBeatDivisor(double time, double? referenceTime = null) { TimingControlPoint timingPoint = TimingPointAt(referenceTime ?? time); int closestDivisor = 0; double closestTime = double.MaxValue; foreach (int divisor in BindableBeatDivisor.VALID_DIVISORS) { double distanceFromSnap = Math.Abs(time - getClosestSnappedTime(timingPoint, time, divisor)); if (Precision.DefinitelyBigger(closestTime, distanceFromSnap)) { closestDivisor = divisor; closestTime = distanceFromSnap; } } return closestDivisor; } private static double getClosestSnappedTime(TimingControlPoint timingPoint, double time, int beatDivisor) { var beatLength = timingPoint.BeatLength / beatDivisor; var beatLengths = (int)Math.Round((time - timingPoint.Time) / beatLength, MidpointRounding.AwayFromZero); return timingPoint.Time + beatLengths * beatLength; } /// <summary> /// Binary searches one of the control point lists to find the active control point at <paramref name="time"/>. /// Includes logic for returning a specific point when no matching point is found. /// </summary> /// <param name="list">The list to search.</param> /// <param name="time">The time to find the control point at.</param> /// <param name="fallback">The control point to use when <paramref name="time"/> is before any control points.</param> /// <returns>The active control point at <paramref name="time"/>, or a fallback <see cref="ControlPoint"/> if none found.</returns> protected T BinarySearchWithFallback<T>(IReadOnlyList<T> list, double time, T fallback) where T : ControlPoint { return BinarySearch(list, time) ?? fallback; } /// <summary> /// Binary searches one of the control point lists to find the active control point at <paramref name="time"/>. /// </summary> /// <param name="list">The list to search.</param> /// <param name="time">The time to find the control point at.</param> /// <returns>The active control point at <paramref name="time"/>.</returns> protected virtual T BinarySearch<T>(IReadOnlyList<T> list, double time) where T : ControlPoint { if (list == null) throw new ArgumentNullException(nameof(list)); if (list.Count == 0) return null; if (time < list[0].Time) return null; if (time >= list[^1].Time) return list[^1]; int l = 0; int r = list.Count - 2; while (l <= r) { int pivot = l + ((r - l) >> 1); if (list[pivot].Time < time) l = pivot + 1; else if (list[pivot].Time > time) r = pivot - 1; else return list[pivot]; } // l will be the first control point with Time > time, but we want the one before it return list[l - 1]; } /// <summary> /// Check whether <paramref name="newPoint"/> should be added. /// </summary> /// <param name="time">The time to find the timing control point at.</param> /// <param name="newPoint">A point to be added.</param> /// <returns>Whether the new point should be added.</returns> protected virtual bool CheckAlreadyExisting(double time, ControlPoint newPoint) { ControlPoint existing = null; switch (newPoint) { case TimingControlPoint _: // Timing points are a special case and need to be added regardless of fallback availability. existing = BinarySearch(TimingPoints, time); break; case EffectControlPoint _: existing = EffectPointAt(time); break; } return newPoint?.IsRedundant(existing) == true; } protected virtual void GroupItemAdded(ControlPoint controlPoint) { switch (controlPoint) { case TimingControlPoint typed: timingPoints.Add(typed); break; case EffectControlPoint typed: effectPoints.Add(typed); break; default: throw new ArgumentException($"A control point of unexpected type {controlPoint.GetType()} was added to this {nameof(ControlPointInfo)}"); } } protected virtual void GroupItemRemoved(ControlPoint controlPoint) { switch (controlPoint) { case TimingControlPoint typed: timingPoints.Remove(typed); break; case EffectControlPoint typed: effectPoints.Remove(typed); break; } } public ControlPointInfo DeepClone() { var controlPointInfo = (ControlPointInfo)Activator.CreateInstance(GetType()); foreach (var point in AllControlPoints) controlPointInfo.Add(point.Time, point.DeepClone()); return controlPointInfo; } } }
using System; using System.Reflection; using System.Runtime.CompilerServices; using System.Collections; /* * Regression tests for the GC support in the JIT */ public partial class Tests { public static int test_36_simple() { // Overflow the registers object o1 = (1); object o2 = (2); object o3 = (3); object o4 = (4); object o5 = (5); object o6 = (6); object o7 = (7); object o8 = (8); /* Prevent the variables from being local to a bb */ bool b = o1 != null; GC.Collect(0); if (b) return (int)o1 + (int)o2 + (int)o3 + (int)o4 + (int)o5 + (int)o6 + (int)o7 + (int)o8; else return 0; } public static int test_36_liveness() { object o = 5; object o1, o2, o3, o4, o5, o6, o7, o8; bool b = o != null; GC.Collect(1); o1 = (1); o2 = (2); o3 = (3); o4 = (4); o5 = (5); o6 = (6); o7 = (7); o8 = (8); if (b) return (int)o1 + (int)o2 + (int)o3 + (int)o4 + (int)o5 + (int)o6 + (int)o7 + (int)o8; else return 0; } struct GcFooStruct { public object o1; public int i; public object o2; public GcFooStruct(int i1, int i, int i2) { this.o1 = i1; this.i = i; this.o2 = i2; } } public static int test_4_vtype() { GcFooStruct s = new GcFooStruct(1, 2, 3); GC.Collect(1); return (int)s.o1 + (int)s.o2; } class BigClass { public object o1, o2, o3, o4, o5, o6, o7, o8, o9, o10; public object o11, o12, o13, o14, o15, o16, o17, o18, o19, o20; public object o21, o22, o23, o24, o25, o26, o27, o28, o29, o30; public object o31, o32; } static void set_fields(BigClass b) { b.o31 = 31; b.o32 = 32; b.o1 = 1; b.o2 = 2; b.o3 = 3; b.o4 = 4; b.o5 = 5; b.o6 = 6; b.o7 = 7; b.o8 = 8; b.o9 = 9; b.o10 = 10; b.o11 = 11; b.o12 = 12; b.o13 = 13; b.o14 = 14; b.o15 = 15; b.o16 = 16; b.o17 = 17; b.o18 = 18; b.o19 = 19; b.o20 = 20; b.o21 = 21; b.o22 = 22; b.o23 = 23; b.o24 = 24; b.o25 = 25; b.o26 = 26; b.o27 = 27; b.o28 = 28; b.o29 = 29; b.o30 = 30; } // Test marking of objects with > 32 fields public static int test_528_mark_runlength_large() { BigClass b = new BigClass(); /* * Do the initialization in a separate method so no object refs remain in * spill slots. */ set_fields(b); GC.Collect(1); return (int)b.o1 + (int)b.o2 + (int)b.o3 + (int)b.o4 + (int)b.o5 + (int)b.o6 + (int)b.o7 + (int)b.o8 + (int)b.o9 + (int)b.o10 + (int)b.o11 + (int)b.o12 + (int)b.o13 + (int)b.o14 + (int)b.o15 + (int)b.o16 + (int)b.o17 + (int)b.o18 + (int)b.o19 + (int)b.o20 + (int)b.o21 + (int)b.o22 + (int)b.o23 + (int)b.o24 + (int)b.o25 + (int)b.o26 + (int)b.o27 + (int)b.o28 + (int)b.o29 + (int)b.o30 + (int)b.o31 + (int)b.o32; } /* * Test liveness and loops. */ public static int test_0_liveness_2() { object o = new object(); for (int n = 0; n < 10; ++n) { /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; if (o != null) o.ToString(); GC.Collect(1); if (o != null) o.ToString(); sum += i + j + k; GC.Collect(1); } return 0; } /* * Test liveness and stack slot sharing * This doesn't work yet, its hard to make the JIT share the stack slots of the * two 'o' variables. */ public static int test_0_liveness_3() { bool b = false; bool b2 = true; /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; if (b) { object o = new object(); /* Make sure o is global */ if (b2) Console.WriteLine(); o.ToString(); } GC.Collect(1); if (b) { object o = new object(); /* Make sure o is global */ if (b2) Console.WriteLine(); o.ToString(); } sum += i + j + k + l + m + n + s; return 0; } /* * Test liveness of variables used to handle items on the IL stack. */ [MethodImplAttribute(MethodImplOptions.NoInlining)] static string call1() { return "A"; } [MethodImplAttribute(MethodImplOptions.NoInlining)] static string call2() { GC.Collect(1); return "A"; } public static int test_0_liveness_4() { bool b = false; bool b2 = true; /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; string o = b ? call1() : call2(); GC.Collect(1); sum += i + j + k + l + m + n + s; return 0; } /* * Test liveness of volatile variables */ [MethodImplAttribute(MethodImplOptions.NoInlining)] static void liveness_5_1(out object o) { o = new object(); } public static int test_0_liveness_5() { bool b = false; bool b2 = true; /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; object o; liveness_5_1(out o); for (int x = 0; x < 10; ++x) { o.ToString(); GC.Collect(1); } sum += i + j + k + l + m + n + s; return 0; } /* * Test the case when a stack slot becomes dead, then live again due to a backward * branch. */ [MethodImplAttribute(MethodImplOptions.NoInlining)] static object alloc_obj() { return new object(); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static bool return_true() { return true; } [MethodImplAttribute(MethodImplOptions.NoInlining)] static bool return_false() { return false; } public static int test_0_liveness_6() { bool b = false; bool b2 = true; /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; for (int x = 0; x < 10; ++x) { GC.Collect(1); object o = alloc_obj(); o.ToString(); GC.Collect(1); } sum += i + j + k + l + m + n + s; return 0; } public static int test_0_multi_dim_ref_array_wbarrier() { string[,] arr = new string[256, 256]; for (int i = 0; i < 256; ++i) { for (int j = 0; j < 100; ++j) arr[i, j] = "" + i + " " + j; } GC.Collect(); return 0; } /* * Liveness + out of line bblocks */ public static int test_0_liveness_7() { /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; // o is dead here GC.Collect(1); if (return_false()) { // This bblock is in-line object o = alloc_obj(); // o is live here if (return_false()) { // This bblock is out-of-line, and o is live here throw new Exception(o.ToString()); } } // o is dead here too GC.Collect(1); return 0; } // Liveness + finally clauses public static int test_0_liveness_8() { /* Exhaust all registers so 'o' is stack allocated */ int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; object o = null; try { o = alloc_obj(); } finally { GC.Collect(1); } o.GetHashCode(); return 0; } [MethodImplAttribute(MethodImplOptions.NoInlining)] static object alloc_string() { return "A"; } [MethodImplAttribute(MethodImplOptions.NoInlining)] static object alloc_obj_and_gc() { GC.Collect(1); return new object(); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static void clobber_regs_and_gc() { int sum = 0, i, j, k, l, m, n, s; for (i = 0; i < 100; ++i) sum++; for (j = 0; j < 100; ++j) sum++; for (k = 0; k < 100; ++k) sum++; for (l = 0; l < 100; ++l) sum++; for (m = 0; m < 100; ++m) sum++; for (n = 0; n < 100; ++n) sum++; for (s = 0; s < 100; ++s) sum++; GC.Collect(1); } [MethodImplAttribute(MethodImplOptions.NoInlining)] static void liveness_9_call1(object o1, object o2, object o3) { o1.GetHashCode(); o2.GetHashCode(); o3.GetHashCode(); } // Liveness + JIT temporaries public static int test_0_liveness_9() { // the result of alloc_obj () goes into a vreg, which gets converted to a // JIT temporary because of the branching introduced by the cast // FIXME: This doesn't crash if MONO_TYPE_I is not treated as a GC ref liveness_9_call1(alloc_obj(), (string)alloc_string(), alloc_obj_and_gc()); return 0; } // Liveness for registers public static int test_0_liveness_10() { // Make sure this goes into a register object o = alloc_obj(); o.GetHashCode(); o.GetHashCode(); o.GetHashCode(); o.GetHashCode(); o.GetHashCode(); o.GetHashCode(); // Break the bblock so o doesn't become a local vreg if (return_true()) // Clobber it with a call and run a GC clobber_regs_and_gc(); // Access it again o.GetHashCode(); return 0; } // Liveness for spill slots holding managed pointers public static int test_0_liveness_11() { Tests[] arr = new Tests[10]; // This uses an ldelema internally // FIXME: This doesn't crash if mp-s are not correctly tracked, just writes to // an old object. arr[0] >>= 1; return 0; } public static Tests operator >>(Tests bi1, int shiftVal) { clobber_regs_and_gc(); return bi1; } [MethodImplAttribute(MethodImplOptions.NoInlining)] public static void liveness_12_inner(int a, int b, int c, int d, int e, int f, object o) { GC.Collect(1); o.GetHashCode(); } // Liveness for param area public static int test_0_liveness_12() { // The ref argument should be passed on the stack liveness_12_inner(1, 2, 3, 4, 5, 6, new object()); return 0; } public static void liveness_13_inner(ref ArrayList arr) { // The value of arr will be stored in a spill slot arr.Add(alloc_obj_and_gc()); } // Liveness for byref arguments in spill slots public static int test_0_liveness_13() { var arr = new ArrayList(); liveness_13_inner(ref arr); return 0; } }
using OpenKh.Common; using OpenKh.Imaging; using System; using System.Collections.Generic; using System.Drawing; using System.IO; using System.IO.Compression; using System.Text; using Xe.BinaryMapper; namespace OpenKh.Imaging { public class PngImage : IImageRead { private byte[] _data; private byte[] _clut; internal enum ColorType { TrueColor = 2, Indexed = 3, AlphaTrueColor = 6, } internal enum ComprMethod { Deflate = 0, } internal enum InterlaceMethod { None = 0, Adam7 = 1, } internal class Signature { [Data] public ulong Magic { get; set; } public const ulong Valid = 727905341920923785; } internal class Chunk { [Data] public int Length { get; set; } [Data] public int CType { get; set; } public byte[] RawData { get; set; } public int Crc { get; set; } public const int IHDR = 0x49484452; public const int IDAT = 0x49444154; public const int IEND = 0x49454E44; public const int PLTE = 0x504C5445; public const int tRNS = 0x74524E53; } internal class IHdr { [Data] public int Width { get; set; } [Data] public int Height { get; set; } [Data] public byte Bits { get; set; } [Data] public byte ColorType { get; set; } [Data] public byte ComprMethod { get; set; } [Data] public byte FilterMethod { get; set; } [Data] public byte InterlaceMethod { get; set; } } public PngImage(Stream stream) { stream.SetPosition(0); var header = BinaryMapping.ReadObject<Signature>(stream); if (header.Magic != Signature.Valid) { throw new InvalidDataException("Bad signature code!"); } var fullData = new MemoryStream(); IHdr ihdr = null; byte[] PLTE = null; byte[] tRNS = null; while (stream.Position < stream.Length) { var chunk = BinaryMapping.ReadObject<Chunk>(stream); chunk.CType = Turn(chunk.CType); chunk.Length = Turn(chunk.Length); chunk.RawData = stream.ReadBytes(chunk.Length); chunk.Crc = Turn(stream.ReadInt32()); if (chunk.CType == Chunk.IEND) { break; } switch (chunk.CType) { case Chunk.IHDR: ihdr = BinaryMapping.ReadObject<IHdr>(new MemoryStream(chunk.RawData)); ihdr.Width = Turn(ihdr.Width); ihdr.Height = Turn(ihdr.Height); break; case Chunk.PLTE: PLTE = chunk.RawData; break; case Chunk.tRNS: tRNS = chunk.RawData; break; case Chunk.IDAT: fullData.Write(chunk.RawData); break; } } if (ihdr == null) { throw new InvalidDataException("No IHDR!"); } var interlaceMethod = (InterlaceMethod)ihdr.InterlaceMethod; if (interlaceMethod != InterlaceMethod.None) { throw new NotSupportedException($"interlaceMethod {interlaceMethod} not supported!"); } var comprMethod = (ComprMethod)ihdr.ComprMethod; if (comprMethod != ComprMethod.Deflate) { throw new NotSupportedException($"comprMethod {comprMethod} not supported!"); } fullData.Position = 2; var deflater = new DeflateStream(fullData, CompressionMode.Decompress); Size = new Size(ihdr.Width, ihdr.Height); var bits = ihdr.Bits; var colorType = (ColorType)ihdr.ColorType; if (bits == 4 && colorType == ColorType.Indexed) { PixelFormat = PixelFormat.Indexed4; var stride = (1 + Size.Width) / 2; _data = new byte[stride * Size.Height]; for (int y = 0; y < Size.Height; y++) { var filter = deflater.ReadByte(); deflater.Read(_data, y * stride, stride); ApplyFilter(_data, y * stride, 1, stride, filter); } _clut = PrepareClut(PLTE, tRNS, 16); } else if (bits == 8 && colorType == ColorType.Indexed) { PixelFormat = PixelFormat.Indexed8; var stride = Size.Width; _data = new byte[stride * Size.Height]; for (int y = 0; y < Size.Height; y++) { var filter = deflater.ReadByte(); deflater.Read(_data, y * stride, stride); ApplyFilter(_data, y * stride, 1, stride, filter); } _clut = PrepareClut(PLTE, tRNS, 256); } else if (bits == 8 && colorType == ColorType.TrueColor) { PixelFormat = PixelFormat.Rgb888; var stride = 3 * Size.Width; _data = new byte[stride * Size.Height]; for (int y = 0; y < Size.Height; y++) { var filter = deflater.ReadByte(); deflater.Read(_data, y * stride, stride); ApplyFilter(_data, y * stride, 3, stride, filter); } } else if (bits == 8 && colorType == ColorType.AlphaTrueColor) { PixelFormat = PixelFormat.Rgba8888; var stride = 4 * Size.Width; _data = new byte[stride * Size.Height]; for (int y = 0; y < Size.Height; y++) { var filter = deflater.ReadByte(); deflater.Read(_data, y * stride, stride); ApplyFilter(_data, y * stride, 4, stride, filter); } int localOfs = 0; for (int y = 0; y < Size.Height; y++) { for (int x = 0; x < Size.Width; x++, localOfs += 4) { var r = _data[localOfs + 0]; var g = _data[localOfs + 1]; var b = _data[localOfs + 2]; var a = _data[localOfs + 3]; _data[localOfs + 0] = b; _data[localOfs + 1] = g; _data[localOfs + 2] = r; _data[localOfs + 3] = a; } } } else { throw new NotSupportedException($"Not supported combination: bits = {bits} and colorType = {colorType}"); } } private void ApplyFilter(byte[] data, int ptr, int pixelSize, int stride, int filter) { // See: https://www.w3.org/TR/PNG-Filters.html // See: https://www.w3.org/TR/2003/REC-PNG-20031110/#9FtIntro if (filter == 0) { // nop } else if (filter == 1) { var endPtr = ptr + stride; ptr += pixelSize; for (; ptr < endPtr; ptr++) { data[ptr] += data[ptr - pixelSize]; } } else if (filter == 2) { var endPtr = ptr + stride; for (; ptr < endPtr; ptr++) { data[ptr] += data[ptr - stride]; } } else if (filter == 3) { var endPtr = ptr + stride; var atNextPixel = ptr + pixelSize; for (; ptr < atNextPixel; ptr++) { data[ptr] += (byte)((0 + data[ptr - stride]) / 2); } for (; ptr < endPtr; ptr++) { data[ptr] += (byte)((data[ptr - pixelSize] + data[ptr - stride]) / 2); } } else if (filter == 4) { var endPtr = ptr + stride; var atNextPixel = ptr + pixelSize; for (; ptr < atNextPixel; ptr++) { data[ptr] += data[ptr - stride]; } for (; ptr < endPtr; ptr++) { data[ptr] += PaethPredictor( data[ptr - pixelSize], data[ptr - stride], data[ptr - pixelSize - stride] ); } } else { throw new NotSupportedException(); } } private byte PaethPredictor(byte a, byte b, byte c) { var p = a + b - c; var pa = Math.Abs(p - a); var pb = Math.Abs(p - b); var pc = Math.Abs(p - c); if (pa <= pb && pa <= pc) { return (byte)a; } else if (pb <= pc) { return (byte)b; } else { return (byte)c; } } private byte[] PrepareClut(byte[] PLTE, byte[] tRNS, int count) { var clut = new byte[4 * count]; for (int y = 0; y < count; y++) { if (3 * (y + 1) <= PLTE.Length) { clut[4 * y + 0] = PLTE[3 * y + 0]; clut[4 * y + 1] = PLTE[3 * y + 1]; clut[4 * y + 2] = PLTE[3 * y + 2]; } if (y + 1 <= tRNS.Length) { clut[4 * y + 3] = tRNS[y]; } } return clut; } private static int Turn(int val) { return (int)Turn((uint)val); } private static uint Turn(uint val) { return 0 | ((val << 24)) | ((val << 8) & 0x00FF0000) | ((val >> 8) & 0x0000FF00) | ((val >> 24) & 0x000000FF) ; } public static bool IsValid(Stream stream) { stream.SetPosition(0); return stream.ReadByte() == 0x89 && stream.ReadByte() == 0x50 && stream.ReadByte() == 0x4e && stream.ReadByte() == 0x47 && stream.ReadByte() == 0x0d && stream.ReadByte() == 0x0a && stream.ReadByte() == 0x1a && stream.ReadByte() == 0x0a; } public static PngImage Read(Stream stream) => new PngImage(stream); #region IImageRead public Size Size { get; internal set; } public PixelFormat PixelFormat { get; internal set; } public byte[] GetData() => _data; public byte[] GetClut() => _clut; #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void CompareEqualUInt64() { var test = new SimpleBinaryOpTest__CompareEqualUInt64(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); if (Sse2.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (Sse2.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (Sse2.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (Sse2.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (Sse2.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__CompareEqualUInt64 { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private byte[] outArray; private GCHandle inHandle1; private GCHandle inHandle2; private GCHandle outHandle; private ulong alignment; public DataTable(UInt64[] inArray1, UInt64[] inArray2, UInt64[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<UInt64>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<UInt64>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt64>(); if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<UInt64, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<UInt64, byte>(ref inArray2[0]), (uint)sizeOfinArray2); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector128<UInt64> _fld1; public Vector128<UInt64> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); return testStruct; } public void RunStructFldScenario(SimpleBinaryOpTest__CompareEqualUInt64 testClass) { var result = Sse41.CompareEqual(_fld1, _fld2); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(SimpleBinaryOpTest__CompareEqualUInt64 testClass) { fixed (Vector128<UInt64>* pFld1 = &_fld1) fixed (Vector128<UInt64>* pFld2 = &_fld2) { var result = Sse41.CompareEqual( Sse2.LoadVector128((UInt64*)(pFld1)), Sse2.LoadVector128((UInt64*)(pFld2)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 16; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<UInt64>>() / sizeof(UInt64); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<UInt64>>() / sizeof(UInt64); private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt64>>() / sizeof(UInt64); private static UInt64[] _data1 = new UInt64[Op1ElementCount]; private static UInt64[] _data2 = new UInt64[Op2ElementCount]; private static Vector128<UInt64> _clsVar1; private static Vector128<UInt64> _clsVar2; private Vector128<UInt64> _fld1; private Vector128<UInt64> _fld2; private DataTable _dataTable; static SimpleBinaryOpTest__CompareEqualUInt64() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _clsVar1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _clsVar2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); } public SimpleBinaryOpTest__CompareEqualUInt64() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _fld1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _fld2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); } _dataTable = new DataTable(_data1, _data2, new UInt64[RetElementCount], LargestVectorSize); } public bool IsSupported => Sse41.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Sse41.CompareEqual( Unsafe.Read<Vector128<UInt64>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<UInt64>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Sse41.CompareEqual( Sse2.LoadVector128((UInt64*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((UInt64*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Sse41.CompareEqual( Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Sse41).GetMethod(nameof(Sse41.CompareEqual), new Type[] { typeof(Vector128<UInt64>), typeof(Vector128<UInt64>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<UInt64>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<UInt64>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Sse41).GetMethod(nameof(Sse41.CompareEqual), new Type[] { typeof(Vector128<UInt64>), typeof(Vector128<UInt64>) }) .Invoke(null, new object[] { Sse2.LoadVector128((UInt64*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((UInt64*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Sse41).GetMethod(nameof(Sse41.CompareEqual), new Type[] { typeof(Vector128<UInt64>), typeof(Vector128<UInt64>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Sse41.CompareEqual( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector128<UInt64>* pClsVar1 = &_clsVar1) fixed (Vector128<UInt64>* pClsVar2 = &_clsVar2) { var result = Sse41.CompareEqual( Sse2.LoadVector128((UInt64*)(pClsVar1)), Sse2.LoadVector128((UInt64*)(pClsVar2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector128<UInt64>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector128<UInt64>>(_dataTable.inArray2Ptr); var result = Sse41.CompareEqual(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = Sse2.LoadVector128((UInt64*)(_dataTable.inArray1Ptr)); var op2 = Sse2.LoadVector128((UInt64*)(_dataTable.inArray2Ptr)); var result = Sse41.CompareEqual(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var op1 = Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArray1Ptr)); var op2 = Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArray2Ptr)); var result = Sse41.CompareEqual(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimpleBinaryOpTest__CompareEqualUInt64(); var result = Sse41.CompareEqual(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new SimpleBinaryOpTest__CompareEqualUInt64(); fixed (Vector128<UInt64>* pFld1 = &test._fld1) fixed (Vector128<UInt64>* pFld2 = &test._fld2) { var result = Sse41.CompareEqual( Sse2.LoadVector128((UInt64*)(pFld1)), Sse2.LoadVector128((UInt64*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Sse41.CompareEqual(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector128<UInt64>* pFld1 = &_fld1) fixed (Vector128<UInt64>* pFld2 = &_fld2) { var result = Sse41.CompareEqual( Sse2.LoadVector128((UInt64*)(pFld1)), Sse2.LoadVector128((UInt64*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Sse41.CompareEqual(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = Sse41.CompareEqual( Sse2.LoadVector128((UInt64*)(&test._fld1)), Sse2.LoadVector128((UInt64*)(&test._fld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector128<UInt64> op1, Vector128<UInt64> op2, void* result, [CallerMemberName] string method = "") { UInt64[] inArray1 = new UInt64[Op1ElementCount]; UInt64[] inArray2 = new UInt64[Op2ElementCount]; UInt64[] outArray = new UInt64[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray2[0]), op2); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "") { UInt64[] inArray1 = new UInt64[Op1ElementCount]; UInt64[] inArray2 = new UInt64[Op2ElementCount]; UInt64[] outArray = new UInt64[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt64>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(UInt64[] left, UInt64[] right, UInt64[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (result[0] != ((left[0] == right[0]) ? unchecked((ulong)(-1)) : 0)) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (result[i] != ((left[i] == right[i]) ? unchecked((ulong)(-1)) : 0)) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Sse41)}.{nameof(Sse41.CompareEqual)}<UInt64>(Vector128<UInt64>, Vector128<UInt64>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Specialized; using System.Reflection; using System.IO; using System.Web; using Mono.Addins; using log4net; using Nini.Config; using OpenMetaverse; using OpenMetaverse.StructuredData; using OpenSim.Framework; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using Caps = OpenSim.Framework.Capabilities.Caps; using OpenSim.Framework.Capabilities; using PermissionMask = OpenSim.Framework.PermissionMask; namespace OpenSim.Region.ClientStack.Linden { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "NewFileAgentInventoryVariablePriceModule")] public class NewFileAgentInventoryVariablePriceModule : INonSharedRegionModule { // private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private Scene m_scene; // private IAssetService m_assetService; private bool m_dumpAssetsToFile = false; private bool m_enabled = true; private int m_levelUpload = 0; #region Region Module interfaceBase Members public Type ReplaceableInterface { get { return null; } } public void Initialise(IConfigSource source) { IConfig meshConfig = source.Configs["Mesh"]; if (meshConfig == null) return; m_enabled = meshConfig.GetBoolean("AllowMeshUpload", true); m_levelUpload = meshConfig.GetInt("LevelUpload", 0); } public void AddRegion(Scene pScene) { m_scene = pScene; } public void RemoveRegion(Scene scene) { m_scene.EventManager.OnRegisterCaps -= RegisterCaps; m_scene = null; } public void RegionLoaded(Scene scene) { // m_assetService = m_scene.RequestModuleInterface<IAssetService>(); m_scene.EventManager.OnRegisterCaps += RegisterCaps; } #endregion #region Region Module interface public void Close() { } public string Name { get { return "NewFileAgentInventoryVariablePriceModule"; } } public void RegisterCaps(UUID agentID, Caps caps) { if(!m_enabled) return; UUID capID = UUID.Random(); // m_log.Debug("[NEW FILE AGENT INVENTORY VARIABLE PRICE]: /CAPS/" + capID); caps.RegisterHandler( "NewFileAgentInventoryVariablePrice", new LLSDStreamhandler<LLSDAssetUploadRequest, LLSDNewFileAngentInventoryVariablePriceReplyResponse>( "POST", "/CAPS/" + capID.ToString(), req => NewAgentInventoryRequest(req, agentID), "NewFileAgentInventoryVariablePrice", agentID.ToString())); } #endregion public LLSDNewFileAngentInventoryVariablePriceReplyResponse NewAgentInventoryRequest(LLSDAssetUploadRequest llsdRequest, UUID agentID) { //TODO: The Mesh uploader uploads many types of content. If you're going to implement a Money based limit // you need to be aware of this //if (llsdRequest.asset_type == "texture" || // llsdRequest.asset_type == "animation" || // llsdRequest.asset_type == "sound") // { // check user level ScenePresence avatar = null; IClientAPI client = null; m_scene.TryGetScenePresence(agentID, out avatar); if (avatar != null) { client = avatar.ControllingClient; if (avatar.UserLevel < m_levelUpload) { if (client != null) client.SendAgentAlertMessage("Unable to upload asset. Insufficient permissions.", false); LLSDNewFileAngentInventoryVariablePriceReplyResponse errorResponse = new LLSDNewFileAngentInventoryVariablePriceReplyResponse(); errorResponse.rsvp = ""; errorResponse.state = "error"; return errorResponse; } } // check funds IMoneyModule mm = m_scene.RequestModuleInterface<IMoneyModule>(); if (mm != null) { if (!mm.UploadCovered(agentID, mm.UploadCharge)) { if (client != null) client.SendAgentAlertMessage("Unable to upload asset. Insufficient funds.", false); LLSDNewFileAngentInventoryVariablePriceReplyResponse errorResponse = new LLSDNewFileAngentInventoryVariablePriceReplyResponse(); errorResponse.rsvp = ""; errorResponse.state = "error"; return errorResponse; } } // } string assetName = llsdRequest.name; string assetDes = llsdRequest.description; string capsBase = "/CAPS/NewFileAgentInventoryVariablePrice/"; UUID newAsset = UUID.Random(); UUID newInvItem = UUID.Random(); UUID parentFolder = llsdRequest.folder_id; string uploaderPath = Util.RandomClass.Next(5000, 8000).ToString("0000") + "/"; AssetUploader uploader = new AssetUploader(assetName, assetDes, newAsset, newInvItem, parentFolder, llsdRequest.inventory_type, llsdRequest.asset_type, capsBase + uploaderPath, MainServer.Instance, m_dumpAssetsToFile); MainServer.Instance.AddStreamHandler( new BinaryStreamHandler( "POST", capsBase + uploaderPath, uploader.uploaderCaps, "NewFileAgentInventoryVariablePrice", agentID.ToString())); string protocol = "http://"; if (MainServer.Instance.UseSSL) protocol = "https://"; string uploaderURL = protocol + m_scene.RegionInfo.ExternalHostName + ":" + MainServer.Instance.Port.ToString() + capsBase + uploaderPath; LLSDNewFileAngentInventoryVariablePriceReplyResponse uploadResponse = new LLSDNewFileAngentInventoryVariablePriceReplyResponse(); uploadResponse.rsvp = uploaderURL; uploadResponse.state = "upload"; uploadResponse.resource_cost = 0; uploadResponse.upload_price = 0; uploader.OnUpLoad += //UploadCompleteHandler; delegate( string passetName, string passetDescription, UUID passetID, UUID pinventoryItem, UUID pparentFolder, byte[] pdata, string pinventoryType, string passetType) { UploadCompleteHandler(passetName, passetDescription, passetID, pinventoryItem, pparentFolder, pdata, pinventoryType, passetType,agentID); }; return uploadResponse; } public void UploadCompleteHandler(string assetName, string assetDescription, UUID assetID, UUID inventoryItem, UUID parentFolder, byte[] data, string inventoryType, string assetType,UUID AgentID) { // m_log.DebugFormat( // "[NEW FILE AGENT INVENTORY VARIABLE PRICE MODULE]: Upload complete for {0}", inventoryItem); sbyte assType = 0; sbyte inType = 0; if (inventoryType == "sound") { inType = 1; assType = 1; } else if (inventoryType == "animation") { inType = 19; assType = 20; } else if (inventoryType == "wearable") { inType = 18; switch (assetType) { case "bodypart": assType = 13; break; case "clothing": assType = 5; break; } } else if (inventoryType == "mesh") { inType = (sbyte)InventoryType.Mesh; assType = (sbyte)AssetType.Mesh; } AssetBase asset; asset = new AssetBase(assetID, assetName, assType, AgentID.ToString()); asset.Data = data; if (m_scene.AssetService != null) m_scene.AssetService.Store(asset); InventoryItemBase item = new InventoryItemBase(); item.Owner = AgentID; item.CreatorId = AgentID.ToString(); item.ID = inventoryItem; item.AssetID = asset.FullID; item.Description = assetDescription; item.Name = assetName; item.AssetType = assType; item.InvType = inType; item.Folder = parentFolder; item.CurrentPermissions = (uint)(PermissionMask.Move | PermissionMask.Copy | PermissionMask.Modify | PermissionMask.Transfer); item.BasePermissions = (uint)PermissionMask.All; item.EveryOnePermissions = 0; item.NextPermissions = (uint)PermissionMask.All; item.CreationDate = Util.UnixTimeSinceEpoch(); m_scene.AddInventoryItem(item); } } }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ // **NOTE** This file was generated by a tool and any changes will be overwritten. namespace Microsoft.Graph { using System; using System.Collections.Generic; using System.IO; using System.Net.Http; using System.Threading; using System.Linq.Expressions; /// <summary> /// The type WorkbookChartFillRequest. /// </summary> public partial class WorkbookChartFillRequest : BaseRequest, IWorkbookChartFillRequest { /// <summary> /// Constructs a new WorkbookChartFillRequest. /// </summary> /// <param name="requestUrl">The URL for the built request.</param> /// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param> /// <param name="options">Query and header option name value pairs for the request.</param> public WorkbookChartFillRequest( string requestUrl, IBaseClient client, IEnumerable<Option> options) : base(requestUrl, client, options) { } /// <summary> /// Creates the specified WorkbookChartFill using POST. /// </summary> /// <param name="workbookChartFillToCreate">The WorkbookChartFill to create.</param> /// <returns>The created WorkbookChartFill.</returns> public System.Threading.Tasks.Task<WorkbookChartFill> CreateAsync(WorkbookChartFill workbookChartFillToCreate) { return this.CreateAsync(workbookChartFillToCreate, CancellationToken.None); } /// <summary> /// Creates the specified WorkbookChartFill using POST. /// </summary> /// <param name="workbookChartFillToCreate">The WorkbookChartFill to create.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The created WorkbookChartFill.</returns> public async System.Threading.Tasks.Task<WorkbookChartFill> CreateAsync(WorkbookChartFill workbookChartFillToCreate, CancellationToken cancellationToken) { this.ContentType = "application/json"; this.Method = "POST"; var newEntity = await this.SendAsync<WorkbookChartFill>(workbookChartFillToCreate, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(newEntity); return newEntity; } /// <summary> /// Deletes the specified WorkbookChartFill. /// </summary> /// <returns>The task to await.</returns> public System.Threading.Tasks.Task DeleteAsync() { return this.DeleteAsync(CancellationToken.None); } /// <summary> /// Deletes the specified WorkbookChartFill. /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The task to await.</returns> public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken) { this.Method = "DELETE"; await this.SendAsync<WorkbookChartFill>(null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets the specified WorkbookChartFill. /// </summary> /// <returns>The WorkbookChartFill.</returns> public System.Threading.Tasks.Task<WorkbookChartFill> GetAsync() { return this.GetAsync(CancellationToken.None); } /// <summary> /// Gets the specified WorkbookChartFill. /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The WorkbookChartFill.</returns> public async System.Threading.Tasks.Task<WorkbookChartFill> GetAsync(CancellationToken cancellationToken) { this.Method = "GET"; var retrievedEntity = await this.SendAsync<WorkbookChartFill>(null, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(retrievedEntity); return retrievedEntity; } /// <summary> /// Updates the specified WorkbookChartFill using PATCH. /// </summary> /// <param name="workbookChartFillToUpdate">The WorkbookChartFill to update.</param> /// <returns>The updated WorkbookChartFill.</returns> public System.Threading.Tasks.Task<WorkbookChartFill> UpdateAsync(WorkbookChartFill workbookChartFillToUpdate) { return this.UpdateAsync(workbookChartFillToUpdate, CancellationToken.None); } /// <summary> /// Updates the specified WorkbookChartFill using PATCH. /// </summary> /// <param name="workbookChartFillToUpdate">The WorkbookChartFill to update.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The updated WorkbookChartFill.</returns> public async System.Threading.Tasks.Task<WorkbookChartFill> UpdateAsync(WorkbookChartFill workbookChartFillToUpdate, CancellationToken cancellationToken) { this.ContentType = "application/json"; this.Method = "PATCH"; var updatedEntity = await this.SendAsync<WorkbookChartFill>(workbookChartFillToUpdate, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(updatedEntity); return updatedEntity; } /// <summary> /// Adds the specified expand value to the request. /// </summary> /// <param name="value">The expand value.</param> /// <returns>The request object to send.</returns> public IWorkbookChartFillRequest Expand(string value) { this.QueryOptions.Add(new QueryOption("$expand", value)); return this; } /// <summary> /// Adds the specified expand value to the request. /// </summary> /// <param name="expandExpression">The expression from which to calculate the expand value.</param> /// <returns>The request object to send.</returns> public IWorkbookChartFillRequest Expand(Expression<Func<WorkbookChartFill, object>> expandExpression) { if (expandExpression == null) { throw new ArgumentNullException(nameof(expandExpression)); } string error; string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error); if (value == null) { throw new ArgumentException(error, nameof(expandExpression)); } else { this.QueryOptions.Add(new QueryOption("$expand", value)); } return this; } /// <summary> /// Adds the specified select value to the request. /// </summary> /// <param name="value">The select value.</param> /// <returns>The request object to send.</returns> public IWorkbookChartFillRequest Select(string value) { this.QueryOptions.Add(new QueryOption("$select", value)); return this; } /// <summary> /// Adds the specified select value to the request. /// </summary> /// <param name="selectExpression">The expression from which to calculate the select value.</param> /// <returns>The request object to send.</returns> public IWorkbookChartFillRequest Select(Expression<Func<WorkbookChartFill, object>> selectExpression) { if (selectExpression == null) { throw new ArgumentNullException(nameof(selectExpression)); } string error; string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error); if (value == null) { throw new ArgumentException(error, nameof(selectExpression)); } else { this.QueryOptions.Add(new QueryOption("$select", value)); } return this; } /// <summary> /// Initializes any collection properties after deserialization, like next requests for paging. /// </summary> /// <param name="workbookChartFillToInitialize">The <see cref="WorkbookChartFill"/> with the collection properties to initialize.</param> private void InitializeCollectionProperties(WorkbookChartFill workbookChartFillToInitialize) { } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace PushDataVSMVCTutorial.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try get sample provided for a specific mediaType, controllerName, actionName and parameterNames. // If not found, try get the sample provided for a specific mediaType, controllerName and actionName regardless of the parameterNames // If still not found, try get the sample provided for a specific type and mediaType if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create one using <see cref="ObjectGenerator"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // Try create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); sampleObject = objectGenerator.GenerateObject(type); } return sampleObject; } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ActionDescriptor.ReturnType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, e.Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
using System.Linq; using Content.Server.Light.Components; using Content.Server.Storage.Components; using Content.Shared.ActionBlocker; using Content.Shared.Interaction; using Content.Shared.Light; using Content.Shared.Popups; using JetBrains.Annotations; using Robust.Shared.Audio; using Robust.Shared.Containers; using Robust.Shared.GameObjects; using Robust.Shared.IoC; using Robust.Shared.Localization; using Robust.Shared.Player; namespace Content.Server.Light.EntitySystems { [UsedImplicitly] public sealed class LightReplacerSystem : EntitySystem { [Dependency] private readonly PoweredLightSystem _poweredLight = default!; [Dependency] private readonly SharedPopupSystem _popupSystem = default!; public override void Initialize() { base.Initialize(); SubscribeLocalEvent<LightReplacerComponent, ComponentInit>(OnInit); SubscribeLocalEvent<LightReplacerComponent, InteractUsingEvent>(HandleInteract); SubscribeLocalEvent<LightReplacerComponent, AfterInteractEvent>(HandleAfterInteract); } private void OnInit(EntityUid uid, LightReplacerComponent replacer, ComponentInit args) { replacer.InsertedBulbs = ContainerHelpers.EnsureContainer<Container>(replacer.Owner, "light_replacer_storage"); } private void HandleAfterInteract(EntityUid uid, LightReplacerComponent component, AfterInteractEvent eventArgs) { if (eventArgs.Handled) return; // standard interaction checks if (!eventArgs.CanReach) return; // behaviour will depends on target type if (eventArgs.Target != null) { var targetUid = (EntityUid) eventArgs.Target; // replace broken light in fixture? if (EntityManager.TryGetComponent(targetUid, out PoweredLightComponent? fixture)) eventArgs.Handled = TryReplaceBulb(uid, targetUid, eventArgs.User, component, fixture); // add new bulb to light replacer container? else if (EntityManager.TryGetComponent(targetUid, out LightBulbComponent? bulb)) eventArgs.Handled = TryInsertBulb(uid, targetUid, eventArgs.User, true, component, bulb); } } private void HandleInteract(EntityUid uid, LightReplacerComponent component, InteractUsingEvent eventArgs) { if (eventArgs.Handled) return; var usedUid = eventArgs.Used; // want to insert a new light bulb? if (EntityManager.TryGetComponent(usedUid, out LightBulbComponent? bulb)) eventArgs.Handled = TryInsertBulb(uid, usedUid, eventArgs.User, true, component, bulb); // add bulbs from storage? else if (EntityManager.TryGetComponent(usedUid, out ServerStorageComponent? storage)) eventArgs.Handled = TryInsertBulbsFromStorage(uid, usedUid, eventArgs.User, component, storage); } /// <summary> /// Try to replace a light bulb in <paramref name="fixtureUid"/> /// using light replacer. Light fixture should have <see cref="PoweredLightComponent"/>. /// </summary> /// <returns>True if successfully replaced light, false otherwise</returns> public bool TryReplaceBulb(EntityUid replacerUid, EntityUid fixtureUid, EntityUid? userUid = null, LightReplacerComponent? replacer = null, PoweredLightComponent? fixture = null) { if (!Resolve(replacerUid, ref replacer)) return false; if (!Resolve(fixtureUid, ref fixture)) return false; // check if light bulb is broken or missing var fixtureBulbUid = _poweredLight.GetBulb(fixture.Owner, fixture); if (fixtureBulbUid != null) { if (!EntityManager.TryGetComponent(fixtureBulbUid.Value, out LightBulbComponent? fixtureBulb)) return false; if (fixtureBulb.State == LightBulbState.Normal) return false; } // try get first inserted bulb of the same type as targeted light fixtutre var bulb = replacer.InsertedBulbs.ContainedEntities.FirstOrDefault( (e) => EntityManager.GetComponentOrNull<LightBulbComponent>(e)?.Type == fixture.BulbType); // found bulb in inserted storage if (bulb.Valid) // FirstOrDefault can return default/invalid uid. { // try to remove it var hasRemoved = replacer.InsertedBulbs.Remove(bulb); if (!hasRemoved) return false; } // try to create new instance of bulb from LightReplacerEntity else { var bulbEnt = replacer.Contents.FirstOrDefault((e) => e.Type == fixture.BulbType && e.Amount > 0); // found right bulb, let's spawn it if (bulbEnt != null) { bulb = EntityManager.SpawnEntity(bulbEnt.PrototypeName, EntityManager.GetComponent<TransformComponent>(replacer.Owner).Coordinates); bulbEnt.Amount--; } // not found any light bulbs else { if (userUid != null) { var msg = Loc.GetString("comp-light-replacer-missing-light", ("light-replacer", replacer.Owner)); _popupSystem.PopupEntity(msg, replacerUid, Filter.Entities(userUid.Value)); } return false; } } // insert it into fixture var wasReplaced = _poweredLight.ReplaceBulb(fixtureUid, bulb, fixture); if (wasReplaced) { SoundSystem.Play(Filter.Pvs(replacerUid), replacer.Sound.GetSound(), replacerUid, AudioParams.Default.WithVolume(-4f)); } return wasReplaced; } /// <summary> /// Try to insert a new bulb inside light replacer /// </summary> /// <returns>True if successfully inserted light, false otherwise</returns> public bool TryInsertBulb(EntityUid replacerUid, EntityUid bulbUid, EntityUid? userUid = null, bool showTooltip = false, LightReplacerComponent? replacer = null, LightBulbComponent? bulb = null) { if (!Resolve(replacerUid, ref replacer)) return false; if (!Resolve(bulbUid, ref bulb)) return false; // only normal (non-broken) bulbs can be inserted inside light replacer if (bulb.State != LightBulbState.Normal) { if (showTooltip && userUid != null) { var msg = Loc.GetString("comp-light-replacer-insert-broken-light"); _popupSystem.PopupEntity(msg, replacerUid, Filter.Entities(userUid.Value)); } return false; } // try insert light and show message var hasInsert = replacer.InsertedBulbs.Insert(bulb.Owner); if (hasInsert && showTooltip && userUid != null) { var msg = Loc.GetString("comp-light-replacer-insert-light", ("light-replacer", replacer.Owner), ("bulb", bulb.Owner)); _popupSystem.PopupEntity(msg, replacerUid, Filter.Entities(userUid.Value)); } return hasInsert; } /// <summary> /// Try to insert all light bulbs from storage (for example light tubes box) /// </summary> /// <returns> /// Returns true if storage contained at least one light bulb /// which was successfully inserted inside light replacer /// </returns> public bool TryInsertBulbsFromStorage(EntityUid replacerUid, EntityUid storageUid, EntityUid? userUid = null, LightReplacerComponent? replacer = null, ServerStorageComponent? storage = null) { if (!Resolve(replacerUid, ref replacer)) return false; if (!Resolve(storageUid, ref storage)) return false; if (storage.StoredEntities == null) return false; var insertedBulbs = 0; var storagedEnts = storage.StoredEntities.ToArray(); foreach (var ent in storagedEnts) { if (EntityManager.TryGetComponent(ent, out LightBulbComponent? bulb)) { if (TryInsertBulb(replacerUid, ent, userUid, false, replacer, bulb)) insertedBulbs++; } } // show some message if success if (insertedBulbs > 0 && userUid != null) { var msg = Loc.GetString("comp-light-replacer-refill-from-storage", ("light-replacer", storage.Owner)); _popupSystem.PopupEntity(msg, replacerUid, Filter.Entities(userUid.Value)); } return insertedBulbs > 0; } } }
/* * ObjectManager.cs - Implementation of the * "System.Runtime.Serialization.ObjectManager" class. * * Copyright (C) 2002, 2003 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Runtime.Serialization { #if CONFIG_SERIALIZATION using System; using System.Collections; using System.Reflection; using System.Security.Permissions; using System.Runtime.Serialization; public class ObjectManager { // Common information that is stored for a member fixup. private abstract class ObjectFixup { public ObjectManager manager; public ObjectInfo value; public ObjectFixup nextFixup; // Constructor. protected ObjectFixup(ObjectManager manager, ObjectInfo value, ObjectFixup nextFixup) { this.manager = manager; this.value = value; this.nextFixup = nextFixup; } // Apply this fixup to an object. public virtual void Apply(ObjectInfo objI) { throw new SerializationException (_("Serialize_BadFixup")); } }; // class ObjectFixup // Fixup that uses a MemberInfo. private class MemberInfoFixup : ObjectFixup { private MemberInfo member; // Constructor. public MemberInfoFixup(ObjectManager manager, ObjectInfo value, MemberInfo member, ObjectFixup nextFixup) : base(manager, value, nextFixup) { this.member = member; } // Apply this fixup to an object. public override void Apply(ObjectInfo objI) { if(member is FieldInfo) { ((FieldInfo)member).SetValue(objI.obj, value.obj); } else { throw new SerializationException (_("Serialize_BadFixup")); } } }; // class MemberInfoFixup // Fixup that uses a member name. private class MemberNameFixup : ObjectFixup { private String memberName; // Constructor. public MemberNameFixup(ObjectManager manager, ObjectInfo value, String memberName, ObjectFixup nextFixup) : base(manager, value, nextFixup) { this.memberName = memberName; } // Apply this fixup to an object. public override void Apply(ObjectInfo objI) { if(objI.sinfo != null) { objI.sinfo.AddValue(memberName, value.obj); if(nextFixup == null) { BindingFlags flags = BindingFlags.Public|BindingFlags.NonPublic|BindingFlags.Instance; Type[] pTypes = new Type[] {typeof(SerializationInfo), typeof(StreamingContext)}; Type type = objI.obj.GetType(); ConstructorInfo ctor = type.GetConstructor(flags, null , pTypes, null); if(ctor == null) { throw new SerializationException (_("Serialize_BadFixup")); } Object[] parms = new Object[] {objI.sinfo, manager.context}; ctor.InvokeOnEmpty(objI.obj, parms); } } else { MemberInfo[] member = objI.obj.GetType().GetMember (memberName); if(member == null || member.Length != 1) { throw new SerializationException (_("Serialize_BadFixup")); } if(member[0] is FieldInfo) { ((FieldInfo)(member[0])).SetValue(objI.obj, value.obj); } else { throw new SerializationException (_("Serialize_BadFixup")); } } } }; // class MemberNameFixup // Fixup that uses an array index. private class ArrayIndexFixup : ObjectFixup { private int[] indices; // Constructor. public ArrayIndexFixup(ObjectManager manager, ObjectInfo value, int[] indices, ObjectFixup nextFixup) : base(manager, value, nextFixup) { this.indices = indices; } // Apply this fixup to an object. public override void Apply(ObjectInfo objI) { ((Array)objI.obj).SetValue(value.obj, indices); } }; // class ArrayIndexFixup // Fixup that uses an index into a single-dimensional array. private class SingleArrayIndexFixup : ObjectFixup { private int index; // Constructor. public SingleArrayIndexFixup(ObjectManager manager, ObjectInfo value, int index, ObjectFixup nextFixup) : base(manager, value, nextFixup) { this.index = index; } // Apply this fixup to an object. public override void Apply(ObjectInfo objI) { ((Array)objI.obj).SetValue(value.obj, index); } }; // class SingleArrayIndexFixup // Information that is stored for an object identifier. private sealed class ObjectInfo { public Object obj; public SerializationInfo sinfo; public long idOfContainingObject; public MemberInfo member; public int[] arrayIndex; public ObjectInfo contains; public ObjectInfo nextContains; public ObjectFixup fixups; public bool done; }; // class ObjectInfo // Internal state. private ISurrogateSelector selector; protected StreamingContext context; private Hashtable objects; private ArrayList callbackList; // Constructor. public ObjectManager(ISurrogateSelector selector, StreamingContext context) { // Make sure that we have the correct permissions. SecurityPermission perm = new SecurityPermission (SecurityPermissionFlag.SerializationFormatter); perm.Demand(); // Initialize the object manager. this.selector = selector; this.context = context; this.objects = new Hashtable(16); // avoid expanding of hashtable this.callbackList = new ArrayList(); } // Apply a contained member fixup. private static void ApplyContained(ObjectInfo oinfo, ObjectInfo contain) { if(contain.member != null) { if(contain.member is FieldInfo) { ((FieldInfo)(contain.member)).SetValue (oinfo.obj, contain.obj); } else { throw new SerializationException (_("Serialize_BadFixup")); } } else if(contain.arrayIndex != null) { ((Array)(oinfo.obj)).SetValue (contain.obj, contain.arrayIndex); } else { throw new SerializationException (_("Serialize_BadFixup")); } } // Perform recorded fixups for contained objects. private static void DoFixupsForContained(ObjectInfo oinfo) { ObjectInfo contain = oinfo.contains; ObjectFixup fixup; do { if(!(contain.done)) { contain.done = true; if(contain.obj == null) { throw new SerializationException (_("Serialize_MissingFixup")); } if(contain.contains != null) { DoFixupsForContained(contain); } fixup = contain.fixups; while(fixup != null) { if(fixup.value.obj == null) { throw new SerializationException (_("Serialize_MissingFixup")); } fixup.Apply(contain); fixup = fixup.nextFixup; } ApplyContained(oinfo, contain); } contain = contain.nextContains; } while(contain != null); } // Perform recorded fixups. public virtual void DoFixups() { IDictionaryEnumerator e = objects.GetEnumerator(); ObjectInfo oinfo; ObjectFixup fixup; while(e.MoveNext()) { oinfo = (ObjectInfo)(e.Value); if(oinfo.obj == null) { throw new SerializationException (_("Serialize_MissingFixup")); } if(oinfo.done || oinfo.idOfContainingObject > 0) { // We already saw this object or the object is // contained within something at a higher level. continue; } oinfo.done = true; if(oinfo.contains != null) { // Handle value type members within this object. DoFixupsForContained(oinfo); } fixup = oinfo.fixups; while(fixup != null) { if(fixup.value.obj == null) { throw new SerializationException (_("Serialize_MissingFixup")); } fixup.Apply(oinfo); fixup = fixup.nextFixup; } } RaiseDeserializationEvent(); } // Return an object with a specific identifier. public virtual Object GetObject(long objectID) { if(objectID <= 0) { throw new ArgumentOutOfRangeException ("objectID", _("Serialize_BadObjectID")); } ObjectInfo info = (ObjectInfo)(objects[objectID]); if(info != null) { return info.obj; } else { return null; } } // Raise a deserialization event on all registered objects that want it. public virtual void RaiseDeserializationEvent() { IEnumerator e = callbackList.GetEnumerator(); while(e.MoveNext()) { IDeserializationCallback cb; cb = (e.Current as IDeserializationCallback); if(cb != null) { cb.OnDeserialization(null); } } } // Get the object information for an object, or add a new one. private ObjectInfo GetObjectInfo(long objectID) { if(objectID <= 0) { throw new ArgumentOutOfRangeException ("objectID", _("Serialize_BadObjectID")); } ObjectInfo oinfo = (ObjectInfo)(objects[objectID]); if(oinfo != null) { return oinfo; } else { oinfo = new ObjectInfo(); objects[objectID] = oinfo; return oinfo; } } // Record an array element fixup to be performed later. public virtual void RecordArrayElementFixup (long arrayToBeFixed, int index, long objectRequired) { ObjectInfo oinfo1 = GetObjectInfo(arrayToBeFixed); ObjectInfo oinfo2 = GetObjectInfo(objectRequired); oinfo1.fixups = new SingleArrayIndexFixup (this, oinfo2, index, oinfo1.fixups); } public virtual void RecordArrayElementFixup (long arrayToBeFixed, int[] indices, long objectRequired) { ObjectInfo oinfo1 = GetObjectInfo(arrayToBeFixed); ObjectInfo oinfo2 = GetObjectInfo(objectRequired); if(indices == null) { throw new ArgumentNullException("indices"); } oinfo1.fixups = new ArrayIndexFixup (this, oinfo2, indices, oinfo1.fixups); } // Record an object member fixup to be performed later. public virtual void RecordDelayedFixup (long objectToBeFixed, String memberName, long objectRequired) { ObjectInfo oinfo1 = GetObjectInfo(objectToBeFixed); ObjectInfo oinfo2 = GetObjectInfo(objectRequired); if(memberName == null) { throw new ArgumentNullException("memberName"); } oinfo1.fixups = new MemberNameFixup (this, oinfo2, memberName, oinfo1.fixups); } public virtual void RecordFixup (long objectToBeFixed, MemberInfo member, long objectRequired) { ObjectInfo oinfo1 = GetObjectInfo(objectToBeFixed); ObjectInfo oinfo2 = GetObjectInfo(objectRequired); if(member == null) { throw new ArgumentNullException("member"); } oinfo1.fixups = new MemberInfoFixup (this, oinfo2, member, oinfo1.fixups); } // Register an object with the object manager. public virtual void RegisterObject(Object obj, long objectID) { RegisterObject(obj, objectID, null, 0, null, null); } public void RegisterObject(Object obj, long objectID, SerializationInfo info) { RegisterObject(obj, objectID, info, 0, null, null); } public void RegisterObject(Object obj, long objectID, SerializationInfo info, long idOfContainingObj, MemberInfo member) { RegisterObject(obj, objectID, info, idOfContainingObj, member, null); } public void RegisterObject(Object obj, long objectID, SerializationInfo info, long idOfContainingObj, MemberInfo member, int[] arrayIndex) { if(obj == null) { throw new ArgumentNullException("objectID"); } if(objectID <= 0) { throw new ArgumentOutOfRangeException ("objectID", _("Serialize_BadObjectID")); } ObjectInfo oinfo = (ObjectInfo)(objects[objectID]); if(oinfo != null && oinfo.obj != null && oinfo.obj != obj) { throw new SerializationException (_("Serialize_AlreadyRegistered")); } else if(oinfo != null) { // Update the information for an existing reference. oinfo.obj = obj; if(obj is IDeserializationCallback) { callbackList.Add(obj); } if(info != null) { oinfo.sinfo = info; } if(member != null) { oinfo.member = member; } if(arrayIndex != null) { oinfo.arrayIndex = arrayIndex; } if(idOfContainingObj != 0 && oinfo.idOfContainingObject == 0) { oinfo.idOfContainingObject = idOfContainingObj; RegisterWithContaining(oinfo); } } else { // Create a new object information block. oinfo = new ObjectInfo(); oinfo.obj = obj; oinfo.sinfo = info; oinfo.idOfContainingObject = idOfContainingObj; oinfo.member = member; oinfo.arrayIndex = arrayIndex; objects[objectID] = oinfo; // Register the object to be called later by // "RaiseDeserializationEvent". if(obj is IDeserializationCallback) { callbackList.Add(obj); } // Register the information block with the container. if(idOfContainingObj > 0) { RegisterWithContaining(oinfo); } } } // Register an object with its containing object. private void RegisterWithContaining(ObjectInfo oinfo) { ObjectInfo oinfo2 = (ObjectInfo)(objects[oinfo.idOfContainingObject]); if(oinfo2 == null) { oinfo2 = new ObjectInfo(); objects[oinfo.idOfContainingObject] = oinfo2; } oinfo.nextContains = oinfo2.contains; oinfo2.contains = oinfo; } }; // class ObjectManager #endif // CONFIG_SERIALIZATION }; // namespace System.Runtime.Serialization
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ /* ================================================================ * About NPOI * Author: Tony Qu * Author's email: tonyqus (at) gmail.com * Author's Blog: tonyqus.wordpress.com.cn (wp.tonyqus.cn) * HomePage: http://www.codeplex.com/npoi * Contributors: * * ==============================================================*/ using System; using System.Text; using System.IO; using System.Collections; namespace NPOI.Util.Collections { /// <summary> /// This class comes from Java /// </summary> public class Properties { private Hashtable _col; private const string whiteSpaceChars = " \t\r\n\f"; private const string keyValueSeparators = "=: \t\r\n\f"; private const string strictKeyValueSeparators = "=:"; /// <summary> /// Initializes a new instance of the <see cref="Properties"/> class. /// </summary> public Properties() { _col = new Hashtable(); } /// <summary> /// Removes the specified key. /// </summary> /// <param name="key">The key.</param> /// <returns></returns> public string Remove(string key) { string retval = (string)_col[key]; _col.Remove(key); return retval; } /// <summary> /// Gets the enumerator. /// </summary> /// <returns></returns> public IEnumerator GetEnumerator() { return _col.GetEnumerator(); } /// <summary> /// Determines whether the specified key contains key. /// </summary> /// <param name="key">The key.</param> /// <returns> /// <c>true</c> if the specified key contains key; otherwise, <c>false</c>. /// </returns> public bool ContainsKey(string key) { return _col.ContainsKey(key); } /// <summary> /// Adds the specified key. /// </summary> /// <param name="key">The key.</param> /// <param name="value">The value.</param> public virtual void Add(string key, string value) { _col[key] = value; } public void AddAll(Properties col) { foreach (string itm in col.Keys) { _col[itm] = col[itm]; } } /// <summary> /// Gets the count. /// </summary> /// <value>The count.</value> public int Count { get { return _col.Count; } } /// <summary> /// Gets or sets the <see cref="System.String"/> with the specified key. /// </summary> /// <value></value> public virtual string this[string key] { get { return (string)_col[key]; } set { _col[key] = value; } } /// <summary> /// Gets the keys. /// </summary> /// <value>The keys.</value> public ICollection Keys { get { return _col.Keys; } } /// <summary> /// Clears this instance. /// </summary> public void Clear() { _col.Clear(); } /// <summary> /// Loads the specified in stream. /// </summary> /// <param name="inStream">The in stream.</param> public void Load(Stream inStream) { StreamReader inp = new StreamReader(inStream, Encoding.GetEncoding(1252)); while (true) { // Get next line String line = inp.ReadLine(); if (line == null) return; if (line.Length > 0) { // Find start of key int len = line.Length; int keyStart; for (keyStart=0; keyStart<len; keyStart++) if (whiteSpaceChars.IndexOf(line[keyStart]) == -1) break; // Blank lines are ignored if (keyStart == len) continue; // Continue lines that end in slashes if they are not comments char firstChar = line[keyStart]; if ((firstChar != '#') && (firstChar != '!')) { while (ContinueLine(line)) { String nextLine = inp.ReadLine(); if (nextLine == null) nextLine = ""; String loppedLine = line.Substring(0, len-1); // Advance beyond whitespace on new line int startIndex; for (startIndex=0; startIndex<nextLine.Length; startIndex++) if (whiteSpaceChars.IndexOf(nextLine[startIndex]) == -1) break; nextLine = nextLine.Substring(startIndex,nextLine.Length - startIndex); line = loppedLine+nextLine; len = line.Length; } // Find separation between key and value int separatorIndex; for (separatorIndex=keyStart; separatorIndex<len; separatorIndex++) { char currentChar = line[separatorIndex]; if (currentChar == '\\') separatorIndex++; else if (keyValueSeparators.IndexOf(currentChar) != -1) break; } // Skip over whitespace after key if any int valueIndex; for (valueIndex=separatorIndex; valueIndex<len; valueIndex++) if (whiteSpaceChars.IndexOf(line[valueIndex]) == -1) break; // Skip over one non whitespace key value separators if any if (valueIndex < len) if (strictKeyValueSeparators.IndexOf(line[valueIndex]) != -1) valueIndex++; // Skip over white space after other separators if any while (valueIndex < len) { if (whiteSpaceChars.IndexOf(line[valueIndex]) == -1) break; valueIndex++; } String key = line.Substring(keyStart, separatorIndex - keyStart); String value = (separatorIndex < len) ? line.Substring(valueIndex, len - valueIndex) : ""; // Convert then store key and value key = LoadConvert(key); value = LoadConvert(value); Add(key, value); } } } } /// <summary> /// Loads the convert. /// </summary> /// <param name="theString">The string.</param> /// <returns></returns> /// <remarks> /// Converts encoded &#92;uxxxx to unicode chars /// and changes special saved chars to their original forms /// </remarks> private String LoadConvert(String theString) { char aChar; int len = theString.Length; StringBuilder outBuffer = new StringBuilder(len); for (int x=0; x<len; ) { aChar = theString[x++]; if (aChar == '\\') { aChar = theString[x++]; if (aChar == 'u') { // Read the xxxx int value=0; for (int i=0; i<4; i++) { aChar = theString[x++]; switch (aChar) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': value = (value << 4) + aChar - '0'; break; case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': value = (value << 4) + 10 + aChar - 'a'; break; case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': value = (value << 4) + 10 + aChar - 'A'; break; default: throw new ArgumentException( "Malformed \\uxxxx encoding."); } } outBuffer.Append((char)value); } else { if (aChar == 't') aChar = '\t'; else if (aChar == 'r') aChar = '\r'; else if (aChar == 'n') aChar = '\n'; else if (aChar == 'f') aChar = '\f'; outBuffer.Append(aChar); } } else outBuffer.Append(aChar); } return outBuffer.ToString(); } /// <summary> /// Continues the line. /// </summary> /// <param name="line">The line.</param> /// <returns></returns> private bool ContinueLine(String line) { int slashCount = 0; int index = line.Length - 1; while ((index >= 0) && (line[index--] == '\\')) slashCount++; return (slashCount % 2 == 1); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Diagnostics.CodeAnalysis; using System.Security; using System.Threading; using System.Threading.Tasks; namespace System.IO.Pipes { /// <summary> /// Named pipe server /// </summary> public sealed partial class NamedPipeServerStream : PipeStream { // Use the maximum number of server instances that the system resources allow public const int MaxAllowedServerInstances = -1; [SecuritySafeCritical] public NamedPipeServerStream(String pipeName) : this(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None) { } [SecuritySafeCritical] public NamedPipeServerStream(String pipeName, PipeDirection direction) : this(pipeName, direction, 1, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None) { } [SecuritySafeCritical] public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances) : this(pipeName, direction, maxNumberOfServerInstances, PipeTransmissionMode.Byte, PipeOptions.None, 0, 0, HandleInheritability.None) { } [SecuritySafeCritical] public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode) : this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, PipeOptions.None, 0, 0, HandleInheritability.None) { } [SecuritySafeCritical] public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options) : this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, 0, 0, HandleInheritability.None) { } [SecuritySafeCritical] public NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options, int inBufferSize, int outBufferSize) : this(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, inBufferSize, outBufferSize, HandleInheritability.None) { } /// <summary> /// Full named pipe server constructor /// </summary> /// <param name="pipeName">Pipe name</param> /// <param name="direction">Pipe direction: In, Out or InOut (duplex). /// Win32 note: this gets OR'd into dwOpenMode to CreateNamedPipe /// </param> /// <param name="maxNumberOfServerInstances">Maximum number of server instances. Specify a fixed value between /// 1 and 254 (Windows)/greater than 1 (Unix), or use NamedPipeServerStream.MaxAllowedServerInstances to use the /// maximum amount allowed by system resources.</param> /// <param name="transmissionMode">Byte mode or message mode. /// Win32 note: this gets used for dwPipeMode. CreateNamedPipe allows you to specify PIPE_TYPE_BYTE/MESSAGE /// and PIPE_READMODE_BYTE/MESSAGE independently, but this sets type and readmode to match. /// </param> /// <param name="options">PipeOption enum: None, Asynchronous, or Write-through /// Win32 note: this gets passed in with dwOpenMode to CreateNamedPipe. Asynchronous corresponds to /// FILE_FLAG_OVERLAPPED option. PipeOptions enum doesn't expose FIRST_PIPE_INSTANCE option because /// this sets that automatically based on the number of instances specified. /// </param> /// <param name="inBufferSize">Incoming buffer size, 0 or higher. /// Note: this size is always advisory; OS uses a suggestion. /// </param> /// <param name="outBufferSize">Outgoing buffer size, 0 or higher (see above)</param> /// <param name="pipeSecurity">PipeSecurity, or null for default security descriptor</param> /// <param name="inheritability">Whether handle is inheritable</param> /// <param name="additionalAccessRights">Combination (logical OR) of PipeAccessRights.TakeOwnership, /// PipeAccessRights.AccessSystemSecurity, and PipeAccessRights.ChangePermissions</param> [SecuritySafeCritical] private NamedPipeServerStream(String pipeName, PipeDirection direction, int maxNumberOfServerInstances, PipeTransmissionMode transmissionMode, PipeOptions options, int inBufferSize, int outBufferSize, HandleInheritability inheritability) : base(direction, transmissionMode, outBufferSize) { if (pipeName == null) { throw new ArgumentNullException(nameof(pipeName)); } if (pipeName.Length == 0) { throw new ArgumentException(SR.Argument_NeedNonemptyPipeName); } if ((options & ~(PipeOptions.WriteThrough | PipeOptions.Asynchronous)) != 0) { throw new ArgumentOutOfRangeException(nameof(options), SR.ArgumentOutOfRange_OptionsInvalid); } if (inBufferSize < 0) { throw new ArgumentOutOfRangeException(nameof(inBufferSize), SR.ArgumentOutOfRange_NeedNonNegNum); } if ((maxNumberOfServerInstances < 1 || maxNumberOfServerInstances > 254) && (maxNumberOfServerInstances != MaxAllowedServerInstances)) { // win32 allows fixed values of 1-254 or 255 to mean max allowed by system. We expose 255 as -1 (unlimited) // through the MaxAllowedServerInstances constant. This is consistent e.g. with -1 as infinite timeout, etc. // We do this check for consistency on Unix, even though maxNumberOfServerInstances is otherwise ignored. throw new ArgumentOutOfRangeException(nameof(maxNumberOfServerInstances), SR.ArgumentOutOfRange_MaxNumServerInstances); } // inheritability will always be None since this private constructor is only called from other constructors from which // inheritability is always set to None. Desktop has a public constructor to allow setting it to something else, but Core // doesn't. if (inheritability < HandleInheritability.None || inheritability > HandleInheritability.Inheritable) { throw new ArgumentOutOfRangeException(nameof(inheritability), SR.ArgumentOutOfRange_HandleInheritabilityNoneOrInheritable); } Create(pipeName, direction, maxNumberOfServerInstances, transmissionMode, options, inBufferSize, outBufferSize, inheritability); } // Create a NamedPipeServerStream from an existing server pipe handle. [SecuritySafeCritical] public NamedPipeServerStream(PipeDirection direction, bool isAsync, bool isConnected, SafePipeHandle safePipeHandle) : base(direction, PipeTransmissionMode.Byte, 0) { if (safePipeHandle == null) { throw new ArgumentNullException(nameof(safePipeHandle)); } if (safePipeHandle.IsInvalid) { throw new ArgumentException(SR.Argument_InvalidHandle, nameof(safePipeHandle)); } ValidateHandleIsPipe(safePipeHandle); InitializeHandle(safePipeHandle, true, isAsync); if (isConnected) { State = PipeState.Connected; } } ~NamedPipeServerStream() { Dispose(false); } public Task WaitForConnectionAsync() { return WaitForConnectionAsync(CancellationToken.None); } public System.IAsyncResult BeginWaitForConnection(AsyncCallback callback, object state) => TaskToApm.Begin(WaitForConnectionAsync(), callback, state); public void EndWaitForConnection(IAsyncResult asyncResult) => TaskToApm.End(asyncResult); // Server can only connect from Disconnected state [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")] private void CheckConnectOperationsServer() { // we're not checking whether already connected; this allows us to throw IOException // "pipe is being closed" if other side is closing (as does win32) or no-op if // already connected if (State == PipeState.Closed) { throw Error.GetPipeNotOpen(); } if (InternalHandle != null && InternalHandle.IsClosed) // only check IsClosed if we have a handle { throw Error.GetPipeNotOpen(); } if (State == PipeState.Broken) { throw new IOException(SR.IO_PipeBroken); } } // Server is allowed to disconnect from connected and broken states [SecurityCritical] private void CheckDisconnectOperations() { if (State == PipeState.WaitingToConnect) { throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected); } if (State == PipeState.Disconnected) { throw new InvalidOperationException(SR.InvalidOperation_PipeAlreadyDisconnected); } if (InternalHandle == null && CheckOperationsRequiresSetHandle) { throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet); } if ((State == PipeState.Closed) || (InternalHandle != null && InternalHandle.IsClosed)) { throw Error.GetPipeNotOpen(); } } } // Users will use this delegate to specify a method to call while impersonating the client // (see NamedPipeServerStream.RunAsClient). public delegate void PipeStreamImpersonationWorker(); }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using OpenMetaverse; using OpenMetaverse.StructuredData; using Aurora.Framework; namespace Aurora.Framework { [Flags] public enum IAgentFlags : uint { Foreign = 1, Temperary = 2, Minor = 4, Locked = 8, PermBan = 16, TempBan = 32, Blocked = 64, Local = 128, LocalOnly = 256, PastPrelude = 512 } public class IAgentInfo : IDataTransferable, BaseCacheAccount { /// <summary> /// Did this user accept the TOS? /// </summary> public bool AcceptTOS; /// <summary> /// AgentFlags /// </summary> public IAgentFlags Flags = 0; /// <summary> /// Current language /// </summary> public string Language = "en-us"; /// <summary> /// Is the users language public /// </summary> public bool LanguageIsPublic = true; /// <summary> /// Max maturity rating the user wishes to see /// </summary> public int MaturityRating = 2; /// <summary> /// Max maturity rating the user can ever to see /// </summary> public int MaxMaturity = 2; /// <summary> /// Other information can be stored in here. /// For ex, temperary ban info for this user /// </summary> public OSDMap OtherAgentInformation = new OSDMap(); /// <summary> /// The ID value for this user /// </summary> public UUID PrincipalID { get; set; } /// <summary> /// Unused, only exists for caching purposes /// </summary> public string Name { get; set; } public override Dictionary<string, object> ToKVP() { return Util.OSDToDictionary(ToOSD()); } public override OSDMap ToOSD() { OSDMap map = new OSDMap { {"PrincipalID", OSD.FromUUID(PrincipalID)}, {"Flags", OSD.FromInteger((int) Flags)}, {"MaxMaturity", OSD.FromInteger(MaxMaturity)}, {"MaturityRating", OSD.FromInteger(MaturityRating)}, {"Language", OSD.FromString(Language)}, {"AcceptTOS", OSD.FromBoolean(AcceptTOS)}, {"LanguageIsPublic", OSD.FromBoolean(LanguageIsPublic)}, { "OtherAgentInformation", OSD.FromString(OSDParser.SerializeLLSDXmlString(OtherAgentInformation)) } }; return map; } public override void FromOSD(OSDMap map) { PrincipalID = map["PrincipalID"].AsUUID(); Flags = (IAgentFlags) map["Flags"].AsInteger(); MaxMaturity = Convert.ToInt32(map["MaxMaturity"].AsInteger()); MaturityRating = Convert.ToInt32(map["MaturityRating"].AsInteger()); Language = map["Language"].AsString(); AcceptTOS = map["AcceptTOS"].AsBoolean(); LanguageIsPublic = map["LanguageIsPublic"].AsBoolean(); if (map.ContainsKey("OtherAgentInformation")) OtherAgentInformation = (OSDMap) OSDParser.DeserializeLLSDXml(map["OtherAgentInformation"].AsString()); } public override void FromKVP(Dictionary<string, object> RetVal) { FromOSD(Util.DictionaryToOSD(RetVal)); } } public class IUserProfileInfo : IDataTransferable { #region ProfileFlags enum public enum ProfileFlags { NoPaymentInfoOnFile = 2, PaymentInfoOnFile = 4, PaymentInfoInUse = 8, AgentOnline = 16 } #endregion /// <summary> /// The appearance archive to load for this user /// </summary> public string AArchiveName = String.Empty; /// <summary> /// The about text listed in a users profile. /// </summary> public string AboutText = String.Empty; /// <summary> /// Show in search /// </summary> public bool AllowPublish = true; /// <summary> /// A UNIX Timestamp (seconds since epoch) for the users creation /// </summary> public int Created = Util.UnixTimeSinceEpoch(); /// <summary> /// The type of the user /// </summary> public string CustomType = String.Empty; /// <summary> /// The display name of the avatar /// </summary> public string DisplayName = String.Empty; /// <summary> /// The first life about text listed in a users profile /// </summary> public string FirstLifeAboutText = String.Empty; /// <summary> /// The profile image for the users first life tab /// </summary> public UUID FirstLifeImage = UUID.Zero; /// <summary> /// Should IM's be sent to the user's email? /// </summary> public bool IMViaEmail; /// <summary> /// The profile image for an avatar stored on the asset server /// </summary> public UUID Image = UUID.Zero; /// <summary> /// The interests of the user /// </summary> public ProfileInterests Interests = new ProfileInterests(); /// <summary> /// Is the user a new user? /// </summary> public bool IsNewUser = true; /// <summary> /// Allow for mature publishing /// </summary> public bool MaturePublish; /// <summary> /// The group that the user is assigned to, ex: Premium /// </summary> public string MembershipGroup = String.Empty; /// <summary> /// All of the notes of the user /// </summary> /// UUID - target agent /// string - notes public OSDMap Notes = new OSDMap(); /// <summary> /// The partner of this user /// </summary> public UUID Partner = UUID.Zero; /// <summary> /// The ID value for this user /// </summary> public UUID PrincipalID = UUID.Zero; /// <summary> /// Is this user's online status visible to others? /// </summary> public bool Visible = true; /// <summary> /// the web address of the Profile URL /// </summary> public string WebURL = String.Empty; public override Dictionary<string, object> ToKVP() { return Util.OSDToDictionary(ToOSD()); } public override OSDMap ToOSD() { return ToOSD(true); } /// <summary> /// This method creates a smaller OSD that /// does not contain sensitive information /// if the trusted boolean is false /// </summary> /// <param name = "secure"></param> /// <returns></returns> public OSDMap ToOSD(bool trusted) { OSDMap map = new OSDMap { {"PrincipalID", OSD.FromUUID(PrincipalID)}, {"AllowPublish", OSD.FromBoolean(AllowPublish)}, {"MaturePublish", OSD.FromBoolean(MaturePublish)}, {"WantToMask", OSD.FromUInteger(Interests.WantToMask)}, {"WantToText", OSD.FromString(Interests.WantToText)}, {"CanDoMask", OSD.FromUInteger(Interests.CanDoMask)}, {"CanDoText", OSD.FromString(Interests.CanDoText)}, {"Languages", OSD.FromString(Interests.Languages)}, {"AboutText", OSD.FromString(AboutText)}, {"FirstLifeImage", OSD.FromUUID(FirstLifeImage)}, {"FirstLifeAboutText", OSD.FromString(FirstLifeAboutText)}, {"Image", OSD.FromUUID(Image)}, {"WebURL", OSD.FromString(WebURL)}, {"Created", OSD.FromInteger(Created)}, {"DisplayName", OSD.FromString(DisplayName)}, {"Partner", OSD.FromUUID(Partner)}, {"Visible", OSD.FromBoolean(Visible)}, {"CustomType", OSD.FromString(CustomType)} }; if (trusted) { map.Add("AArchiveName", OSD.FromString(AArchiveName)); map.Add("IMViaEmail", OSD.FromBoolean(IMViaEmail)); map.Add("IsNewUser", OSD.FromBoolean(IsNewUser)); map.Add("MembershipGroup", OSD.FromString(MembershipGroup)); } map.Add("Notes", OSD.FromString(OSDParser.SerializeJsonString(Notes))); return map; } public override void FromOSD(OSDMap map) { PrincipalID = map["PrincipalID"].AsUUID(); AllowPublish = map["AllowPublish"].AsBoolean(); MaturePublish = map["MaturePublish"].AsBoolean(); //Interests Interests = new ProfileInterests { WantToMask = map["WantToMask"].AsUInteger(), WantToText = map["WantToText"].AsString(), CanDoMask = map["CanDoMask"].AsUInteger(), CanDoText = map["CanDoText"].AsString(), Languages = map["Languages"].AsString() }; //End interests try { if (map.ContainsKey("Notes")) Notes = (OSDMap) OSDParser.DeserializeJson(map["Notes"].AsString()); } catch { } AboutText = map["AboutText"].AsString(); FirstLifeImage = map["FirstLifeImage"].AsUUID(); FirstLifeAboutText = map["FirstLifeAboutText"].AsString(); Image = map["Image"].AsUUID(); WebURL = map["WebURL"].AsString(); Created = map["Created"].AsInteger(); DisplayName = map["DisplayName"].AsString(); Partner = map["Partner"].AsUUID(); Visible = map["Visible"].AsBoolean(); AArchiveName = map["AArchiveName"].AsString(); CustomType = map["CustomType"].AsString(); IMViaEmail = map["IMViaEmail"].AsBoolean(); IsNewUser = map["IsNewUser"].AsBoolean(); MembershipGroup = map["MembershipGroup"].AsString(); } public override void FromKVP(Dictionary<string, object> RetVal) { FromOSD(Util.DictionaryToOSD(RetVal)); } } public class ProfileInterests { public uint CanDoMask; public string CanDoText = ""; public string Languages = ""; public uint WantToMask; public string WantToText = ""; } public class Classified : IDataTransferable { public uint Category; public byte ClassifiedFlags; public UUID ClassifiedUUID; public uint CreationDate; public UUID CreatorUUID; public string Description; public uint ExpirationDate; public Vector3 GlobalPos; public string Name; public string ParcelName; public UUID ParcelUUID; public uint ParentEstate; public int PriceForListing; public UUID ScopeID; public string SimName; public UUID SnapshotUUID; public override OSDMap ToOSD() { OSDMap Classified = new OSDMap { {"ClassifiedUUID", OSD.FromUUID(ClassifiedUUID)}, {"CreatorUUID", OSD.FromUUID(CreatorUUID)}, {"CreationDate", OSD.FromUInteger(CreationDate)}, {"ExpirationDate", OSD.FromUInteger(ExpirationDate)}, {"Category", OSD.FromUInteger(Category)}, {"Name", OSD.FromString(Name)}, {"Description", OSD.FromString(Description)}, {"ParcelUUID", OSD.FromUUID(ParcelUUID)}, {"ParentEstate", OSD.FromUInteger(ParentEstate)}, {"SnapshotUUID", OSD.FromUUID(SnapshotUUID)}, {"ScopeID", OSD.FromUUID(ScopeID)}, {"SimName", OSD.FromString(SimName)}, {"GlobalPos", OSD.FromVector3(GlobalPos)}, {"ParcelName", OSD.FromString(ParcelName)}, {"ClassifiedFlags", OSD.FromInteger(ClassifiedFlags)}, {"PriceForListing", OSD.FromInteger(PriceForListing)} }; return Classified; } public override void FromOSD(OSDMap map) { ClassifiedUUID = map["ClassifiedUUID"].AsUUID(); CreatorUUID = map["CreatorUUID"].AsUUID(); CreationDate = map["CreationDate"].AsUInteger(); ExpirationDate = map["ExpirationDate"].AsUInteger(); Category = map["Category"].AsUInteger(); Name = map["Name"].AsString(); Description = map["Description"].AsString(); ParcelUUID = map["ParcelUUID"].AsUUID(); ParentEstate = map["ParentEstate"].AsUInteger(); SnapshotUUID = map["SnapshotUUID"].AsUUID(); ScopeID = map["ScopeID"].AsUUID(); SimName = map["SimName"].AsString(); GlobalPos = map["GlobalPos"].AsVector3(); ParcelName = map["ParcelName"].AsString(); ClassifiedFlags = (byte) map["ClassifiedFlags"].AsInteger(); PriceForListing = map["PriceForListing"].AsInteger(); } public override void FromKVP(Dictionary<string, object> KVP) { FromOSD(Util.DictionaryToOSD(KVP)); } public override Dictionary<string, object> ToKVP() { return Util.OSDToDictionary(ToOSD()); } } public class ProfilePickInfo : IDataTransferable { public UUID CreatorUUID; public string Description; public int Enabled; public Vector3 GlobalPos; public string Name; public string OriginalName; public UUID ParcelUUID; public UUID PickUUID; public string SimName; public UUID SnapshotUUID; public int SortOrder; public int TopPick; public string User; public override OSDMap ToOSD() { OSDMap Pick = new OSDMap { {"PickUUID", OSD.FromUUID(PickUUID)}, {"CreatorUUID", OSD.FromUUID(CreatorUUID)}, {"TopPick", OSD.FromInteger(TopPick)}, {"ParcelUUID", OSD.FromUUID(ParcelUUID)}, {"Name", OSD.FromString(Name)}, {"Description", OSD.FromString(Description)}, {"SnapshotUUID", OSD.FromUUID(SnapshotUUID)}, {"User", OSD.FromString(User)}, {"OriginalName", OSD.FromString(OriginalName)}, {"SimName", OSD.FromString(SimName)}, {"GlobalPos", OSD.FromVector3(GlobalPos)}, {"SortOrder", OSD.FromInteger(SortOrder)}, {"Enabled", OSD.FromInteger(Enabled)} }; return Pick; } public override void FromOSD(OSDMap map) { PickUUID = map["PickUUID"].AsUUID(); CreatorUUID = map["CreatorUUID"].AsUUID(); TopPick = map["TopPick"].AsInteger(); ParcelUUID = map["AsString"].AsUUID(); Name = map["Name"].AsString(); Description = map["Description"].AsString(); SnapshotUUID = map["SnapshotUUID"].AsUUID(); User = map["User"].AsString(); OriginalName = map["OriginalName"].AsString(); SimName = map["SimName"].AsString(); GlobalPos = map["GlobalPos"].AsVector3(); SortOrder = map["SortOrder"].AsInteger(); Enabled = map["Enabled"].AsInteger(); } public override void FromKVP(Dictionary<string, object> KVP) { FromOSD(Util.DictionaryToOSD(KVP)); } public override Dictionary<string, object> ToKVP() { return Util.OSDToDictionary(ToOSD()); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace YourFood.Services.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
using Content.Shared.ActionBlocker; using Content.Shared.Buckle.Components; using Content.Shared.Physics.Pull; using Content.Shared.Pulling.Components; using Content.Shared.Pulling.Events; using Robust.Shared.Containers; using Robust.Shared.GameObjects; using Robust.Shared.Map; using Robust.Shared.IoC; using Robust.Shared.Physics; using Robust.Shared.Log; namespace Content.Shared.Pulling { public abstract partial class SharedPullingSystem : EntitySystem { [Dependency] private readonly ActionBlockerSystem _blocker = default!; [Dependency] private readonly SharedContainerSystem _containerSystem = default!; public bool CanPull(EntityUid puller, EntityUid pulled) { if (!EntityManager.HasComponent<SharedPullerComponent>(puller)) { return false; } if (!_blocker.CanInteract(puller, pulled)) { return false; } if (!EntityManager.TryGetComponent<IPhysBody?>(pulled, out var _physics)) { return false; } if (_physics.BodyType == BodyType.Static) { return false; } if (puller == pulled) { return false; } if (!_containerSystem.IsInSameOrNoContainer(puller, pulled)) { return false; } if (EntityManager.TryGetComponent<SharedBuckleComponent?>(puller, out var buckle)) { // Prevent people pulling the chair they're on, etc. if (buckle.Buckled && (buckle.LastEntityBuckledTo == pulled)) { return false; } } var startPull = new StartPullAttemptEvent(puller, pulled); RaiseLocalEvent(puller, startPull); return !startPull.Cancelled; } public bool TogglePull(EntityUid puller, SharedPullableComponent pullable) { if (pullable.Puller == puller) { return TryStopPull(pullable); } return TryStartPull(puller, pullable.Owner); } // -- Core attempted actions -- public bool TryStopPull(SharedPullableComponent pullable, EntityUid? user = null) { if (!pullable.BeingPulled) { return false; } var msg = new StopPullingEvent(user); RaiseLocalEvent(pullable.Owner, msg); if (msg.Cancelled) return false; _pullSm.ForceRelationship(null, pullable); return true; } public bool TryStartPull(EntityUid puller, EntityUid pullable) { if (!EntityManager.TryGetComponent<SharedPullerComponent?>(puller, out var pullerComp)) { return false; } if (!EntityManager.TryGetComponent<SharedPullableComponent?>(pullable, out var pullableComp)) { return false; } return TryStartPull(pullerComp, pullableComp); } // The main "start pulling" function. public bool TryStartPull(SharedPullerComponent puller, SharedPullableComponent pullable) { if (puller.Pulling == pullable.Owner) return true; // Pulling a new object : Perform sanity checks. if (!CanPull(puller.Owner, pullable.Owner)) { return false; } if (!EntityManager.TryGetComponent<PhysicsComponent?>(puller.Owner, out var pullerPhysics)) { return false; } if (!EntityManager.TryGetComponent<PhysicsComponent?>(pullable.Owner, out var pullablePhysics)) { return false; } // Ensure that the puller is not currently pulling anything. // If this isn't done, then it happens too late, and the start/stop messages go out of order, // and next thing you know it thinks it's not pulling anything even though it is! var oldPullable = puller.Pulling; if (oldPullable != null) { if (EntityManager.TryGetComponent<SharedPullableComponent?>(oldPullable.Value, out var oldPullableComp)) { if (!TryStopPull(oldPullableComp)) { return false; } } else { Logger.WarningS("c.go.c.pulling", "Well now you've done it, haven't you? Someone transferred pulling (onto {0}) while presently pulling something that has no Pullable component (on {1})!", pullable.Owner, oldPullable); return false; } } // Ensure that the pullable is not currently being pulled. // Same sort of reasons as before. var oldPuller = pullable.Puller; if (oldPuller != null) { if (!TryStopPull(pullable)) { return false; } } // Continue with pulling process. var pullAttempt = new PullAttemptMessage(pullerPhysics, pullablePhysics); RaiseLocalEvent(puller.Owner, pullAttempt, broadcast: false); if (pullAttempt.Cancelled) { return false; } RaiseLocalEvent(pullable.Owner, pullAttempt); if (pullAttempt.Cancelled) { return false; } _pullSm.ForceRelationship(puller, pullable); return true; } public bool TryMoveTo(SharedPullableComponent pullable, EntityCoordinates to) { if (pullable.Puller == null) { return false; } if (!EntityManager.HasComponent<PhysicsComponent>(pullable.Owner)) { return false; } _pullSm.ForceSetMovingTo(pullable, to); return true; } public void StopMoveTo(SharedPullableComponent pullable) { _pullSm.ForceSetMovingTo(pullable, null); } } }
// --------------------------------------------------------------------------- // <copyright file="ConversationAction.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // --------------------------------------------------------------------------- //----------------------------------------------------------------------- // <summary>Defines the ConversationAction class.</summary> //----------------------------------------------------------------------- namespace Microsoft.Exchange.WebServices.Data { using System; using System.Collections.Generic; using System.Text; /// <summary> /// ConversationAction class that represents ConversationActionType in the request XML. /// This class really is meant for representing single ConversationAction that needs to /// be taken on a conversation. /// </summary> internal class ConversationAction : IJsonSerializable { /// <summary> /// Gets or sets conversation action /// </summary> internal ConversationActionType Action { get; set; } /// <summary> /// Gets or sets conversation id /// </summary> internal ConversationId ConversationId { get; set; } /// <summary> /// Gets or sets ProcessRightAway /// </summary> internal bool ProcessRightAway { get; set; } /// <summary> /// Gets or set conversation categories for Always Categorize action /// </summary> internal StringList Categories { get; set; } /// <summary> /// Gets or sets Enable Always Delete value for Always Delete action /// </summary> internal bool EnableAlwaysDelete { get; set; } /// <summary> /// Gets or sets the IsRead state. /// </summary> internal bool? IsRead { get; set; } /// <summary> /// Gets or sets the SuppressReadReceipts flag. /// </summary> internal bool? SuppressReadReceipts { get; set; } /// <summary> /// Gets or sets the Deletion mode. /// </summary> internal DeleteMode? DeleteType { get; set; } /// <summary> /// Gets or sets the flag. /// </summary> internal Flag Flag { get; set; } /// <summary> /// ConversationLastSyncTime is used in one time action to determine the items /// on which to take the action. /// </summary> internal DateTime? ConversationLastSyncTime { get; set; } /// <summary> /// Gets or sets folder id ContextFolder /// </summary> internal FolderIdWrapper ContextFolderId { get; set; } /// <summary> /// Gets or sets folder id for Move action /// </summary> internal FolderIdWrapper DestinationFolderId { get; set; } /// <summary> /// Gets or sets the retention policy type. /// </summary> internal RetentionType? RetentionPolicyType { get; set; } /// <summary> /// Gets or sets the retention policy tag id. /// </summary> internal Guid? RetentionPolicyTagId { get; set; } /// <summary> /// Gets the name of the XML element. /// </summary> /// <returns>XML element name.</returns> internal string GetXmlElementName() { return XmlElementNames.ApplyConversationAction; } /// <summary> /// Validate request. /// </summary> internal void Validate() { EwsUtilities.ValidateParam(this.ConversationId, "conversationId"); } /// <summary> /// Writes XML elements. /// </summary> /// <param name="writer">The writer.</param> internal void WriteElementsToXml(EwsServiceXmlWriter writer) { writer.WriteStartElement( XmlNamespace.Types, XmlElementNames.ConversationAction); try { string actionValue = String.Empty; switch (this.Action) { case ConversationActionType.AlwaysCategorize: actionValue = XmlElementNames.AlwaysCategorize; break; case ConversationActionType.AlwaysDelete: actionValue = XmlElementNames.AlwaysDelete; break; case ConversationActionType.AlwaysMove: actionValue = XmlElementNames.AlwaysMove; break; case ConversationActionType.Delete: actionValue = XmlElementNames.Delete; break; case ConversationActionType.Copy: actionValue = XmlElementNames.Copy; break; case ConversationActionType.Move: actionValue = XmlElementNames.Move; break; case ConversationActionType.SetReadState: actionValue = XmlElementNames.SetReadState; break; case ConversationActionType.SetRetentionPolicy: actionValue = XmlElementNames.SetRetentionPolicy; break; case ConversationActionType.Flag: actionValue = XmlElementNames.Flag; break; default: throw new ArgumentException("ConversationAction"); } // Emit the action element writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.Action, actionValue); // Emit the conversation id element this.ConversationId.WriteToXml( writer, XmlNamespace.Types, XmlElementNames.ConversationId); if (this.Action == ConversationActionType.AlwaysCategorize || this.Action == ConversationActionType.AlwaysDelete || this.Action == ConversationActionType.AlwaysMove) { // Emit the ProcessRightAway element writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.ProcessRightAway, EwsUtilities.BoolToXSBool(this.ProcessRightAway)); } if (this.Action == ConversationActionType.AlwaysCategorize) { // Emit the categories element if (this.Categories != null && this.Categories.Count > 0) { this.Categories.WriteToXml( writer, XmlNamespace.Types, XmlElementNames.Categories); } } else if (this.Action == ConversationActionType.AlwaysDelete) { // Emit the EnableAlwaysDelete element writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.EnableAlwaysDelete, EwsUtilities.BoolToXSBool(this.EnableAlwaysDelete)); } else if (this.Action == ConversationActionType.AlwaysMove) { // Emit the Move Folder Id if (this.DestinationFolderId != null) { writer.WriteStartElement(XmlNamespace.Types, XmlElementNames.DestinationFolderId); this.DestinationFolderId.WriteToXml(writer); writer.WriteEndElement(); } } else { if (this.ContextFolderId != null) { writer.WriteStartElement( XmlNamespace.Types, XmlElementNames.ContextFolderId); this.ContextFolderId.WriteToXml(writer); writer.WriteEndElement(); } if (this.ConversationLastSyncTime.HasValue) { writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.ConversationLastSyncTime, this.ConversationLastSyncTime.Value); } if (this.Action == ConversationActionType.Copy) { EwsUtilities.Assert( this.DestinationFolderId != null, "ApplyconversationActionRequest", "DestinationFolderId should be set when performing copy action"); writer.WriteStartElement( XmlNamespace.Types, XmlElementNames.DestinationFolderId); this.DestinationFolderId.WriteToXml(writer); writer.WriteEndElement(); } else if (this.Action == ConversationActionType.Move) { EwsUtilities.Assert( this.DestinationFolderId != null, "ApplyconversationActionRequest", "DestinationFolderId should be set when performing move action"); writer.WriteStartElement( XmlNamespace.Types, XmlElementNames.DestinationFolderId); this.DestinationFolderId.WriteToXml(writer); writer.WriteEndElement(); } else if (this.Action == ConversationActionType.Delete) { EwsUtilities.Assert( this.DeleteType.HasValue, "ApplyconversationActionRequest", "DeleteType should be specified when deleting a conversation."); writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.DeleteType, this.DeleteType.Value); } else if (this.Action == ConversationActionType.SetReadState) { EwsUtilities.Assert( this.IsRead.HasValue, "ApplyconversationActionRequest", "IsRead should be specified when marking/unmarking a conversation as read."); writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.IsRead, this.IsRead.Value); if (this.SuppressReadReceipts.HasValue) { writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.SuppressReadReceipts, this.SuppressReadReceipts.Value); } } else if (this.Action == ConversationActionType.SetRetentionPolicy) { EwsUtilities.Assert( this.RetentionPolicyType.HasValue, "ApplyconversationActionRequest", "RetentionPolicyType should be specified when setting a retention policy on a conversation."); writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.RetentionPolicyType, this.RetentionPolicyType.Value); if (this.RetentionPolicyTagId.HasValue) { writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.RetentionPolicyTagId, this.RetentionPolicyTagId.Value); } } else if (this.Action == ConversationActionType.Flag) { EwsUtilities.Assert( this.Flag != null, "ApplyconversationActionRequest", "Flag should be specified when flagging conversation items."); writer.WriteStartElement(XmlNamespace.Types, XmlElementNames.Flag); this.Flag.WriteElementsToXml(writer); writer.WriteEndElement(); } } } finally { writer.WriteEndElement(); } } /// <summary> /// Creates a JSON representation of this object. /// </summary> /// <param name="service">The service.</param> /// <returns> /// A Json value (either a JsonObject, an array of Json values, or a Json primitive) /// </returns> public object ToJson(ExchangeService service) { JsonObject jsonProperty = new JsonObject(); // Emit the action element jsonProperty.Add(XmlElementNames.Action, this.Action); // Emit the conversation id element jsonProperty.Add(XmlElementNames.ConversationId, this.ConversationId.InternalToJson(service)); if (this.Action == ConversationActionType.AlwaysCategorize || this.Action == ConversationActionType.AlwaysDelete || this.Action == ConversationActionType.AlwaysMove) { // Emit the ProcessRightAway element jsonProperty.Add(XmlElementNames.ProcessRightAway, this.ProcessRightAway); } if (this.Action == ConversationActionType.AlwaysCategorize) { // Emit the categories element if (this.Categories != null && this.Categories.Count > 0) { jsonProperty.Add(XmlElementNames.Categories, this.Categories.InternalToJson(service)); } } else if (this.Action == ConversationActionType.AlwaysDelete) { // Emit the EnableAlwaysDelete element jsonProperty.Add(XmlElementNames.EnableAlwaysDelete, this.EnableAlwaysDelete); } else if (this.Action == ConversationActionType.AlwaysMove) { // Emit the Move Folder Id if (this.DestinationFolderId != null) { JsonObject jsonTargetFolderId = new JsonObject(); jsonTargetFolderId.Add(XmlElementNames.BaseFolderId, this.DestinationFolderId.InternalToJson(service)); jsonProperty.Add(XmlElementNames.DestinationFolderId, jsonTargetFolderId); } } else { if (this.ContextFolderId != null) { JsonObject jsonTargetFolderId = new JsonObject(); jsonTargetFolderId.Add(XmlElementNames.BaseFolderId, this.ContextFolderId.InternalToJson(service)); jsonProperty.Add(XmlElementNames.ContextFolderId, jsonTargetFolderId); } if (this.ConversationLastSyncTime.HasValue) { jsonProperty.Add( XmlElementNames.ConversationLastSyncTime, this.ConversationLastSyncTime.Value); } if (this.Action == ConversationActionType.Copy) { EwsUtilities.Assert( this.DestinationFolderId != null, "ApplyconversationActionRequest", "DestinationFolderId should be set when performing copy action"); JsonObject jsonTargetFolderId = new JsonObject(); jsonTargetFolderId.Add(XmlElementNames.BaseFolderId, this.DestinationFolderId.InternalToJson(service)); jsonProperty.Add(XmlElementNames.DestinationFolderId, jsonTargetFolderId); } else if (this.Action == ConversationActionType.Move) { EwsUtilities.Assert( this.DestinationFolderId != null, "ApplyconversationActionRequest", "DestinationFolderId should be set when performing move action"); JsonObject jsonTargetFolderId = new JsonObject(); jsonTargetFolderId.Add(XmlElementNames.BaseFolderId, this.DestinationFolderId.InternalToJson(service)); jsonProperty.Add(XmlElementNames.DestinationFolderId, jsonTargetFolderId); } else if (this.Action == ConversationActionType.Delete) { EwsUtilities.Assert( this.DeleteType.HasValue, "ApplyconversationActionRequest", "DeleteType should be specified when deleting a conversation."); jsonProperty.Add(XmlElementNames.DeleteType, this.DeleteType.Value); } else if (this.Action == ConversationActionType.SetReadState) { EwsUtilities.Assert( this.IsRead.HasValue, "ApplyconversationActionRequest", "IsRead should be specified when marking/unmarking a conversation as read."); jsonProperty.Add(XmlElementNames.IsRead, this.IsRead.Value); if (this.SuppressReadReceipts.HasValue) { jsonProperty.Add(XmlElementNames.SuppressReadReceipts, this.SuppressReadReceipts.HasValue); } } else if (this.Action == ConversationActionType.SetRetentionPolicy) { EwsUtilities.Assert( this.RetentionPolicyType.HasValue, "ApplyconversationActionRequest", "RetentionPolicyType should be specified when setting a retention policy on a conversation."); jsonProperty.Add(XmlElementNames.RetentionPolicyType, this.RetentionPolicyType.Value); if (this.RetentionPolicyTagId.HasValue) { jsonProperty.Add(XmlElementNames.RetentionPolicyTagId, this.RetentionPolicyTagId.Value); } } else if (this.Action == ConversationActionType.Flag) { EwsUtilities.Assert( this.Flag != null, "ApplyconversationActionRequest", "Flag should be specified when flagging items in a conversation."); jsonProperty.Add(XmlElementNames.Flag, this.Flag.InternalToJson(service)); } } return jsonProperty; } } }
// $Id$ // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // using System; using NUnit.Framework; using org.apache.etch.tests; using org.apache.etch.tests.types.Test1; namespace etch.tests { [TestFixture] public class TestTest1DotCsharp { [TestFixtureSetUp] public void First() { Console.WriteLine(); Console.Write( "TestTest" ); } [Test] public void const_booleans() { Assert.IsFalse( ConstsTest1.BOOL1 ); Assert.IsTrue( ConstsTest1.BOOL2 ); } [Test] public void const_bytes() { Assert.AreEqual( sbyte.MinValue, ConstsTest1.BYTE1 ); Assert.AreEqual( (sbyte) -1, ConstsTest1.BYTE2 ); Assert.AreEqual( (sbyte) 0, ConstsTest1.BYTE3 ); Assert.AreEqual( (sbyte) 1, ConstsTest1.BYTE4 ); Assert.AreEqual( sbyte.MaxValue, ConstsTest1.BYTE5 ); } [Test] public void const_shorts() { Assert.AreEqual( short.MinValue, ConstsTest1.SHORT1 ); Assert.AreEqual( (short) -1, ConstsTest1.SHORT2 ); Assert.AreEqual( (short) 0, ConstsTest1.SHORT3 ); Assert.AreEqual( (short) 1, ConstsTest1.SHORT4 ); Assert.AreEqual( short.MaxValue, ConstsTest1.SHORT5 ); } [Test] public void const_ints() { Assert.AreEqual(int.MinValue, ConstsTest1.INT1); Assert.AreEqual(-1, ConstsTest1.INT2); Assert.AreEqual(0, ConstsTest1.INT3); Assert.AreEqual(1, ConstsTest1.INT4); Assert.AreEqual(int.MaxValue, ConstsTest1.INT5); } [Test] public void const_longs() { Assert.AreEqual(long.MinValue, ConstsTest1.LONG1); Assert.AreEqual((long)-1, ConstsTest1.LONG2); Assert.AreEqual((long)0, ConstsTest1.LONG3); Assert.AreEqual((long)1, ConstsTest1.LONG4); Assert.AreEqual(long.MaxValue, ConstsTest1.LONG5); } [Test] public void const_floats() { Assert.AreEqual(1.4e-45f, ConstsTest1.FLOAT1); Assert.AreEqual((float)-1, ConstsTest1.FLOAT2); Assert.AreEqual((float)0, ConstsTest1.FLOAT3); Assert.AreEqual((float)1, ConstsTest1.FLOAT4); Assert.AreEqual(3.4028235e38f, ConstsTest1.FLOAT5); } [Test] public void const_doubles() { Assert.AreEqual(4.9e-324, ConstsTest1.DOUBLE1); Assert.AreEqual((double)-1, ConstsTest1.DOUBLE2); Assert.AreEqual((double)0, ConstsTest1.DOUBLE3); Assert.AreEqual((double)1, ConstsTest1.DOUBLE4); Assert.AreEqual(1.7976931348623157e308, ConstsTest1.DOUBLE5); } [Test] public void const_strings() { Assert.AreEqual("", ConstsTest1.STRING1); Assert.AreEqual("a", ConstsTest1.STRING2); Assert.AreEqual("abc", ConstsTest1.STRING3); Assert.AreEqual("a\tb\tc\r\n", ConstsTest1.STRING4); Assert.AreEqual("x x", ConstsTest1.STRING5); Assert.AreEqual("x\u00bfx", ConstsTest1.STRING6); Assert.AreEqual("x\u03a9x", ConstsTest1.STRING7); Assert.AreEqual("x\\x", ConstsTest1.STRING8); Assert.AreEqual("x'x", ConstsTest1.STRING9); Assert.AreEqual("x\"x", ConstsTest1.STRING10); } [Test] public void enum_E1() { //Assert.AreEqual( 3, E1.values().Length ); Assert.IsNotNull( E1.A ); Assert.IsNotNull( E1.B ); Assert.IsNotNull( E1.C ); //Assert.AreEqual( E1.A, E1.valueOf( "A" ) ); //Assert.AreEqual( E1.B, E1.valueOf( "B" ) ); //Assert.AreEqual( E1.C, E1.valueOf( "C" ) ); //Assert.AreEqual( 0, E1.A.ordinal() ); //Assert.AreEqual( 1, E1.B.ordinal() ); //Assert.AreEqual( 2, E1.C.ordinal() ); } [Test] public void struct_S1() { S1 s = new S1( 1, 2, 3 ); Assert.AreEqual( 1, s.x ); Assert.AreEqual( 2, s.y ); Assert.AreEqual( 3, s.z ); s = new S1( null, null, null ); Assert.IsNull( s.x ); Assert.IsNull( s.y ); Assert.IsNull( s.z ); s = new S1(); Assert.IsNull( s.x ); Assert.IsNull( s.y ); Assert.IsNull( s.z ); s.x = 4; s.y = 5; s.z = 6; Assert.AreEqual( 4, s.x ); Assert.AreEqual( 5, s.y ); Assert.AreEqual( 6, s.z ); s.x = null; Assert.IsNull( s.x ); s.x = 7; Assert.AreEqual( 7, s.x ); } [Test] public void except_Excp1() { Excp1 e = new Excp1( "foo", 23 ); Assert.AreEqual( "foo", e.msg ); Assert.AreEqual( 23, e.code ); e = new Excp1( null, null ); Assert.IsNull( e.msg ); Assert.IsNull( e.code ); e = new Excp1(); Assert.IsNull( e.msg ); Assert.IsNull( e.code ); e.msg = "bar"; e.code = 24; Assert.AreEqual( "bar", e.msg ); Assert.AreEqual( 24, e.code ); e.msg = null; e.code = null; Assert.IsNull( e.msg ); Assert.IsNull( e.code ); //Throwable t = e; //Assert.IsTrue( t is Exception ); //Assert.IsFalse( t is RuntimeException ); } [Test] public void except_Excp2() { Excp2 e = new Excp2(); Exception t = e; Assert.IsTrue( t is Exception ); //Assert.IsTrue( t is RuntimeException ); } [Test] public void except_Excp3() { Excp3 e = new Excp3(); Exception t = e; Assert.IsTrue( t is Exception ); //Assert.IsTrue( t is RuntimeException ); } [Test] public void except_Excp4() { Excp4 e = new Excp4(); Exception t = e; Assert.IsTrue( t is Exception ); //Assert.IsFalse( t is Exception ); } [Test] public void method_nothing() { test.nothing(); } [Test] public void method_incr1() { Assert.AreEqual( 2, test.incr( 1 ) ); } [Test] public void method_incr2() { Assert.AreEqual( 3, test.incr( 2 ) ); } [Test] public void method_incr3() { Assert.AreEqual( -1, test.incr( -2 ) ); } [Test] public void method_sub1() { Assert.AreEqual( 5, test.sub( 7, 2 ) ); } [Test] public void method_sub2() { Assert.AreEqual( 8, test.sub( 23, 15 ) ); } [Test] public void method_sub3() { Assert.AreEqual( -5, test.sub( 2, 7 ) ); } [Test] public void method_sum() { Assert.AreEqual( 24, test.sum( new int[] { 1, 2, 3, 7, 11 } ) ); } [Test] public void method_trans1() { Assert.AreEqual( 2, test.trans( E1.A, 5 ) ); // divide by 2 } [Test] public void method_trans2() { Assert.AreEqual( 10, test.trans( E1.B, 5 ) ); // mul by 2 } [Test] public void method_trans3() { Assert.AreEqual( 12, test.trans( E1.C, 5 ) ); // add 7 } [Test] public void method_dist1() { Assert.AreEqual( Math.Sqrt( 3 ), test.dist( new S1( 1, 1, 1 ), new S1( 0, 0, 0 ) ) ); } [Test] public void method_dist2() { Assert.AreEqual( Math.Sqrt( 35 ), test.dist( new S1( 1, 2, 3 ), new S1( 6, 5, 4 ) ) ); } [Test] public void method_dist3() { Assert.AreEqual( Math.Sqrt( 56 ), test.dist( new S1( 1, 2, 3 ), new S1( -1, -2, -3 ) ) ); } [Test] public void method_fill1() { int[] x = test.fill( 0, 1 ); Assert.AreEqual( 0, x.Length ); foreach (int i in x) Assert.AreEqual( 1, i ); } [Test] public void method_fill2() { int[] x = test.fill( 1, 2 ); Assert.AreEqual( 1, x.Length ); foreach (int i in x) Assert.AreEqual( 2, i ); } [Test] public void method_fill3() { int[] x = test.fill( 2, 3 ); Assert.AreEqual( 2, x.Length ); foreach (int i in x) Assert.AreEqual( 3, i ); } [Test] public void method_blow1() { try { test.blow( "foo", 2 ); Assert.IsTrue( false, "blow did not throw" ); } catch( Excp1 e ) { Assert.AreEqual( "foo", e.msg ); Assert.AreEqual( 2, e.code ); } } [Test] public void method_blow2() { try { test.blow( "bar", 3 ); Assert.IsTrue( false, "blow did not throw" ); } catch( Excp1 e ) { Assert.AreEqual( "bar", e.msg ); Assert.AreEqual( 3, e.code ); } } [Test] public void method_beets1() { Assert.AreEqual( 5, test.beets( E1.A ) ); } [Test] public void method_beets2() { try { test.beets( E1.B ); Assert.IsTrue( false, "beets did not throw" ); } catch ( Excp3 ) { Assert.IsTrue( true ); } } [Test] public void method_beets3() { try { test.beets( E1.C ); Assert.IsTrue(false, "beets did not throw" ); } catch ( Excp4 ) { Assert.IsTrue( true ); } } //[Test] //public void method_beets4() //{ // Assert.IsNull( test.beets( null ) ); //} // //Verify that an object can be passed as a parameter in a stuct. // [Test] public void struct_S3() { testS3Helper( "bool", ConstsTest1.BOOL2 ); testS3Helper( "byte", ConstsTest1.BYTE5 ); testS3Helper( "short", ConstsTest1.SHORT5 ); testS3Helper("int", ConstsTest1.INT5); testS3Helper("long", ConstsTest1.LONG5); testS3Helper("float", ConstsTest1.FLOAT5); testS3Helper("double", ConstsTest1.DOUBLE5); testS3Helper("string", ConstsTest1.STRING3); } /* * Verify that an array of objects can be passed as a parameter * in a stuct. */ [Test] public void struct_S4() { int sz = 10; testS4Helper(sz, "boolean", ConstsTest1.BOOL2 ); testS4Helper(sz, "byte", ConstsTest1.BYTE5 ); testS4Helper(sz, "short", ConstsTest1.SHORT5 ); testS4Helper(sz, "int", ConstsTest1.INT5); testS4Helper(sz, "long", ConstsTest1.LONG5); testS4Helper(sz, "float", ConstsTest1.FLOAT5); testS4Helper(sz, "double", ConstsTest1.DOUBLE5); testS4Helper(sz, "string", ConstsTest1.STRING3); } // Verify that an object can be passed as a parameter in an // exception [Test] public void except_Excp5() { int code = 500; String msg = "Excp5"; testExcp5Helper(msg, code, ConstsTest1.BOOL1); testExcp5Helper (msg, code, ConstsTest1.BYTE5); testExcp5Helper (msg, code, ConstsTest1.SHORT5); testExcp5Helper(msg, code, ConstsTest1.INT5); testExcp5Helper(msg, code, ConstsTest1.LONG5); testExcp5Helper(msg, code, ConstsTest1.FLOAT5); testExcp5Helper(msg, code, ConstsTest1.DOUBLE5); testExcp5Helper(msg, code, ConstsTest1.STRING3); } [Test] public void except_Excp6() { int sz = 10; int code = 500; String msg = "Excp6"; testExcp6Helper(sz, msg, code, ConstsTest1.BOOL1); testExcp6Helper( sz, msg, code, ConstsTest1.BYTE5 ); testExcp6Helper( sz, msg, code, ConstsTest1.SHORT5 ); testExcp6Helper(sz, msg, code, ConstsTest1.INT5); testExcp6Helper(sz, msg, code, ConstsTest1.LONG5); testExcp6Helper(sz, msg, code, ConstsTest1.FLOAT5); testExcp6Helper(sz, msg, code, ConstsTest1.DOUBLE5); testExcp6Helper(sz, msg, code, ConstsTest1.STRING3); } [Test] public void method_throwExcp5() { String msg = "throwExcp5"; int code = 500; method_throwExcp5Helper(msg, code, ConstsTest1.BOOL1); method_throwExcp5Helper( msg, code, ConstsTest1.BYTE5 ); method_throwExcp5Helper( msg, code, ConstsTest1.SHORT5 ); method_throwExcp5Helper(msg, code, ConstsTest1.INT5); method_throwExcp5Helper(msg, code, ConstsTest1.LONG5); method_throwExcp5Helper(msg, code, ConstsTest1.FLOAT5); method_throwExcp5Helper(msg, code, ConstsTest1.DOUBLE5); method_throwExcp5Helper(msg, code, ConstsTest1.STRING3); } [Test] public void method_throwExcp6() { String msg = "throwExcp6"; int code = 500; int sz = 10; method_throwExcp6Helper(sz, msg, code, ConstsTest1.BOOL1); method_throwExcp6Helper( sz, msg, code, ConstsTest1.BYTE5 ); method_throwExcp6Helper( sz, msg, code, ConstsTest1.SHORT5 ); method_throwExcp6Helper(sz, msg, code, ConstsTest1.INT5); method_throwExcp6Helper(sz, msg, code, ConstsTest1.LONG5); method_throwExcp6Helper(sz, msg, code, ConstsTest1.FLOAT5); method_throwExcp6Helper(sz, msg, code, ConstsTest1.DOUBLE5); method_throwExcp6Helper(sz, msg, code, ConstsTest1.STRING3); } #region Helper methods private void testS3Helper( string type, object value ) { S3 s = new S3( type, value ); Assert.AreEqual( type, s.tipe ); Assert.AreEqual( value, s.x ); s = new S3( null, null ); Assert.IsNull(s.tipe); Assert.IsNull( s.x ); s = new S3(); Assert.IsNull( s.tipe ); Assert.IsNull( s.x ); s.tipe = type; s.x = value; Assert.AreEqual( type, s.tipe ); Assert.AreEqual( value, s.x ); s.x = null; Assert.IsNull( s.x ); s.x = value; Assert.AreEqual( value, s.x ); } public void testS4Helper( int sz, string type, object value ) { object[] myObject = test.fillObject( sz, value ); S4 s = new S4( type, myObject ); Assert.AreEqual( type, s.tipe ); Assert.AreEqual( sz, myObject.Length ); for ( int i = 0; i < myObject.Length; i++ ) Assert.AreEqual( value, myObject[ i ] ); } private void testExcp5Helper( String msg, int code, Object value ) { Excp5 e = new Excp5( msg, code, value ); Assert.AreEqual( msg, e.msg ); Assert.AreEqual( code, e.code ); Assert.AreEqual( value, e.x); e = new Excp5( null, null, null ); Assert.IsNull( e.msg ); Assert.IsNull( e.code ); Assert.IsNull( e.x); e = new Excp5(); Assert.IsNull( e.msg ); Assert.IsNull( e.code ); Assert.IsNull( e.x ); e.msg = msg; e.code = code; e.x = value; Assert.AreEqual( msg, e.msg ); Assert.AreEqual( code, e.code ); Assert.AreEqual( value, e.x ); e.msg = null; e.code = null; e.x = null; Assert.IsNull( e.msg ); Assert.IsNull( e.code ); Assert.IsNull( e.x ); //Throwable t = e; Assert.IsTrue( e is Exception ); //assertFalse( t instanceof RuntimeException ); } private void testExcp6Helper( int sz, String msg, int code, Object value ) { Object[] myObject = test.fillObject( sz, value ); Excp6 e = new Excp6( msg, code, myObject ); Assert.AreEqual( msg, e.msg ); Assert.AreEqual( code, e.code ); for ( int i = 0; i < myObject.Length; i++ ) { Assert.AreEqual( value, myObject[i] ); } e = new Excp6( null, null, null ); Assert.IsNull( e.msg ); Assert.IsNull( e.code ); Assert.IsNull( e.x ); e = new Excp6(); Assert.IsNull( e.msg ); Assert.IsNull( e.code ); Assert.IsNull( e.x ); e.msg = msg; e.code = code; e.x = myObject; Assert.AreEqual( msg, e.msg ); Assert.AreEqual( code, e.code ); for ( int i = 0; i < myObject.Length ; i++ ) { Assert.AreEqual( value, myObject[i] ); } e.msg = null; e.code = null; e.x = null; Assert.IsNull( e.msg ); Assert.IsNull( e.code ); Assert.IsNull( e.x ); //Exception t = e; Assert.IsTrue( e is Exception ); // assertFalse( t is RuntimeException ); } private void method_throwExcp5Helper( String msg, int code, Object value ) { try { test.throwExcp5( msg, code, value ); Assert.Fail( "throwExcp5 did not throw" ); } catch ( Excp5 e ) { Assert.AreEqual( msg, e.msg ); Assert.AreEqual( code, e.code ); Assert.AreEqual( value, e.x ); } } private void method_throwExcp6Helper( int sz, String msg, int code, Object value ) { Object[] myObject = test.fillObject( sz, value ); try { test.throwExcp6( msg, code, myObject ); Assert.Fail( "throwExcp5 did not throw" ); } catch ( Excp6 e ) { Assert.AreEqual( msg, e.msg ); Assert.AreEqual( code, e.code ); for ( int i = 0; i < myObject.Length; i++ ) { Assert.AreEqual( value, e.x[ i ] ); } } } private void method_fillObjectHelper( int sz, Object value ) { Object[] myObject = test.fillObject( sz, value ); for ( int i = 0; i < myObject.Length; i++ ) { Assert.AreEqual( value, myObject[ i ] ); } } #endregion private Test1 test = new FakeTest1(); } }
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using osu.Framework; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Colour; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Input.Events; using osu.Game.Scoring; using osu.Game.Screens.Ranking.Contracted; using osu.Game.Screens.Ranking.Expanded; using osu.Game.Users; using osuTK; using osuTK.Graphics; namespace osu.Game.Screens.Ranking { public class ScorePanel : CompositeDrawable, IStateful<PanelState> { /// <summary> /// Width of the panel when contracted. /// </summary> public const float CONTRACTED_WIDTH = 130; /// <summary> /// Height of the panel when contracted. /// </summary> private const float contracted_height = 385; /// <summary> /// Width of the panel when expanded. /// </summary> public const float EXPANDED_WIDTH = 360; /// <summary> /// Height of the panel when expanded. /// </summary> private const float expanded_height = 586; /// <summary> /// Height of the top layer when the panel is expanded. /// </summary> private const float expanded_top_layer_height = 53; /// <summary> /// Height of the top layer when the panel is contracted. /// </summary> private const float contracted_top_layer_height = 30; /// <summary> /// Duration for the panel to resize into its expanded/contracted size. /// </summary> public const double RESIZE_DURATION = 200; /// <summary> /// Delay after <see cref="RESIZE_DURATION"/> before the top layer is expanded. /// </summary> public const double TOP_LAYER_EXPAND_DELAY = 100; /// <summary> /// Duration for the top layer expansion. /// </summary> private const double top_layer_expand_duration = 200; /// <summary> /// Duration for the panel contents to fade in. /// </summary> private const double content_fade_duration = 50; private static readonly ColourInfo expanded_top_layer_colour = ColourInfo.GradientVertical(Color4Extensions.FromHex("#444"), Color4Extensions.FromHex("#333")); private static readonly ColourInfo expanded_middle_layer_colour = ColourInfo.GradientVertical(Color4Extensions.FromHex("#555"), Color4Extensions.FromHex("#333")); private static readonly Color4 contracted_top_layer_colour = Color4Extensions.FromHex("#353535"); private static readonly Color4 contracted_middle_layer_colour = Color4Extensions.FromHex("#353535"); public event Action<PanelState> StateChanged; /// <summary> /// The position of the score in the rankings. /// </summary> public readonly Bindable<int?> ScorePosition = new Bindable<int?>(); /// <summary> /// An action to be invoked if this <see cref="ScorePanel"/> is clicked while in an expanded state. /// </summary> public Action PostExpandAction; public readonly ScoreInfo Score; private bool displayWithFlair; private Container content; private Container topLayerContainer; private Drawable topLayerBackground; private Container topLayerContentContainer; private Drawable topLayerContent; private Container middleLayerContainer; private Drawable middleLayerBackground; private Container middleLayerContentContainer; private Drawable middleLayerContent; public ScorePanel(ScoreInfo score, bool isNewLocalScore = false) { Score = score; displayWithFlair = isNewLocalScore; ScorePosition.Value = score.Position; } [BackgroundDependencyLoader] private void load() { // ScorePanel doesn't include the top extruding area in its own size. // Adding a manual offset here allows the expanded version to take on an "acceptable" vertical centre when at 100% UI scale. const float vertical_fudge = 20; InternalChild = content = new Container { Anchor = Anchor.Centre, Origin = Anchor.Centre, Size = new Vector2(40), Y = vertical_fudge, Children = new Drawable[] { topLayerContainer = new Container { Name = "Top layer", RelativeSizeAxes = Axes.X, Alpha = 0, Height = 120, Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, CornerRadius = 20, CornerExponent = 2.5f, Masking = true, Child = topLayerBackground = new Box { RelativeSizeAxes = Axes.Both } }, topLayerContentContainer = new Container { RelativeSizeAxes = Axes.Both } } }, middleLayerContainer = new Container { Name = "Middle layer", RelativeSizeAxes = Axes.Both, Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, CornerRadius = 20, CornerExponent = 2.5f, Masking = true, Children = new[] { middleLayerBackground = new Box { RelativeSizeAxes = Axes.Both }, new UserCoverBackground { RelativeSizeAxes = Axes.Both, User = Score.User, Colour = ColourInfo.GradientVertical(Color4.White.Opacity(0.5f), Color4Extensions.FromHex("#444").Opacity(0)) } } }, middleLayerContentContainer = new Container { RelativeSizeAxes = Axes.Both } } } } }; } protected override void LoadComplete() { base.LoadComplete(); updateState(); topLayerBackground.FinishTransforms(false, nameof(Colour)); middleLayerBackground.FinishTransforms(false, nameof(Colour)); } private PanelState state = PanelState.Contracted; public PanelState State { get => state; set { if (state == value) return; state = value; if (IsLoaded) updateState(); StateChanged?.Invoke(value); } } private void updateState() { topLayerContent?.FadeOut(content_fade_duration).Expire(); middleLayerContent?.FadeOut(content_fade_duration).Expire(); switch (state) { case PanelState.Expanded: Size = new Vector2(EXPANDED_WIDTH, expanded_height); topLayerBackground.FadeColour(expanded_top_layer_colour, RESIZE_DURATION, Easing.OutQuint); middleLayerBackground.FadeColour(expanded_middle_layer_colour, RESIZE_DURATION, Easing.OutQuint); topLayerContentContainer.Add(topLayerContent = new ExpandedPanelTopContent(Score.User) { Alpha = 0 }); middleLayerContentContainer.Add(middleLayerContent = new ExpandedPanelMiddleContent(Score, displayWithFlair) { Alpha = 0 }); // only the first expanded display should happen with flair. displayWithFlair = false; break; case PanelState.Contracted: Size = new Vector2(CONTRACTED_WIDTH, contracted_height); topLayerBackground.FadeColour(contracted_top_layer_colour, RESIZE_DURATION, Easing.OutQuint); middleLayerBackground.FadeColour(contracted_middle_layer_colour, RESIZE_DURATION, Easing.OutQuint); topLayerContentContainer.Add(topLayerContent = new ContractedPanelTopContent { ScorePosition = { BindTarget = ScorePosition }, Alpha = 0 }); middleLayerContentContainer.Add(middleLayerContent = new ContractedPanelMiddleContent(Score) { Alpha = 0 }); break; } content.ResizeTo(Size, RESIZE_DURATION, Easing.OutQuint); bool topLayerExpanded = topLayerContainer.Y < 0; // If the top layer was already expanded, then we don't need to wait for the resize and can instead transform immediately. This looks better when changing the panel state. using (BeginDelayedSequence(topLayerExpanded ? 0 : RESIZE_DURATION + TOP_LAYER_EXPAND_DELAY)) { topLayerContainer.FadeIn(); switch (state) { case PanelState.Expanded: topLayerContainer.MoveToY(-expanded_top_layer_height / 2, top_layer_expand_duration, Easing.OutQuint); middleLayerContainer.MoveToY(expanded_top_layer_height / 2, top_layer_expand_duration, Easing.OutQuint); break; case PanelState.Contracted: topLayerContainer.MoveToY(-contracted_top_layer_height / 2, top_layer_expand_duration, Easing.OutQuint); middleLayerContainer.MoveToY(contracted_top_layer_height / 2, top_layer_expand_duration, Easing.OutQuint); break; } topLayerContent?.FadeIn(content_fade_duration); middleLayerContent?.FadeIn(content_fade_duration); } } public override Vector2 Size { get => base.Size; set { base.Size = value; // Auto-size isn't used to avoid 1-frame issues and because the score panel is removed/re-added to the container. if (trackingContainer != null) trackingContainer.Size = value; } } protected override bool OnClick(ClickEvent e) { if (State == PanelState.Contracted) { State = PanelState.Expanded; return true; } PostExpandAction?.Invoke(); return true; } public override bool ReceivePositionalInputAt(Vector2 screenSpacePos) => base.ReceivePositionalInputAt(screenSpacePos) || topLayerContainer.ReceivePositionalInputAt(screenSpacePos) || middleLayerContainer.ReceivePositionalInputAt(screenSpacePos); private ScorePanelTrackingContainer trackingContainer; /// <summary> /// Creates a <see cref="ScorePanelTrackingContainer"/> which this <see cref="ScorePanel"/> can reside inside. /// The <see cref="ScorePanelTrackingContainer"/> will track the size of this <see cref="ScorePanel"/>. /// </summary> /// <remarks> /// This <see cref="ScorePanel"/> is immediately added as a child of the <see cref="ScorePanelTrackingContainer"/>. /// </remarks> /// <returns>The <see cref="ScorePanelTrackingContainer"/>.</returns> /// <exception cref="InvalidOperationException">If a <see cref="ScorePanelTrackingContainer"/> already exists.</exception> public ScorePanelTrackingContainer CreateTrackingContainer() { if (trackingContainer != null) throw new InvalidOperationException("A score panel container has already been created."); return trackingContainer = new ScorePanelTrackingContainer(this); } } }
// This file was created automatically, do not modify the contents of this file. // ReSharper disable InvalidXmlDocComment // ReSharper disable InconsistentNaming // ReSharper disable CheckNamespace // ReSharper disable MemberCanBePrivate.Global using System; using System.Runtime.InteropServices; // Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\Engine\Classes\Components\PawnNoiseEmitterComponent.h:19 namespace UnrealEngine { [ManageType("ManagePawnNoiseEmitterComponent")] public partial class ManagePawnNoiseEmitterComponent : UPawnNoiseEmitterComponent, IManageWrapper { public ManagePawnNoiseEmitterComponent(IntPtr adress) : base(adress) { } #region DLLInmport [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_Activate(IntPtr self, bool bReset); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_BeginPlay(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_CreateRenderState_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_Deactivate(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_DestroyComponent(IntPtr self, bool bPromoteChildren); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_DestroyRenderState_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_InitializeComponent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_InvalidateLightingCacheDetailed(IntPtr self, bool bInvalidateBuildEnqueuedLighting, bool bTranslationOnly); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnActorEnableCollisionChanged(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnComponentCreated(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnComponentDestroyed(IntPtr self, bool bDestroyingHierarchy); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnCreatePhysicsState(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnDestroyPhysicsState(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnRegister(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnRep_IsActive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnUnregister(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_RegisterComponentTickFunctions(IntPtr self, bool bRegister); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_SendRenderDynamicData_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_SendRenderTransform_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_SetActive(IntPtr self, bool bNewActive, bool bReset); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_SetAutoActivate(IntPtr self, bool bNewAutoActivate); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_SetComponentTickEnabled(IntPtr self, bool bEnabled); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_SetComponentTickEnabledAsync(IntPtr self, bool bEnabled); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_ToggleActive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_UninitializeComponent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_BeginDestroy(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_FinishDestroy(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_MarkAsEditorOnlySubobject(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostCDOContruct(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostEditImport(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostInitProperties(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostLoad(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostNetReceive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostRepNotifies(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PostSaveRoot(IntPtr self, bool bCleanupIsRequired); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PreDestroyFromReplication(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_PreNetReceive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_ShutdownAfterError(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_CreateCluster(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__UPawnNoiseEmitterComponent_OnClusterMarkedAsPendingKill(IntPtr self); #endregion #region Methods /// <summary> /// Activates the SceneComponent, should be overridden by native child classes. /// </summary> /// <param name="bReset">Whether the activation should happen even if ShouldActivate returns false.</param> public override void Activate(bool bReset) => E__Supper__UPawnNoiseEmitterComponent_Activate(this, bReset); /// <summary> /// BeginsPlay for the component. Occurs at level startup or actor spawn. This is before BeginPlay (Actor or Component). /// <para>All Components (that want initialization) in the level will be Initialized on load before any </para> /// Actor/Component gets BeginPlay. /// <para>Requires component to be registered and initialized. </para> /// </summary> public override void BeginPlay() => E__Supper__UPawnNoiseEmitterComponent_BeginPlay(this); /// <summary> /// Used to create any rendering thread information for this component /// <para>@warning This is called concurrently on multiple threads (but never the same component concurrently) </para> /// </summary> protected override void CreateRenderState_Concurrent() => E__Supper__UPawnNoiseEmitterComponent_CreateRenderState_Concurrent(this); /// <summary> /// Deactivates the SceneComponent. /// </summary> public override void Deactivate() => E__Supper__UPawnNoiseEmitterComponent_Deactivate(this); /// <summary> /// Unregister the component, remove it from its outer Actor's Components array and mark for pending kill. /// </summary> public override void DestroyComponent(bool bPromoteChildren) => E__Supper__UPawnNoiseEmitterComponent_DestroyComponent(this, bPromoteChildren); /// <summary> /// Used to shut down any rendering thread structure for this component /// <para>@warning This is called concurrently on multiple threads (but never the same component concurrently) </para> /// </summary> protected override void DestroyRenderState_Concurrent() => E__Supper__UPawnNoiseEmitterComponent_DestroyRenderState_Concurrent(this); /// <summary> /// Initializes the component. Occurs at level startup or actor spawn. This is before BeginPlay (Actor or Component). /// <para>All Components in the level will be Initialized on load before any Actor/Component gets BeginPlay </para> /// Requires component to be registered, and bWantsInitializeComponent to be true. /// </summary> public override void InitializeComponent() => E__Supper__UPawnNoiseEmitterComponent_InitializeComponent(this); /// <summary> /// Called when this actor component has moved, allowing it to discard statically cached lighting information. /// </summary> public override void InvalidateLightingCacheDetailed(bool bInvalidateBuildEnqueuedLighting, bool bTranslationOnly) => E__Supper__UPawnNoiseEmitterComponent_InvalidateLightingCacheDetailed(this, bInvalidateBuildEnqueuedLighting, bTranslationOnly); /// <summary> /// Called on each component when the Actor's bEnableCollisionChanged flag changes /// </summary> public override void OnActorEnableCollisionChanged() => E__Supper__UPawnNoiseEmitterComponent_OnActorEnableCollisionChanged(this); /// <summary> /// Called when a component is created (not loaded). This can happen in the editor or during gameplay /// </summary> public override void OnComponentCreated() => E__Supper__UPawnNoiseEmitterComponent_OnComponentCreated(this); /// <summary> /// Called when a component is destroyed /// </summary> /// <param name="bDestroyingHierarchy">True if the entire component hierarchy is being torn down, allows avoiding expensive operations</param> public override void OnComponentDestroyed(bool bDestroyingHierarchy) => E__Supper__UPawnNoiseEmitterComponent_OnComponentDestroyed(this, bDestroyingHierarchy); /// <summary> /// Used to create any physics engine information for this component /// </summary> protected override void OnCreatePhysicsState() => E__Supper__UPawnNoiseEmitterComponent_OnCreatePhysicsState(this); /// <summary> /// Used to shut down and physics engine structure for this component /// </summary> protected override void OnDestroyPhysicsState() => E__Supper__UPawnNoiseEmitterComponent_OnDestroyPhysicsState(this); /// <summary> /// Called when a component is registered, after Scene is set, but before CreateRenderState_Concurrent or OnCreatePhysicsState are called. /// </summary> protected override void OnRegister() => E__Supper__UPawnNoiseEmitterComponent_OnRegister(this); public override void OnRep_IsActive() => E__Supper__UPawnNoiseEmitterComponent_OnRep_IsActive(this); /// <summary> /// Called when a component is unregistered. Called after DestroyRenderState_Concurrent and OnDestroyPhysicsState are called. /// </summary> protected override void OnUnregister() => E__Supper__UPawnNoiseEmitterComponent_OnUnregister(this); /// <summary> /// Virtual call chain to register all tick functions /// </summary> /// <param name="bRegister">true to register, false, to unregister</param> protected override void RegisterComponentTickFunctions(bool bRegister) => E__Supper__UPawnNoiseEmitterComponent_RegisterComponentTickFunctions(this, bRegister); /// <summary> /// Called to send dynamic data for this component to the rendering thread /// </summary> protected override void SendRenderDynamicData_Concurrent() => E__Supper__UPawnNoiseEmitterComponent_SendRenderDynamicData_Concurrent(this); /// <summary> /// Called to send a transform update for this component to the rendering thread /// <para>@warning This is called concurrently on multiple threads (but never the same component concurrently) </para> /// </summary> protected override void SendRenderTransform_Concurrent() => E__Supper__UPawnNoiseEmitterComponent_SendRenderTransform_Concurrent(this); /// <summary> /// Sets whether the component is active or not /// </summary> /// <param name="bNewActive">The new active state of the component</param> /// <param name="bReset">Whether the activation should happen even if ShouldActivate returns false.</param> public override void SetActive(bool bNewActive, bool bReset) => E__Supper__UPawnNoiseEmitterComponent_SetActive(this, bNewActive, bReset); /// <summary> /// Sets whether the component should be auto activate or not. Only safe during construction scripts. /// </summary> /// <param name="bNewAutoActivate">The new auto activate state of the component</param> public override void SetAutoActivate(bool bNewAutoActivate) => E__Supper__UPawnNoiseEmitterComponent_SetAutoActivate(this, bNewAutoActivate); /// <summary> /// Set this component's tick functions to be enabled or disabled. Only has an effect if the function is registered /// </summary> /// <param name="bEnabled">Whether it should be enabled or not</param> public override void SetComponentTickEnabled(bool bEnabled) => E__Supper__UPawnNoiseEmitterComponent_SetComponentTickEnabled(this, bEnabled); /// <summary> /// Spawns a task on GameThread that will call SetComponentTickEnabled /// </summary> /// <param name="bEnabled">Whether it should be enabled or not</param> public override void SetComponentTickEnabledAsync(bool bEnabled) => E__Supper__UPawnNoiseEmitterComponent_SetComponentTickEnabledAsync(this, bEnabled); /// <summary> /// Toggles the active state of the component /// </summary> public override void ToggleActive() => E__Supper__UPawnNoiseEmitterComponent_ToggleActive(this); /// <summary> /// Handle this component being Uninitialized. /// <para>Called from AActor::EndPlay only if bHasBeenInitialized is true </para> /// </summary> public override void UninitializeComponent() => E__Supper__UPawnNoiseEmitterComponent_UninitializeComponent(this); /// <summary> /// Called before destroying the object. This is called immediately upon deciding to destroy the object, to allow the object to begin an /// <para>asynchronous cleanup process. </para> /// </summary> public override void BeginDestroy() => E__Supper__UPawnNoiseEmitterComponent_BeginDestroy(this); /// <summary> /// Called to finish destroying the object. After UObject::FinishDestroy is called, the object's memory should no longer be accessed. /// <para>@warning Because properties are destroyed here, Super::FinishDestroy() should always be called at the end of your child class's FinishDestroy() method, rather than at the beginning. </para> /// </summary> public override void FinishDestroy() => E__Supper__UPawnNoiseEmitterComponent_FinishDestroy(this); /// <summary> /// Called during subobject creation to mark this component as editor only, which causes it to get stripped in packaged builds /// </summary> public override void MarkAsEditorOnlySubobject() => E__Supper__UPawnNoiseEmitterComponent_MarkAsEditorOnlySubobject(this); /// <summary> /// Called after the C++ constructor has run on the CDO for a class. This is an obscure routine used to deal with the recursion /// <para>in the construction of the default materials </para> /// </summary> public override void PostCDOContruct() => E__Supper__UPawnNoiseEmitterComponent_PostCDOContruct(this); /// <summary> /// Called after importing property values for this object (paste, duplicate or .t3d import) /// <para>Allow the object to perform any cleanup for properties which shouldn't be duplicated or </para> /// are unsupported by the script serialization /// </summary> public override void PostEditImport() => E__Supper__UPawnNoiseEmitterComponent_PostEditImport(this); /// <summary> /// Called after the C++ constructor and after the properties have been initialized, including those loaded from config. /// <para>This is called before any serialization or other setup has happened. </para> /// </summary> public override void PostInitProperties() => E__Supper__UPawnNoiseEmitterComponent_PostInitProperties(this); /// <summary> /// Do any object-specific cleanup required immediately after loading an object. /// <para>This is not called for newly-created objects, and by default will always execute on the game thread. </para> /// </summary> public override void PostLoad() => E__Supper__UPawnNoiseEmitterComponent_PostLoad(this); /// <summary> /// Called right after receiving a bunch /// </summary> public override void PostNetReceive() => E__Supper__UPawnNoiseEmitterComponent_PostNetReceive(this); /// <summary> /// Called right after calling all OnRep notifies (called even when there are no notifies) /// </summary> public override void PostRepNotifies() => E__Supper__UPawnNoiseEmitterComponent_PostRepNotifies(this); /// <summary> /// Called from within SavePackage on the passed in base/root object. /// <para>This function is called after the package has been saved and can perform cleanup. </para> /// </summary> /// <param name="bCleanupIsRequired">Whether PreSaveRoot dirtied state that needs to be cleaned up</param> public override void PostSaveRoot(bool bCleanupIsRequired) => E__Supper__UPawnNoiseEmitterComponent_PostSaveRoot(this, bCleanupIsRequired); /// <summary> /// Called right before being marked for destruction due to network replication /// </summary> public override void PreDestroyFromReplication() => E__Supper__UPawnNoiseEmitterComponent_PreDestroyFromReplication(this); /// <summary> /// Called right before receiving a bunch /// </summary> public override void PreNetReceive() => E__Supper__UPawnNoiseEmitterComponent_PreNetReceive(this); /// <summary> /// After a critical error, perform any mission-critical cleanup, such as restoring the video mode orreleasing hardware resources. /// </summary> public override void ShutdownAfterError() => E__Supper__UPawnNoiseEmitterComponent_ShutdownAfterError(this); /// <summary> /// Called after PostLoad to create UObject cluster /// </summary> public override void CreateCluster() => E__Supper__UPawnNoiseEmitterComponent_CreateCluster(this); /// <summary> /// Called during Garbage Collection to perform additional cleanup when the cluster is about to be destroyed due to PendingKill flag being set on it. /// </summary> public override void OnClusterMarkedAsPendingKill() => E__Supper__UPawnNoiseEmitterComponent_OnClusterMarkedAsPendingKill(this); #endregion public static implicit operator IntPtr(ManagePawnNoiseEmitterComponent self) { return self?.NativePointer ?? IntPtr.Zero; } public static implicit operator ManagePawnNoiseEmitterComponent(ObjectPointerDescription PtrDesc) { return NativeManager.GetWrapper<ManagePawnNoiseEmitterComponent>(PtrDesc); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; #if !netstandard using Internal.Runtime.CompilerServices; #else using System.Runtime.CompilerServices; #endif namespace System { internal static partial class SpanHelpers { public static int IndexOf<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. T valueHead = value; ref T valueTail = ref Unsafe.Add(ref value, 1); int valueTailLength = valueLength - 1; int index = 0; for (; ; ) { Debug.Assert(0 <= index && index <= searchSpaceLength); // Ensures no deceptive underflows in the computation of "remainingSearchSpaceLength". int remainingSearchSpaceLength = searchSpaceLength - index - valueTailLength; if (remainingSearchSpaceLength <= 0) break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there. // Do a quick search for the first element of "value". int relativeIndex = IndexOf(ref Unsafe.Add(ref searchSpace, index), valueHead, remainingSearchSpaceLength); if (relativeIndex == -1) break; index += relativeIndex; // Found the first element of "value". See if the tail matches. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, index + 1), ref valueTail, valueTailLength)) return index; // The tail matched. Return a successful find. index++; } return -1; } public static unsafe int IndexOf<T>(ref T searchSpace, T value, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); IntPtr index = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations while (length >= 8) { length -= 8; if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; if (value.Equals(Unsafe.Add(ref searchSpace, index + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, index + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, index + 3))) goto Found3; if (value.Equals(Unsafe.Add(ref searchSpace, index + 4))) goto Found4; if (value.Equals(Unsafe.Add(ref searchSpace, index + 5))) goto Found5; if (value.Equals(Unsafe.Add(ref searchSpace, index + 6))) goto Found6; if (value.Equals(Unsafe.Add(ref searchSpace, index + 7))) goto Found7; index += 8; } if (length >= 4) { length -= 4; if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; if (value.Equals(Unsafe.Add(ref searchSpace, index + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, index + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, index + 3))) goto Found3; index += 4; } while (length > 0) { if (value.Equals(Unsafe.Add(ref searchSpace, index))) goto Found; index += 1; length--; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return (int)(byte*)index; Found1: return (int)(byte*)(index + 1); Found2: return (int)(byte*)(index + 2); Found3: return (int)(byte*)(index + 3); Found4: return (int)(byte*)(index + 4); Found5: return (int)(byte*)(index + 5); Found6: return (int)(byte*)(index + 6); Found7: return (int)(byte*)(index + 7); } public static int IndexOfAny<T>(ref T searchSpace, T value0, T value1, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; int index = 0; while ((length - index) >= 8) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, index + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, index + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, index + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, index + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found7; index += 8; } if ((length - index) >= 4) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; index += 4; } while (index < length) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; index++; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return index; Found1: return index + 1; Found2: return index + 2; Found3: return index + 3; Found4: return index + 4; Found5: return index + 5; Found6: return index + 6; Found7: return index + 7; } public static int IndexOfAny<T>(ref T searchSpace, T value0, T value1, T value2, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; int index = 0; while ((length - index) >= 8) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, index + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, index + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, index + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, index + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found7; index += 8; } if ((length - index) >= 4) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; lookUp = Unsafe.Add(ref searchSpace, index + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, index + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, index + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; index += 4; } while (index < length) { lookUp = Unsafe.Add(ref searchSpace, index); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; index++; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return index; Found1: return index + 1; Found2: return index + 2; Found3: return index + 3; Found4: return index + 4; Found5: return index + 5; Found6: return index + 6; Found7: return index + 7; } public static int IndexOfAny<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. int index = -1; for (int i = 0; i < valueLength; i++) { var tempIndex = IndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength); if ((uint)tempIndex < (uint)index) { index = tempIndex; // Reduce space for search, cause we don't care if we find the search value after the index of a previously found value searchSpaceLength = tempIndex; if (index == 0) break; } } return index; } public static int LastIndexOf<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. T valueHead = value; ref T valueTail = ref Unsafe.Add(ref value, 1); int valueTailLength = valueLength - 1; int index = 0; for (; ; ) { Debug.Assert(0 <= index && index <= searchSpaceLength); // Ensures no deceptive underflows in the computation of "remainingSearchSpaceLength". int remainingSearchSpaceLength = searchSpaceLength - index - valueTailLength; if (remainingSearchSpaceLength <= 0) break; // The unsearched portion is now shorter than the sequence we're looking for. So it can't be there. // Do a quick search for the first element of "value". int relativeIndex = LastIndexOf(ref searchSpace, valueHead, remainingSearchSpaceLength); if (relativeIndex == -1) break; // Found the first element of "value". See if the tail matches. if (SequenceEqual(ref Unsafe.Add(ref searchSpace, relativeIndex + 1), ref valueTail, valueTailLength)) return relativeIndex; // The tail matched. Return a successful find. index += remainingSearchSpaceLength - relativeIndex; } return -1; } public static int LastIndexOf<T>(ref T searchSpace, T value, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); while (length >= 8) { length -= 8; if (value.Equals(Unsafe.Add(ref searchSpace, length + 7))) goto Found7; if (value.Equals(Unsafe.Add(ref searchSpace, length + 6))) goto Found6; if (value.Equals(Unsafe.Add(ref searchSpace, length + 5))) goto Found5; if (value.Equals(Unsafe.Add(ref searchSpace, length + 4))) goto Found4; if (value.Equals(Unsafe.Add(ref searchSpace, length + 3))) goto Found3; if (value.Equals(Unsafe.Add(ref searchSpace, length + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, length + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, length))) goto Found; } if (length >= 4) { length -= 4; if (value.Equals(Unsafe.Add(ref searchSpace, length + 3))) goto Found3; if (value.Equals(Unsafe.Add(ref searchSpace, length + 2))) goto Found2; if (value.Equals(Unsafe.Add(ref searchSpace, length + 1))) goto Found1; if (value.Equals(Unsafe.Add(ref searchSpace, length))) goto Found; } while (length > 0) { length--; if (value.Equals(Unsafe.Add(ref searchSpace, length))) goto Found; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return length; Found1: return length + 1; Found2: return length + 2; Found3: return length + 3; Found4: return length + 4; Found5: return length + 5; Found6: return length + 6; Found7: return length + 7; } public static int LastIndexOfAny<T>(ref T searchSpace, T value0, T value1, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; while (length >= 8) { length -= 8; lookUp = Unsafe.Add(ref searchSpace, length + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found7; lookUp = Unsafe.Add(ref searchSpace, length + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, length + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, length + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; } if (length >= 4) { length -= 4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; } while (length > 0) { length--; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp)) goto Found; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return length; Found1: return length + 1; Found2: return length + 2; Found3: return length + 3; Found4: return length + 4; Found5: return length + 5; Found6: return length + 6; Found7: return length + 7; } public static int LastIndexOfAny<T>(ref T searchSpace, T value0, T value1, T value2, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); T lookUp; while (length >= 8) { length -= 8; lookUp = Unsafe.Add(ref searchSpace, length + 7); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found7; lookUp = Unsafe.Add(ref searchSpace, length + 6); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found6; lookUp = Unsafe.Add(ref searchSpace, length + 5); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found5; lookUp = Unsafe.Add(ref searchSpace, length + 4); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; } if (length >= 4) { length -= 4; lookUp = Unsafe.Add(ref searchSpace, length + 3); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found3; lookUp = Unsafe.Add(ref searchSpace, length + 2); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found2; lookUp = Unsafe.Add(ref searchSpace, length + 1); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found1; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; } while (length > 0) { length--; lookUp = Unsafe.Add(ref searchSpace, length); if (value0.Equals(lookUp) || value1.Equals(lookUp) || value2.Equals(lookUp)) goto Found; } return -1; Found: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return length; Found1: return length + 1; Found2: return length + 2; Found3: return length + 3; Found4: return length + 4; Found5: return length + 5; Found6: return length + 6; Found7: return length + 7; } public static int LastIndexOfAny<T>(ref T searchSpace, int searchSpaceLength, ref T value, int valueLength) where T : IEquatable<T> { Debug.Assert(searchSpaceLength >= 0); Debug.Assert(valueLength >= 0); if (valueLength == 0) return 0; // A zero-length sequence is always treated as "found" at the start of the search space. int index = -1; for (int i = 0; i < valueLength; i++) { var tempIndex = LastIndexOf(ref searchSpace, Unsafe.Add(ref value, i), searchSpaceLength); if (tempIndex > index) index = tempIndex; } return index; } public static bool SequenceEqual<T>(ref T first, ref T second, int length) where T : IEquatable<T> { Debug.Assert(length >= 0); if (Unsafe.AreSame(ref first, ref second)) goto Equal; IntPtr index = (IntPtr)0; // Use IntPtr for arithmetic to avoid unnecessary 64->32->64 truncations while (length >= 8) { length -= 8; if (!Unsafe.Add(ref first, index).Equals(Unsafe.Add(ref second, index))) goto NotEqual; if (!Unsafe.Add(ref first, index + 1).Equals(Unsafe.Add(ref second, index + 1))) goto NotEqual; if (!Unsafe.Add(ref first, index + 2).Equals(Unsafe.Add(ref second, index + 2))) goto NotEqual; if (!Unsafe.Add(ref first, index + 3).Equals(Unsafe.Add(ref second, index + 3))) goto NotEqual; if (!Unsafe.Add(ref first, index + 4).Equals(Unsafe.Add(ref second, index + 4))) goto NotEqual; if (!Unsafe.Add(ref first, index + 5).Equals(Unsafe.Add(ref second, index + 5))) goto NotEqual; if (!Unsafe.Add(ref first, index + 6).Equals(Unsafe.Add(ref second, index + 6))) goto NotEqual; if (!Unsafe.Add(ref first, index + 7).Equals(Unsafe.Add(ref second, index + 7))) goto NotEqual; index += 8; } if (length >= 4) { length -= 4; if (!Unsafe.Add(ref first, index).Equals(Unsafe.Add(ref second, index))) goto NotEqual; if (!Unsafe.Add(ref first, index + 1).Equals(Unsafe.Add(ref second, index + 1))) goto NotEqual; if (!Unsafe.Add(ref first, index + 2).Equals(Unsafe.Add(ref second, index + 2))) goto NotEqual; if (!Unsafe.Add(ref first, index + 3).Equals(Unsafe.Add(ref second, index + 3))) goto NotEqual; index += 4; } while (length > 0) { if (!Unsafe.Add(ref first, index).Equals(Unsafe.Add(ref second, index))) goto NotEqual; index += 1; length--; } Equal: return true; NotEqual: // Workaround for https://github.com/dotnet/coreclr/issues/13549 return false; } public static int SequenceCompareTo<T>(ref T first, int firstLength, ref T second, int secondLength) where T : IComparable<T> { Debug.Assert(firstLength >= 0); Debug.Assert(secondLength >= 0); var minLength = firstLength; if (minLength > secondLength) minLength = secondLength; for (int i = 0; i < minLength; i++) { int result = Unsafe.Add(ref first, i).CompareTo(Unsafe.Add(ref second, i)); if (result != 0) return result; } return firstLength.CompareTo(secondLength); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/servicecontrol/v1/operation.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Api.Servicecontrol.V1 { /// <summary>Holder for reflection information generated from google/api/servicecontrol/v1/operation.proto</summary> public static partial class OperationReflection { #region Descriptor /// <summary>File descriptor for google/api/servicecontrol/v1/operation.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static OperationReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Cixnb29nbGUvYXBpL3NlcnZpY2Vjb250cm9sL3YxL29wZXJhdGlvbi5wcm90", "bxIcZ29vZ2xlLmFwaS5zZXJ2aWNlY29udHJvbC52MRocZ29vZ2xlL2FwaS9h", "bm5vdGF0aW9ucy5wcm90bxosZ29vZ2xlL2FwaS9zZXJ2aWNlY29udHJvbC92", "MS9sb2dfZW50cnkucHJvdG8aL2dvb2dsZS9hcGkvc2VydmljZWNvbnRyb2wv", "djEvbWV0cmljX3ZhbHVlLnByb3RvGh9nb29nbGUvcHJvdG9idWYvdGltZXN0", "YW1wLnByb3RvIo8ECglPcGVyYXRpb24SFAoMb3BlcmF0aW9uX2lkGAEgASgJ", "EhYKDm9wZXJhdGlvbl9uYW1lGAIgASgJEhMKC2NvbnN1bWVyX2lkGAMgASgJ", "Ei4KCnN0YXJ0X3RpbWUYBCABKAsyGi5nb29nbGUucHJvdG9idWYuVGltZXN0", "YW1wEiwKCGVuZF90aW1lGAUgASgLMhouZ29vZ2xlLnByb3RvYnVmLlRpbWVz", "dGFtcBJDCgZsYWJlbHMYBiADKAsyMy5nb29nbGUuYXBpLnNlcnZpY2Vjb250", "cm9sLnYxLk9wZXJhdGlvbi5MYWJlbHNFbnRyeRJHChFtZXRyaWNfdmFsdWVf", "c2V0cxgHIAMoCzIsLmdvb2dsZS5hcGkuc2VydmljZWNvbnRyb2wudjEuTWV0", "cmljVmFsdWVTZXQSOwoLbG9nX2VudHJpZXMYCCADKAsyJi5nb29nbGUuYXBp", "LnNlcnZpY2Vjb250cm9sLnYxLkxvZ0VudHJ5EkYKCmltcG9ydGFuY2UYCyAB", "KA4yMi5nb29nbGUuYXBpLnNlcnZpY2Vjb250cm9sLnYxLk9wZXJhdGlvbi5J", "bXBvcnRhbmNlGi0KC0xhYmVsc0VudHJ5EgsKA2tleRgBIAEoCRINCgV2YWx1", "ZRgCIAEoCToCOAEiHwoKSW1wb3J0YW5jZRIHCgNMT1cQABIICgRISUdIEAFC", "gwEKIGNvbS5nb29nbGUuYXBpLnNlcnZpY2Vjb250cm9sLnYxQg5PcGVyYXRp", "b25Qcm90b1ABWkpnb29nbGUuZ29sYW5nLm9yZy9nZW5wcm90by9nb29nbGVh", "cGlzL2FwaS9zZXJ2aWNlY29udHJvbC92MTtzZXJ2aWNlY29udHJvbPgBAWIG", "cHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, global::Google.Api.Servicecontrol.V1.LogEntryReflection.Descriptor, global::Google.Api.Servicecontrol.V1.MetricValueReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.Servicecontrol.V1.Operation), global::Google.Api.Servicecontrol.V1.Operation.Parser, new[]{ "OperationId", "OperationName", "ConsumerId", "StartTime", "EndTime", "Labels", "MetricValueSets", "LogEntries", "Importance" }, null, new[]{ typeof(global::Google.Api.Servicecontrol.V1.Operation.Types.Importance) }, new pbr::GeneratedClrTypeInfo[] { null, }) })); } #endregion } #region Messages /// <summary> /// Represents information regarding an operation. /// </summary> public sealed partial class Operation : pb::IMessage<Operation> { private static readonly pb::MessageParser<Operation> _parser = new pb::MessageParser<Operation>(() => new Operation()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Operation> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Api.Servicecontrol.V1.OperationReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Operation() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Operation(Operation other) : this() { operationId_ = other.operationId_; operationName_ = other.operationName_; consumerId_ = other.consumerId_; StartTime = other.startTime_ != null ? other.StartTime.Clone() : null; EndTime = other.endTime_ != null ? other.EndTime.Clone() : null; labels_ = other.labels_.Clone(); metricValueSets_ = other.metricValueSets_.Clone(); logEntries_ = other.logEntries_.Clone(); importance_ = other.importance_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Operation Clone() { return new Operation(this); } /// <summary>Field number for the "operation_id" field.</summary> public const int OperationIdFieldNumber = 1; private string operationId_ = ""; /// <summary> /// Identity of the operation. This must be unique within the scope of the /// service that generated the operation. If the service calls /// Check() and Report() on the same operation, the two calls should carry /// the same id. /// /// UUID version 4 is recommended, though not required. /// In scenarios where an operation is computed from existing information /// and an idempotent id is desirable for deduplication purpose, UUID version 5 /// is recommended. See RFC 4122 for details. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string OperationId { get { return operationId_; } set { operationId_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "operation_name" field.</summary> public const int OperationNameFieldNumber = 2; private string operationName_ = ""; /// <summary> /// Fully qualified name of the operation. Reserved for future use. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string OperationName { get { return operationName_; } set { operationName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "consumer_id" field.</summary> public const int ConsumerIdFieldNumber = 3; private string consumerId_ = ""; /// <summary> /// Identity of the consumer who is using the service. /// This field should be filled in for the operations initiated by a /// consumer, but not for service-initiated operations that are /// not related to a specific consumer. /// /// This can be in one of the following formats: /// project:&lt;project_id>, /// project_number:&lt;project_number>, /// api_key:&lt;api_key>. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string ConsumerId { get { return consumerId_; } set { consumerId_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "start_time" field.</summary> public const int StartTimeFieldNumber = 4; private global::Google.Protobuf.WellKnownTypes.Timestamp startTime_; /// <summary> /// Required. Start time of the operation. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Google.Protobuf.WellKnownTypes.Timestamp StartTime { get { return startTime_; } set { startTime_ = value; } } /// <summary>Field number for the "end_time" field.</summary> public const int EndTimeFieldNumber = 5; private global::Google.Protobuf.WellKnownTypes.Timestamp endTime_; /// <summary> /// End time of the operation. /// Required when the operation is used in [ServiceController.Report][google.api.servicecontrol.v1.ServiceController.Report], /// but optional when the operation is used in [ServiceController.Check][google.api.servicecontrol.v1.ServiceController.Check]. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Google.Protobuf.WellKnownTypes.Timestamp EndTime { get { return endTime_; } set { endTime_ = value; } } /// <summary>Field number for the "labels" field.</summary> public const int LabelsFieldNumber = 6; private static readonly pbc::MapField<string, string>.Codec _map_labels_codec = new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 50); private readonly pbc::MapField<string, string> labels_ = new pbc::MapField<string, string>(); /// <summary> /// Labels describing the operation. Only the following labels are allowed: /// /// - Labels describing monitored resources as defined in /// the service configuration. /// - Default labels of metric values. When specified, labels defined in the /// metric value override these default. /// - The following labels defined by Google Cloud Platform: /// - `cloud.googleapis.com/location` describing the location where the /// operation happened, /// - `servicecontrol.googleapis.com/user_agent` describing the user agent /// of the API request, /// - `servicecontrol.googleapis.com/service_agent` describing the service /// used to handle the API request (e.g. ESP), /// - `servicecontrol.googleapis.com/platform` describing the platform /// where the API is served (e.g. GAE, GCE, GKE). /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, string> Labels { get { return labels_; } } /// <summary>Field number for the "metric_value_sets" field.</summary> public const int MetricValueSetsFieldNumber = 7; private static readonly pb::FieldCodec<global::Google.Api.Servicecontrol.V1.MetricValueSet> _repeated_metricValueSets_codec = pb::FieldCodec.ForMessage(58, global::Google.Api.Servicecontrol.V1.MetricValueSet.Parser); private readonly pbc::RepeatedField<global::Google.Api.Servicecontrol.V1.MetricValueSet> metricValueSets_ = new pbc::RepeatedField<global::Google.Api.Servicecontrol.V1.MetricValueSet>(); /// <summary> /// Represents information about this operation. Each MetricValueSet /// corresponds to a metric defined in the service configuration. /// The data type used in the MetricValueSet must agree with /// the data type specified in the metric definition. /// /// Within a single operation, it is not allowed to have more than one /// MetricValue instances that have the same metric names and identical /// label value combinations. If a request has such duplicated MetricValue /// instances, the entire request is rejected with /// an invalid argument error. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Google.Api.Servicecontrol.V1.MetricValueSet> MetricValueSets { get { return metricValueSets_; } } /// <summary>Field number for the "log_entries" field.</summary> public const int LogEntriesFieldNumber = 8; private static readonly pb::FieldCodec<global::Google.Api.Servicecontrol.V1.LogEntry> _repeated_logEntries_codec = pb::FieldCodec.ForMessage(66, global::Google.Api.Servicecontrol.V1.LogEntry.Parser); private readonly pbc::RepeatedField<global::Google.Api.Servicecontrol.V1.LogEntry> logEntries_ = new pbc::RepeatedField<global::Google.Api.Servicecontrol.V1.LogEntry>(); /// <summary> /// Represents information to be logged. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Google.Api.Servicecontrol.V1.LogEntry> LogEntries { get { return logEntries_; } } /// <summary>Field number for the "importance" field.</summary> public const int ImportanceFieldNumber = 11; private global::Google.Api.Servicecontrol.V1.Operation.Types.Importance importance_ = 0; /// <summary> /// DO NOT USE. This is an experimental field. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Google.Api.Servicecontrol.V1.Operation.Types.Importance Importance { get { return importance_; } set { importance_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Operation); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Operation other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (OperationId != other.OperationId) return false; if (OperationName != other.OperationName) return false; if (ConsumerId != other.ConsumerId) return false; if (!object.Equals(StartTime, other.StartTime)) return false; if (!object.Equals(EndTime, other.EndTime)) return false; if (!Labels.Equals(other.Labels)) return false; if(!metricValueSets_.Equals(other.metricValueSets_)) return false; if(!logEntries_.Equals(other.logEntries_)) return false; if (Importance != other.Importance) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (OperationId.Length != 0) hash ^= OperationId.GetHashCode(); if (OperationName.Length != 0) hash ^= OperationName.GetHashCode(); if (ConsumerId.Length != 0) hash ^= ConsumerId.GetHashCode(); if (startTime_ != null) hash ^= StartTime.GetHashCode(); if (endTime_ != null) hash ^= EndTime.GetHashCode(); hash ^= Labels.GetHashCode(); hash ^= metricValueSets_.GetHashCode(); hash ^= logEntries_.GetHashCode(); if (Importance != 0) hash ^= Importance.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (OperationId.Length != 0) { output.WriteRawTag(10); output.WriteString(OperationId); } if (OperationName.Length != 0) { output.WriteRawTag(18); output.WriteString(OperationName); } if (ConsumerId.Length != 0) { output.WriteRawTag(26); output.WriteString(ConsumerId); } if (startTime_ != null) { output.WriteRawTag(34); output.WriteMessage(StartTime); } if (endTime_ != null) { output.WriteRawTag(42); output.WriteMessage(EndTime); } labels_.WriteTo(output, _map_labels_codec); metricValueSets_.WriteTo(output, _repeated_metricValueSets_codec); logEntries_.WriteTo(output, _repeated_logEntries_codec); if (Importance != 0) { output.WriteRawTag(88); output.WriteEnum((int) Importance); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (OperationId.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(OperationId); } if (OperationName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(OperationName); } if (ConsumerId.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(ConsumerId); } if (startTime_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(StartTime); } if (endTime_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(EndTime); } size += labels_.CalculateSize(_map_labels_codec); size += metricValueSets_.CalculateSize(_repeated_metricValueSets_codec); size += logEntries_.CalculateSize(_repeated_logEntries_codec); if (Importance != 0) { size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Importance); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Operation other) { if (other == null) { return; } if (other.OperationId.Length != 0) { OperationId = other.OperationId; } if (other.OperationName.Length != 0) { OperationName = other.OperationName; } if (other.ConsumerId.Length != 0) { ConsumerId = other.ConsumerId; } if (other.startTime_ != null) { if (startTime_ == null) { startTime_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); } StartTime.MergeFrom(other.StartTime); } if (other.endTime_ != null) { if (endTime_ == null) { endTime_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); } EndTime.MergeFrom(other.EndTime); } labels_.Add(other.labels_); metricValueSets_.Add(other.metricValueSets_); logEntries_.Add(other.logEntries_); if (other.Importance != 0) { Importance = other.Importance; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { OperationId = input.ReadString(); break; } case 18: { OperationName = input.ReadString(); break; } case 26: { ConsumerId = input.ReadString(); break; } case 34: { if (startTime_ == null) { startTime_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); } input.ReadMessage(startTime_); break; } case 42: { if (endTime_ == null) { endTime_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); } input.ReadMessage(endTime_); break; } case 50: { labels_.AddEntriesFrom(input, _map_labels_codec); break; } case 58: { metricValueSets_.AddEntriesFrom(input, _repeated_metricValueSets_codec); break; } case 66: { logEntries_.AddEntriesFrom(input, _repeated_logEntries_codec); break; } case 88: { importance_ = (global::Google.Api.Servicecontrol.V1.Operation.Types.Importance) input.ReadEnum(); break; } } } } #region Nested types /// <summary>Container for nested types declared in the Operation message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { /// <summary> /// Defines the importance of the data contained in the operation. /// </summary> public enum Importance { /// <summary> /// The API implementation may cache and aggregate the data. /// The data may be lost when rare and unexpected system failures occur. /// </summary> [pbr::OriginalName("LOW")] Low = 0, /// <summary> /// The API implementation doesn't cache and aggregate the data. /// If the method returns successfully, it's guaranteed that the data has /// been persisted in durable storage. /// </summary> [pbr::OriginalName("HIGH")] High = 1, } } #endregion } #endregion } #endregion Designer generated code