context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using System.Text; using Lucene.Net.Documents; namespace Lucene.Net.Search { using Lucene.Net.Randomized.Generators; using NUnit.Framework; using System.Collections.Generic; using System.IO; using Directory = Lucene.Net.Store.Directory; using DirectoryReader = Lucene.Net.Index.DirectoryReader; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Document = Documents.Document; using Field = Field; using IndexReader = Lucene.Net.Index.IndexReader; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using NumericDocValuesField = NumericDocValuesField; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using Term = Lucene.Net.Index.Term; using TestUtil = Lucene.Net.Util.TestUtil; [SuppressCodecs("Lucene3x")] [TestFixture] public class TestSortRescorer : LuceneTestCase { internal IndexSearcher Searcher; internal DirectoryReader Reader; internal Directory Dir; [SetUp] public override void SetUp() { base.SetUp(); Dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewStringField("id", "1", Field.Store.YES)); doc.Add(NewTextField("body", "some contents and more contents", Field.Store.NO)); doc.Add(new NumericDocValuesField("popularity", 5)); iw.AddDocument(doc); doc = new Document(); doc.Add(NewStringField("id", "2", Field.Store.YES)); doc.Add(NewTextField("body", "another document with different contents", Field.Store.NO)); doc.Add(new NumericDocValuesField("popularity", 20)); iw.AddDocument(doc); doc = new Document(); doc.Add(NewStringField("id", "3", Field.Store.YES)); doc.Add(NewTextField("body", "crappy contents", Field.Store.NO)); doc.Add(new NumericDocValuesField("popularity", 2)); iw.AddDocument(doc); Reader = iw.Reader; Searcher = new IndexSearcher(Reader); iw.Dispose(); } [TearDown] public override void TearDown() { Reader.Dispose(); Dir.Dispose(); base.TearDown(); } [Test] public virtual void TestBasic() { // create a sort field and sort by it (reverse order) Query query = new TermQuery(new Term("body", "contents")); IndexReader r = Searcher.IndexReader; // Just first pass query TopDocs hits = Searcher.Search(query, 10); Assert.AreEqual(3, hits.TotalHits); Assert.AreEqual("3", r.Document(hits.ScoreDocs[0].Doc).Get("id")); Assert.AreEqual("1", r.Document(hits.ScoreDocs[1].Doc).Get("id")); Assert.AreEqual("2", r.Document(hits.ScoreDocs[2].Doc).Get("id")); // Now, rescore: Sort sort = new Sort(new SortField("popularity", SortFieldType.INT32, true)); Rescorer rescorer = new SortRescorer(sort); hits = rescorer.Rescore(Searcher, hits, 10); Assert.AreEqual(3, hits.TotalHits); Assert.AreEqual("2", r.Document(hits.ScoreDocs[0].Doc).Get("id")); Assert.AreEqual("1", r.Document(hits.ScoreDocs[1].Doc).Get("id")); Assert.AreEqual("3", r.Document(hits.ScoreDocs[2].Doc).Get("id")); string expl = rescorer.Explain(Searcher, Searcher.Explain(query, hits.ScoreDocs[0].Doc), hits.ScoreDocs[0].Doc).ToString(); // Confirm the explanation breaks out the individual // sort fields: Assert.IsTrue(expl.Contains("= sort field <int: \"popularity\">! value=20")); // Confirm the explanation includes first pass details: Assert.IsTrue(expl.Contains("= first pass score")); Assert.IsTrue(expl.Contains("body:contents in")); } [Test] public virtual void TestRandom() { Directory dir = NewDirectory(); int numDocs = AtLeast(1000); RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); int[] idToNum = new int[numDocs]; int maxValue = TestUtil.NextInt(Random(), 10, 1000000); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(NewStringField("id", "" + i, Field.Store.YES)); int numTokens = TestUtil.NextInt(Random(), 1, 10); StringBuilder b = new StringBuilder(); for (int j = 0; j < numTokens; j++) { b.Append("a "); } doc.Add(NewTextField("field", b.ToString(), Field.Store.NO)); idToNum[i] = Random().Next(maxValue); doc.Add(new NumericDocValuesField("num", idToNum[i])); w.AddDocument(doc); } IndexReader r = w.Reader; w.Dispose(); IndexSearcher s = NewSearcher(r); int numHits = TestUtil.NextInt(Random(), 1, numDocs); bool reverse = Random().NextBoolean(); TopDocs hits = s.Search(new TermQuery(new Term("field", "a")), numHits); Rescorer rescorer = new SortRescorer(new Sort(new SortField("num", SortFieldType.INT32, reverse))); TopDocs hits2 = rescorer.Rescore(s, hits, numHits); int[] expected = new int[numHits]; for (int i = 0; i < numHits; i++) { expected[i] = hits.ScoreDocs[i].Doc; } int reverseInt = reverse ? -1 : 1; Array.Sort(expected, new ComparerAnonymousInnerClassHelper(this, idToNum, r, reverseInt)); bool fail = false; for (int i = 0; i < numHits; i++) { fail |= (int)expected[i] != hits2.ScoreDocs[i].Doc; } Assert.IsFalse(fail); r.Dispose(); dir.Dispose(); } private class ComparerAnonymousInnerClassHelper : IComparer<int> { private readonly TestSortRescorer OuterInstance; private int[] IdToNum; private IndexReader r; private int ReverseInt; public ComparerAnonymousInnerClassHelper(TestSortRescorer outerInstance, int[] idToNum, IndexReader r, int reverseInt) { this.OuterInstance = outerInstance; this.IdToNum = idToNum; this.r = r; this.ReverseInt = reverseInt; } public virtual int Compare(int a, int b) { try { int av = IdToNum[Convert.ToInt32(r.Document(a).Get("id"))]; int bv = IdToNum[Convert.ToInt32(r.Document(b).Get("id"))]; if (av < bv) { return -ReverseInt; } else if (bv < av) { return ReverseInt; } else { // Tie break by docID return a - b; } } catch (IOException ioe) { throw new Exception(ioe.ToString(), ioe); } } } } }
using Lucene.Net.Analysis.TokenAttributes; using Lucene.Net.Support; using System; using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using ByteBlockPool = Lucene.Net.Util.ByteBlockPool; using BytesRef = Lucene.Net.Util.BytesRef; using OffsetAttribute = Lucene.Net.Analysis.TokenAttributes.OffsetAttribute; using PayloadAttribute = Lucene.Net.Analysis.TokenAttributes.PayloadAttribute; using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator; using TermVectorsWriter = Lucene.Net.Codecs.TermVectorsWriter; internal sealed class TermVectorsConsumerPerField : TermsHashConsumerPerField { internal readonly TermsHashPerField termsHashPerField; internal readonly TermVectorsConsumer termsWriter; internal readonly FieldInfo fieldInfo; internal readonly DocumentsWriterPerThread.DocState docState; internal readonly FieldInvertState fieldState; internal bool doVectors; internal bool doVectorPositions; internal bool doVectorOffsets; internal bool doVectorPayloads; internal int maxNumPostings; internal IOffsetAttribute offsetAttribute; internal IPayloadAttribute payloadAttribute; internal bool hasPayloads; // if enabled, and we actually saw any for this field public TermVectorsConsumerPerField(TermsHashPerField termsHashPerField, TermVectorsConsumer termsWriter, FieldInfo fieldInfo) { this.termsHashPerField = termsHashPerField; this.termsWriter = termsWriter; this.fieldInfo = fieldInfo; docState = termsHashPerField.docState; fieldState = termsHashPerField.fieldState; } internal override int StreamCount => 2; internal override bool Start(IIndexableField[] fields, int count) { doVectors = false; doVectorPositions = false; doVectorOffsets = false; doVectorPayloads = false; hasPayloads = false; for (int i = 0; i < count; i++) { IIndexableField field = fields[i]; if (field.IndexableFieldType.IsIndexed) { if (field.IndexableFieldType.StoreTermVectors) { doVectors = true; doVectorPositions |= field.IndexableFieldType.StoreTermVectorPositions; doVectorOffsets |= field.IndexableFieldType.StoreTermVectorOffsets; if (doVectorPositions) { doVectorPayloads |= field.IndexableFieldType.StoreTermVectorPayloads; } else if (field.IndexableFieldType.StoreTermVectorPayloads) { // TODO: move this check somewhere else, and impl the other missing ones throw new ArgumentException("cannot index term vector payloads without term vector positions (field=\"" + field.Name + "\")"); } } else { if (field.IndexableFieldType.StoreTermVectorOffsets) { throw new ArgumentException("cannot index term vector offsets when term vectors are not indexed (field=\"" + field.Name + "\")"); } if (field.IndexableFieldType.StoreTermVectorPositions) { throw new ArgumentException("cannot index term vector positions when term vectors are not indexed (field=\"" + field.Name + "\")"); } if (field.IndexableFieldType.StoreTermVectorPayloads) { throw new ArgumentException("cannot index term vector payloads when term vectors are not indexed (field=\"" + field.Name + "\")"); } } } else { if (field.IndexableFieldType.StoreTermVectors) { throw new ArgumentException("cannot index term vectors when field is not indexed (field=\"" + field.Name + "\")"); } if (field.IndexableFieldType.StoreTermVectorOffsets) { throw new ArgumentException("cannot index term vector offsets when field is not indexed (field=\"" + field.Name + "\")"); } if (field.IndexableFieldType.StoreTermVectorPositions) { throw new ArgumentException("cannot index term vector positions when field is not indexed (field=\"" + field.Name + "\")"); } if (field.IndexableFieldType.StoreTermVectorPayloads) { throw new ArgumentException("cannot index term vector payloads when field is not indexed (field=\"" + field.Name + "\")"); } } } if (doVectors) { termsWriter.hasVectors = true; if (termsHashPerField.bytesHash.Count != 0) { // Only necessary if previous doc hit a // non-aborting exception while writing vectors in // this field: termsHashPerField.Reset(); } } // TODO: only if needed for performance //perThread.postingsCount = 0; return doVectors; } [MethodImpl(MethodImplOptions.NoInlining)] public void Abort() { } /// <summary> /// Called once per field per document if term vectors /// are enabled, to write the vectors to /// RAMOutputStream, which is then quickly flushed to /// the real term vectors files in the Directory. /// </summary> internal override void Finish() { if (!doVectors || termsHashPerField.bytesHash.Count == 0) { return; } termsWriter.AddFieldToFlush(this); } [MethodImpl(MethodImplOptions.NoInlining)] internal void FinishDocument() { // LUCENENET: .NET doesn't support asserts in release mode if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.finish start"); int numPostings = termsHashPerField.bytesHash.Count; BytesRef flushTerm = termsWriter.flushTerm; Debug.Assert(numPostings >= 0); if (numPostings > maxNumPostings) { maxNumPostings = numPostings; } // this is called once, after inverting all occurrences // of a given field in the doc. At this point we flush // our hash into the DocWriter. Debug.Assert(termsWriter.VectorFieldsInOrder(fieldInfo)); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; TermVectorsWriter tv = termsWriter.writer; int[] termIDs = termsHashPerField.SortPostings(tv.Comparer); tv.StartField(fieldInfo, numPostings, doVectorPositions, doVectorOffsets, hasPayloads); ByteSliceReader posReader = doVectorPositions ? termsWriter.vectorSliceReaderPos : null; ByteSliceReader offReader = doVectorOffsets ? termsWriter.vectorSliceReaderOff : null; ByteBlockPool termBytePool = termsHashPerField.termBytePool; for (int j = 0; j < numPostings; j++) { int termID = termIDs[j]; int freq = postings.freqs[termID]; // Get BytesRef termBytePool.SetBytesRef(flushTerm, postings.textStarts[termID]); tv.StartTerm(flushTerm, freq); if (doVectorPositions || doVectorOffsets) { if (posReader != null) { termsHashPerField.InitReader(posReader, termID, 0); } if (offReader != null) { termsHashPerField.InitReader(offReader, termID, 1); } tv.AddProx(freq, posReader, offReader); } tv.FinishTerm(); } tv.FinishField(); termsHashPerField.Reset(); fieldInfo.SetStoreTermVectors(); } internal void ShrinkHash() { termsHashPerField.ShrinkHash(maxNumPostings); maxNumPostings = 0; } internal override void Start(IIndexableField f) { if (doVectorOffsets) { offsetAttribute = fieldState.AttributeSource.AddAttribute<IOffsetAttribute>(); } else { offsetAttribute = null; } if (doVectorPayloads && fieldState.AttributeSource.HasAttribute<IPayloadAttribute>()) { payloadAttribute = fieldState.AttributeSource.GetAttribute<IPayloadAttribute>(); } else { payloadAttribute = null; } } internal void WriteProx(TermVectorsPostingsArray postings, int termID) { if (doVectorOffsets) { int startOffset = fieldState.Offset + offsetAttribute.StartOffset; int endOffset = fieldState.Offset + offsetAttribute.EndOffset; termsHashPerField.WriteVInt32(1, startOffset - postings.lastOffsets[termID]); termsHashPerField.WriteVInt32(1, endOffset - startOffset); postings.lastOffsets[termID] = endOffset; } if (doVectorPositions) { BytesRef payload; if (payloadAttribute == null) { payload = null; } else { payload = payloadAttribute.Payload; } int pos = fieldState.Position - postings.lastPositions[termID]; if (payload != null && payload.Length > 0) { termsHashPerField.WriteVInt32(0, (pos << 1) | 1); termsHashPerField.WriteVInt32(0, payload.Length); termsHashPerField.WriteBytes(0, payload.Bytes, payload.Offset, payload.Length); hasPayloads = true; } else { termsHashPerField.WriteVInt32(0, pos << 1); } postings.lastPositions[termID] = fieldState.Position; } } internal override void NewTerm(int termID) { // LUCENENET: .NET doesn't support asserts in release mode if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.newTerm start"); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID] = 1; postings.lastOffsets[termID] = 0; postings.lastPositions[termID] = 0; WriteProx(postings, termID); } internal override void AddTerm(int termID) { // LUCENENET: .NET doesn't support asserts in release mode if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) docState.TestPoint("TermVectorsTermsWriterPerField.addTerm start"); TermVectorsPostingsArray postings = (TermVectorsPostingsArray)termsHashPerField.postingsArray; postings.freqs[termID]++; WriteProx(postings, termID); } [ExceptionToNetNumericConvention] internal override void SkippingLongTerm() { } internal override ParallelPostingsArray CreatePostingsArray(int size) { return new TermVectorsPostingsArray(size); } internal sealed class TermVectorsPostingsArray : ParallelPostingsArray { public TermVectorsPostingsArray(int size) : base(size) { freqs = new int[size]; lastOffsets = new int[size]; lastPositions = new int[size]; } internal int[] freqs; // How many times this term occurred in the current doc internal int[] lastOffsets; // Last offset we saw internal int[] lastPositions; // Last position where this term occurred internal override ParallelPostingsArray NewInstance(int size) { return new TermVectorsPostingsArray(size); } internal override void CopyTo(ParallelPostingsArray toArray, int numToCopy) { Debug.Assert(toArray is TermVectorsPostingsArray); TermVectorsPostingsArray to = (TermVectorsPostingsArray)toArray; base.CopyTo(toArray, numToCopy); Array.Copy(freqs, 0, to.freqs, 0, size); Array.Copy(lastOffsets, 0, to.lastOffsets, 0, size); Array.Copy(lastPositions, 0, to.lastPositions, 0, size); } internal override int BytesPerPosting() { return base.BytesPerPosting() + 3 * RamUsageEstimator.NUM_BYTES_INT32; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.IO; using System.Globalization; namespace System.Net { internal class StreamFramer { private Stream _transport; // TODO (Issue #3114): Implement using TPL instead of APM. private StreamAsyncHelper _transportAPM; private bool _eof; private FrameHeader _writeHeader = new FrameHeader(); private FrameHeader _curReadHeader = new FrameHeader(); private FrameHeader _readVerifier = new FrameHeader( FrameHeader.IgnoreValue, FrameHeader.IgnoreValue, FrameHeader.IgnoreValue); private byte[] _readHeaderBuffer; private byte[] _writeHeaderBuffer; private readonly AsyncCallback _readFrameCallback; private readonly AsyncCallback _beginWriteCallback; public StreamFramer(Stream Transport) { if (Transport == null || Transport == Stream.Null) { throw new ArgumentNullException("Transport"); } _transport = Transport; _readHeaderBuffer = new byte[_curReadHeader.Size]; _writeHeaderBuffer = new byte[_writeHeader.Size]; _readFrameCallback = new AsyncCallback(ReadFrameCallback); _beginWriteCallback = new AsyncCallback(BeginWriteCallback); _transportAPM = new StreamAsyncHelper(_transport); } public FrameHeader ReadHeader { get { return _curReadHeader; } } public FrameHeader WriteHeader { get { return _writeHeader; } } public Stream Transport { get { return _transport; } } public byte[] ReadMessage() { if (_eof) { return null; } int offset = 0; byte[] buffer = _readHeaderBuffer; int bytesRead; while (offset < buffer.Length) { bytesRead = Transport.Read(buffer, offset, buffer.Length - offset); if (bytesRead == 0) { if (offset == 0) { // m_Eof, return null _eof = true; return null; } else { throw new IOException(SR.Format(SR.net_io_readfailure, SR.Format(SR.net_io_connectionclosed))); } } offset += bytesRead; } _curReadHeader.CopyFrom(buffer, 0, _readVerifier); if (_curReadHeader.PayloadSize > _curReadHeader.MaxMessageSize) { throw new InvalidOperationException(SR.Format(SR.net_frame_size, _curReadHeader.MaxMessageSize.ToString(NumberFormatInfo.InvariantInfo), _curReadHeader.PayloadSize.ToString(NumberFormatInfo.InvariantInfo))); } buffer = new byte[_curReadHeader.PayloadSize]; offset = 0; while (offset < buffer.Length) { bytesRead = Transport.Read(buffer, offset, buffer.Length - offset); if (bytesRead == 0) { throw new IOException(SR.Format(SR.net_io_readfailure, SR.Format(SR.net_io_connectionclosed))); } offset += bytesRead; } return buffer; } public IAsyncResult BeginReadMessage(AsyncCallback asyncCallback, object stateObject) { WorkerAsyncResult workerResult; if (_eof) { workerResult = new WorkerAsyncResult(this, stateObject, asyncCallback, null, 0, 0); workerResult.InvokeCallback(-1); return workerResult; } workerResult = new WorkerAsyncResult(this, stateObject, asyncCallback, _readHeaderBuffer, 0, _readHeaderBuffer.Length); IAsyncResult result = _transportAPM.BeginRead(_readHeaderBuffer, 0, _readHeaderBuffer.Length, _readFrameCallback, workerResult); if (result.CompletedSynchronously) { ReadFrameComplete(result); } return workerResult; } private void ReadFrameCallback(IAsyncResult transportResult) { if (!(transportResult.AsyncState is WorkerAsyncResult)) { if (GlobalLog.IsEnabled) { GlobalLog.Assert("StreamFramer::ReadFrameCallback|The state expected to be WorkerAsyncResult, received:{0}.", transportResult.GetType().FullName); } Debug.Fail("StreamFramer::ReadFrameCallback|The state expected to be WorkerAsyncResult, received:" + transportResult.GetType().FullName + "."); } if (transportResult.CompletedSynchronously) { return; } WorkerAsyncResult workerResult = (WorkerAsyncResult)transportResult.AsyncState; try { ReadFrameComplete(transportResult); } catch (Exception e) { if (e is OutOfMemoryException) { throw; } if (!(e is IOException)) { e = new System.IO.IOException(SR.Format(SR.net_io_readfailure, e.Message), e); } workerResult.InvokeCallback(e); } } // IO COMPLETION CALLBACK // // This callback is responsible for getting the complete protocol frame. // 1. it reads the header. // 2. it determines the frame size. // 3. loops while not all frame received or an error. // private void ReadFrameComplete(IAsyncResult transportResult) { do { if (!(transportResult.AsyncState is WorkerAsyncResult)) { if (GlobalLog.IsEnabled) { GlobalLog.AssertFormat("StreamFramer::ReadFrameComplete|The state expected to be WorkerAsyncResult, received:{0}.", transportResult.GetType().FullName); } Debug.Fail("StreamFramer::ReadFrameComplete|The state expected to be WorkerAsyncResult, received:" + transportResult.GetType().FullName + "."); } WorkerAsyncResult workerResult = (WorkerAsyncResult)transportResult.AsyncState; int bytesRead = _transportAPM.EndRead(transportResult); workerResult.Offset += bytesRead; if (!(workerResult.Offset <= workerResult.End)) { if (GlobalLog.IsEnabled) { GlobalLog.AssertFormat("StreamFramer::ReadFrameCallback|WRONG: offset - end = {0}", workerResult.Offset - workerResult.End); } Debug.Fail("StreamFramer::ReadFrameCallback|WRONG: offset - end = " + (workerResult.Offset - workerResult.End)); } if (bytesRead <= 0) { // (by design) This indicates the stream has receives EOF // If we are in the middle of a Frame - fail, otherwise - produce EOF object result = null; if (!workerResult.HeaderDone && workerResult.Offset == 0) { result = (object)-1; } else { result = new System.IO.IOException(SR.net_frame_read_io); } workerResult.InvokeCallback(result); return; } if (workerResult.Offset >= workerResult.End) { if (!workerResult.HeaderDone) { workerResult.HeaderDone = true; // This indicates the header has been read succesfully _curReadHeader.CopyFrom(workerResult.Buffer, 0, _readVerifier); int payloadSize = _curReadHeader.PayloadSize; if (payloadSize < 0) { // Let's call user callback and he call us back and we will throw workerResult.InvokeCallback(new System.IO.IOException(SR.Format(SR.net_frame_read_size))); } if (payloadSize == 0) { // report emtpy frame (NOT eof!) to the caller, he might be interested in workerResult.InvokeCallback(0); return; } if (payloadSize > _curReadHeader.MaxMessageSize) { throw new InvalidOperationException(SR.Format(SR.net_frame_size, _curReadHeader.MaxMessageSize.ToString(NumberFormatInfo.InvariantInfo), payloadSize.ToString(NumberFormatInfo.InvariantInfo))); } // Start reading the remaining frame data (note header does not count). byte[] frame = new byte[payloadSize]; // Save the ref of the data block workerResult.Buffer = frame; workerResult.End = frame.Length; workerResult.Offset = 0; // Transport.BeginRead below will pickup those changes. } else { workerResult.HeaderDone = false; // Reset for optional object reuse. workerResult.InvokeCallback(workerResult.End); return; } } // This means we need more data to complete the data block. transportResult = _transportAPM.BeginRead(workerResult.Buffer, workerResult.Offset, workerResult.End - workerResult.Offset, _readFrameCallback, workerResult); } while (transportResult.CompletedSynchronously); } // // User code will call this when workerResult gets signaled. // // On BeginRead, the user always gets back our WorkerAsyncResult. // The Result property represents either a number of bytes read or an // exception put by our async state machine. // public byte[] EndReadMessage(IAsyncResult asyncResult) { if (asyncResult == null) { throw new ArgumentNullException("asyncResult"); } WorkerAsyncResult workerResult = asyncResult as WorkerAsyncResult; if (workerResult == null) { throw new ArgumentException(SR.Format(SR.net_io_async_result, typeof(WorkerAsyncResult).FullName), "asyncResult"); } if (!workerResult.InternalPeekCompleted) { workerResult.InternalWaitForCompletion(); } if (workerResult.Result is Exception) { throw (Exception)(workerResult.Result); } int size = (int)workerResult.Result; if (size == -1) { _eof = true; return null; } else if (size == 0) { // Empty frame. return new byte[0]; } return workerResult.Buffer; } public void WriteMessage(byte[] message) { if (message == null) { throw new ArgumentNullException("message"); } _writeHeader.PayloadSize = message.Length; _writeHeader.CopyTo(_writeHeaderBuffer, 0); Transport.Write(_writeHeaderBuffer, 0, _writeHeaderBuffer.Length); if (message.Length == 0) { return; } Transport.Write(message, 0, message.Length); } public IAsyncResult BeginWriteMessage(byte[] message, AsyncCallback asyncCallback, object stateObject) { if (message == null) { throw new ArgumentNullException("message"); } _writeHeader.PayloadSize = message.Length; _writeHeader.CopyTo(_writeHeaderBuffer, 0); if (message.Length == 0) { return _transportAPM.BeginWrite(_writeHeaderBuffer, 0, _writeHeaderBuffer.Length, asyncCallback, stateObject); } // Will need two async writes. Prepare the second: WorkerAsyncResult workerResult = new WorkerAsyncResult(this, stateObject, asyncCallback, message, 0, message.Length); // Charge the first: IAsyncResult result = _transportAPM.BeginWrite(_writeHeaderBuffer, 0, _writeHeaderBuffer.Length, _beginWriteCallback, workerResult); if (result.CompletedSynchronously) { BeginWriteComplete(result); } return workerResult; } private void BeginWriteCallback(IAsyncResult transportResult) { if (!(transportResult.AsyncState is WorkerAsyncResult)) { if (GlobalLog.IsEnabled) { GlobalLog.AssertFormat("StreamFramer::BeginWriteCallback|The state expected to be WorkerAsyncResult, received:{0}.", transportResult.AsyncState.GetType().FullName); } Debug.Fail("StreamFramer::BeginWriteCallback|The state expected to be WorkerAsyncResult, received:" + transportResult.AsyncState.GetType().FullName + "."); } if (transportResult.CompletedSynchronously) { return; } var workerResult = (WorkerAsyncResult)transportResult.AsyncState; try { BeginWriteComplete(transportResult); } catch (Exception e) { if (e is OutOfMemoryException) { throw; } workerResult.InvokeCallback(e); } } // IO COMPLETION CALLBACK // // Called when user IO request was wrapped to do several underlined IO. // private void BeginWriteComplete(IAsyncResult transportResult) { do { WorkerAsyncResult workerResult = (WorkerAsyncResult)transportResult.AsyncState; // First, complete the previous portion write. _transportAPM.EndWrite(transportResult); // Check on exit criterion. if (workerResult.Offset == workerResult.End) { workerResult.InvokeCallback(); return; } // Setup exit criterion. workerResult.Offset = workerResult.End; // Write next portion (frame body) using Async IO. transportResult = _transportAPM.BeginWrite(workerResult.Buffer, 0, workerResult.End, _beginWriteCallback, workerResult); } while (transportResult.CompletedSynchronously); } public void EndWriteMessage(IAsyncResult asyncResult) { if (asyncResult == null) { throw new ArgumentNullException("asyncResult"); } WorkerAsyncResult workerResult = asyncResult as WorkerAsyncResult; if (workerResult != null) { if (!workerResult.InternalPeekCompleted) { workerResult.InternalWaitForCompletion(); } if (workerResult.Result is Exception) { throw (Exception)(workerResult.Result); } } else { _transportAPM.EndWrite(asyncResult); } } } // // This class wraps an Async IO request. It is based on our internal LazyAsyncResult helper. // - If ParentResult is not null then the base class (LazyAsyncResult) methods must not be used. // - If ParentResult == null, then real user IO request is wrapped. // internal class WorkerAsyncResult : LazyAsyncResult { public byte[] Buffer; public int Offset; public int End; public bool HeaderDone; // This might be reworked so we read both header and frame in one chunk. public WorkerAsyncResult(object asyncObject, object asyncState, AsyncCallback savedAsyncCallback, byte[] buffer, int offset, int end) : base(asyncObject, asyncState, savedAsyncCallback) { Buffer = buffer; Offset = offset; End = end; } } // Describes the header used in framing of the stream data. internal class FrameHeader { public const int IgnoreValue = -1; public const int HandshakeDoneId = 20; public const int HandshakeErrId = 21; public const int HandshakeId = 22; public const int DefaultMajorV = 1; public const int DefaultMinorV = 0; private int _MessageId; private int _MajorV; private int _MinorV; private int _PayloadSize; public FrameHeader() { _MessageId = HandshakeId; _MajorV = DefaultMajorV; _MinorV = DefaultMinorV; _PayloadSize = -1; } public FrameHeader(int messageId, int majorV, int minorV) { _MessageId = messageId; _MajorV = majorV; _MinorV = minorV; _PayloadSize = -1; } public int Size { get { return 5; } } public int MaxMessageSize { get { return 0xFFFF; } } public int MessageId { get { return _MessageId; } set { _MessageId = value; } } public int MajorV { get { return _MajorV; } } public int MinorV { get { return _MinorV; } } public int PayloadSize { get { return _PayloadSize; } set { if (value > MaxMessageSize) { throw new ArgumentException(SR.Format(SR.net_frame_max_size, MaxMessageSize.ToString(NumberFormatInfo.InvariantInfo), value.ToString(NumberFormatInfo.InvariantInfo)), "PayloadSize"); } _PayloadSize = value; } } public void CopyTo(byte[] dest, int start) { dest[start++] = (byte)_MessageId; dest[start++] = (byte)_MajorV; dest[start++] = (byte)_MinorV; dest[start++] = (byte)((_PayloadSize >> 8) & 0xFF); dest[start] = (byte)(_PayloadSize & 0xFF); } public void CopyFrom(byte[] bytes, int start, FrameHeader verifier) { _MessageId = bytes[start++]; _MajorV = bytes[start++]; _MinorV = bytes[start++]; _PayloadSize = (int)((bytes[start++] << 8) | bytes[start]); if (verifier.MessageId != FrameHeader.IgnoreValue && MessageId != verifier.MessageId) { throw new InvalidOperationException(SR.Format(SR.net_io_header_id, "MessageId", MessageId, verifier.MessageId)); } if (verifier.MajorV != FrameHeader.IgnoreValue && MajorV != verifier.MajorV) { throw new InvalidOperationException(SR.Format(SR.net_io_header_id, "MajorV", MajorV, verifier.MajorV)); } if (verifier.MinorV != FrameHeader.IgnoreValue && MinorV != verifier.MinorV) { throw new InvalidOperationException(SR.Format(SR.net_io_header_id, "MinorV", MinorV, verifier.MinorV)); } } } }
using System.Data.SqlClient; namespace EduHub.Data.Entities { partial class ARDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the FAIR_MARKET column. New to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[AR]') AND name = 'FAIR_MARKET'"; return new SqlCommand(sql, SqlConnection); } } partial class CRDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the ARN column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[CR]') AND name = 'ARN'"; return new SqlCommand(sql, SqlConnection); } } partial class CRFDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the PO_PRINTED, APPROVED_BY, STAFF_ORDER_BY, CRPRTID columns. New to C21v59. const string sql = @"SELECT 4 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[CRF]') AND name IN ('PO_PRINTED', 'APPROVED_BY', 'STAFF_ORDER_BY', 'CRPRTID')"; return new SqlCommand(sql, SqlConnection); } } partial class DF_TFRDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the EMERG_CONTACT columns have the right type. Modified in C21v60. const string sql = @"SELECT 4 - COUNT(*) FROM sys.columns c JOIN sys.types t ON c.system_type_id=t.system_type_id WHERE object_id = OBJECT_ID(N'[dbo].[DF_TFR]') AND c.name LIKE 'EMERG_CONTACT0%' AND t.name = 'varchar'"; return new SqlCommand(sql, SqlConnection); } } partial class DFDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the WWCC_NUMBER_A column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[DF]') AND name = 'WWCC_NUMBER_A'"; return new SqlCommand(sql, SqlConnection); } } partial class DRDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the KNOTE_FLAG column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[DR]') AND name = 'KNOTE_FLAG'"; return new SqlCommand(sql, SqlConnection); } } partial class DRFDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the TRDEL_MONTHS column. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[DRF]') AND name = 'TRDEL_MONTHS'"; return new SqlCommand(sql, SqlConnection); } } partial class FDT_IMPDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the NOTES columns have the right type. Modified in C21v60. const string sql = @"SELECT 4 - COUNT(*) FROM sys.columns c JOIN sys.types t ON c.system_type_id=t.system_type_id WHERE object_id = OBJECT_ID(N'[dbo].[FDT_IMP]') AND c.name LIKE 'NOTES0%' AND t.name = 'varchar'"; return new SqlCommand(sql, SqlConnection); } } partial class GLBANKDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the COMMENTS_COMMIT column. New to C21v60. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[GLBANK]') AND name = 'COMMENTS_COMMIT'"; return new SqlCommand(sql, SqlConnection); } } partial class GLDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the BATCHABLE column. New to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[GL]') AND name = 'BATCHABLE'"; return new SqlCommand(sql, SqlConnection); } } partial class GLFBANKDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the SUBPROGRAM column. New to C21v60. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[GLFBANK]') AND name = 'SUBPROGRAM'"; return new SqlCommand(sql, SqlConnection); } } partial class GLFDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the FEE_CODE column. New to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[GLF]') AND name = 'FEE_CODE'"; return new SqlCommand(sql, SqlConnection); } } partial class KCM_TFRDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the DISABILITY column. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KCM_TFR]') AND name = 'DISABILITY'"; return new SqlCommand(sql, SqlConnection); } } partial class KCMDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the DISABILITY column. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KCM]') AND name = 'DISABILITY'"; return new SqlCommand(sql, SqlConnection); } } partial class KCYDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the BIRTHDATE_FROM and BIRTHDATE_TO columns. New to C21v57. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KCY]') AND name IN ('BIRTHDATE_FROM', 'BIRTHDATE_TO')"; return new SqlCommand(sql, SqlConnection); } } partial class KDIDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the MIN_SCORE and MAX_SCORE columns have the right type. Modified to C21v58. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KDI]') AND name IN ('MIN_SCORE', 'MAX_SCORE') AND max_length = 6"; return new SqlCommand(sql, SqlConnection); } } partial class KDODataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the DATA_ENTRY column. New to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KDO]') AND name = 'DATA_ENTRY'"; return new SqlCommand(sql, SqlConnection); } } partial class KGCDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the SELF_DESCRIBED column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KGC]') AND name = 'SELF_DESCRIBED'"; return new SqlCommand(sql, SqlConnection); } } partial class KGHDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the SELF_DESCRIBED column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KGH]') AND name = 'SELF_DESCRIBED'"; return new SqlCommand(sql, SqlConnection); } } partial class KGLSUBDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the BATCHABLE column. New to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[KGLSUB]') AND name = 'BATCHABLE'"; return new SqlCommand(sql, SqlConnection); } } partial class QBDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the EDUPAY column. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[QB]') AND name = 'EDUPAY'"; return new SqlCommand(sql, SqlConnection); } } partial class SCEN_STDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the CNSE, CSE and FSE columns. New to C21v59. const string sql = @"SELECT 3 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SCEN_ST]') AND name IN ('CNSE', 'CSE', 'FSE')"; return new SqlCommand(sql, SqlConnection); } } partial class SCIDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the NCCD_SUMMARY_SAVED column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SCI]') AND name = 'NCCD_SUMMARY_SAVED'"; return new SqlCommand(sql, SqlConnection); } } partial class SFDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the EMERG_CONTACT_MEMO01 column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SF]') AND name = 'EMERG_CONTACT_MEMO01'"; return new SqlCommand(sql, SqlConnection); } } partial class SGDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the VET column. New to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SG]') AND name = 'VET'"; return new SqlCommand(sql, SqlConnection); } } partial class SKGSDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the CONCURRENT_ENTROL column. New to C21v60. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SKGS]') AND name = 'CONCURRENT_ENROL'"; return new SqlCommand(sql, SqlConnection); } } partial class SMDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the COMMENTA column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SM]') AND name = 'COMMENTA' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class SMCDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the EXP_ASREQ_MED column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SMC]') AND name = 'EXP_ASREQ_MED'"; return new SqlCommand(sql, SqlConnection); } } partial class ST_TFRDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the EMERG_CONTACT columns have the right type. Modified in C21v60. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns c JOIN sys.types t ON c.system_type_id=t.system_type_id WHERE object_id = OBJECT_ID(N'[dbo].[ST_TFR]') AND c.name LIKE 'EMERG_CONTACT0%' AND t.name = 'varchar'"; return new SqlCommand(sql, SqlConnection); } } partial class STDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the MYLNS_NUMERACY column. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[ST]') AND name = 'MYLNS_NUMERACY'"; return new SqlCommand(sql, SqlConnection); } } partial class STARDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the RESTRICTION column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[STAR]') AND name = 'RESTRICTION' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class STMADataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the COMMENTA and COMMENTB columns have the right type. Modified to C21v58. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[STMA]') AND name IN ('COMMENTA', 'COMMENTB') AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class STMBDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the CREATOR column. New to C21v59. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[STMB]') AND name = 'CREATOR'"; return new SqlCommand(sql, SqlConnection); } } partial class STNATDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the REMARK and ATTACHMENT columns have the right type. Modified to C21v58. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[STNAT]') AND name IN ('REMARK', 'ATTACHMENT') AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class STPODataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the PERFORMANCE and OTHER columns have the right type. Modified to C21v58. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[STPO]') AND name IN ('PERFORMANCE', 'OTHER') AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class STPSDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the REASON_LEFT column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[STPS]') AND name = 'REASON_LEFT' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class STVDIDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the Index_SKEY_VDIMENSION_VDOMAIN_YEAR_SEMESTER_SCORE index. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.sysindexes WHERE id = OBJECT_ID(N'[dbo].[STVDI]') AND name = 'STVDI_Index_SKEY_VDIMENSION_VDOMAIN_YEAR_SEMESTER_SCORE'"; return new SqlCommand(sql, SqlConnection); } } partial class STVDODataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the Index_SKEY_VDIMENSION_YEAR_SEMESTER_VDOMAIN_SCORE index. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.sysindexes WHERE id = OBJECT_ID(N'[dbo].[STVDO]') AND name = 'STVDO_Index_SKEY_VDIMENSION_YEAR_SEMESTER_VDOMAIN_SCORE'"; return new SqlCommand(sql, SqlConnection); } } partial class SUDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the OVERVIEW column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SU]') AND name = 'OVERVIEW' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class SVAGDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the VDOMAIN column. New to C21v59. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[SVAG]') AND name = 'VDOMAIN'"; return new SqlCommand(sql, SqlConnection); } } partial class SXABDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the Index_STKEY_AM_ACT_TYPE_PM_ACT_TYPE_TXHG_TID_ABSENCE_DATE_LW_DATE index. New to C21v64. const string sql = @"SELECT 1 - COUNT(*) FROM sys.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SXAB]') AND name = 'SXAB_Index_STKEY_AM_ACT_TYPE_PM_ACT_TYPE_TXHG_TID_ABSENCE_DATE_LW_DATE'"; return new SqlCommand(sql, SqlConnection); } } partial class TCDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the DAY_COMMENT column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[TC]') AND name = 'DAY_COMMENT' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class TCTBDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the COMMENT_A column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[TCTB]') AND name = 'COMMENT_A' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class TCTDDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the TCTD_Index_TCTDKEY_TIME_TYPE_QKEY index. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.indexes WHERE object_id = OBJECT_ID(N'[dbo].[TCTD]') AND name = 'TCTD_Index_TCTDKEY_TIME_TYPE_QKEY'"; return new SqlCommand(sql, SqlConnection); } } partial class TCTQDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the TCTQ_Index_TCTQKEY_QROW_QKEY index. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.indexes WHERE object_id = OBJECT_ID(N'[dbo].[TCTQ]') AND name = 'TCTQ_Index_TCTQKEY_QROW_QKEY'"; return new SqlCommand(sql, SqlConnection); } } partial class TCTRDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the COMMENT_R column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[TCTR]') AND name = 'COMMENT_R' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class TEDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the COMMENTS column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[TE]') AND name = 'COMMENTS' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class TETNDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the TETN_Index_TETELINK_ATTENDEE_TYPE index. New to C21v57. const string sql = @"SELECT 1 - COUNT(*) FROM sys.indexes WHERE object_id = OBJECT_ID(N'[dbo].[TETN]') AND name = 'TETN_Index_TETELINK_ATTENDEE_TYPE'"; return new SqlCommand(sql, SqlConnection); } } partial class TTDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check that the GSOLS column has the right type. Modified to C21v58. const string sql = @"SELECT 1 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[TT]') AND name = 'GSOLS' AND max_length = -1"; return new SqlCommand(sql, SqlConnection); } } partial class UMDataSet { /// <inheritdoc /> public override SqlCommand GetSqlTableIsValidCommand(SqlConnection SqlConnection) { // Check for the existence of the LATITUDE and LONGITUDE columns. New to C21v59. const string sql = @"SELECT 2 - COUNT(*) FROM sys.columns WHERE object_id = OBJECT_ID(N'[dbo].[UM]') AND name IN ('LATITUDE', 'LONGITUDE')"; return new SqlCommand(sql, SqlConnection); } } }
using System; using System.IO; using System.Collections; using Hydra.Framework.Mapping.Geometries; namespace Hydra.Framework.Mapping.Converters.WellKnownBinary { // // ********************************************************************** /// <summary> /// Converts a <see cref="Hydra.Framework.Mapping.Geometries.AbstractGeometry"/> instance to a Well-known Binary string representation. /// </summary> /// <remarks> /// <para>The Well-known Binary Representation for <see cref="Hydra.Framework.Mapping.Geometries.AbstractGeometry"/> (WKBGeometry) provides a portable /// representation of a <see cref="Hydra.Framework.Mapping.Geometries.AbstractGeometry"/> value as a contiguous stream of bytes. It permits <see cref="Hydra.Framework.Mapping.Geometries.AbstractGeometry"/> /// values to be exchanged between an ODBC client and an SQL database in binary form.</para> /// <para>The Well-known Binary Representation for <see cref="Hydra.Framework.Mapping.Geometries.AbstractGeometry"/> is obtained by serializing a <see cref="Hydra.Framework.Mapping.Geometries.AbstractGeometry"/> /// instance as a sequence of numeric types drawn from the set {Unsigned Integer, Double} and /// then serializing each numeric type as a sequence of bytes using one of two well defined, /// standard, binary representations for numeric types (NDR, XDR). The specific binary encoding /// (NDR or XDR) used for a geometry byte stream is described by a one byte tag that precedes /// the serialized bytes. The only difference between the two encodings of geometry is one of /// byte order, the XDR encoding is Big Endian, the NDR encoding is Little Endian.</para> /// </remarks> // ********************************************************************** // public class GeometryToWKB { #region Public Static Methods // // ********************************************************************** /// <summary> /// Writes a geometry to a byte array using little endian byte encoding /// </summary> /// <param name="g">The geometry to write</param> /// <returns>WKB representation of the geometry</returns> // ********************************************************************** // public static byte[] Write(AbstractGeometry g) { return Write(g, WkbByteOrder.Ndr); } // // ********************************************************************** /// <summary> /// Writes a geometry to a byte array using the specified encoding. /// </summary> /// <param name="g">The geometry to write</param> /// <param name="wkbByteOrder">Byte order</param> /// <returns>WKB representation of the geometry</returns> // ********************************************************************** // public static byte[] Write(AbstractGeometry g, WkbByteOrder wkbByteOrder) { MemoryStream ms = new MemoryStream(); BinaryWriter bw = new BinaryWriter(ms); // // Write the byteorder format. // bw.Write((byte)wkbByteOrder); // // Write the type of this geometry // WriteType(g, bw, wkbByteOrder); // // Write the geometry // WriteGeometry(g, bw, wkbByteOrder); return ms.ToArray(); } #endregion #region Private Static Methods // // ********************************************************************** /// <summary> /// Writes the type number for this geometry. /// </summary> /// <param name="geometry">The geometry to determine the type of.</param> /// <param name="bWriter">Binary Writer</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteType(AbstractGeometry geometry, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Determine the type of the geometry. // switch (geometry.GetType().FullName) { // // Points are type 1. // case "Hydra.Framework.Mapping.Geometries.Point": WriteUInt32((uint)WKBGeometryType.wkbPoint, bWriter, byteorder); break; // // Linestrings are type 2. // case "Hydra.Framework.Mapping.Geometries.LineString": WriteUInt32((uint)WKBGeometryType.wkbLineString, bWriter, byteorder); break; // // Polygons are type 3. // case "Hydra.Framework.Mapping.Geometries.Polygon": WriteUInt32((uint)WKBGeometryType.wkbPolygon, bWriter, byteorder); break; // // Mulitpoints are type 4. // case "Hydra.Framework.Mapping.Geometries.MultiPoint": WriteUInt32((uint)WKBGeometryType.wkbMultiPoint, bWriter, byteorder); break; // // Multilinestrings are type 5. // case "Hydra.Framework.Mapping.Geometries.MultiLineString": WriteUInt32((uint)WKBGeometryType.wkbMultiLineString, bWriter, byteorder); break; // // Multipolygons are type 6. // case "Hydra.Framework.Mapping.Geometries.MultiPolygon": WriteUInt32((uint)WKBGeometryType.wkbMultiPolygon, bWriter, byteorder); break; // // Geometrycollections are type 7. // case "Hydra.Framework.Mapping.Geometries.GeometryCollection": WriteUInt32((uint)WKBGeometryType.wkbGeometryCollection, bWriter, byteorder); break; // // If the type is not of the above 7 throw an exception. // default: throw new ArgumentException("Invalid AbstractGeometry Type"); } } // // ********************************************************************** /// <summary> /// Writes the geometry to the binary writer. /// </summary> /// <param name="geometry">The geometry to be written.</param> /// <param name="bWriter">The b writer.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteGeometry(AbstractGeometry geometry, BinaryWriter bWriter, WkbByteOrder byteorder) { switch (geometry.GetType().FullName) { // // Write the point. // case "Hydra.Framework.Mapping.Geometries.Point": WritePoint((Point)geometry, bWriter, byteorder); break; case "Hydra.Framework.Mapping.Geometries.LineString": LineString ls = (LineString)geometry; WriteLineString(ls, bWriter, byteorder); break; case "Hydra.Framework.Mapping.Geometries.Polygon": WritePolygon((Polygon)geometry, bWriter, byteorder); break; // // Write the Multipoint. // case "Hydra.Framework.Mapping.Geometries.MultiPoint": WriteMultiPoint((MultiPoint)geometry, bWriter, byteorder); break; // // Write the Multilinestring. // case "Hydra.Framework.Mapping.Geometries.MultiLineString": WriteMultiLineString((MultiLineString)geometry, bWriter, byteorder); break; // // Write the Multipolygon. // case "Hydra.Framework.Mapping.Geometries.MultiPolygon": WriteMultiPolygon((MultiPolygon)geometry, bWriter, byteorder); break; // // Write the Geometrycollection. // case "Hydra.Framework.Mapping.Geometries.GeometryCollection": WriteGeometryCollection((GeometryCollection)geometry, bWriter, byteorder); break; // // If the type is not of the above 7 throw an exception. // default: throw new ArgumentException("Invalid AbstractGeometry Type"); } } // // ********************************************************************** /// <summary> /// Writes a point. /// </summary> /// <param name="point">The point to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WritePoint(Point point, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Write the x coordinate. // WriteDouble(point.X, bWriter, byteorder); // // Write the y coordinate. // WriteDouble(point.Y, bWriter, byteorder); } // // ********************************************************************** /// <summary> /// Writes a linestring. /// </summary> /// <param name="ls">The linestring to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteLineString(LineString ls, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Write the number of points in this linestring. // WriteUInt32((uint)ls.Vertices.Count, bWriter, byteorder); // // Loop on each vertices. // foreach (Point p in ls.Vertices) WritePoint(p, bWriter, byteorder); } // // ********************************************************************** /// <summary> /// Writes a polygon. /// </summary> /// <param name="poly">The polygon to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WritePolygon(Polygon poly, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Get the number of rings in this polygon. // int numRings = poly.InteriorRings.Count + 1; // // Write the number of rings to the stream (add one for the shell) // WriteUInt32((uint)numRings, bWriter, byteorder); // // Write the exterior of this polygon. // WriteLineString((LineString)poly.ExteriorRing, bWriter, byteorder); // // Loop on the number of rings - 1 because we already wrote the shell. // foreach (LinearRing lr in poly.InteriorRings) { // // Write the (lineString)LinearRing. // WriteLineString((LineString)lr, bWriter, byteorder); } } // // ********************************************************************** /// <summary> /// Writes a multipoint. /// </summary> /// <param name="mp">The multipoint to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteMultiPoint(MultiPoint mp, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Write the number of points. // WriteUInt32((uint)mp.Points.Count, bWriter, byteorder); // // Loop on the number of points. // foreach (Point p in mp.Points) { // // Write Points Header // bWriter.Write((byte)byteorder); WriteUInt32((uint)WKBGeometryType.wkbPoint, bWriter, byteorder); // // Write each point. // WritePoint((Point)p, bWriter, byteorder); } } // // ********************************************************************** /// <summary> /// Writes a multilinestring. /// </summary> /// <param name="mls">The multilinestring to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteMultiLineString(MultiLineString mls, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Write the number of linestrings. // WriteUInt32((uint)mls.LineStrings.Count, bWriter, byteorder); // // Loop on the number of linestrings. // foreach (LineString ls in mls.LineStrings) { // // Write LineString Header // bWriter.Write((byte)byteorder); WriteUInt32((uint)WKBGeometryType.wkbLineString, bWriter, byteorder); // // Write each linestring. // WriteLineString(ls, bWriter, byteorder); } } // // ********************************************************************** /// <summary> /// Writes a multipolygon. /// </summary> /// <param name="mp">The mulitpolygon to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteMultiPolygon(MultiPolygon mp, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Write the number of polygons. // WriteUInt32((uint)mp.Polygons.Count, bWriter, byteorder); // // Loop on the number of polygons. // foreach (Polygon poly in mp.Polygons) { // // Write polygon header // bWriter.Write((byte)byteorder); WriteUInt32((uint)WKBGeometryType.wkbPolygon, bWriter, byteorder); // // Write each polygon. // WritePolygon(poly, bWriter, byteorder); } } // // ********************************************************************** /// <summary> /// Writes a geometrycollection. /// </summary> /// <param name="gc">The geometrycollection to be written.</param> /// <param name="bWriter">Stream to write to.</param> /// <param name="byteorder">Byte order</param> // ********************************************************************** // private static void WriteGeometryCollection(GeometryCollection gc, BinaryWriter bWriter, WkbByteOrder byteorder) { // // Get the number of geometries in this geometrycollection. // int numGeometries = gc.NumGeometries; // // Write the number of geometries. // WriteUInt32((uint)numGeometries, bWriter, byteorder); // // Loop on the number of geometries. // for (int i = 0; i < numGeometries; i++) { // // Write the byte-order format of the following geometry. // bWriter.Write((byte)byteorder); // // Write the type of each geometry. // WriteType(gc[i], bWriter, byteorder); // // Write each geometry. // WriteGeometry(gc[i], bWriter, byteorder); } } // // ********************************************************************** /// <summary> /// Writes an unsigned integer to the binarywriter using the specified encoding /// </summary> /// <param name="value">Value to write</param> /// <param name="writer">Binary Writer</param> /// <param name="byteOrder">byteorder</param> // ********************************************************************** // private static void WriteUInt32(UInt32 value, BinaryWriter writer, WkbByteOrder byteOrder) { if (byteOrder == WkbByteOrder.Xdr) { byte[] bytes = BitConverter.GetBytes(value); Array.Reverse(bytes); writer.Write(BitConverter.ToUInt32(bytes, 0)); } else writer.Write(value); } // // ********************************************************************** /// <summary> /// Writes a double to the binarywriter using the specified encoding /// </summary> /// <param name="value">Value to write</param> /// <param name="writer">Binary Writer</param> /// <param name="byteOrder">byteorder</param> // ********************************************************************** // private static void WriteDouble(double value, BinaryWriter writer, WkbByteOrder byteOrder) { if (byteOrder == WkbByteOrder.Xdr) { byte[] bytes = BitConverter.GetBytes(value); Array.Reverse(bytes); writer.Write(BitConverter.ToDouble(bytes, 0)); } else writer.Write(value); } #endregion } }
using System.Threading; using Solenoid.Expressions.Parser.antlr.collections.impl; namespace Solenoid.Expressions.Parser.antlr.debug { public class LLkDebuggingParser : LLkParser, DebuggingParser { private void InitBlock() { parserEventSupport = new ParserEventSupport(this); } public override void setDebugMode(bool mode) { _notDebugMode = !mode; } protected internal ParserEventSupport parserEventSupport; private bool _notDebugMode = false; protected internal string[] ruleNames; protected internal string[] semPredNames; public LLkDebuggingParser(int k_):base(k_) { InitBlock(); } public LLkDebuggingParser(ParserSharedInputState state, int k_):base(state, k_) { InitBlock(); } public LLkDebuggingParser(TokenBuffer tokenBuf, int k_):base(tokenBuf, k_) { InitBlock(); } public LLkDebuggingParser(TokenStream lexer, int k_):base(lexer, k_) { InitBlock(); } public override void addMessageListener(MessageListener l) { parserEventSupport.addMessageListener(l); } public override void addParserListener(ParserListener l) { parserEventSupport.addParserListener(l); } public override void addParserMatchListener(ParserMatchListener l) { parserEventSupport.addParserMatchListener(l); } public override void addParserTokenListener(ParserTokenListener l) { parserEventSupport.addParserTokenListener(l); } public override void addSemanticPredicateListener(SemanticPredicateListener l) { parserEventSupport.addSemanticPredicateListener(l); } public override void addSyntacticPredicateListener(SyntacticPredicateListener l) { parserEventSupport.addSyntacticPredicateListener(l); } public override void addTraceListener(TraceListener l) { parserEventSupport.addTraceListener(l); } /// <summary>Get another token object from the token stream /// </summary> public override void consume() { var la_1 = - 99; la_1 = LA(1); base.consume(); parserEventSupport.fireConsume(la_1); } protected internal virtual void fireEnterRule(int num, int data) { if (isDebugMode()) parserEventSupport.fireEnterRule(num, inputState.guessing, data); } protected internal virtual void fireExitRule(int num, int data) { if (isDebugMode()) parserEventSupport.fireExitRule(num, inputState.guessing, data); } protected internal virtual bool fireSemanticPredicateEvaluated(int type, int num, bool condition) { if (isDebugMode()) return parserEventSupport.fireSemanticPredicateEvaluated(type, num, condition, inputState.guessing); else return condition; } protected internal virtual void fireSyntacticPredicateFailed() { if (isDebugMode()) parserEventSupport.fireSyntacticPredicateFailed(inputState.guessing); } protected internal virtual void fireSyntacticPredicateStarted() { if (isDebugMode()) parserEventSupport.fireSyntacticPredicateStarted(inputState.guessing); } protected internal virtual void fireSyntacticPredicateSucceeded() { if (isDebugMode()) parserEventSupport.fireSyntacticPredicateSucceeded(inputState.guessing); } public virtual string getRuleName(int num) { return ruleNames[num]; } public virtual string getSemPredName(int num) { return semPredNames[num]; } public virtual void goToSleep() { lock(this) { try { Monitor.Wait(this); } catch (System.Threading.ThreadInterruptedException) { } } } public override bool isDebugMode() { return !_notDebugMode; } public virtual bool isGuessing() { return inputState.guessing > 0; } /// <summary>Return the token type of the ith token of lookahead where i=1 /// is the current token being examined by the parser (i.e., it /// has not been matched yet). /// </summary> public override int LA(int i) { var la = base.LA(i); parserEventSupport.fireLA(i, la); return la; } /// <summary>Make sure current lookahead symbol matches token type <tt>t</tt>. /// Throw an exception upon mismatch, which is catch by either the /// error handler or by the syntactic predicate. /// </summary> public override void match(int t) { var text = LT(1).getText(); var la_1 = LA(1); try { base.match(t); parserEventSupport.fireMatch(t, text, inputState.guessing); } catch (MismatchedTokenException e) { if (inputState.guessing == 0) parserEventSupport.fireMismatch(la_1, t, text, inputState.guessing); throw e; } } /// <summary>Make sure current lookahead symbol matches the given set /// Throw an exception upon mismatch, which is catch by either the /// error handler or by the syntactic predicate. /// </summary> public override void match(BitSet b) { var text = LT(1).getText(); var la_1 = LA(1); try { base.match(b); parserEventSupport.fireMatch(la_1, b, text, inputState.guessing); } catch (MismatchedTokenException e) { if (inputState.guessing == 0) parserEventSupport.fireMismatch(la_1, b, text, inputState.guessing); throw e; } } public override void matchNot(int t) { var text = LT(1).getText(); var la_1 = LA(1); try { base.matchNot(t); parserEventSupport.fireMatchNot(la_1, t, text, inputState.guessing); } catch (MismatchedTokenException e) { if (inputState.guessing == 0) parserEventSupport.fireMismatchNot(la_1, t, text, inputState.guessing); throw e; } } public override void removeMessageListener(MessageListener l) { parserEventSupport.removeMessageListener(l); } public override void removeParserListener(ParserListener l) { parserEventSupport.removeParserListener(l); } public override void removeParserMatchListener(ParserMatchListener l) { parserEventSupport.removeParserMatchListener(l); } public override void removeParserTokenListener(ParserTokenListener l) { parserEventSupport.removeParserTokenListener(l); } public override void removeSemanticPredicateListener(SemanticPredicateListener l) { parserEventSupport.removeSemanticPredicateListener(l); } public override void removeSyntacticPredicateListener(SyntacticPredicateListener l) { parserEventSupport.removeSyntacticPredicateListener(l); } public override void removeTraceListener(TraceListener l) { parserEventSupport.removeTraceListener(l); } /// <summary>Parser error-reporting function can be overridden in subclass /// </summary> public override void reportError(RecognitionException ex) { parserEventSupport.fireReportError(ex); base.reportError(ex); } /// <summary>Parser error-reporting function can be overridden in subclass /// </summary> public override void reportError(string s) { parserEventSupport.fireReportError(s); base.reportError(s); } /// <summary>Parser warning-reporting function can be overridden in subclass /// </summary> public override void reportWarning(string s) { parserEventSupport.fireReportWarning(s); base.reportWarning(s); } public virtual void setupDebugging(TokenBuffer tokenBuf) { setupDebugging(null, tokenBuf); } public virtual void setupDebugging(TokenStream lexer) { setupDebugging(lexer, null); } /// <summary>User can override to do their own debugging /// </summary> protected internal virtual void setupDebugging(TokenStream lexer, TokenBuffer tokenBuf) { setDebugMode(true); // default parser debug setup is ParseView try { // try // { // System.Type.GetType("javax.swing.JButton"); // } // catch (System.Exception) // { // System.Console.Error.WriteLine("Swing is required to use ParseView, but is not present in your CLASSPATH"); // System.Environment.Exit(1); // } var c = System.Type.GetType("antlr.parseview.ParseView"); var constructor = c.GetConstructor(new System.Type[]{typeof(LLkDebuggingParser), typeof(TokenStream), typeof(TokenBuffer)}); constructor.Invoke(new object[]{this, lexer, tokenBuf}); } catch (System.Exception e) { System.Console.Error.WriteLine("Error initializing ParseView: " + e); System.Console.Error.WriteLine("Please report this to Scott Stanchfield, [email protected]"); System.Environment.Exit(1); } } public virtual void wakeUp() { lock(this) { Monitor.Pulse(this); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Numerics.Tests { public static class Support { private static Random s_random; static Support() { s_random = new Random(-55); } public static Random Random { get { return s_random; } } // Valid values in double type public static Double[] doubleValidValues = new Double[] { double.MinValue, -1, 0, double.Epsilon, 1, double.MaxValue, }; // Invalid values in double type public static Double[] doubleInvalidValues = new Double[] { double.NegativeInfinity, double.PositiveInfinity, double.NaN }; // Typical phase values in double type public static Double[] phaseTypicalValues = new Double[] { -Math.PI/2, 0, Math.PI/2 }; public static String[] supportedStdNumericFormats = new String[] { "C", "E", "F", "G", "N", "P", "R" }; private static double GetRandomValue(double mult, bool fIsNegative) { double randomDouble = (mult * s_random.NextDouble()); randomDouble %= (Double)(mult); return fIsNegative ? -randomDouble : randomDouble; } public static double GetRandomDoubleValue(bool fIsNegative) { return GetRandomValue(double.MaxValue, fIsNegative); } public static double GetSmallRandomDoubleValue(bool fIsNegative) { return GetRandomValue(1.0, fIsNegative); } public static Int16 GetRandomInt16Value(bool fIsNegative) { if (fIsNegative) { return ((Int16)s_random.Next(Int16.MinValue, 0)); } else { return ((Int16)s_random.Next(1, Int16.MaxValue)); } } public static Int32 GetRandomInt32Value(bool fIsNegative) { return ((Int32)GetRandomValue(Int32.MaxValue, fIsNegative)); } public static Int64 GetRandomInt64Value(bool fIsNegative) { return ((Int64)GetRandomValue(Int64.MaxValue, fIsNegative)); } public static Byte GetRandomByteValue() { return ((Byte)s_random.Next(1, Byte.MaxValue)); } #if CLS_Compliant public static SByte GetRandomSByteValue(bool fIsNegative) { if (fIsNegative) { return ((SByte) random.Next(SByte.MinValue, 0)); } else { return ((SByte) random.Next(1, SByte.MaxValue)); } } public static UInt16 GetRandomUInt16Value() { return ((UInt16)random.Next(1, UInt16.MaxValue)); } public static UInt32 GetRandomUInt32Value() { return ((UInt32)GetRandomValue(UInt32.MaxValue, false)); } public static UInt64 GetRandomUInt64Value() { return ((UInt64)GetRandomValue(UInt64.MaxValue, false)); } #endif public static Single GetRandomSingleValue(bool fIsNegative) { return ((Single)GetRandomValue(Single.MaxValue, fIsNegative)); } public static BigInteger GetRandomBigIntegerValue(bool fIsNegative) { return ((BigInteger)GetRandomValue(double.MaxValue, fIsNegative)); } public static Decimal GetRandomDecimalValue(bool fIsNegative) { if (fIsNegative) { return ((Decimal)new Decimal( s_random.Next(Int32.MinValue, Int32.MaxValue), s_random.Next(Int32.MinValue, Int32.MaxValue), s_random.Next(Int32.MinValue, Int32.MaxValue), true, (byte)s_random.Next(0, 29))); } else { return ((Decimal)new Decimal( s_random.Next(Int32.MinValue, Int32.MaxValue), s_random.Next(Int32.MinValue, Int32.MaxValue), s_random.Next(Int32.MinValue, Int32.MaxValue), false, (byte)s_random.Next(0, 29))); } } public static double GetRandomPhaseValue(bool fIsNegative) { return GetRandomValue((Math.PI / 2), fIsNegative); } public static bool IsDiffTolerable(double d1, double d2) { if (double.IsInfinity(d1)) { return AreSameInfinity(d1, d2 * 10); } else if (double.IsInfinity(d2)) { return AreSameInfinity(d1 * 10, d2); } else { double diffRatio = (d1 - d2) / d1; diffRatio *= Math.Pow(10, 6); diffRatio = Math.Abs(diffRatio); return (diffRatio < 1); } } private static bool AreSameInfinity(double d1, double d2) { return double.IsNegativeInfinity(d1) == double.IsNegativeInfinity(d2) && double.IsPositiveInfinity(d1) == double.IsPositiveInfinity(d2); } public static void VerifyRealImaginaryProperties(Complex complex, double real, double imaginary, string message) { Assert.True(real.Equals((Double)complex.Real) || IsDiffTolerable(complex.Real, real), message); Assert.True(imaginary.Equals((Double)complex.Imaginary) || IsDiffTolerable(complex.Imaginary, imaginary), message); } public static void VerifyMagnitudePhaseProperties(Complex complex, double magnitude, double phase, string message) { // The magnitude (m) of a complex number (z = x + yi) is the absolute value - |z| = sqrt(x^2 + y^2) // Verification is done using the square of the magnitude since m^2 = x^2 + y^2 double expectedMagnitudeSqr = magnitude * magnitude; double actualMagnitudeSqr = complex.Magnitude * complex.Magnitude; Assert.True(expectedMagnitudeSqr.Equals((Double)(actualMagnitudeSqr)) || IsDiffTolerable(actualMagnitudeSqr, expectedMagnitudeSqr), message); if (double.IsNaN(magnitude)) { phase = double.NaN; } else if (magnitude == 0) { phase = 0; } else if (magnitude < 0) { phase += (phase < 0) ? Math.PI : -Math.PI; } Assert.True(phase.Equals((Double)complex.Phase) || IsDiffTolerable(complex.Phase, phase), message); } } }
using System; using System.Diagnostics; using System.Drawing.Drawing2D; using System.Windows.Forms; namespace System.Drawing.PieChart { /// <summary> /// Summary description for PieChartControl. /// </summary> public class PieChartControl : System.Windows.Forms.Panel { /// <summary> /// Initializes the <c>PieChartControl</c>. /// </summary> public PieChartControl() : base() { this.SetStyle(ControlStyles.UserPaint, true); this.SetStyle(ControlStyles.AllPaintingInWmPaint, true); this.SetStyle(ControlStyles.DoubleBuffer, true); this.SetStyle(ControlStyles.ResizeRedraw, true); m_toolTip = new ToolTip(); } /// <summary> /// Sets the left margin for the chart. /// </summary> public float LeftMargin { set { Debug.Assert(value >= 0); m_leftMargin = value; Invalidate(); } } /// <summary> /// Sets the right margin for the chart. /// </summary> public float RightMargin { set { Debug.Assert(value >= 0); m_rightMargin = value; Invalidate(); } } /// <summary> /// Sets the top margin for the chart. /// </summary> public float TopMargin { set { Debug.Assert(value >= 0); m_topMargin = value; Invalidate(); } } /// <summary> /// Sets the bottom margin for the chart. /// </summary> public float BottomMargin { set { Debug.Assert(value >= 0); m_bottomMargin = value; Invalidate(); } } /// <summary> /// Sets the indicator if chart should fit the bounding rectangle /// exactly. /// </summary> public bool FitChart { set { m_fitChart = value; Invalidate(); } } /// <summary> /// Sets values to be represented by the chart. /// </summary> public decimal[] Values { set { m_values = value; Invalidate(); } } /// <summary> /// Sets colors to be used for rendering pie slices. /// </summary> public Color[] Colors { set { m_colors = value; Invalidate(); } } /// <summary> /// Sets values for slice displacements. /// </summary> public float[] SliceRelativeDisplacements { set { m_relativeSliceDisplacements = value; Invalidate(); } } /// <summary> /// Gets or sets tooltip texts. /// </summary> public string[] ToolTips { set { m_toolTipTexts = value; } get { return m_toolTipTexts; } } /// <summary> /// Sets texts appearing by each pie slice. /// </summary> public string[] Texts { set { m_texts = value; } } /// <summary> /// Sets pie slice reative height. /// </summary> public float SliceRelativeHeight { set { m_sliceRelativeHeight = value; Invalidate(); } } /// <summary> /// Sets the shadow style. /// </summary> public ShadowStyle ShadowStyle { set { m_shadowStyle = value; Invalidate(); } } /// <summary> /// Sets the edge color type. /// </summary> public EdgeColorType EdgeColorType { set { m_edgeColorType = value; Invalidate(); } } /// <summary> /// Sets the edge lines width. /// </summary> public float EdgeLineWidth { set { m_edgeLineWidth = value; Invalidate(); } } /// <summary> /// Sets the initial angle from which pies are drawn. /// </summary> public float InitialAngle { set { m_initialAngle = value; Invalidate(); } } /// <summary> /// Handles <c>OnPaint</c> event. /// </summary> /// <param name="args"> /// <c>PaintEventArgs</c> object. /// </param> protected override void OnPaint(PaintEventArgs args) { base.OnPaint(args); if (HasAnyValue) { DoDraw(args.Graphics); } } /// <summary> /// Sets values for the chart and draws them. /// </summary> /// <param name="graphics"> /// Graphics object used for drawing. /// </param> protected void DoDraw(Graphics graphics) { if (m_values != null && m_values.Length > 0) { graphics.SmoothingMode = SmoothingMode.AntiAlias; float width = ClientSize.Width - m_leftMargin - m_rightMargin; float height = ClientSize.Height - m_topMargin - m_bottomMargin; // if the width or height if <=0 an exception would be thrown -> exit method.. if (width <= 0 || height <= 0) return; if (m_pieChart != null) m_pieChart.Dispose(); if (m_colors != null && m_colors.Length > 0) m_pieChart = new PieChart3D(m_leftMargin, m_topMargin, width, height, m_values, m_colors, m_sliceRelativeHeight, m_texts); else m_pieChart = new PieChart3D(m_leftMargin, m_topMargin, width, height, m_values, m_sliceRelativeHeight, m_texts); m_pieChart.FitToBoundingRectangle = m_fitChart; m_pieChart.InitialAngle = m_initialAngle; m_pieChart.SliceRelativeDisplacements = m_relativeSliceDisplacements; m_pieChart.EdgeColorType = m_edgeColorType; m_pieChart.EdgeLineWidth = m_edgeLineWidth; m_pieChart.ShadowStyle = m_shadowStyle; m_pieChart.HighlightedIndex = m_highlightedIndex; m_pieChart.Draw(graphics); m_pieChart.Font = this.Font; m_pieChart.ForeColor = this.ForeColor; m_pieChart.PlaceTexts(graphics); } } /// <summary> /// Handles <c>MouseEnter</c> event to activate the tooltip. /// </summary> /// <param name="e"></param> protected override void OnMouseEnter(System.EventArgs e) { base.OnMouseEnter(e); m_defaultToolTipAutoPopDelay = m_toolTip.AutoPopDelay; m_toolTip.AutoPopDelay = Int16.MaxValue; } /// <summary> /// Handles <c>MouseLeave</c> event to disable tooltip. /// </summary> /// <param name="e"></param> protected override void OnMouseLeave(System.EventArgs e) { base.OnMouseLeave(e); m_toolTip.RemoveAll(); m_toolTip.AutoPopDelay = m_defaultToolTipAutoPopDelay; m_highlightedIndex = -1; Refresh(); } /// <summary> /// Handles <c>MouseMove</c> event to display tooltip for the pie /// slice under pointer and to display slice in highlighted color. /// </summary> /// <param name="e"></param> protected override void OnMouseMove(System.Windows.Forms.MouseEventArgs e) { base.OnMouseMove(e); if (m_pieChart != null && m_values != null && m_values.Length > 0) { int index = m_pieChart.FindPieSliceUnderPoint(new PointF(e.X, e.Y)); if (index != m_highlightedIndex) { m_highlightedIndex = index; Refresh(); } if (m_highlightedIndex != -1) { if (m_toolTipTexts == null || m_toolTipTexts.Length <= m_highlightedIndex || m_toolTipTexts[m_highlightedIndex].Length == 0) m_toolTip.SetToolTip(this, m_values[m_highlightedIndex].ToString()); else m_toolTip.SetToolTip(this, m_toolTipTexts[m_highlightedIndex]); } else { m_toolTip.RemoveAll(); } } } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (!m_disposed) { try { if (disposing) { if (m_pieChart != null) // 2012/10/3 m_pieChart.Dispose(); m_toolTip.Dispose(); } m_disposed = true; } finally { base.Dispose(disposing); } } } /// <summary> /// Gets a flag indicating if at least one value is nonzero. /// </summary> private bool HasAnyValue { get { if (m_values == null) return false; foreach (decimal angle in m_values) { if (angle != 0) { return true; } } return false; } } private PieChart3D m_pieChart = null; private float m_leftMargin; private float m_topMargin; private float m_rightMargin; private float m_bottomMargin; private bool m_fitChart = false; private decimal[] m_values = null; private Color[] m_colors = null; private float m_sliceRelativeHeight; private float[] m_relativeSliceDisplacements = new float[] { 0F }; private string[] m_texts = null; private string[] m_toolTipTexts = null; private ShadowStyle m_shadowStyle = ShadowStyle.GradualShadow; private EdgeColorType m_edgeColorType = EdgeColorType.SystemColor; private float m_edgeLineWidth = 1F; private float m_initialAngle; private int m_highlightedIndex = -1; private ToolTip m_toolTip = null; /// <summary> /// Default AutoPopDelay of the ToolTip control. /// </summary> private int m_defaultToolTipAutoPopDelay; /// <summary> /// Flag indicating that object has been disposed. /// </summary> private bool m_disposed = false; } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.ComponentModel; using System.Runtime.Serialization; using System.Diagnostics.Contracts; namespace System.Windows.Forms { // Summary: // Represents a node of a System.Windows.Forms.TreeView. //[Serializable] //[DefaultProperty("Text")] //[TypeConverter(typeof(TreeNodeConverter))] public class TreeNode //: MarshalByRefObject, ICloneable, ISerializable { // Summary: // Initializes a new instance of the System.Windows.Forms.TreeNode class. //public TreeNode(); // // Summary: // Initializes a new instance of the System.Windows.Forms.TreeNode class with // the specified label text. // // Parameters: // text: // The label System.Windows.Forms.TreeNode.Text of the new tree node. //public TreeNode(string text); // // Summary: // Initializes a new instance of the System.Windows.Forms.TreeNode class using // the specified serialization information and context. // // Parameters: // serializationInfo: // A System.Runtime.Serialization.SerializationInfo containing the data to deserialize // the class. // // context: // The System.Runtime.Serialization.StreamingContext containing the source and // destination of the serialized stream. //protected TreeNode(SerializationInfo serializationInfo, StreamingContext context); // // Summary: // Initializes a new instance of the System.Windows.Forms.TreeNode class with // the specified label text and child tree nodes. // // Parameters: // text: // The label System.Windows.Forms.TreeNode.Text of the new tree node. // // children: // An array of child System.Windows.Forms.TreeNode objects. //public TreeNode(string text, TreeNode[] children); // // Summary: // Initializes a new instance of the System.Windows.Forms.TreeNode class with // the specified label text and images to display when the tree node is in a // selected and unselected state. // // Parameters: // text: // The label System.Windows.Forms.TreeNode.Text of the new tree node. // // imageIndex: // The index value of System.Drawing.Image to display when the tree node is // unselected. // // selectedImageIndex: // The index value of System.Drawing.Image to display when the tree node is // selected. //public TreeNode(string text, int imageIndex, int selectedImageIndex); // // Summary: // Initializes a new instance of the System.Windows.Forms.TreeNode class with // the specified label text, child tree nodes, and images to display when the // tree node is in a selected and unselected state. // // Parameters: // text: // The label System.Windows.Forms.TreeNode.Text of the new tree node. // // imageIndex: // The index value of System.Drawing.Image to display when the tree node is // unselected. // // selectedImageIndex: // The index value of System.Drawing.Image to display when the tree node is // selected. // // children: // An array of child System.Windows.Forms.TreeNode objects. //public TreeNode(string text, int imageIndex, int selectedImageIndex, TreeNode//[] children); // Summary: // Gets or sets the background color of the tree node. // // Returns: // The background System.Drawing.Color of the tree node. The default is System.Drawing.Color.Empty. //public System.Drawing.Color BackColor { get; set; } // // Summary: // Gets the bounds of the tree node. // // Returns: // The System.Drawing.Rectangle that represents the bounds of the tree node. //[Browsable(false)] //public System.Drawing.Rectangle Bounds { get; } // // Summary: // Gets or sets a value indicating whether the tree node is in a checked state. // // Returns: // true if the tree node is in a checked state; otherwise, false. //[DefaultValue(false)] //public bool Checked { get; set; } // // Summary: // Gets the shortcut menu associated with this tree node. // // Returns: // The System.Windows.Forms.ContextMenu associated with the tree node. //[DefaultValue("")] //public virtual ContextMenu ContextMenu { get; set; } // // Summary: // Gets or sets the shortcut menu associated with this tree node. // // Returns: // The System.Windows.Forms.ContextMenuStrip associated with the tree node. //[DefaultValue("")] //public virtual ContextMenuStrip ContextMenuStrip { get; set; } // // Summary: // Gets the first child tree node in the tree node collection. // // Returns: // The first child System.Windows.Forms.TreeNode in the System.Windows.Forms.TreeNode.Nodes // collection. //[Browsable(false)] //public TreeNode FirstNode { get; } // // Summary: // Gets or sets the foreground color of the tree node. // // Returns: // The foreground System.Drawing.Color of the tree node. //public System.Drawing.Color ForeColor { get; set; } // // Summary: // Gets the path from the root tree node to the current tree node. // // Returns: // The path from the root tree node to the current tree node. // // Exceptions: // System.InvalidOperationException: // The node is not contained in a System.Windows.Forms.TreeView. //[Browsable(false)] //public string FullPath { get; } // // Summary: // Gets the handle of the tree node. // // Returns: // The tree node handle. //[Browsable(false)] //public IntPtr Handle { get; } // // Summary: // Gets or sets the image list index value of the image displayed when the tree // node is in the unselected state. // // Returns: // A zero-based index value that represents the image position in the assigned // System.Windows.Forms.ImageList. //[TypeConverter(typeof(TreeViewImageIndexConverter))] //[Localizable(true)] //[RefreshProperties(RefreshProperties.Repaint)] //[DefaultValue(-1)] //[RelatedImageList("TreeView.ImageList")] //public int ImageIndex { get; set; } // // Summary: // Gets or sets the key for the image associated with this tree node when the // node is in an unselected state. // // Returns: // The key for the image associated with this tree node when the node is in // an unselected state. //[TypeConverter(typeof(TreeViewImageKeyConverter))] //[Localizable(true)] //[DefaultValue("")] //[RefreshProperties(RefreshProperties.Repaint)] //[RelatedImageList("TreeView.ImageList")] //public string ImageKey { get; set; } // // Summary: // Gets the position of the tree node in the tree node collection. // // Returns: // A zero-based index value that represents the position of the tree node in // the System.Windows.Forms.TreeNode.Nodes collection. //public int Index { get; } // // Summary: // Gets a value indicating whether the tree node is in an editable state. // // Returns: // true if the tree node is in editable state; otherwise, false. //[Browsable(false)] //public bool IsEditing { get; } // // Summary: // Gets a value indicating whether the tree node is in the expanded state. // // Returns: // true if the tree node is in the expanded state; otherwise, false. //[Browsable(false)] //public bool IsExpanded { get; } // // Summary: // Gets a value indicating whether the tree node is in the selected state. // // Returns: // true if the tree node is in the selected state; otherwise, false. //[Browsable(false)] //public bool IsSelected { get; } // // Summary: // Gets a value indicating whether the tree node is visible or partially visible. // // Returns: // true if the tree node is visible or partially visible; otherwise, false. //[Browsable(false)] //public bool IsVisible { get; } // // Summary: // Gets the last child tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the last child tree node. //[Browsable(false)] //public TreeNode LastNode { get; } // // Summary: // Gets the zero-based depth of the tree node in the System.Windows.Forms.TreeView // control. // // Returns: // The zero-based depth of the tree node in the System.Windows.Forms.TreeView // control. //[Browsable(false)] public int Level { get { Contract.Ensures(Contract.Result<int>() >= 0); return default(int); } } // // Summary: // Gets or sets the name of the tree node. // // Returns: // A System.String that represents the name of the tree node. public string Name { get { // **F : From reflector Contract.Ensures(Contract.Result<string>() != null); return default(string); } set { // ** F: can set to null } } // // Summary: // Gets the next sibling tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the next sibling tree node. //[Browsable(false)] // ** F: Can return null //public TreeNode NextNode { get; } // // Summary: // Gets the next visible tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the next visible tree node. //[Browsable(false)] //public TreeNode NextVisibleNode { get; } // // Summary: // Gets or sets the font used to display the text on the tree node's label. // // Returns: // The System.Drawing.Font used to display the text on the tree node's label. //[Localizable(true)] //[DefaultValue("")] //public System.Drawing.Font NodeFont { get; set; } // // Summary: // Gets the collection of System.Windows.Forms.TreeNode objects assigned to // the current tree node. // // Returns: // A System.Windows.Forms.TreeNodeCollection that represents the tree nodes // assigned to the current tree node. //[ListBindable(false)] //[Browsable(false)] public TreeNodeCollection Nodes { get { Contract.Ensures(Contract.Result<TreeNodeCollection>() != null); return default(TreeNodeCollection); } } // // Summary: // Gets the parent tree node of the current tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the parent of the current // tree node. //[Browsable(false)] //public TreeNode Parent { get; } // // Summary: // Gets the previous sibling tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the previous sibling tree // node. //[Browsable(false)] //public TreeNode PrevNode { get; } // // Summary: // Gets the previous visible tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the previous visible tree // node. //[Browsable(false)] //public TreeNode PrevVisibleNode { get; } // // Summary: // Gets or sets the image list index value of the image that is displayed when // the tree node is in the selected state. // // Returns: // A zero-based index value that represents the image position in an System.Windows.Forms.ImageList. //[TypeConverter(typeof(TreeViewImageIndexConverter))] //[Localizable(true)] //[DefaultValue(-1)] //[RefreshProperties(RefreshProperties.Repaint)] //[RelatedImageList("TreeView.ImageList")] // *** F: not putting here ensures result >= 0, as from Reflector it seems that one can set it to a negative number, and no check is performed ... //public int SelectedImageIndex { get; set; } // // Summary: // Gets or sets the key of the image displayed in the tree node when it is in // a selected state. // // Returns: // The key of the image displayed when the tree node is in a selected state. //[TypeConverter(typeof(TreeViewImageKeyConverter))] //[DefaultValue("")] //[RefreshProperties(RefreshProperties.Repaint)] //[RelatedImageList("TreeView.ImageList")] //[Localizable(true)] // *** F: not putting here ensures result >= 0, as from Reflector it seems that one can set it to a negative number, and no check is performed ... //public string SelectedImageKey { get; set; } // // Summary: // Gets or sets the index of the image used to indicate the state of the System.Windows.Forms.TreeNode // when the parent System.Windows.Forms.TreeView has its System.Windows.Forms.TreeView.CheckBoxes // property set to false. // // Returns: // The index of the image used to indicate the state of the System.Windows.Forms.TreeNode. // // Exceptions: // System.ArgumentOutOfRangeException: // The specified index is less than -1 or greater than 14. //[Localizable(true)] //[RefreshProperties(RefreshProperties.Repaint)] //[RelatedImageList("TreeView.StateImageList")] //[DefaultValue(-1)] public int StateImageIndex { get { Contract.Ensures(Contract.Result<int>() >= -1); Contract.Ensures(Contract.Result<int>() <= 14); return default(int); } set { Contract.Requires(value >= -1); Contract.Requires(value <= 14); } } // // Summary: // Gets or sets the key of the image used to indicate the state of the System.Windows.Forms.TreeNode // when the parent System.Windows.Forms.TreeView has its System.Windows.Forms.TreeView.CheckBoxes // property set to false. // // Returns: // The key of the image used to indicate the state of the System.Windows.Forms.TreeNode. //[DefaultValue("")] //[TypeConverter(typeof(ImageKeyConverter))] //[Localizable(true)] //[RefreshProperties(RefreshProperties.Repaint)] //[RelatedImageList("TreeView.StateImageList")] //public string StateImageKey { get; set; } // // Summary: // Gets or sets the object that contains data about the tree node. // // Returns: // An System.Object that contains data about the tree node. The default is null. //[Localizable(false)] //[Bindable(true)] //[DefaultValue("")] //[TypeConverter(typeof(StringConverter))] //public object Tag { get; set; } // // Summary: // Gets or sets the text displayed in the label of the tree node. // // Returns: // The text displayed in the label of the tree node. //[Localizable(true)] public string Text { get { // **F : From reflector Contract.Ensures(Contract.Result<string>() != null); return default(string); } set { // ** F: can set to null } } // // Summary: // Gets or sets the text that appears when the mouse pointer hovers over a System.Windows.Forms.TreeNode. // // Returns: // Gets the text that appears when the mouse pointer hovers over a System.Windows.Forms.TreeNode. //[DefaultValue("")] //[Localizable(false)] //public string ToolTipText { get; set; } // // Summary: // Gets the parent tree view that the tree node is assigned to. // // Returns: // A System.Windows.Forms.TreeView that represents the parent tree view that // the tree node is assigned to, or null if the node has not been assigned to // a tree view. //[Browsable(false)] //public TreeView TreeView { get; } // Summary: // Initiates the editing of the tree node label. // // Exceptions: // System.InvalidOperationException: // System.Windows.Forms.TreeView.LabelEdit is set to false. //public void BeginEdit(); // // Summary: // Copies the tree node and the entire subtree rooted at this tree node. // // Returns: // The System.Object that represents the cloned System.Windows.Forms.TreeNode. //public virtual object Clone(); // // Summary: // Collapses the tree node. //public void Collapse(); // // Summary: // Collapses the System.Windows.Forms.TreeNode and optionally collapses its // children. // // Parameters: // ignoreChildren: // true to leave the child nodes in their current state; false to collapse the // child nodes. //public void Collapse(bool ignoreChildren); // // Summary: // Loads the state of the System.Windows.Forms.TreeNode from the specified System.Runtime.Serialization.SerializationInfo. // // Parameters: // serializationInfo: // The System.Runtime.Serialization.SerializationInfo that describes the System.Windows.Forms.TreeNode. // // context: // The System.Runtime.Serialization.StreamingContext that indicates the state // of the stream during deserialization. //protected virtual void Deserialize(SerializationInfo serializationInfo, StreamingContext context); // // Summary: // Ends the editing of the tree node label. // // Parameters: // cancel: // true if the editing of the tree node label text was canceled without being // saved; otherwise, false. //public void EndEdit(bool cancel); // // Summary: // Ensures that the tree node is visible, expanding tree nodes and scrolling // the tree view control as necessary. //public void EnsureVisible(); // // Summary: // Expands the tree node. //public void Expand(); // // Summary: // Expands all the child tree nodes. //public void ExpandAll(); // // Summary: // Returns the tree node with the specified handle and assigned to the specified // tree view control. // // Parameters: // tree: // The System.Windows.Forms.TreeView that contains the tree node. // // handle: // The handle of the tree node. // // Returns: // A System.Windows.Forms.TreeNode that represents the tree node assigned to // the specified System.Windows.Forms.TreeView control with the specified handle. //public static TreeNode FromHandle(TreeView tree, IntPtr handle); // // Summary: // Returns the number of child tree nodes. // // Parameters: // includeSubTrees: // true if the resulting count includes all tree nodes indirectly rooted at // this tree node; otherwise, false. // // Returns: // The number of child tree nodes assigned to the System.Windows.Forms.TreeNode.Nodes // collection. public int GetNodeCount(bool includeSubTrees) { Contract.Ensures(Contract.Result<int>() >= 0); return default(int); } // // Summary: // Removes the current tree node from the tree view control. //public void Remove(); // // Summary: // Saves the state of the System.Windows.Forms.TreeNode to the specified System.Runtime.Serialization.SerializationInfo. // // Parameters: // si: // The System.Runtime.Serialization.SerializationInfo that describes the System.Windows.Forms.TreeNode. // // context: // The System.Runtime.Serialization.StreamingContext that indicates the state // of the stream during serialization //protected virtual void Serialize(SerializationInfo si, StreamingContext context); // // Summary: // Toggles the tree node to either the expanded or collapsed state. //public void Toggle(); // // // Returns: // A System.String that represents the current System.Object. //public override string ToString(); } }
using System; using SubSonic.Schema; using SubSonic.DataProviders; using System.Data; namespace Solution.DataAccess.DataModel { /// <summary> /// Table: CardDetail /// Primary Key: Id /// </summary> public class CardDetailStructs: DatabaseTable { public CardDetailStructs(IDataProvider provider):base("CardDetail",provider){ ClassName = "CardDetail"; SchemaName = "dbo"; Columns.Add(new DatabaseColumn("Id", this) { IsPrimaryKey = true, DataType = DbType.Int32, IsNullable = false, AutoIncrement = true, IsForeignKey = false, MaxLength = 0, PropertyName = "Id" }); Columns.Add(new DatabaseColumn("emp_id", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 50, PropertyName = "emp_id" }); Columns.Add(new DatabaseColumn("join_id", this) { IsPrimaryKey = false, DataType = DbType.Int32, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "join_id" }); Columns.Add(new DatabaseColumn("depart_id", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 30, PropertyName = "depart_id" }); Columns.Add(new DatabaseColumn("card_id", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 16, PropertyName = "card_id" }); Columns.Add(new DatabaseColumn("kind", this) { IsPrimaryKey = false, DataType = DbType.Int32, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "kind" }); Columns.Add(new DatabaseColumn("use_date", this) { IsPrimaryKey = false, DataType = DbType.DateTime, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "use_date" }); Columns.Add(new DatabaseColumn("Invalid_date", this) { IsPrimaryKey = false, DataType = DbType.DateTime, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "Invalid_date" }); Columns.Add(new DatabaseColumn("isbuban", this) { IsPrimaryKey = false, DataType = DbType.Int16, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "isbuban" }); Columns.Add(new DatabaseColumn("isunloss", this) { IsPrimaryKey = false, DataType = DbType.Int16, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "isunloss" }); Columns.Add(new DatabaseColumn("card_sn", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 20, PropertyName = "card_sn" }); Columns.Add(new DatabaseColumn("old_card_id", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 16, PropertyName = "old_card_id" }); Columns.Add(new DatabaseColumn("audit", this) { IsPrimaryKey = false, DataType = DbType.Int16, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "audit" }); Columns.Add(new DatabaseColumn("remark", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 50, PropertyName = "remark" }); Columns.Add(new DatabaseColumn("op_date", this) { IsPrimaryKey = false, DataType = DbType.DateTime, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "op_date" }); Columns.Add(new DatabaseColumn("op_user", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 100, PropertyName = "op_user" }); } public IColumn Id{ get{ return this.GetColumn("Id"); } } public IColumn emp_id{ get{ return this.GetColumn("emp_id"); } } public IColumn join_id{ get{ return this.GetColumn("join_id"); } } public IColumn depart_id{ get{ return this.GetColumn("depart_id"); } } public IColumn card_id{ get{ return this.GetColumn("card_id"); } } public IColumn kind{ get{ return this.GetColumn("kind"); } } public IColumn use_date{ get{ return this.GetColumn("use_date"); } } public IColumn Invalid_date{ get{ return this.GetColumn("Invalid_date"); } } public IColumn isbuban{ get{ return this.GetColumn("isbuban"); } } public IColumn isunloss{ get{ return this.GetColumn("isunloss"); } } public IColumn card_sn{ get{ return this.GetColumn("card_sn"); } } public IColumn old_card_id{ get{ return this.GetColumn("old_card_id"); } } public IColumn audit{ get{ return this.GetColumn("audit"); } } public IColumn remark{ get{ return this.GetColumn("remark"); } } public IColumn op_date{ get{ return this.GetColumn("op_date"); } } public IColumn op_user{ get{ return this.GetColumn("op_user"); } } } }
// Copyright (c) 2010-2013 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.Dynamic; using System.Reflection; using RazorEngine.Templating; using SharpCore; using SharpCore.Logging; using SharpDoc.Model; namespace SharpDoc { /// <summary> /// Overrides default RazorEngine TemplateBase to provide additional /// methods (Import) and properties (Helpers, Param, Style). /// </summary> /// <typeparam name="T"></typeparam> public class TemplateHelperBase : TemplateBase<TemplateContext> { private IDictionary<string, object> _helpersDictionary; /// <summary> /// Initializes a new instance of the <see cref="TemplateHelperBase"/> class. /// </summary> public TemplateHelperBase() { Helpers = new ExpandoObject(); _helpersDictionary = (IDictionary<string, object>)Helpers; } /// <summary> /// Gets or sets the helpers. /// </summary> /// <value>The helpers.</value> public dynamic Helpers { get; private set; } /// <summary> /// Gets the param dynamic properties. /// </summary> /// <value>The param dynamic properties.</value> public dynamic Param { get { return Model.Param; } } /// <summary> /// Gets the style dynamic properties. /// </summary> /// <value>The style dynamic properties.</value> public dynamic Style { get { return Model.Style; }} /// <summary> /// Includes the specified name. /// </summary> /// <param name="name">The name.</param> /// <returns></returns> public override string Include(string name) { return base.Include(name, Model); } /// <summary> /// Registers the helper. /// </summary> /// <param name="name">The name.</param> /// <param name="template">The template.</param> /// <param name="helperMethod">The helper method.</param> private void RegisterHelper(string name, ITemplate template, MethodInfo helperMethod) { DynamicHelper dynamicHelper = null; if (!_helpersDictionary.ContainsKey(name)) { dynamicHelper = new DynamicHelper(); _helpersDictionary.Add(name, dynamicHelper); } else { dynamicHelper = (DynamicHelper)_helpersDictionary[name]; } dynamicHelper.RegisterHelper(template, helperMethod); } /// <summary> /// Imports the template with the specified name. /// </summary> /// <param name="name">The template name.</param> /// <returns> /// The result of the template with the specified name. /// </returns> public virtual string Import(string name) { // If helpers is already loaded, then return immediately if (_helpersDictionary.ContainsKey(name)) return ""; if (string.IsNullOrWhiteSpace(name)) throw new ArgumentException("The name of the template to include is required."); if (Service == null) throw new InvalidOperationException("No template service has been set of this template."); foreach (var methodInfo in Service.GetType().GetMethods(BindingFlags.NonPublic|BindingFlags.Instance)) { if ( methodInfo.Name == "ResolveTemplate" && methodInfo.GetGenericArguments().Length == 1 && methodInfo.GetParameters().Length == 2) { try { methodInfo.MakeGenericMethod(typeof (TemplateContext)).Invoke(Service, new object[] {name, Model}); } catch (TargetInvocationException targetEx) { if (targetEx.InnerException is TemplateCompilationException) { var ex = (TemplateCompilationException) targetEx.InnerException; string location; // Retrieves the location of the template Model.GetTemplate(name, out location); foreach (var compilerError in ex.Errors) { Logger.PushLocation(location, compilerError.Line, compilerError.Column); if (compilerError.IsWarning) { Logger.Warning("{0}: {1}", compilerError.ErrorNumber, compilerError.ErrorText); } else { Logger.Error("{0}: {1}", compilerError.ErrorNumber, compilerError.ErrorText); } Logger.PopLocation(); } Logger.PopLocation(); Logger.Fatal("Error when compiling template [{0}]", name); } throw targetEx.InnerException; } break; } } var templateCacheField = Service.GetType().GetField("templateCache", BindingFlags.Instance|BindingFlags.NonPublic); var templateMap = (IDictionary<string, ITemplate>)templateCacheField.GetValue(Service); var template = templateMap[name]; foreach (var method in template.GetType().GetMethods(BindingFlags.Instance | BindingFlags.Public)) { if (typeof(TemplateWriter).IsAssignableFrom(method.ReturnType)) { RegisterHelper(name, template, method); } } return ""; } protected string ToUrl(IModelReference modelRef, string content = null, bool forceLocal = false, string attributes = null, bool useSelf = true) { return Model.ToUrl(modelRef, content, forceLocal, attributes, useSelf); } protected string ToUrl(string id, string content = null, bool forceLocal = false, string attributes = null, bool useSelf = true) { return Model.ToUrl(id, content, forceLocal, attributes, null, useSelf); } /// <summary> /// Copies the content of the directory. /// </summary> /// <param name="directory">The directory.</param> public void CopyDirectoryContent(string directory) { @Model.CopyStyleContent(directory); } /// <summary> /// Resolves a file from template directories. /// </summary> /// <param name="file">The file.</param> /// <returns>The path to the file</returns> public string ResolveFile(string file) { return Model.ResolvePath(file); } /// <summary> /// Resolves and load a file from template directories. /// </summary> /// <param name="file">The file.</param> /// <returns>The content of the file</returns> public string Loadfile(string file) { return Model.Loadfile(file); } /// <summary> /// Perform regular expression expansion. /// </summary> /// <param name="content">The content to replace.</param> /// <returns>The content replaced</returns> public string TagExpand(string content) { return Model.TagExpand(content); } /// <summary> /// Parses the specified template name. /// </summary> /// <param name="templateName">Name of the template.</param> /// <returns></returns> public string Parse(string templateName) { return Model.Parse(templateName); } /// <summary> /// Escapes the specified content. /// </summary> /// <param name="content">The content.</param> /// <returns></returns> public string Escape(string content) { if (content == null) return "NULLERROR"; return Utility.EscapeHtml(content); } } }
/******************************************************************************* * Copyright 2008-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and * limitations under the License. * ***************************************************************************** * __ _ _ ___ * ( )( \/\/ )/ __) * /__\ \ / \__ \ * (_)(_) \/\/ (___/ * * AWS SDK for .NET */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; namespace Amazon.EC2.Model { /// <summary> /// Modifies an attribute of an instance. /// </summary> [XmlRootAttribute(IsNullable = false)] public class ModifyInstanceAttributeRequest : EC2Request { private string instanceIdField; private string attributeField; private string valueField; private List<InstanceBlockDeviceMappingParameter> blockDeviceMappingField; private List<string> groupIdField; /// <summary> /// The instance ID. /// </summary> [XmlElementAttribute(ElementName = "InstanceId")] public string InstanceId { get { return this.instanceIdField; } set { this.instanceIdField = value; } } /// <summary> /// Sets the instance ID. /// </summary> /// <param name="instanceId">The instance ID.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public ModifyInstanceAttributeRequest WithInstanceId(string instanceId) { this.instanceIdField = instanceId; return this; } /// <summary> /// Checks if InstanceId property is set /// </summary> /// <returns>true if InstanceId property is set</returns> public bool IsSetInstanceId() { return this.instanceIdField != null; } /// <summary> /// Name of the attribute to modify. /// </summary> /// <remarks> /// Valid values: /// "instanceType", "kernel", "ramdisk", "userData", "disableApiTermination", /// "instanceInitiatedShutdownBehavior", "sourceDestCheck", "rootDeviceName", /// "blockDeviceMapping" and "ebsOptimized" /// </remarks> [XmlElementAttribute(ElementName = "Attribute")] public string Attribute { get { return this.attributeField; } set { this.attributeField = value; } } /// <summary> /// Sets the name of the attribute to modify. /// </summary> /// <param name="attribute">Name of the attribute to modify. Values are /// "instanceType", "kernel", "ramdisk", "userData", "disableApiTermination", /// "instanceInitiatedShutdownBehavior", "sourceDestCheck", "rootDeviceName", /// "blockDeviceMapping" and "ebsOptimized"</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public ModifyInstanceAttributeRequest WithAttribute(string attribute) { this.attributeField = attribute; return this; } /// <summary> /// Checks if Attribute property is set /// </summary> /// <returns>true if Attribute property is set</returns> public bool IsSetAttribute() { return this.attributeField != null; } /// <summary> /// New value of the attribute. /// </summary> [XmlElementAttribute(ElementName = "Value")] public string Value { get { return this.valueField; } set { this.valueField = value; } } /// <summary> /// Sets the new value of the attribute. /// </summary> /// <param name="value">New value of the attribute.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public ModifyInstanceAttributeRequest WithValue(string value) { this.valueField = value; return this; } /// <summary> /// Checks if Value property is set /// </summary> /// <returns>true if Value property is set</returns> public bool IsSetValue() { return this.valueField != null; } /// <summary> /// New mapping that defines native device names to use when exposing virtual devices. /// </summary> [XmlElementAttribute(ElementName = "BlockDeviceMapping")] public List<InstanceBlockDeviceMappingParameter> BlockDeviceMapping { get { if (this.blockDeviceMappingField == null) { this.blockDeviceMappingField = new List<InstanceBlockDeviceMappingParameter>(); } return this.blockDeviceMappingField; } set { this.blockDeviceMappingField = value; } } /// <summary> /// Sets the new mapping that defines native device names to use when exposing virtual devices. /// </summary> /// <param name="list">Modifies the mapping that defines native device names /// to use when exposing virtual devices.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public ModifyInstanceAttributeRequest WithBlockDeviceMapping(params InstanceBlockDeviceMappingParameter[] list) { foreach (InstanceBlockDeviceMappingParameter item in list) { BlockDeviceMapping.Add(item); } return this; } /// <summary> /// Checks if BlockDeviceMapping property is set /// </summary> /// <returns>true if BlockDeviceMapping property is set</returns> public bool IsSetBlockDeviceMapping() { return (BlockDeviceMapping.Count > 0); } /// <summary> /// New security groups that an instance belongs to. /// </summary> /// <remarks> /// This is applicable only to instances running in a VPC. Use /// this parameter when you want to change the security /// groups an instance is in. The new set of groups you specify /// replaces the current set. You must specify at least one /// group, even if it's just the default security group in the VPC. /// </remarks> [XmlElementAttribute(ElementName = "GroupId")] public List<string> GroupId { get { if (this.groupIdField == null) { this.groupIdField = new List<string>(); } return this.groupIdField; } set { this.groupIdField = value; } } /// <summary> /// Sets the new security groups that an instance belongs to. /// </summary> /// <param name="list">This is applicable only to instances running in a VPC. Use /// this parameter when you want to change the security /// groups an instance is in. The new set of groups you specify /// replaces the current set. You must specify at least one /// group, even if it's just the default security group in the VPC.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public ModifyInstanceAttributeRequest WithGroupId(params string[] list) { foreach (string item in list) { GroupId.Add(item); } return this; } /// <summary> /// Checks if GroupId property is set /// </summary> /// <returns>true if GroupId property is set</returns> public bool IsSetGroupId() { return (GroupId.Count > 0); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsLro { using System; using System.Linq; using System.Collections.Generic; using System.Diagnostics; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// Long-running Operation for AutoRest /// </summary> public partial class AutoRestLongRunningOperationTestService : ServiceClient<AutoRestLongRunningOperationTestService>, IAutoRestLongRunningOperationTestService, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Gets Azure subscription credentials. /// </summary> public ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } public virtual ILROsOperations LROs { get; private set; } public virtual ILRORetrysOperations LRORetrys { get; private set; } public virtual ILROSADsOperations LROSADs { get; private set; } public virtual ILROsCustomHeaderOperations LROsCustomHeader { get; private set; } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestLongRunningOperationTestService(params DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestLongRunningOperationTestService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestLongRunningOperationTestService(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestLongRunningOperationTestService(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public AutoRestLongRunningOperationTestService(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public AutoRestLongRunningOperationTestService(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public AutoRestLongRunningOperationTestService(Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public AutoRestLongRunningOperationTestService(Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.LROs = new LROsOperations(this); this.LRORetrys = new LRORetrysOperations(this); this.LROSADs = new LROSADsOperations(this); this.LROsCustomHeader = new LROsCustomHeaderOperations(this); this.BaseUri = new Uri("http://localhost"); this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; SerializationSettings.Converters.Add(new ResourceJsonConverter()); DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; DeserializationSettings.Converters.Add(new ResourceJsonConverter()); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Linq; using System.Text.Json; using System.Threading.Tasks; using Azure.Core.TestFramework; using NUnit.Framework; namespace Azure.Storage.Blobs.ChangeFeed.Tests { /// <summary> /// For recording these tests it's good to use an account where changefeed has been enabled and some live events are steadily generated. /// For example one can create simple Azure Function that runs every minutes and manipulates few blobs. /// </summary> public class BlobChangeFeedAsyncPagableTests : ChangeFeedTestBase { public BlobChangeFeedAsyncPagableTests(bool async, BlobClientOptions.ServiceVersion serviceVersion) : base(async, serviceVersion, null /* RecordedTestMode.Record /* to re-record */) { } [RecordedTest] [Ignore("For debugging larger Change Feeds locally")] public async Task Test() { BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(); IList<BlobChangeFeedEvent> list = await blobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { Console.WriteLine(e); } } [RecordedTest] [Ignore("For debugging larger Change Feeds locally")] public async Task TestHistorical() { BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(start: DateTime.Now.AddHours(-2), end: DateTime.Now.AddHours(-1)); IList<BlobChangeFeedEvent> list = await blobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { Console.WriteLine(e); } } [RecordedTest] [Ignore("For debugging larger Change Feeds locally")] public async Task TestLastHour() { BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(start: DateTime.Now, end: DateTime.Now); IList<BlobChangeFeedEvent> list = await blobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { Console.WriteLine(e); } } /// <summary> /// This test checks if tail of the change feed can be listened to. /// To setup recording have an account where changes are generated quite frequently (i.e. every 1 minute). /// This test runs long in recording mode as it waits multiple times for events. /// </summary> /// <returns></returns> [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task TestTailEvents() { // Uncomment when recording. //DateTimeOffset startTime = DateTimeOffset.Now; // Update and uncomment after recording. DateTimeOffset startTime = new DateTimeOffset(2020, 8, 10, 16, 00, 00, TimeSpan.Zero); TimeSpan pollInterval = Mode == RecordedTestMode.Playback ? TimeSpan.Zero : TimeSpan.FromMinutes(3); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); Page<BlobChangeFeedEvent> lastPage = null; ISet<string> EventIdsPart1 = new HashSet<string>(); ISet<string> EventIdsPart2 = new HashSet<string>(); ISet<string> EventIdsPart3 = new HashSet<string>(); // Part 1 AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(start: startTime); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(); await foreach (var page in asyncEnumerable) { lastPage = page; foreach (var evt in page.Values) { EventIdsPart1.Add(evt.Id.ToString()); } } CollectionAssert.IsNotEmpty(EventIdsPart1); await Task.Delay(pollInterval); // Part 2 blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(lastPage.ContinuationToken); asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(); await foreach (var page in asyncEnumerable) { lastPage = page; foreach (var evt in page.Values) { EventIdsPart2.Add(evt.Id.ToString()); } } CollectionAssert.IsNotEmpty(EventIdsPart2); await Task.Delay(pollInterval); // Part 3 blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(lastPage.ContinuationToken); asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(); await foreach (var page in asyncEnumerable) { lastPage = page; foreach (var evt in page.Values) { EventIdsPart3.Add(evt.Id.ToString()); } } CollectionAssert.IsNotEmpty(EventIdsPart3); // Assert events are not duplicated CollectionAssert.IsEmpty(EventIdsPart1.Intersect(EventIdsPart2)); CollectionAssert.IsEmpty(EventIdsPart1.Intersect(EventIdsPart3)); CollectionAssert.IsEmpty(EventIdsPart2.Intersect(EventIdsPart3)); } [RecordedTest] [Ignore("For debugging larger Change Feeds locally")] public async Task PageSizeTest() { int pageSize = 100; BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); IAsyncEnumerator<Page<BlobChangeFeedEvent>> asyncEnumerator = blobChangeFeedClient.GetChangesAsync().AsPages(pageSizeHint: pageSize).GetAsyncEnumerator(); List<int> pageSizes = new List<int>(); while (await asyncEnumerator.MoveNextAsync()) { pageSizes.Add(asyncEnumerator.Current.Values.Count); } // All pages except the last should have a count == pageSize. for (int i = 0; i < pageSizes.Count - 1; i++) { Assert.AreEqual(pageSize, pageSizes[i]); } } [RecordedTest] [Ignore("For debugging larger Change Feeds locally")] public async Task CursorTest() { BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 500); Page<BlobChangeFeedEvent> page = await asyncEnumerable.FirstAsync(); foreach (BlobChangeFeedEvent changeFeedEvent in page.Values) { Console.WriteLine(changeFeedEvent); } Console.WriteLine("break"); string continuation = page.ContinuationToken; AsyncPageable<BlobChangeFeedEvent> cursorBlobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); IList<BlobChangeFeedEvent> list = await cursorBlobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { Console.WriteLine(e); } } [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task CanReadTillEnd() { // Uncomment when recording. //DateTimeOffset startTime = DateTimeOffset.Now; // Update and uncomment after recording. DateTimeOffset startTime = new DateTimeOffset(2020, 7, 31, 19, 00, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(startTime); IList<BlobChangeFeedEvent> list = await blobChangeFeedAsyncPagable.ToListAsync(); CollectionAssert.IsNotEmpty(list); } [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task ResumeFromTheMiddleOfTheChunk() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 7, 30, 23, 00, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 7, 30, 23, 15, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); ISet<string> AllEventIds = new HashSet<string>(); await foreach (BlobChangeFeedEvent e in blobChangeFeedAsyncPagable) { AllEventIds.Add(e.Id.ToString()); } // Iterate over first two pages ISet<string> EventIdsPart1 = new HashSet<string>(); blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); Page<BlobChangeFeedEvent> lastPage = null; int pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { EventIdsPart1.Add(e.Id.ToString()); } pages++; lastPage = page; if (pages > 2) { break; } } string continuation = lastPage.ContinuationToken; long blockOffset = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor).CurrentSegmentCursor.ShardCursors.First().BlockOffset; Assert.Greater(blockOffset, 0, "Making sure we actually finish in the middle of chunk, if this fails play with test data to make it pass"); // Iterate over next two pages ISet<string> EventIdsPart2 = new HashSet<string>(); blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); lastPage = null; pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { EventIdsPart2.Add(e.Id.ToString()); } pages++; lastPage = page; if (pages > 2) { break; } } continuation = lastPage.ContinuationToken; blockOffset = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor).CurrentSegmentCursor.ShardCursors.First().BlockOffset; Assert.Greater(blockOffset, 0, "Making sure we actually finish in the middle of chunk, if this fails play with test data to make it pass"); // Iterate over remaining ISet<string> EventIdsTail = new HashSet<string>(); AsyncPageable<BlobChangeFeedEvent> cursorBlobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); IList<BlobChangeFeedEvent> list = await cursorBlobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { EventIdsTail.Add(e.Id.ToString()); } ISet<string> AllEventIdsFromResumingIteration = new HashSet<string>(); AllEventIdsFromResumingIteration.UnionWith(EventIdsPart1); AllEventIdsFromResumingIteration.UnionWith(EventIdsPart2); AllEventIdsFromResumingIteration.UnionWith(EventIdsTail); Assert.Greater(AllEventIds.Count, 0); Assert.Greater(EventIdsPart1.Count, 0); Assert.Greater(EventIdsPart2.Count, 0); Assert.Greater(EventIdsTail.Count, 0); Assert.AreEqual(AllEventIds.Count, EventIdsPart1.Count + EventIdsPart2.Count + EventIdsTail.Count); CollectionAssert.AreEqual(AllEventIds, AllEventIdsFromResumingIteration); } /// <summary> /// This test requires an account with changefeed where multiple shards has been created. /// However. Some shards should be empty. Easiest way to set this up is to have just one blob and keep modifying it. /// Changes related to same blobName are guaranteed to end up in same shard. /// </summary> /// <returns></returns> [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task ResumeFromTheMiddleOfTheChunkWithSomeEmptyShards() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 8, 5, 17, 00, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 8, 5, 17, 15, 00, TimeSpan.Zero); int expectedNonEmptyShards = 1; BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); ISet<string> AllEventIds = new HashSet<string>(); await foreach (BlobChangeFeedEvent e in blobChangeFeedAsyncPagable) { AllEventIds.Add(e.Id.ToString()); } // Iterate over first two pages ISet<string> EventIdsPart1 = new HashSet<string>(); blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); Page<BlobChangeFeedEvent> lastPage = null; int pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { EventIdsPart1.Add(e.Id.ToString()); } pages++; lastPage = page; if (pages > 2) { break; } } string continuation = lastPage.ContinuationToken; var currentSegmentCursor = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor).CurrentSegmentCursor; Assert.AreEqual(expectedNonEmptyShards, currentSegmentCursor.ShardCursors.Count); Assert.IsNotNull(currentSegmentCursor.ShardCursors.Find(x => x.BlockOffset > 0), "Making sure we actually finish some shard in the middle of chunk, if this fails play with test data to make it pass"); // Iterate over next two pages ISet<string> EventIdsPart2 = new HashSet<string>(); blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); lastPage = null; pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { EventIdsPart2.Add(e.Id.ToString()); } pages++; lastPage = page; if (pages > 2) { break; } } continuation = lastPage.ContinuationToken; currentSegmentCursor = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor).CurrentSegmentCursor; Assert.AreEqual(expectedNonEmptyShards, currentSegmentCursor.ShardCursors.Count); Assert.IsNotNull(currentSegmentCursor.ShardCursors.Find(x => x.BlockOffset > 0), "Making sure we actually finish some shard in the middle of chunk, if this fails play with test data to make it pass"); // Iterate over remaining ISet<string> EventIdsTail = new HashSet<string>(); AsyncPageable<BlobChangeFeedEvent> cursorBlobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); IList<BlobChangeFeedEvent> list = await cursorBlobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { EventIdsTail.Add(e.Id.ToString()); } ISet<string> AllEventIdsFromResumingIteration = new HashSet<string>(); AllEventIdsFromResumingIteration.UnionWith(EventIdsPart1); AllEventIdsFromResumingIteration.UnionWith(EventIdsPart2); AllEventIdsFromResumingIteration.UnionWith(EventIdsTail); Assert.Greater(AllEventIds.Count, 0); Assert.Greater(EventIdsPart1.Count, 0); Assert.Greater(EventIdsPart2.Count, 0); Assert.Greater(EventIdsTail.Count, 0); Assert.AreEqual(AllEventIds.Count, EventIdsPart1.Count + EventIdsPart2.Count + EventIdsTail.Count); CollectionAssert.AreEqual(AllEventIds, AllEventIdsFromResumingIteration); } /// <summary> /// This test requires an account with changefeed where multiple shards has been created all with events. /// Easiest way to set this up is to modify lot of random blobs (i.e. with names that contain GUIDs). /// </summary> /// <returns></returns> [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task ResumeFromTheMiddleOfTheChunkWithManyNonEmptyShards() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 8, 5, 17, 00, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 8, 5, 17, 15, 00, TimeSpan.Zero); int expectedShardCount = 3; BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); ISet<string> AllEventIds = new HashSet<string>(); await foreach (BlobChangeFeedEvent e in blobChangeFeedAsyncPagable) { AllEventIds.Add(e.Id.ToString()); } // Iterate over first two pages ISet<string> EventIdsPart1 = new HashSet<string>(); blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); Page<BlobChangeFeedEvent> lastPage = null; int pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { EventIdsPart1.Add(e.Id.ToString()); } pages++; lastPage = page; if (pages > 2) { break; } } string continuation = lastPage.ContinuationToken; var currentSegmentCursor = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor).CurrentSegmentCursor; Assert.AreEqual(currentSegmentCursor.ShardCursors.Count, expectedShardCount); Assert.IsNotNull(currentSegmentCursor.ShardCursors.Find(x => x.BlockOffset > 0), "Making sure we actually finish some shard in the middle of chunk, if this fails play with test data to make it pass"); // Iterate over next two pages ISet<string> EventIdsPart2 = new HashSet<string>(); blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); lastPage = null; pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { EventIdsPart2.Add(e.Id.ToString()); } pages++; lastPage = page; if (pages > 2) { break; } } continuation = lastPage.ContinuationToken; currentSegmentCursor = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor).CurrentSegmentCursor; Assert.AreEqual(currentSegmentCursor.ShardCursors.Count, expectedShardCount); Assert.IsNotNull(currentSegmentCursor.ShardCursors.Find(x => x.BlockOffset > 0), "Making sure we actually finish some shard in the middle of chunk, if this fails play with test data to make it pass"); // Iterate over remaining ISet<string> EventIdsTail = new HashSet<string>(); AsyncPageable<BlobChangeFeedEvent> cursorBlobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); IList<BlobChangeFeedEvent> list = await cursorBlobChangeFeedAsyncPagable.ToListAsync(); foreach (BlobChangeFeedEvent e in list) { EventIdsTail.Add(e.Id.ToString()); } ISet<string> AllEventIdsFromResumingIteration = new HashSet<string>(); AllEventIdsFromResumingIteration.UnionWith(EventIdsPart1); AllEventIdsFromResumingIteration.UnionWith(EventIdsPart2); AllEventIdsFromResumingIteration.UnionWith(EventIdsTail); Assert.Greater(AllEventIds.Count, 0); Assert.Greater(EventIdsPart1.Count, 0); Assert.Greater(EventIdsPart2.Count, 0); Assert.Greater(EventIdsTail.Count, 0); Assert.AreEqual(AllEventIds.Count, EventIdsPart1.Count + EventIdsPart2.Count + EventIdsTail.Count); CollectionAssert.AreEqual(AllEventIds, AllEventIdsFromResumingIteration); } [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task ResumeFromEndInThePastYieldsEmptyResult() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 7, 30, 23, 00, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 7, 30, 23, 15, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); ISet<string> AllEventIds = new HashSet<string>(); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); Page<BlobChangeFeedEvent> lastPage = null; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { AllEventIds.Add(e.Id.ToString()); } lastPage = page; } string continuation = lastPage.ContinuationToken; // Act blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); IList<BlobChangeFeedEvent> tail = await blobChangeFeedAsyncPagable.ToListAsync(); // Assert Assert.AreEqual(0, tail.Count); Assert.Greater(AllEventIds.Count, 0); } [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task ImmediateResumeFromEndOfCurrentHourYieldsEmptyResult() { // Uncomment when recording. //DateTimeOffset startTime = DateTimeOffset.Now; // Update and uncomment after recording. DateTimeOffset startTime = new DateTimeOffset(2020, 8, 11, 21, 00, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime); ISet<string> AllEventIds = new HashSet<string>(); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); Page<BlobChangeFeedEvent> lastPage = null; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { AllEventIds.Add(e.Id.ToString()); } lastPage = page; } string continuation = lastPage.ContinuationToken; // Act blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync(continuation); IList<BlobChangeFeedEvent> tail = await blobChangeFeedAsyncPagable.ToListAsync(); // Assert Assert.AreEqual(0, tail.Count); Assert.Greater(AllEventIds.Count, 0); } /// <summary> /// To setup account for this test have a steady stream of events (i.e. some changes every 1 minute) that covers at least from an hour before start time /// to an hour after end time. /// </summary> /// <returns></returns> [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task TestAlreadyRoundedBoundaries() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 8, 5, 16, 00, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 8, 5, 18, 00, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); var eventList = new List<BlobChangeFeedEvent>(await blobChangeFeedAsyncPagable.ToListAsync()); // Assert Assert.Greater(eventList.Count, 1); Assert.IsNull(eventList.Find(e => e.EventTime < startTime.AddMinutes(-15)), "No event 15 minutes before start is present"); Assert.IsNull(eventList.Find(e => e.EventTime > endTime.AddMinutes(15)), "No event 15 minutes after end is present"); Assert.IsNotNull(eventList.Find(e => e.EventTime < startTime.AddMinutes(15)), "There is some event 15 minutes after start"); Assert.IsNotNull(eventList.Find(e => e.EventTime > endTime.AddMinutes(-15)), "There is some event 15 minutes before end"); } /// <summary> /// To setup account for this test have a steady stream of events (i.e. some changes every 1 minute) that covers at least from an hour before start time /// to an hour after end time. /// </summary> [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task TestNonRoundedBoundaries() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 8, 5, 16, 24, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 8, 5, 18, 35, 00, TimeSpan.Zero); DateTimeOffset roundedStartTime = new DateTimeOffset(2020, 8, 5, 16, 00, 00, TimeSpan.Zero); DateTimeOffset roundedEndTime = new DateTimeOffset(2020, 8, 5, 19, 00, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Collect all events within range AsyncPageable<BlobChangeFeedEvent> blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); var eventList = new List<BlobChangeFeedEvent>(await blobChangeFeedAsyncPagable.ToListAsync()); // Assert Assert.Greater(eventList.Count, 1); Assert.IsNull(eventList.Find(e => e.EventTime < roundedStartTime.AddMinutes(-15)), "No event 15 minutes before start is present"); Assert.IsNull(eventList.Find(e => e.EventTime > roundedEndTime.AddMinutes(15)), "No event 15 minutes after end is present"); Assert.IsNotNull(eventList.Find(e => e.EventTime < roundedStartTime.AddMinutes(15)), "There is some event 15 minutes after start"); Assert.IsNotNull(eventList.Find(e => e.EventTime > roundedEndTime.AddMinutes(-15)), "There is some event 15 minutes before end"); } [RecordedTest] [PlaybackOnly("Changefeed E2E tests require previously generated events")] public async Task CursorFormatTest() { // This is hardcoded for playback stability. Feel free to modify but make sure recordings match. DateTimeOffset startTime = new DateTimeOffset(2020, 7, 30, 23, 00, 00, TimeSpan.Zero); DateTimeOffset endTime = new DateTimeOffset(2020, 7, 30, 23, 15, 00, TimeSpan.Zero); BlobServiceClient service = GetServiceClient_SharedKey(); BlobChangeFeedClient blobChangeFeedClient = service.GetChangeFeedClient(); // Iterate over first two pages var blobChangeFeedAsyncPagable = blobChangeFeedClient.GetChangesAsync( start: startTime, end: endTime); IAsyncEnumerable<Page<BlobChangeFeedEvent>> asyncEnumerable = blobChangeFeedAsyncPagable.AsPages(pageSizeHint: 50); Page<BlobChangeFeedEvent> lastPage = null; int pages = 0; await foreach (Page<BlobChangeFeedEvent> page in asyncEnumerable) { foreach (BlobChangeFeedEvent e in page.Values) { Console.WriteLine(e); } pages++; lastPage = page; if (pages > 2) { break; } } // Act string continuation = lastPage.ContinuationToken; // Verify // You may need to update expected values when re-recording var cursor = (JsonSerializer.Deserialize(continuation, typeof(ChangeFeedCursor)) as ChangeFeedCursor); Assert.AreEqual(new DateTimeOffset(2020, 7, 31, 00, 00, 00, TimeSpan.Zero), cursor.EndTime); Assert.AreEqual(1, cursor.CursorVersion); Assert.AreEqual("emilydevtest.blob.core.windows.net", cursor.UrlHost); var currentSegmentCursor = cursor.CurrentSegmentCursor; Assert.AreEqual("idx/segments/2020/07/30/2300/meta.json", currentSegmentCursor.SegmentPath); Assert.AreEqual("log/00/2020/07/30/2300/", currentSegmentCursor.CurrentShardPath); Assert.AreEqual(1, currentSegmentCursor.ShardCursors.Count); var shardCursor = currentSegmentCursor.ShardCursors.First(); Assert.AreEqual("log/00/2020/07/30/2300/00000.avro", shardCursor.CurrentChunkPath); Assert.AreEqual(96253, shardCursor.BlockOffset); Assert.AreEqual(0, shardCursor.EventIndex); } } }
/// <summary> /// Copyright 2013 The Loon Authors /// Licensed under the Apache License, Version 2.0 (the "License"); you may not /// use this file except in compliance with the License. You may obtain a copy of /// the License at /// http://www.apache.org/licenses/LICENSE-2.0 /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT /// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the /// License for the specific language governing permissions and limitations under /// the License. /// </summary> /// using Loon.Core.Geom; using Loon.Utils; namespace Loon.Physics { public class PPhysWorld { private PBody[] bodies; private long collisionDetectionTime; private long collisionSolveTime; private Vector2f gravity; private int iterations; private PJoint[] joints; private int numBodies; private int numJoints; private int numShapes; private int numSolvers; private long positionUpdateTime; private PSweepAndPrune sap; private PShape[] shapes; private PSolver[] solvers; private long totalStepTime; public PPhysWorld() { this.iterations = 10; this.gravity = new Vector2f(0.0F, 9.80665F); this.bodies = new PBody[1024]; this.joints = new PJoint[1024]; this.shapes = new PShape[1024]; this.solvers = new PSolver[1024]; this.sap = new PSweepAndPrune(); } public bool RemoveBody(PBody b) { return RemoveBody(b, false); } public bool RemoveBody(PBody b, bool identity) { object[] items = this.bodies; if (identity || b == null) { for (int i = 0; i < numBodies; i++) { if (items[i] == (object) b) { RemoveBody(i); return true; } } } else { for (int i_0 = 0; i_0 < numBodies; i_0++) { if (b.Equals(items[i_0])) { RemoveBody(i_0); return true; } } } return false; } public void AddBody(PBody b) { if (numBodies + 1 >= bodies.Length) { bodies = (PBody[]) CollectionUtils .CopyOf(bodies, bodies.Length * 2); } b.w = this; for (int i = 0; i < b.numShapes; i++) { AddShape(b.shapes[i]); } bodies[numBodies] = b; numBodies++; } public void AddJoint(PJoint j) { if (numJoints + 1 >= joints.Length) { joints = (PJoint[]) CollectionUtils.CopyOf(joints, joints.Length * 2); } joints[numJoints] = j; numJoints++; } internal void AddShape(PShape s) { if (s._type == PShapeType.CONCAVE_SHAPE) { PConcavePolygonShape c = (PConcavePolygonShape) s; for (int i = 0; i < c.numConvexes; i++) { AddShape(((PShape) (c.convexes[i]))); } return; } if (numShapes + 1 >= shapes.Length) { shapes = (PShape[]) CollectionUtils.CopyOf(shapes, shapes.Length * 2); } shapes[numShapes] = s; s._sapAABB.Set(sap, s, s._aabb); numShapes++; } private void AddSolver(PSolver s) { if (numSolvers + 1 >= solvers.Length) { solvers = (PSolver[]) CollectionUtils.CopyOf(solvers, solvers.Length * 2); } solvers[numSolvers] = s; numSolvers++; } private PCollisionChooser cc = new PCollisionChooser(); private void Collide(long st) { PSortableObject[] obj = sap.Sort(); if (sap.checkX) { for (int i = 0; i < sap.numObject; i++) if (obj[i].begin) { PSortableObject end = obj[i].aabb.endX; PShape s1 = obj[i].parent; int j = i; do { j++; if (obj[j].begin) { PShape s2 = obj[j].parent; if ((!s1._parent.fix || !s2._parent.fix) && s1._parent != s2._parent && s1._aabb.IsHit(s2._aabb)) CollisionShape(s1, s2, cc); } } while (obj[j] != end && j < sap.numObject); } } else { for (int i_0 = 0; i_0 < sap.numObject; i_0++) if (obj[i_0].begin) { PSortableObject end_1 = obj[i_0].aabb.endY; PShape s1_2 = obj[i_0].parent; int j_3 = i_0; do { j_3++; if (obj[j_3].begin) { PShape s2_4 = obj[j_3].parent; if ((!s1_2._parent.fix || !s2_4._parent.fix) && s1_2._parent != s2_4._parent && s1_2._aabb.IsHit(s2_4._aabb)) CollisionShape(s1_2, s2_4, cc); } } while (obj[j_3] != end_1 && j_3 < sap.numObject); } } long en = (System.DateTime.Now.Ticks*100); collisionDetectionTime = en - st; for (int i_5 = 0; i_5 < numSolvers; i_5++) { if (solvers[i_5].rem) { RemoveSolver(i_5); i_5--; } } } private void CollisionShape(PShape s1, PShape s2, PCollisionChooser cc_0) { PContact[] cs = new PContact[2]; int num = cc_0.Collide(s1, s2, cs); if (num == 0) { return; } bool found = false; for (int f = 0; f < numSolvers; f++) { if (s1 != solvers[f].s1 || s2 != solvers[f].s2) { continue; } solvers[f].Update(cs, num); found = true; break; } if (!found) { PSolver solver = new PSolver(s1, s2, cs, num); AddSolver(solver); } } public PBody[] GetBodies() { return (PBody[]) CollectionUtils.CopyOf(bodies, numBodies); } public PBody[] Inner_bodies() { return bodies; } public int Size() { return numBodies; } public long GetCollisionDetectionTime() { return collisionDetectionTime; } public long GetCollisionSolveTime() { return collisionSolveTime; } public Vector2f GetGravity() { return gravity.Clone(); } public float GetIterations() { return (float) iterations; } public PJoint[] GetJoints() { return (PJoint[]) CollectionUtils.CopyOf(joints, numJoints); } public long GetPositionUpdateTime() { return positionUpdateTime; } public PShape[] GetShapes() { return (PShape[]) CollectionUtils.CopyOf(shapes, numShapes); } public PSolver[] GetSolvers() { return (PSolver[]) CollectionUtils.CopyOf(solvers, numSolvers); } public long GetTotalStepTime() { return totalStepTime; } private void RemoveBody(int index) { for (int i = 0; i < bodies[index].numShapes; i++) { PShape s = bodies[index].shapes[i]; if (s._type == PShapeType.CONCAVE_SHAPE) { PConcavePolygonShape c = (PConcavePolygonShape) s; for (int j = 0; j < c.numConvexes; j++) { c.convexes[j]._rem = true; } } else { s._rem = true; } } if (index != numBodies - 1) { System.Array.Copy((bodies),index + 1,(bodies),index,numBodies - index - 1); } numBodies--; } private void RemoveJoint(int index) { if (index != numJoints - 1) { System.Array.Copy((joints),index + 1,(joints),index,numJoints - index - 1); } numJoints--; } private void RemoveShape(int index) { if (shapes[index]._type == PShapeType.CONCAVE_SHAPE) { PConcavePolygonShape c = (PConcavePolygonShape) shapes[index]; for (int i = 0; i < c.numConvexes; i++) { c.convexes[i]._rem = true; } } shapes[index]._sapAABB.Remove(); if (index != numShapes - 1) { System.Array.Copy((shapes),index + 1,(shapes),index,numShapes - index - 1); } numShapes--; } private void RemoveSolver(int index) { if (index != numSolvers - 1) { System.Array.Copy((solvers),index + 1,(solvers),index,numSolvers - index - 1); } numSolvers--; } public void SetGravity(float gx, float gy) { gravity.Set(gx, gy); } public void SetIterations(int iterations_0) { this.iterations = iterations_0; } private void Solve(float dt) { long st = (System.DateTime.Now.Ticks*100); for (int i = 0; i < numSolvers; i++) { solvers[i].PreSolve(); } for (int i_0 = 0; i_0 < numJoints; i_0++) { joints[i_0].PreSolve(dt); } for (int j = 0; j < iterations; j++) { for (int i_1 = 0; i_1 < numJoints; i_1++) { joints[i_1].SolveVelocity(dt); } for (int i_2 = 0; i_2 < numSolvers; i_2++) { solvers[i_2].SolveVelocity(); } } long en = (System.DateTime.Now.Ticks*100); collisionSolveTime = en - st; st = (System.DateTime.Now.Ticks*100); for (int i_3 = 0; i_3 < numBodies; i_3++) if (!bodies[i_3].fix) { PBody b = bodies[i_3]; b.correctVel.x = b.vel.x * dt; b.correctVel.y = b.vel.y * dt; b.correctAngVel = b.angVel * dt; } en = (System.DateTime.Now.Ticks*100); positionUpdateTime += en - st; st = (System.DateTime.Now.Ticks*100); for (int j_4 = 0; j_4 < iterations; j_4++) { for (int i_5 = 0; i_5 < numJoints; i_5++) { joints[i_5].SolvePosition(); } for (int i_6 = 0; i_6 < numSolvers; i_6++) { solvers[i_6].SolvePosition(); } } en = (System.DateTime.Now.Ticks*100); collisionSolveTime += en - st; st = (System.DateTime.Now.Ticks*100); for (int i_7 = 0; i_7 < numBodies; i_7++) { PBody b_8 = bodies[i_7]; if (b_8.fix) { b_8.angVel = 0.0F; b_8.vel.Set(0.0F, 0.0F); } else { b_8.pos.x += b_8.correctVel.x; b_8.pos.y += b_8.correctVel.y; b_8.ang += b_8.correctAngVel; } b_8.Update(); } for (int i_9 = 0; i_9 < numJoints; i_9++) { joints[i_9].Update(); } en = (System.DateTime.Now.Ticks*100); positionUpdateTime += en - st; } public void Step(float dt) { long st = (System.DateTime.Now.Ticks*100); for (int i = 0; i < numBodies; i++) if (bodies[i].rem) { RemoveBody(i); i--; } else { bodies[i].Update(); if (!bodies[i].fix) { PBody b = bodies[i]; b.vel.x += gravity.x * dt; b.vel.y += gravity.y * dt; } } for (int i_0 = 0; i_0 < numShapes; i_0++) { if (shapes[i_0]._rem) { RemoveShape(i_0); i_0--; } } for (int i_1 = 0; i_1 < numJoints; i_1++) { if (joints[i_1].rem) { RemoveJoint(i_1); i_1--; } else { joints[i_1].Update(); } } long en = (System.DateTime.Now.Ticks*100); positionUpdateTime = en - st; Collide(en); Solve(dt); long totalEn = (System.DateTime.Now.Ticks*100); totalStepTime = totalEn - st; } public void Update() { for (int i = 0; i < numBodies; i++) { if (bodies[i].rem) { RemoveBody(i); i--; } else { bodies[i].Update(); } } for (int i_0 = 0; i_0 < numShapes; i_0++) { if (shapes[i_0]._rem) { RemoveShape(i_0); i_0--; } } for (int i_1 = 0; i_1 < numJoints; i_1++) { if (joints[i_1].rem) { RemoveJoint(i_1); i_1--; } else { joints[i_1].Update(); } } } } }
using System; using System.Collections; using System.Collections.Generic; using System.IO; using Castle.Services.Transaction; using Cuyahoga.Core.Service.Files; using log4net; using Cuyahoga.Core.Domain; using Cuyahoga.Core.DataAccess; namespace Cuyahoga.Core.Service.SiteStructure { /// <summary> /// Provides functionality to manage site instances. /// </summary> [Transactional] public class SiteService : ISiteService { private static readonly ILog log = LogManager.GetLogger(typeof(SiteService)); private ISiteStructureDao _siteStructureDao; private ICommonDao _commonDao; private IFileService _fileService; /// <summary> /// Constructor. /// </summary> /// <param name="siteStructureDao"></param> /// <param name="commonDao"></param> /// <param name="fileService"></param> public SiteService(ISiteStructureDao siteStructureDao, ICommonDao commonDao, IFileService fileService) { this._siteStructureDao = siteStructureDao; this._commonDao = commonDao; this._fileService = fileService; } #region ISiteService Members public Site GetSiteById(int siteId) { return (Site)this._commonDao.GetObjectById(typeof(Site), siteId); } public Site GetSiteBySiteUrl(string siteUrl) { Site site = this._siteStructureDao.GetSiteBySiteUrl(siteUrl); // Try to resolve the site via SiteAlias if (site == null) { SiteAlias sa = this._siteStructureDao.GetSiteAliasByUrl(siteUrl); if (sa != null) { site = sa.Site; } } return site; } public SiteAlias GetSiteAliasById(int siteAliasId) { return (SiteAlias)this._commonDao.GetObjectById(typeof(SiteAlias), siteAliasId); } public SiteAlias GetSiteAliasByUrl(string url) { return this._siteStructureDao.GetSiteAliasByUrl(url); } public IList<SiteAlias> GetSiteAliasesBySite(Site site) { return this._siteStructureDao.GetSiteAliasesBySite(site); } public IList GetAllSites() { return this._commonDao.GetAll(typeof(Site)); } [Transaction(TransactionMode.RequiresNew)] public virtual void CreateSite(Site site, string siteDataRoot, IList<Template> templatesToCopy, string systemTemplatesDirectory) { try { // 1. Add global roles to site IList<Role> roles = this._commonDao.GetAll<Role>(); foreach (Role role in roles) { if (role.IsGlobal) { site.Roles.Add(role); } } // 2. Save site in database this._commonDao.SaveObject(site); // 3. Create SiteData folder structure if (! this._fileService.CheckIfDirectoryIsWritable(siteDataRoot)) { throw new IOException(string.Format("Unable to create the site because the directory {0} is not writable.", siteDataRoot)); } string siteDataPhysicalDirectory = Path.Combine(siteDataRoot, site.Id.ToString()); this._fileService.CreateDirectory(siteDataPhysicalDirectory); this._fileService.CreateDirectory(Path.Combine(siteDataPhysicalDirectory, "UserFiles")); this._fileService.CreateDirectory(Path.Combine(siteDataPhysicalDirectory, "index")); string siteTemplatesDirectory = Path.Combine(siteDataPhysicalDirectory, "Templates"); this._fileService.CreateDirectory(siteTemplatesDirectory); // 4. Copy templates IList<string> templateDirectoriesToCopy = new List<string>(); foreach (Template template in templatesToCopy) { string templateDirectoryName = template.BasePath.Substring(template.BasePath.IndexOf("/") + 1); if (! templateDirectoriesToCopy.Contains(templateDirectoryName)) { templateDirectoriesToCopy.Add(templateDirectoryName); } Template newTemplate = template.GetCopy(); newTemplate.Site = site; site.Templates.Add(newTemplate); this._commonDao.SaveOrUpdateObject(newTemplate); this._commonDao.SaveOrUpdateObject(site); } foreach (string templateDirectory in templateDirectoriesToCopy) { string sourceDir = Path.Combine(systemTemplatesDirectory, templateDirectory); string targetDir = Path.Combine(siteTemplatesDirectory, templateDirectory); this._fileService.CopyDirectoryContents(sourceDir, targetDir); } } catch (Exception ex) { log.Error("An unexpected error occured while creating a new site.", ex); throw; } } [Transaction(TransactionMode.RequiresNew)] public virtual void SaveSite(Site site) { try { // We need to use a specific DAO to also enable clearing the query cache. this._siteStructureDao.SaveSite(site); } catch (Exception ex) { log.Error("Error saving site", ex); throw; } } [Transaction(TransactionMode.RequiresNew)] public virtual void DeleteSite(Site site) { if (site.RootNodes.Count > 0) { throw new Exception("Can't delete a site when there are still related nodes. Please delete all nodes before deleting an entire site."); } else { IList<SiteAlias> aliases = this._siteStructureDao.GetSiteAliasesBySite(site); if (aliases.Count > 0) { throw new Exception("Unable to delete a site when a site has related aliases."); } else { try { // We need to use a specific DAO to also enable clearing the query cache. this._siteStructureDao.DeleteSite(site); } catch (Exception ex) { log.Error("Error deleting site", ex); throw; } } } } [Transaction(TransactionMode.RequiresNew)] public virtual void SaveSiteAlias(SiteAlias siteAlias) { try { // We need to use a specific DAO to also enable clearing the query cache. this._siteStructureDao.SaveSiteAlias(siteAlias); } catch (Exception ex) { log.Error("Error saving site alias", ex); throw; } } [Transaction(TransactionMode.RequiresNew)] public virtual void DeleteSiteAlias(SiteAlias siteAlias) { try { // We need to use a specific DAO to also enable clearing the query cache. this._siteStructureDao.DeleteSiteAlias(siteAlias); } catch (Exception ex) { log.Error("Error deleting site alias", ex); throw; } } #endregion } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // //#define DEBUG_SAVE_STATE //#define DEBUG_RELOAD_STATE using System.ComponentModel; using Microsoft.Zelig.CodeGeneration.IR.Transformations; namespace Microsoft.Zelig.FrontEnd { using System; using System.Collections.Generic; using System.Text; using System.IO; using Importer = Microsoft.Zelig.MetaData.Importer; using Normalized = Microsoft.Zelig.MetaData.Normalized; using IR = Microsoft.Zelig.CodeGeneration.IR; using RT = Microsoft.Zelig.Runtime; using TS = Microsoft.Zelig.Runtime.TypeSystem; using Cfg = Microsoft.Zelig.Configuration.Environment; using ARM = Microsoft.Zelig.Emulation.ArmProcessor; class Bench : TS.IEnvironmentProvider, TS.IConfigurationProvider, MetaData.IMetaDataResolverHelper, MetaData.ISymbolResolverHelper, IR.Transformations.ConstraintSystemCollector.IVisualizer { const string c_ZeligSourceCode = "/ZeligSourceCode"; public class RawImage { public string SectionName; public uint RangeStart; public uint RangeEnd; } public class TypeSystemForFrontEnd : IR.TypeSystemForCodeTransformation { readonly Bench m_owner; public TypeSystemForFrontEnd( Bench owner, TS.IEnvironmentProvider env ) : base( env ) { m_owner = owner; } //--// protected override void NotifyCompilationPhase( IR.CompilationSteps.PhaseDriver phase ) { m_owner.NotifyCompilationPhase( phase ); } public override IR.SourceCodeTracker GetSourceCodeTracker( ) { return m_owner.m_sourceCodeTracker; } } //--// // // State // const string SymbolSuffix = ".pdb"; const string SourceCodeDatabaseSuffix = ".srcdb"; //--// // // Bench is a root object in the system. This makes it easier to put any object in the watch window from the global hierarchy. // private static Bench s_pThis; private string m_outputName; private string m_outputDir; private string m_targetFile; private string m_entryPointName; private bool m_fReloadState; private bool m_fDumpIL; private bool m_fDumpIRpre; private bool m_fDumpIRpost; private bool m_fDumpIR; private bool m_fDumpIRXMLpre; private bool m_fDumpIRXMLpost; private bool m_fDumpIRXML; private bool m_fDumpCFG; private bool m_fDumpLLVMIR; private bool m_fDumpLLVMIR_TextRepresentation; private bool m_fDumpASM; private bool m_fDumpASMDIR; private bool m_fDumpHEX; private bool m_fSkipReadOnly; private uint m_nativeIntSize; private List< RawImage > m_dumpRawImage; private string m_libraryLocation_HostBuild; private string m_libraryLocation_TargetBuild; private List< string > m_references; private List< string > m_searchOrder; private List< string > m_importDirectories; private List< string > m_importLibraries; private IR.CompilationSteps.DelegationCache m_delegationCache; private MetaData.MetaDataResolver m_resolver; private TypeSystemForFrontEnd m_typeSystem; private IR.CompilationSteps.Controller m_controller; private Cfg.CompilationSetupCategory m_compilationSetup; private GrowOnlyHashTable< string, object > m_configurationOptions; private List<String> m_disabledPhases; private Cfg.ProductCategory m_product; private Cfg.MemoryMapCategory m_memoryMap; private IR.SourceCodeTracker m_sourceCodeTracker; //--// private Cfg.Manager m_configurationManager; private PerformanceCounters.Timing m_timing; private long m_lastTiming; // // Constructor Methods // private Bench( ) { m_timing.Start( ); #if DEBUG #if DEBUG_INSTRUMENTATION Environment.SetEnvironmentVariable( "Flavor", "Instrumentation" ); #else Environment.SetEnvironmentVariable( "Flavor", "Debug" ); #endif #else Environment.SetEnvironmentVariable( "Flavor", "Release" ); #endif //--// m_outputDir = "."; m_nativeIntSize = 32; m_dumpRawImage = new List<RawImage>( ); m_references = new List<string>( ); m_searchOrder = new List<string>( ); m_importDirectories = new List<string>( ); m_importLibraries = new List<string>( ); m_resolver = new Zelig.MetaData.MetaDataResolver( this ); m_configurationOptions = HashTableFactory.New<string, object>( ); m_disabledPhases = new List<String>( ); m_sourceCodeTracker = new IR.SourceCodeTracker( ); } //--// private string GetTime( ) { long newTiming = m_timing.Sample( ); long diffTiming = newTiming - m_lastTiming; m_lastTiming = newTiming; return string.Format( "{0,10:F3} [{1,8:F3}]", ( float )PerformanceCounters.Timing.ToMicroSeconds( newTiming ) / ( 1000 * 1000 ), ( float )PerformanceCounters.Timing.ToMicroSeconds( diffTiming ) / ( 1000 * 1000 ) ); } //--// private string AddSearchDirectory( string dir ) { if( m_searchOrder.Contains( dir ) == false ) { m_searchOrder.Add( dir ); } return dir; } void NotifyCompilationPhase( IR.CompilationSteps.PhaseDriver phase ) { Console.WriteLine( "{0}: Phase: {1}", GetTime( ), phase ); #if DEBUG_SAVE_STATE if(phase == Microsoft.Zelig.CodeGeneration.IR.CompilationSteps.Phase.GenerateImage) { SaveIrToDisk( "temp.ZeligImage", m_typeSystem ); } #endif //// if(phase == IR.CompilationSteps.Phase.ReduceTypeSystem + 1) //// { //// string filePrefix = Path.Combine( m_outputDir, m_outputName ); //// //// DumpIRAsText( filePrefix + ".ZeligIR_post" ); //// } //// switch(phase) //// { //// case Microsoft.Zelig.CodeGeneration.IR.CompilationSteps.Phase.Optimizations: //// case Microsoft.Zelig.CodeGeneration.IR.CompilationSteps.Phase.AllocateRegisters: //// Console.WriteLine( "Press ENTER" ); //// Console.ReadLine(); //// break; //// } } //--// private static string RemoveFileExtension( string file ) { return System.IO.Path.GetDirectoryName( file ) + @"\" + System.IO.Path.GetFileNameWithoutExtension( file ); } private static string Expand( string file ) { return Environment.ExpandEnvironmentVariables( file ); } private Importer.MetaData LoadAssembly( string file ) { byte[] image = System.IO.File.ReadAllBytes( file ); Importer.PELoader pe = new Importer.PELoader( file, image ); Importer.MetaData md = Importer.MetaData.loadMetaData( file, this, pe, Importer.MetaDataLoaderFlags.LoadCode | Importer.MetaDataLoaderFlags.LoadDebugInfo ); return md; } //--// object TS.IEnvironmentProvider.GetService( Type t ) { if( t == typeof( Normalized.IMetaDataBootstrap ) ) { return m_resolver; } if( t == typeof( TS.IConfigurationProvider ) ) { return this; } if( t == typeof( IR.CompilationSteps.DelegationCache ) ) { return m_delegationCache; } return null; } //--// bool TS.IConfigurationProvider.GetValue( string optionName, out object val ) { return m_configurationOptions.TryGetValue( optionName, out val ); } //--// void IR.Transformations.ConstraintSystemCollector.IVisualizer.DisplayGraph( IR.Transformations.ConstraintSystemCollector.GraphState gs ) { Microsoft.Zelig.Tools.InequalityGraphVisualization.Viewer.Show( gs ); } //--// Importer.PdbInfo.PdbFile MetaData.ISymbolResolverHelper.ResolveAssemblySymbols( string file ) { try { string root = RemoveFileExtension( file ); string symbolFile = root + SymbolSuffix; string sourceFile = root + SourceCodeDatabaseSuffix; FileInfo symbolInfo = new FileInfo( symbolFile ); if( symbolInfo.Exists ) { byte[] image = System.IO.File.ReadAllBytes( symbolFile ); Importer.PdbInfo.PdbFile pdbFile = new Importer.PdbInfo.PdbFile( new Importer.ArrayReader( image ) ); Importer.PdbInfo.DataStream streamSrc = pdbFile.GetStream( c_ZeligSourceCode ); if( streamSrc != null ) { try { IR.SourceCodeTracker sct = new IR.SourceCodeTracker( ); using( var stream = new MemoryStream( streamSrc.Payload ) ) { using( var ctx = IR.TypeSystemSerializer.GetDeserializationContext( stream, null, null, 0 ) ) { ctx.TransformGeneric( ref sct ); } } m_sourceCodeTracker.Merge( sct ); } catch { } } return pdbFile; } } catch { } return null; } Importer.MetaData MetaData.IMetaDataResolverHelper.ResolveAssemblyReference( string name, MetaData.MetaDataVersion ver ) { // // Force use of our version of mscorlib. // if( name == "mscorlib" ) { return CheckAndLoad( m_libraryLocation_TargetBuild, name ); } Importer.MetaData md; foreach( string dir in m_searchOrder ) { md = CheckAndLoad( dir, name, ver ); if( md != null ) { return md; } } return null; } //--// private Importer.MetaData CheckAndLoad( string dir, string name ) { string file = Path.Combine( dir, name + ".dll" ); if( System.IO.File.Exists( file ) ) { try { return LoadAssembly( file ); } catch { } } return null; } private Importer.MetaData CheckAndLoad( string dir, string name, MetaData.MetaDataVersion ver ) { Importer.MetaData md = CheckAndLoad( dir, name ); if( md != null ) { if( md.Assembly.Name == name ) { if( ver == null || md.Assembly.Version.IsCompatible( ver, false ) ) { return md; } } } return null; } //--// private void EmbedSourceCodeAll( string sDirectory ) { DirectoryInfo dir = new DirectoryInfo( sDirectory ); if( dir.Exists == false ) { Console.WriteLine( "Cannot find directory '{0}'", sDirectory ); } else { foreach( FileInfo file in dir.GetFiles( ) ) { if( string.Compare( file.Extension, SymbolSuffix, true ) == 0 ) { try { EmbedSourceCode( file.FullName ); } catch { } } } } } private void EmbedSourceCode( string file ) { try { var fi = new System.IO.FileInfo( file ); if( fi.Exists == false ) { Console.WriteLine( "Cannot find file '{0}'", file ); } else if( m_fSkipReadOnly && fi.IsReadOnly ) { Console.WriteLine( "Skipping read-only file '{0}'", file ); } else { IR.SourceCodeTracker sct = new IR.SourceCodeTracker( ); byte[] image = System.IO.File.ReadAllBytes( file ); Importer.PdbInfo.PdbFile pdbFile = new Importer.PdbInfo.PdbFile( new Importer.ArrayReader( image ) ); foreach( var pdbFunc in pdbFile.Functions ) { foreach( var pdbLine in pdbFunc.LineBlocks ) { IR.SourceCodeTracker.SourceCode sc = sct.GetSourceCode( pdbLine.File.Name ); if( sc != null ) { sc.UsingCachedValues = true; } } } { MemoryStream output = new MemoryStream( ); using( IR.TransformationContextForCodeTransformation ctx = IR.TypeSystemSerializer.GetSerializationContext( output ) ) { ctx.TransformGeneric( ref sct ); } Importer.PdbInfo.DataStream stream = pdbFile.CreateNewStream( c_ZeligSourceCode ); stream.Payload = output.ToArray( ); } System.IO.File.WriteAllBytes( file, pdbFile.Emit( ) ); Console.WriteLine( "Embedded source code in '{0}'", file ); } } catch( Importer.PdbInfo.PdbException ) { Console.WriteLine( "Skipping '{0}', unrecognized PDB format", file ); } } //--//--// private void SaveIrToDisk( string file, IR.TypeSystemForCodeTransformation typeSystem ) { using( System.IO.FileStream stream = new System.IO.FileStream( file, FileMode.Create, FileAccess.Write, FileShare.None, 1024 * 1024 ) ) { IR.TypeSystemSerializer.Serialize( stream, typeSystem ); } } private IR.TypeSystemForCodeTransformation LoadIrFromDisk( string file, IR.TypeSystemSerializer.CreateInstance callback ) { using( System.IO.FileStream stream = new System.IO.FileStream( file, FileMode.Open ) ) { return IR.TypeSystemSerializer.Deserialize( stream, callback, null, 0 ); } } private object CreateInstanceForType( Type t ) { if( t == typeof( IR.TypeSystemForCodeTransformation ) ) { return new TypeSystemForFrontEnd( this, this ); } return null; } //--// private void DumpIRAsText( string file ) { using( var ird = new IR.TextIntermediateRepresentationDumper( file ) ) { var types = m_typeSystem.Types.ToArray( ); Array.Sort( types, ( x, y ) => x.ToString( ).CompareTo( y.ToString( ) ) ); foreach( var td in types ) { ird.WriteLine( "###############################################################################" ); ird.WriteLine( ); ird.WriteLine( "Type: {0} [Size={1}]", td, td.ValidLayout ? td.Size : uint.MaxValue ); if( td.Extends != null ) { ird.WriteLine( " Extends: {0}", td.Extends ); } foreach( var itf in td.Interfaces ) { ird.WriteLine( " Interface: {0}", itf ); } foreach( var fd in td.Fields ) { ird.WriteLine( " Field: {0} [Offset={1}]", fd, fd.ValidLayout ? fd.Offset : -1 ); } ird.WriteLine( ); var methods = ArrayUtility.CopyNotNullArray( td.Methods ); Array.Sort( methods, ( x, y ) => x.ToShortString( ).CompareTo( y.ToShortString( ) ) ); foreach( var md in methods ) { IR.ControlFlowGraphState cfg = IR.TypeSystemForCodeTransformation.GetCodeForMethod( md ); if( cfg != null ) { ird.WriteLine( " Method {0}", md ); ird.WriteLine( ); cfg.Dump( ird ); } else { ird.WriteLine( " NoCodeMethod {0}", md ); } ird.WriteLine( ); } } } } private void DumpIRAsXML(string file) { // Collect all methods into a list var allMethods = new List<TS.MethodRepresentation>(); m_typeSystem.EnumerateMethods(method => allMethods.Add(method)); // Initialize the XML IRDumper var doc = new System.Xml.XmlDocument(); var node = XmlHelper.AddElement(doc, "Methods"); var irDumper = new IR.XmlIntermediateRepresentationDumper(doc, node); // Dump each method alphabetically foreach (var method in irDumper.Sort(allMethods)) { IR.TypeSystemForCodeTransformation.GetCodeForMethod(method)?.Dump(irDumper); } doc.Save(file); } //--// private void InitializeConfigurationManager( ) { if( m_configurationManager == null ) { m_configurationManager = new Cfg.Manager( ); m_configurationManager.AddAllAssemblies( ); m_configurationManager.ComputeAllPossibleValuesForFields( ); } } private void SearchConfigurationOptions( Cfg.AbstractCategory category ) { foreach( var valueCtx in category.SearchValuesWithAttributes( typeof( Cfg.LinkToConfigurationOptionAttribute ) ) ) { var attrib = valueCtx.GetAttribute<Cfg.LinkToConfigurationOptionAttribute>( ); m_configurationOptions[ attrib.Name ] = valueCtx.Value; } } private IEnumerable<T> GetConfigurationOptions<T>( ) where T : Cfg.AbstractCategory { InitializeConfigurationManager( ); foreach( Cfg.AbstractCategory value in m_configurationManager.AllValues ) { if( value is T ) { value.ApplyDefaultValues( ); yield return ( T )value; } } } private T GetConfigurationOption<T>( Type target ) where T : Cfg.AbstractCategory { foreach( var option in GetConfigurationOptions<T>( ) ) { if( option.GetType( ) == target ) { return option; } } return null; } private static string[] RecombineArgs( string[] args ) { List<string> arguments = new List<string>(args.Length); for(int i = 0; i < args.Length; ++i) { // An argument like "C:\my directory\with\a\space" will be split as // "C:\my" and "irectory\with\a\space" acrosss two entries in args. // We need to re-combine those entries // We must find a matching pair of double quotes. // Matching quotes must appear in the same or next argument. if( args[ i ].StartsWith( "\"" ) && !args[ i ].EndsWith("\"") ) { // Look in the very next argument. // if we are at the last entry in args already, then // we have an unmatched quote we may be able to recover from if( i == args.Length - 1 ) { // ignore and hope for the best } else if( args[i + 1].EndsWith("\"") ) { args[ i + 1 ] = args[ i ] + " " + args[ i + 1 ]; ++i; } else { // no matching double-quotes return null; } } arguments.Add( args[i] ); } return arguments.ToArray( ); } private bool Parse( string[] args ) { if( args != null ) { for( int i = 0; i < args.Length; i++ ) { string arg = args[ i ]; if( arg == string.Empty ) { continue; } if( arg.StartsWith( "/" ) || arg.StartsWith( "-" ) ) { string option = arg.Substring( 1 ); if( IsMatch( option, "Cfg" ) ) { string file; if( !GetArgument( arg, args, ref i, out file, true ) ) { return false; } using( System.IO.StreamReader stream = new System.IO.StreamReader( file ) ) { string line; while( ( line = stream.ReadLine( ) ) != null ) { if( line.StartsWith( "#" ) ) { continue; } var arguments = line.Split( new char[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries ); string[] recombinedArgs = RecombineArgs( arguments ); if(recombinedArgs == null ) { Console.WriteLine( String.Format( "Arguments at line '{0}' could not be recombined", line ) ); return false; } if( Parse( recombinedArgs ) == false ) { return false; } } } } else if( IsMatch( option, "DumpIL" ) ) { m_fDumpIL = true; } else if( IsMatch( option, "ReloadState" ) ) { m_fReloadState = true; } else if( IsMatch( option, "DumpIRpre" ) ) { m_fDumpIRpre = true; } else if( IsMatch( option, "DumpIRpost" ) ) { m_fDumpIRpost = true; } else if( IsMatch( option, "DumpIR" ) ) { m_fDumpIR = true; } else if (IsMatch(option, "DumpIRXMLpre")) { m_fDumpIRXMLpre = true; } else if (IsMatch(option, "DumpIRXMLpost")) { m_fDumpIRXMLpost = true; } else if (IsMatch(option, "DumpIRXML")) { m_fDumpIRXML = true; } else if( IsMatch( option, "DumpLLVMIR" ) ) { m_fDumpLLVMIR = true; } else if( IsMatch( option, "DumpLLVMIR_TextRepresentation" ) ) { m_fDumpLLVMIR_TextRepresentation = true; } else if( IsMatch( option, "DumpCFG" ) ) { m_fDumpCFG = true; } else if( IsMatch( option, "DumpASM" ) ) { m_fDumpASM = true; } else if( IsMatch( option, "DumpASMDIR" ) ) { m_fDumpASMDIR = true; } else if( IsMatch( option, "DumpHEX" ) ) { m_fDumpHEX = true; } else if( IsMatch( option, "DumpRAW" ) ) { string section; uint rangeStart; uint rangeEnd; if( !GetArgument( arg, args, ref i, out section, false ) || !GetArgument( arg, args, ref i, out rangeStart, true ) || !GetArgument( arg, args, ref i, out rangeEnd, true ) ) { return false; } m_dumpRawImage.Add( new RawImage { SectionName = section, RangeStart = rangeStart, RangeEnd = rangeEnd } ); } else if( IsMatch( option, "HostAssemblyDir" ) ) { string dir; if( !GetArgument( arg, args, ref i, out dir, true ) ) { return false; } m_libraryLocation_HostBuild = AddSearchDirectory( dir ); } else if( IsMatch( option, "DeviceAssemblyDir" ) ) { string dir; if( !GetArgument( arg, args, ref i, out dir, true ) ) { return false; } m_libraryLocation_TargetBuild = AddSearchDirectory( dir ); } else if( IsMatch( option, "ImportDirectory" ) ) { string dir; if( !GetArgument( arg, args, ref i, out dir, true ) ) { return false; } dir = dir.ToLower( ); if( !m_importDirectories.Contains( dir ) ) { m_importDirectories.Add( dir ); } } else if( IsMatch( option, "ImportLibrary" ) ) { string file; if( !GetArgument( arg, args, ref i, out file, true ) ) { return false; } file = file.ToLower( ); if( !m_importLibraries.Contains( file ) ) { m_importLibraries.Add( file ); } } else if( IsMatch( option, "MaxProcs" ) ) { uint iMaxProcs; if( !GetArgument( arg, args, ref i, out iMaxProcs, false ) ) { return false; } IR.CompilationSteps.ParallelTransformationsHandler.MaximumNumberOfProcessorsToUse = ( int )iMaxProcs; } else if( IsMatch( option, "OutputName" ) ) { string name; if( !GetArgument( arg, args, ref i, out name, false ) ) { return false; } m_outputName = name; } else if( IsMatch( option, "NativeIntSize" ) ) { string name; if( !GetArgument( arg, args, ref i, out name, false ) ) { return false; } if( !UInt32.TryParse( name, out m_nativeIntSize ) ) return false; } else if( IsMatch( option, "OutputDir" ) ) { string dir; if( !GetArgument( arg, args, ref i, out dir, true ) ) { return false; } m_outputDir = dir; } else if( IsMatch( option, "Reference" ) ) { string reference; if( !GetArgument( arg, args, ref i, out reference, false ) ) { return false; } m_references.Add( reference ); } else if( IsMatch( option, "CompilationSetup" ) ) { string compilationSetup; if( !GetArgument( arg, args, ref i, out compilationSetup, false ) ) { return false; } m_compilationSetup = null; foreach( var setup in GetConfigurationOptions<Cfg.CompilationSetupCategory>( ) ) { if( setup.GetType( ).FullName == compilationSetup ) { m_compilationSetup = setup; break; } } if( m_compilationSetup == null ) { Console.WriteLine( "Cannot find definition for compilation setup '{0}'", compilationSetup ); return false; } SearchConfigurationOptions( m_compilationSetup ); m_product = GetConfigurationOption<Cfg.ProductCategory>( m_compilationSetup.Product ); if( m_product == null ) { Console.Error.WriteLine( "Cannot compile without a product definition!" ); return false; } SearchConfigurationOptions( m_product ); m_memoryMap = GetConfigurationOption<Cfg.MemoryMapCategory>( m_compilationSetup.MemoryMap ); if( m_memoryMap == null ) { Console.Error.WriteLine( "Cannot compile without a memory map!" ); return false; } SearchConfigurationOptions( m_memoryMap ); } else if( IsMatch( option, "CompilationOption" ) ) { string type; string name; string value; if( !GetArgument( arg, args, ref i, out type, false ) || !GetArgument( arg, args, ref i, out name, false ) || !GetArgument( arg, args, ref i, out value, false ) ) { return false; } Type t = Type.GetType( type ); if( t == null ) { Console.Error.WriteLine( "Cannot find type '{0}'", type ); return false; } try { object res = t.InvokeMember( "Parse", System.Reflection.BindingFlags.InvokeMethod | System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public, null, null, new object[] { value } ); m_configurationOptions[ name ] = res; } catch( Exception ex ) { Console.Error.WriteLine( "Cannot parse value '{0}': {1}", value, ex ); return false; } } else if( IsMatch( option, "CompilationPhaseDisabled" ) ) { string phase; if( !GetArgument( arg, args, ref i, out phase, false ) ) { return false; } m_disabledPhases.Add( phase.Trim( ) ); } else if( IsMatch( option, "Include" ) ) { string dir; if( !GetArgument( arg, args, ref i, out dir, true ) ) { return false; } AddSearchDirectory( dir ); } else if( IsMatch( option, "SkipReadOnly" ) ) { m_fSkipReadOnly = true; } else if( IsMatch( option, "EmbedSourceCode" ) ) { string sFile; if( !GetArgument( arg, args, ref i, out sFile, true ) ) { return false; } EmbedSourceCode( sFile ); return false; } else if( IsMatch( option, "EmbedSourceCodeAll" ) ) { string sDir; if( !GetArgument( arg, args, ref i, out sDir, true ) ) { return false; } EmbedSourceCodeAll( sDir ); return false; } else if( IsMatch( option, "EntryPoint" ) ) { string sEP; if( !GetArgument( arg, args, ref i, out sEP, true ) ) { return false; } m_entryPointName = sEP; } else { Console.WriteLine( "Unrecognized option: {0}", option ); return false; } } else { arg = Expand( arg ); if( File.Exists( arg ) == false ) { Console.WriteLine( "Cannot find '{0}'", arg ); return false; } if( m_targetFile != null ) { Console.WriteLine( "ERROR: Only one target file per compilation." ); } m_targetFile = arg; m_searchOrder.Insert( 0, System.IO.Path.GetDirectoryName( arg ) ); } } return true; } return false; } private static bool IsMatch( string arg, string cmd ) { return String.Compare( arg, cmd, StringComparison.OrdinalIgnoreCase ) == 0; } private static bool GetArgument( string arg, string[] args, ref int i, out string value, bool fExpand ) { if( i + 1 < args.Length ) { i++; value = args[ i ]; if( fExpand ) { value = Expand( value ); } return true; } Console.WriteLine( "Option '{0}' needs an argument", arg ); value = null; return false; } private static bool GetArgument( string arg, string[] args, ref int i, out uint value, bool fCanBeHex ) { string str; if( GetArgument( arg, args, ref i, out str, false ) ) { if( uint.TryParse( str, out value ) ) { return true; } if( fCanBeHex ) { if( str.StartsWith( "0x" ) && uint.TryParse( str.Substring( 2 ), System.Globalization.NumberStyles.AllowHexSpecifier, null, out value ) ) { return true; } } Console.WriteLine( "Option '{0}' needs a numeric argument, got '{1}'", arg, str ); } value = 0; return false; } //--// private bool ValidateOptions( ) { /* if( m_compilationSetup == null ) { Console.Error.WriteLine( "Cannot compile without a compilation setup!" ); return false; }*/ return true; } //--// private void Compile( ) { if( m_outputName == null ) { m_outputName = Path.GetFileNameWithoutExtension( m_targetFile ); } string filePrefix = Path.Combine( m_outputDir, m_outputName ); /*FileStream fs = new FileStream( filePrefix + "_cout.txt", FileMode.Create ); StreamWriter sw = new StreamWriter( fs ); Console.SetOut( sw );*/ //--// // // We need this assembly, for all the extra stuff about open classes. // if( m_fReloadState == false ) { MetaData.IMetaDataResolverHelper resolver = this; Importer.MetaData md; md = resolver.ResolveAssemblyReference( "Microsoft.Zelig.Runtime", null ); m_resolver.Add( md ); m_resolver.ResolveAll( ); md = resolver.ResolveAssemblyReference( "mscorlib", null ); m_resolver.Add( md ); m_resolver.ResolveAll( ); //--// md = LoadAssembly( m_targetFile ); m_resolver.Add( md ); m_resolver.ResolveAll( ); //--// foreach( string reference in m_references ) { md = resolver.ResolveAssemblyReference( reference, null ); m_resolver.Add( md ); m_resolver.ResolveAll( ); } } //--// #if DEBUG_RELOAD_STATE m_typeSystem = (TypeSystemForFrontEnd)LoadIrFromDisk( "temp.ZeligImage", CreateInstanceForType ); m_typeSystem.GenerateImage( Microsoft.Zelig.CodeGeneration.IR.CompilationSteps.Phase.GenerateImage ); m_controller = new IR.CompilationSteps.Controller( m_typeSystem ); if(m_fDumpIL) { } if(m_fDumpIRpre) { } #else if( m_fReloadState == false ) { m_typeSystem = new TypeSystemForFrontEnd( this, this ); m_delegationCache = new IR.CompilationSteps.DelegationCache( m_typeSystem ); m_typeSystem.InitializeForCompilation( ); //--// var pa = ( IR.Abstractions.Platform )Activator.CreateInstance( m_compilationSetup.Platform, m_typeSystem, m_memoryMap ); m_typeSystem.PlatformAbstraction = pa; //--// var cc = ( IR.Abstractions.CallingConvention )Activator.CreateInstance( m_compilationSetup.CallingConvention, m_typeSystem ); m_typeSystem.CallingConvention = cc; //--// Console.WriteLine( "{0}: ConvertToIR", GetTime( ) ); foreach( Normalized.MetaDataAssembly asml in m_resolver.NormalizedAssemblies ) { m_typeSystem.ImportAssembly( asml ); } Console.WriteLine( "{0}: Done", GetTime( ) ); //--// Console.WriteLine( "{0}: ResolveAll", GetTime( ) ); m_typeSystem.ResolveAll( ); Console.WriteLine( "{0}: Done", GetTime( ) ); //LLVM/DWARF debugging support. m_typeSystem.EnumerateMethods( delegate( TS.MethodRepresentation md ) { var cfg=IR.TypeSystemForCodeTransformation.GetCodeForMethod( md ); if( cfg != null ) { Debugging.DebugInfo di = null; foreach( var op in cfg.DataFlow_SpanningTree_Operators ) { if( op.DebugInfo != null ) { di = op.DebugInfo; break; } } if( di != null ) { m_typeSystem.Module.DebugInfoForMethods.Add( md, new Debugging.DebugInfo( di.SrcFileName, di.MethodName, di.BeginLineNumber, di.BeginColumn, di.EndLineNumber, di.EndColumn ) ); } } } ); //--// Directory.CreateDirectory( Path.GetDirectoryName( filePrefix ) ); if( m_fDumpIL ) { DirectoryInfo di = Directory.CreateDirectory( filePrefix ); string oldCD = Environment.CurrentDirectory; Environment.CurrentDirectory = di.FullName; foreach( Normalized.MetaDataAssembly asmlNormalized in m_resolver.NormalizedAssemblies ) { using( MetaData.MetaDataDumper writer = new MetaData.MetaDataDumper( asmlNormalized.Name, asmlNormalized.Version ) ) { writer.Process( asmlNormalized, true ); } } Environment.CurrentDirectory = oldCD; } if( m_fDumpCFG ) { DirectoryInfo di = Directory.CreateDirectory( filePrefix + "CFG" ); string oldCD = Environment.CurrentDirectory; Environment.CurrentDirectory = di.FullName; foreach( var t in m_typeSystem.Types ) { foreach( var m in t.Methods ) { IR.ControlFlowGraphStateForCodeTransformation cfg = IR.TypeSystemForCodeTransformation.GetCodeForMethod( m ); if( cfg != null ) { cfg.DumpToFile( m.m_identity.ToString( ) + ".txt" ); } } } Environment.CurrentDirectory = oldCD; } if( m_fDumpIRpre ) { using( System.IO.TextWriter writer = new System.IO.StreamWriter( filePrefix + ".TypeSystemDump.IrTxtpre", false, System.Text.Encoding.ASCII ) ) { writer.WriteLine( "======================" ); writer.WriteLine( "==== Types table ====" ); writer.WriteLine( "======================" ); foreach( TS.TypeRepresentation td in m_typeSystem.Types ) { writer.WriteLine( "Type : {0}", td ); writer.WriteLine( "Assembly: {0}", td.Owner.Name ); writer.WriteLine( "Version : {0}", td.Owner.Version ); } writer.WriteLine( "" ); writer.WriteLine( "" ); writer.WriteLine( "" ); writer.WriteLine( "======================" ); writer.WriteLine( "==== Type Details ====" ); writer.WriteLine( "======================" ); foreach( TS.TypeRepresentation td in m_typeSystem.Types ) { writer.WriteLine( "Type: {0}", td ); foreach( TS.TypeRepresentation itf in td.Interfaces ) { writer.WriteLine( " Interface: {0}", itf ); } foreach( TS.FieldRepresentation fd in td.Fields ) { writer.WriteLine( " Field: {0}", fd ); } foreach( TS.MethodRepresentation md in td.Methods ) { writer.WriteLine( " Method: {0}", md ); } writer.WriteLine( ); } } DumpIRAsText( filePrefix + ".ZeligIR_Pre" ); } if (m_fDumpIRXMLpre) { DumpIRAsXML(filePrefix + ".ZeligIR_Pre.xml"); } //--// m_typeSystem.NativeImportDirectories = m_importDirectories; m_typeSystem.NativeImportLibraries = m_importLibraries; Console.WriteLine( "{0}: ExecuteSteps", GetTime( ) ); m_controller = new IR.CompilationSteps.Controller( m_typeSystem, m_disabledPhases ); m_controller.ExecuteSteps( ); if( m_fDumpIRpost ) { using( System.IO.TextWriter writer = new System.IO.StreamWriter( filePrefix + ".TypeSystemDump.IrTxtpost", false, System.Text.Encoding.ASCII ) ) { writer.WriteLine( "======================" ); writer.WriteLine( "==== Types table ====" ); writer.WriteLine( "======================" ); foreach( TS.TypeRepresentation td in m_typeSystem.Types ) { writer.WriteLine( "Type : {0}", td ); writer.WriteLine( "Assembly: {0}", td.Owner.Name ); writer.WriteLine( "Version : {0}", td.Owner.Version ); } writer.WriteLine( "" ); writer.WriteLine( "" ); writer.WriteLine( "" ); writer.WriteLine( "======================" ); writer.WriteLine( "==== Type Details ====" ); writer.WriteLine( "======================" ); foreach( TS.TypeRepresentation td in m_typeSystem.Types ) { writer.WriteLine( "Type : {0}", td ); foreach( TS.TypeRepresentation itf in td.Interfaces ) { writer.WriteLine( " Interface: {0}", itf ); } foreach( TS.FieldRepresentation fd in td.Fields ) { writer.WriteLine( " Field: {0}", fd ); } foreach( TS.MethodRepresentation md in td.Methods ) { writer.WriteLine( " Method: {0}", md ); } writer.WriteLine( ); } } DumpIRAsText( filePrefix + ".ZeligIR_Post" ); } if (m_fDumpIRXMLpost) { DumpIRAsXML(filePrefix + ".ZeligIR_Post.xml"); } SaveIrToDisk( "temp.ZeligImage", m_typeSystem ); } else { m_typeSystem = ( TypeSystemForFrontEnd )LoadIrFromDisk( "temp.ZeligImage", CreateInstanceForType ); m_delegationCache = new IR.CompilationSteps.DelegationCache( m_typeSystem ); //--// Console.WriteLine( "{0}: ExecuteSteps", GetTime( ) ); m_controller = new IR.CompilationSteps.Controller( m_typeSystem ); m_controller.ExecuteSteps( true ); } Console.WriteLine( "{0}: Done", GetTime( ) ); Console.Out.Flush( ); #endif // //--// if( PerformanceCounters.ContextualTiming.IsEnabled( ) ) { Console.WriteLine( "{0}: Dumping Performance Counters", GetTime( ) ); using( System.IO.StreamWriter output = new System.IO.StreamWriter( filePrefix + "_timing.type.txt", false, System.Text.Encoding.ASCII ) ) { PerformanceCounters.ContextualTiming.DumpAllByType( output ); } using( System.IO.StreamWriter output = new System.IO.StreamWriter( filePrefix + "_timing.reason.txt", false, System.Text.Encoding.ASCII ) ) { PerformanceCounters.ContextualTiming.DumpAllByReason( output ); } Console.WriteLine( "{0}: Done", GetTime( ) ); } // // We don't need to serialized the prohibited set. // Console.WriteLine( "{0}: Saving Results for {1}", GetTime( ), filePrefix ); m_typeSystem.DropCompileTimeObjects( ); { Console.WriteLine( "{0}: Image...", GetTime( ) ); SaveIrToDisk( filePrefix + ".ZeligImage", m_typeSystem ); Console.WriteLine( "{0}: Image done", GetTime( ) ); } if( m_fDumpIR ) { Console.WriteLine( "{0}: IR dump...", GetTime( ) ); DumpIRAsText( filePrefix + ".ZeligIR" ); Console.WriteLine( "{0}: IR dump done", GetTime( ) ); } if (m_fDumpIRXML) { Console.WriteLine("{0}: IR dump (XML)...", GetTime()); DumpIRAsXML(filePrefix + ".ZeligIR.xml"); Console.WriteLine("{0}: IR dump (XML) done", GetTime()); } if( m_fDumpASMDIR ) { Console.WriteLine( "{0}: ASMDIR dump...", GetTime( ) ); m_typeSystem.DisassembleImage( filePrefix + ".asmdir" ); Console.WriteLine( "{0}: ASMDIR dump done", GetTime( ) ); } if( m_fDumpASM ) { Console.WriteLine( "{0}: ASM dump...", GetTime( ) ); m_typeSystem.DisassembleImageInOneFile( filePrefix + ".asm" ); Console.WriteLine( "{0}: ASM dump done", GetTime( ) ); } if( m_fDumpHEX ) { Console.WriteLine( "{0}: HEX file...", GetTime( ) ); using( FileStream stream = new FileStream( filePrefix + ".hex", FileMode.Create ) ) { foreach( var section in m_typeSystem.Image ) { ARM.SRecordParser.Encode( stream, section.Payload, section.Address ); } TS.MethodRepresentation mdEntrypoint = m_typeSystem.TryGetHandler( Microsoft.Zelig.Runtime.HardwareException.Reset ); if( mdEntrypoint != null ) { IR.ControlFlowGraphStateForCodeTransformation cfg = IR.TypeSystemForCodeTransformation.GetCodeForMethod( mdEntrypoint ); if( cfg != null ) { IR.ImageBuilders.SequentialRegion reg = m_typeSystem.ImageBuilder.GetAssociatedRegion( cfg ); if( reg != null ) { ARM.SRecordParser.EncodeEntrypoint( stream, reg.BaseAddress.ToUInt32( ) ); } } } } Console.WriteLine( "{0}: HEX file done", GetTime( ) ); } if( m_fDumpLLVMIR ) { m_typeSystem.Module.DumpToFile( filePrefix + ".bc", false ); } if( m_fDumpLLVMIR_TextRepresentation ) { m_typeSystem.Module.DumpToFile( filePrefix + ".ll", true ); } foreach( var raw in m_dumpRawImage ) { var mem = new MemoryStream( ); foreach( var section in m_typeSystem.Image ) { uint sectionStart = section.Address; uint sectionEnd = sectionStart + ( uint )section.Payload.Length; uint sectionStart2 = Math.Max( sectionStart, raw.RangeStart ); uint sectionEnd2 = Math.Min( sectionEnd, raw.RangeEnd ); if( sectionStart2 < sectionEnd2 ) { uint offset = sectionStart2 - raw.RangeStart; mem.SetLength( offset ); mem.Seek( offset, SeekOrigin.Begin ); mem.Write( section.Payload, 0, section.Payload.Length ); } } if( mem.Length > 0 ) { Console.WriteLine( "{0}: RAW file...", GetTime( ) ); using( FileStream stream = new FileStream( filePrefix + "." + raw.SectionName + ".bin", FileMode.Create ) ) { var buf = mem.ToArray( ); stream.Write( buf, 0, buf.Length ); } Console.WriteLine( "{0}: RAW file done", GetTime( ) ); } } Console.WriteLine( "{0}: Done", GetTime( ) ); } //--// public static void Main( string[] args ) { Bench bench = new Bench( ); Bench.s_pThis = bench; try { // path with space need to be re-assembled string[] recombinedArgs = RecombineArgs( args ); if( recombinedArgs != null && bench.Parse( recombinedArgs ) ) { if( bench.ValidateOptions( ) ) { bench.Compile( ); } } } catch( Importer.SilentCompilationAbortException ) { } catch( Exception ex ) { Console.Error.WriteLine( "Caught exception: {0}", ex ); } finally { if( System.Diagnostics.Debugger.IsAttached ) { Console.WriteLine( "Press <enter> to exit" ); Console.ReadLine( ); } } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsBodyDateTimeRfc1123 { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Models; /// <summary> /// Extension methods for Datetimerfc1123. /// </summary> public static partial class Datetimerfc1123Extensions { /// <summary> /// Get null datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetNull(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetNullAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get null datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetNullAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetNullWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get invalid datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetInvalid(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetInvalidAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get invalid datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetInvalidAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetInvalidWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get overflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetOverflow(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetOverflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get overflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetOverflowAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetOverflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get underflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUnderflow(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetUnderflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get underflow datetime value /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUnderflowAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUnderflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put max datetime value Fri, 31 Dec 9999 23:59:59 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutUtcMaxDateTime(this IDatetimerfc1123 operations, DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetimerfc1123)s).PutUtcMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put max datetime value Fri, 31 Dec 9999 23:59:59 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutUtcMaxDateTimeAsync(this IDatetimerfc1123 operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutUtcMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get max datetime value fri, 31 dec 9999 23:59:59 gmt /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUtcLowercaseMaxDateTime(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetUtcLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value fri, 31 dec 9999 23:59:59 gmt /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUtcLowercaseMaxDateTimeAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUtcLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Get max datetime value FRI, 31 DEC 9999 23:59:59 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUtcUppercaseMaxDateTime(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetUtcUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get max datetime value FRI, 31 DEC 9999 23:59:59 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUtcUppercaseMaxDateTimeAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUtcUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Put min datetime value Mon, 1 Jan 0001 00:00:00 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> public static void PutUtcMinDateTime(this IDatetimerfc1123 operations, DateTime datetimeBody) { Task.Factory.StartNew(s => ((IDatetimerfc1123)s).PutUtcMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Put min datetime value Mon, 1 Jan 0001 00:00:00 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='datetimeBody'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task PutUtcMinDateTimeAsync(this IDatetimerfc1123 operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken)) { await operations.PutUtcMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Get min datetime value Mon, 1 Jan 0001 00:00:00 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static DateTime? GetUtcMinDateTime(this IDatetimerfc1123 operations) { return Task.Factory.StartNew(s => ((IDatetimerfc1123)s).GetUtcMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get min datetime value Mon, 1 Jan 0001 00:00:00 GMT /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<DateTime?> GetUtcMinDateTimeAsync(this IDatetimerfc1123 operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetUtcMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
using System; using System.Collections.Generic; namespace QarnotSDK { /// <summary> /// Represents an error that occur during a pool execution. /// </summary> public class QPoolError { /// <summary> /// Qarnot error code. /// Note: error code descriptions are available here: /// https://computing.qarnot.com/developers/develop/common-errors /// </summary> public string Code { get; set; } /// <summary> /// Human error message. /// </summary> public string Message { get; set; } /// <summary> /// Debug code to send to Qarnot support team. /// </summary> public string Debug { get; set; } internal QPoolError() { } /// <summary> /// Returns all the information about this error in one string. /// </summary> /// <returns></returns> public override string ToString() { if (String.IsNullOrEmpty(Debug)) return String.Format("[{0}] {1}", Code, Message); else return String.Format("[{0}] {1} ({2})", Code, Message, Debug); } } /// <summary> /// Represents a public TCP port of a pool. /// </summary> public class QPoolStatusActiveForwards { /// <summary> /// Port of the application running on the compute node. /// </summary> public UInt16 ApplicationPort { get; set; } /// <summary> /// Port where this application can be reached on the public host. /// </summary> public UInt16 ForwarderPort { get; set; } /// <summary> /// Public host where this application can be reached. /// </summary> public string ForwarderHost { get; set; } internal QPoolStatusActiveForwards() { } } /// <summary> /// Represents the pool elastic properties /// </summary> internal class QPoolElasticProperty { public const bool DEFAULT_IS_ELASTIC = false; public const uint DEFAULT_MIN_TOTAL_SLOTS = 0; public const uint DEFAULT_MAX_TOTAL_SLOTS = 0; public const uint DEFAULT_MIN_IDLING_SLOTS = 0; public const uint DEFAULT_RESIZE_PERIOD = 90; public const float DEFAULT_RAMP_RESIZE_FACTOR = 0.4f; public const uint DEFAULT_MIN_IDLING_TIME = 90; /// <summary> /// if the elastic behaviour is activated /// </summary> public bool IsElastic { get; set; } = DEFAULT_IS_ELASTIC; /// <summary> /// Lower bound of slots number /// </summary> public uint MinTotalSlots { get; set; } = DEFAULT_MIN_TOTAL_SLOTS; /// <summary> /// Upper bound of slots number /// </summary> public uint MaxTotalSlots { get; set; } = DEFAULT_MAX_TOTAL_SLOTS; /// <summary> /// Keep a number of slot doing nothing, but up and waiting /// </summary> public uint MinIdleSlots { get; set; } = DEFAULT_MIN_IDLING_SLOTS; /// <summary> /// Period for time for resizing idling slots (in seconds) /// </summary> /// <remarks> ResizePeriod minimum is 90 secondes </remarks> public uint ResizePeriod { get; set; } = DEFAULT_RESIZE_PERIOD; /// <summary> /// In order to close or open progresssively slots. /// </summary> /// <remarks> RampResizeFactor is in [0:1[ </remarks> public float RampResizeFactor { get; set; } = DEFAULT_RAMP_RESIZE_FACTOR; /// <summary> /// when a slot is empty, wait MinIdleTimeSeconds seconds before allowing the slot to be closed or reused /// </summary> public uint MinIdleTimeSeconds { get; set; } = DEFAULT_MIN_IDLING_TIME; internal QPoolElasticProperty() {} } /// <summary> /// Represents the status and the statistics of a running pool node. /// </summary> public class QPoolStatusPerRunningInstanceInfo { /// <summary> /// Phase. /// </summary> public string Phase { get; set; } /// <summary> /// Instance Id. /// </summary> public UInt32 InstanceId { get; set; } /// <summary> /// Core Count. /// </summary> public UInt32 CoreCount { get; set; } /// <summary> /// Maximum frequency in Ghz. /// </summary> public float MaxFrequencyGHz { get; set; } /// <summary> /// Current frequency in Ghz. /// </summary> public float CurrentFrequencyGHz { get; set; } /// <summary> /// Cpu usage in percentage (0 to 100). /// </summary> public float CpuUsage { get; set; } /// <summary> /// Maximum memory in MB. /// </summary> public float MaxMemoryMB { get; set; } /// <summary> /// Current memory usage in MB. /// </summary> public float CurrentMemoryMB { get; set; } /// <summary> /// Inbound network traffic in Kbps. /// </summary> public float NetworkInKbps { get; set; } /// <summary> /// Outbound network traffic in Kbps. /// </summary> public float NetworkOutKbps { get; set; } /// <summary> /// Progress. /// </summary> public float Progress { get; set; } /// <summary> /// Execution time in seconds. /// </summary> public float ExecutionTimeSec { get; set; } /// <summary> /// Virtual execution time Ghz. /// Note: /// - An execution time of 10 seconds @4Ghz will return here 40 seconds. /// - An execution time of 10 seconds @0.5Ghz will return here 5 seconds. /// </summary> public float ExecutionTimeGHz { get; set; } /// <summary> /// Processor model. /// </summary> public string CpuModel { get; set; } /// <summary> /// Memory usage in percent (0.0f to 1.0f). /// </summary> public float MemoryUsage { get; set; } /// <summary> /// Inbound ports forwarding information. /// </summary> public List<QPoolStatusActiveForwards> ActiveForwards { get; set; } internal QPoolStatusPerRunningInstanceInfo() { ActiveForwards = new List<QPoolStatusActiveForwards>(); } } /// <summary> /// Represents the execution time by cpu infos. /// </summary> public class QPoolStatusExecutionTimeByCpuModel { /// <summary> /// CPU name and Model. /// </summary> public string Model { get; set; } /// <summary> /// CPU time used in seconds. /// </summary> public double Time { get; set; } /// <summary> /// CPU core numbers. /// </summary> public uint Core { get; set; } internal QPoolStatusExecutionTimeByCpuModel() { } } /// <summary> /// Represents the execution cpu ratio for each task. /// </summary> public class QPoolStatusExecutionTimeGhzByCpuModel { /// <summary> /// CPU name and Model. /// </summary> public string Model { get; set; } /// <summary> /// CPU gigahertz time used in seconds. /// </summary> public double TimeGhz { get; set; } /// <summary> /// CPU core numbers. /// </summary> public uint Core { get; set; } /// <summary> /// CPU clock ratio. /// </summary> public double ClockRatio { get; set; } internal QPoolStatusExecutionTimeGhzByCpuModel() { } } /// <summary> /// Represents the statistics of a running pool. /// </summary> public class QPoolStatusRunningInstancesInfo { /// <summary> /// Last information update timestamp. /// </summary> public DateTime Timestamp { get; set; } /// <summary> /// Average Frequency in GHz. /// </summary> public float AverageFrequencyGHz { get; set; } /// <summary> /// Maximum Frequency in GHz. /// </summary> public float MaxFrequencyGHz { get; set; } /// <summary> /// Minimum Frequency in GHz. /// </summary> public float MinFrequencyGHz { get; set; } /// <summary> /// Average Maximum Frequency in GHz. /// </summary> public float AverageMaxFrequencyGHz { get; set; } /// <summary> /// Average CPU Usage. /// </summary> public float AverageCpuUsage { get; set; } /// <summary> /// Cluster Power Indicator. /// </summary> public float ClusterPowerIndicator { get; set; } /// <summary> /// Average Memory Usage. /// </summary> public float AverageMemoryUsage { get; set; } /// <summary> /// Average Network Input in Kbps. /// </summary> public float AverageNetworkInKbps { get; set; } /// <summary> /// Average Network Output in Kbps. /// </summary> public float AverageNetworkOutKbps { get; set; } /// <summary> /// Total Network Input in Kbps. /// </summary> public float TotalNetworkInKbps { get; set; } /// <summary> /// Total Network Output in Kbps. /// </summary> public float TotalNetworkOutKbps { get; set; } /// <summary> /// Total Network Output in Kbps. /// </summary> public List<QPoolStatusPerRunningInstanceInfo> PerRunningInstanceInfo { get; set; } internal QPoolStatusRunningInstancesInfo() { PerRunningInstanceInfo = new List<QPoolStatusPerRunningInstanceInfo>(); } } /// <summary> /// Represents the status of a running pool. /// </summary> public class QPoolStatus { /// <summary> /// Retrieve the instance download progress indicator /// </summary> public float DownloadProgress { get; set; } /// <summary> /// Retrieve the instance execution progress indicator /// </summary> public float ExecutionProgress { get; set; } /// <summary> /// Retrieve the instance execution upload indicator /// </summary> public float UploadProgress { get; set; } /// <summary> /// The pool instance number. /// </summary> public uint InstanceCount { get; set; } /// <summary> /// Download time (in second) /// </summary> public long DownloadTimeSec { get; set; } /// <summary> /// Execution time (in second) /// </summary> public long ExecutionTimeSec { get; set; } /// <summary> /// Upload time (in second) /// </summary> public long UploadTimeSec { get; set; } /// <summary> /// Range for the succeeded instances /// </summary> public string SucceededRange { get; set; } /// <summary> /// Range for the executed instances /// </summary> public string ExecutedRange { get; set; } /// <summary> /// Range for the failed instances /// </summary> public string FailedRange { get; set; } /// <summary> /// Running instances information(see QPoolStatusRunningInstancesInfo) /// </summary> public QPoolStatusRunningInstancesInfo RunningInstancesInfo { get; set; } /// <summary> /// Execution cpu times for each Running instance /// </summary> public List<QPoolStatusExecutionTimeByCpuModel> ExecutionTimeByCpuModel { get; set; } /// <summary> /// Execution cpu ratio for each Running instance /// </summary> public List<QPoolStatusExecutionTimeGhzByCpuModel> ExecutionTimeGhzByCpuModel { get; set; } internal QPoolStatus() { ExecutionTimeByCpuModel = new List<QPoolStatusExecutionTimeByCpuModel>(); ExecutionTimeGhzByCpuModel = new List<QPoolStatusExecutionTimeGhzByCpuModel>(); } } /// <summary> /// Represents the preparation commands and environment of each task running by this pool. /// </summary> public class PoolPreparationTask { /// <summary> /// Command line value. /// </summary> /// <value></value> public string CommandLine { get; set; } /// <summary> /// Command line constructor. /// </summary> /// <param name="commandLine">command line to execute before launching a task</param> public PoolPreparationTask(string commandLine) { CommandLine = commandLine; } } internal class PoolApi { public override string ToString() { return string.Format("[PoolApi: Name={0}, Profile={1}, InstanceCount={2}, State={3}, CreationDate={4}, Uuid={5}]", Name, Profile, InstanceCount, State, CreationDate, Uuid); } public string Name { get; set; } public string Profile { get; set; } public uint InstanceCount { get; set; } public uint? RunningInstanceCount { get; set; } public uint? RunningCoreCount { get; set; } public TimeSpan? ExecutionTime { get; set; } public TimeSpan? WallTime { get; set; } public DateTime EndDate { get; set; } public List<string> ResourceBuckets { get; set; } public List<ApiAdvancedResourceBucket> AdvancedResourceBuckets { get; set; } public string State { get; set; } public string PreviousState { get; set; } public DateTime StateTransitionTime { get; set; } public DateTime PreviousStateTransitionTime { get; set; } public DateTime LastModified { get; set; } public List<QPoolError> Errors { get; set; } public DateTime CreationDate { get; set; } public List<KeyValHelper> Constants { get; set; } public List<KeyValHelper> Constraints { get; set; } public Dictionary<string, string> Labels { get; set; } public List<String> Tags { get; set; } public Guid Uuid { get; set; } public string Shortname { get; set; } public QPoolStatus Status { get; set; } public QPoolElasticProperty ElasticProperty { get; set; } public PoolPreparationTask PreparationTask { get; set; } public bool AutoDeleteOnCompletion { get; set; } public TimeSpan CompletionTimeToLive { get; set; } public bool? TaskDefaultWaitForPoolResourcesSynchronization { get; set; } public int QueuedOrRunningTaskInstancesCount { get; set; } public int TotalSlotCapacity { get; set; } public double PoolUsage { get; set; } public HardwareConstraints HardwareConstraints { get; set; } internal PoolApi() { Constants = new List<KeyValHelper>(); Constraints = new List<KeyValHelper>(); Labels = new Dictionary<string, string>(); Tags = new List<String>(); ResourceBuckets = new List<String>(); AdvancedResourceBuckets = new List<ApiAdvancedResourceBucket>(); Errors = new List<QPoolError>(); ElasticProperty = new QPoolElasticProperty(); AutoDeleteOnCompletion = false; } } }
/* Software Developer: Fred Ekstrand Copyright (C) 2016 by: Fred Ekstrand Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE SOFTWARE DEVLOPER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of the software developer shall not be used in advertising or otherwise to promote the sale, use or other dealings in this "Software" without prior written authorization from the software developer. */ using System; using System.Collections.Generic; using System.Text; namespace Ekstrand.Encryption.Ciphers { /// <summary> /// Abstract base class defining common operations. /// </summary> /// <seealso cref="IRotor" /> [Serializable] public abstract class AbstractRotor : IRotor { #region Class Variables #pragma warning disable CS1591 // Missing XML comment for publicly visible type or member protected int m_LstPosition = LAST_POSITION; // Last position before incrementation to determine if rotor has cycled. protected Dictionary<byte, byte> m_Translation; // Rotor translation from input to output byte value. protected const int ELEMENT_SIZE = 256; // Number of elements to cover each byte value 0 - 255. protected bool m_SettingsInUse = false; // Guard from changing settings while in use. Force user to use reset. #pragma warning restore CS1591 // Missing XML comment for publicly visible type or member private IRotor m_NextRotor = null; // Reference to next rotor. private int m_StartPosition = 0; // Rotor start position. private int m_Incrementation = 1; // Rotor incrementation amount private RotorTurnDirection m_TurnDirection = RotorTurnDirection.CW; #pragma warning disable CS1591 // Missing XML comment for publicly visible type or member protected const int START_POSITION = 0; protected const int INCREMENTATION = 1; protected const int LAST_POSITION = -1; protected const string SUBSTITUTIONSETERR = "Can not change end points while in use. You must Reset first."; protected const string ARRAYSIZENOT256 = "Array size must be 256"; protected const string DUPLICATE_SIDE_A_ENDPOINTS = "Duplicate Side-A end points."; protected const string DUPLICATE_SIDE_B_ENDPOINTS = "Duplicate Side-B end points."; protected const string STARTPOINT_VALUE = "Start position must be between (0-255)"; protected const string INCREMENTATION_VALUE = "Incrementation value must be between (1-255)"; protected const string SUBSTITUTIONSET_NULL = "SubstitutionSet is null"; #pragma warning restore CS1591 // Missing XML comment for publicly visible type or member #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="AbstractRotor"/> class. /// </summary> internal AbstractRotor() { m_Translation = new Dictionary<byte, byte>(); } #endregion #region Methods /// <summary> /// Clear all settings /// </summary> public abstract void Clear(); /// <summary> /// Encode/Decode byte /// </summary> /// <param name="value">The value.</param> /// /// <param name="encode">Bool mode of operation "true" is encode and "false" is decode</param> /// <returns> /// Return encode/decode byte /// </returns> public abstract byte ProcessByte(byte value, bool encode); /// <summary> /// Resets back to its start state. /// </summary> public abstract void Reset(); /// <summary> /// Turn a rotor /// </summary> public abstract void Turn(); /// <summary> /// Populates the Translation points. /// </summary> /// <param name="values">The values.</param> protected void PopulateTransulationPoints(EndPointPair<byte, byte>[] values) { ValidateTransulationPoints(values); for (int i = 0; i < values.Length; i++) { m_Translation.Add(values[i].SideA, values[i].SideB); } } /// <summary> /// Validates the Translation points. /// </summary> /// <param name="values">The values.</param> /// <exception cref="System.InvalidOperationException">Can not change end points while in use. You must Reset first.</exception> /// <exception cref="System.ArgumentException"> /// Value array size must be 256 /// or /// Duplicate Side-A end points. /// or /// Duplicate Side-A end points. /// </exception> protected void ValidateTransulationPoints(EndPointPair<byte, byte>[] values) { if(values == null) { throw new NullReferenceException("SubsitiutionSet"); } if (m_SettingsInUse) { throw new InvalidOperationException(SUBSTITUTIONSETERR); } if (values.Length != ELEMENT_SIZE) { throw new ArgumentException(ARRAYSIZENOT256); } byte valueA = 0; byte valueB = 0; for (int i = 0; i < values.Length; i++) { valueA = values[i].SideA; valueB = values[i].SideB; for (int j = i + 1; j < values.Length; j++) { if (valueA == values[j].SideA) { throw new ArgumentException(DUPLICATE_SIDE_A_ENDPOINTS); } if (valueB == values[j].SideB) { throw new ArgumentException(DUPLICATE_SIDE_B_ENDPOINTS); } } } } /// <summary> /// Determines whether this instance has cycled. /// </summary> protected virtual void HasCycled() { if (m_LstPosition != -1 && Position == StartPosition) { Cycled = true; return; } else { Cycled = false; return; } } #endregion #region Properties /// <summary> /// Gets the current position of the rotor. /// </summary> /// <value> /// Current rotor position. /// </value> public int Position { get; internal set; } /// <summary> /// Gets or sets the next rotor. /// </summary> /// <value> /// The next rotor. /// </value> /// <exception cref="System.InvalidOperationException">Can not change end points while in use. You must Reset first.</exception> public IRotor NextRotor { get { return m_NextRotor; } set { if (m_SettingsInUse) { throw new InvalidOperationException(SUBSTITUTIONSETERR); } m_NextRotor = value; } } /// <summary> /// Gets or sets the start position. /// </summary> /// <value> /// Integer values (0 - 255) for position for rotor to start. /// </value> /// <exception cref="System.InvalidOperationException">Can not change end points while in use. You must Reset first.</exception> public int StartPosition { get { return m_StartPosition; } set { if (m_SettingsInUse) { throw new InvalidOperationException(SUBSTITUTIONSETERR); } else if(value > 255 || value < 0) { throw new InvalidOperationException(STARTPOINT_VALUE); } else { m_StartPosition = value; Position = value; } } } /// <summary> /// Gets or sets the substitution set. /// </summary> /// <value> /// Array of EndPointPair&lt;byte, byte&gt; /// </value> public virtual EndPointPair<byte, byte>[] SubstitutionSet { get { if (m_Translation.Count == 0) { return null; } EndPointPair<byte, byte>[] Set = new EndPointPair<byte, byte>[ELEMENT_SIZE]; for (int i = 0; i < m_Translation.Count; i++) { Set[i] = new EndPointPair<byte, byte>((byte)i, m_Translation[(byte)i]); } return Set; } set { ValidateTransulationPoints(value); m_Translation.Clear(); PopulateTransulationPoints(value); } } /// <summary> /// Gets or sets the turn direction. /// </summary> /// <value> /// RotorTurnDirection enum value. /// </value> /// <exception cref="System.InvalidOperationException">Can not change end points while in use. You must Reset first.</exception> public RotorTurnDirection TurnDirection { get { return m_TurnDirection; } set { if (m_SettingsInUse) { throw new InvalidOperationException(SUBSTITUTIONSETERR); } m_TurnDirection = value; } } /// <summary> /// Gets a value indicating whether this <see cref="IRotor" /> has cycled. /// </summary> /// <value> /// <c>true</c> if cycled; otherwise, <c>false</c>. /// </value> public bool Cycled { get; internal set; } /// <summary> /// Gets or Sets the incrementation of the rotor. /// </summary> /// <value> /// The incrementation. /// </value> /// <exception cref="System.InvalidOperationException">Can not change end points while in use. You must Reset first.</exception> public virtual int Incrementation { get { return m_Incrementation; } set { if (m_SettingsInUse) { throw new InvalidOperationException(SUBSTITUTIONSETERR); } if(value < 1 || value > 255) { throw new InvalidOperationException(INCREMENTATION_VALUE); } m_Incrementation = value; } } #endregion } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.AIPlatform.V1.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedModelServiceClientTest { [xunit::FactAttribute] public void GetModelRequestObject() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelRequest request = new GetModelRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.GetModel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model response = client.GetModel(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelRequestObjectAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelRequest request = new GetModelRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.GetModelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Model>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model responseCallSettings = await client.GetModelAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Model responseCancellationToken = await client.GetModelAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModel() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelRequest request = new GetModelRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.GetModel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model response = client.GetModel(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelRequest request = new GetModelRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.GetModelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Model>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model responseCallSettings = await client.GetModelAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Model responseCancellationToken = await client.GetModelAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelResourceNames() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelRequest request = new GetModelRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.GetModel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model response = client.GetModel(request.ModelName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelResourceNamesAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelRequest request = new GetModelRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.GetModelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Model>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model responseCallSettings = await client.GetModelAsync(request.ModelName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Model responseCancellationToken = await client.GetModelAsync(request.ModelName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateModelRequestObject() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateModelRequest request = new UpdateModelRequest { Model = new Model(), UpdateMask = new wkt::FieldMask(), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.UpdateModel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model response = client.UpdateModel(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateModelRequestObjectAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateModelRequest request = new UpdateModelRequest { Model = new Model(), UpdateMask = new wkt::FieldMask(), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.UpdateModelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Model>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model responseCallSettings = await client.UpdateModelAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Model responseCancellationToken = await client.UpdateModelAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateModel() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateModelRequest request = new UpdateModelRequest { Model = new Model(), UpdateMask = new wkt::FieldMask(), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.UpdateModel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model response = client.UpdateModel(request.Model, request.UpdateMask); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateModelAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateModelRequest request = new UpdateModelRequest { Model = new Model(), UpdateMask = new wkt::FieldMask(), }; Model expectedResponse = new Model { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), DisplayName = "display_name137f65c2", Description = "description2cf9da67", PredictSchemata = new PredictSchemata(), MetadataSchemaUri = "metadata_schema_uric874bf0a", Metadata = new wkt::Value(), TrainingPipelineAsTrainingPipelineName = TrainingPipelineName.FromProjectLocationTrainingPipeline("[PROJECT]", "[LOCATION]", "[TRAINING_PIPELINE]"), ContainerSpec = new ModelContainerSpec(), SupportedDeploymentResourcesTypes = { Model.Types.DeploymentResourcesType.DedicatedResources, }, SupportedInputStorageFormats = { "supported_input_storage_formats82472d65", }, SupportedOutputStorageFormats = { "supported_output_storage_formats9adca4ec", }, CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), DeployedModels = { new DeployedModelRef(), }, Etag = "etage8ad7218", Labels = { { "key8a0b6e3c", "value60c16320" }, }, SupportedExportFormats = { new Model.Types.ExportFormat(), }, ExplanationSpec = new ExplanationSpec(), EncryptionSpec = new EncryptionSpec(), ArtifactUri = "artifact_uri469c7020", }; mockGrpcClient.Setup(x => x.UpdateModelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Model>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); Model responseCallSettings = await client.UpdateModelAsync(request.Model, request.UpdateMask, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Model responseCancellationToken = await client.UpdateModelAsync(request.Model, request.UpdateMask, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelEvaluationRequestObject() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationRequest request = new GetModelEvaluationRequest { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), }; ModelEvaluation expectedResponse = new ModelEvaluation { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), SliceDimensions = { "slice_dimensions7d96a205", }, ModelExplanation = new ModelExplanation(), }; mockGrpcClient.Setup(x => x.GetModelEvaluation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluation response = client.GetModelEvaluation(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelEvaluationRequestObjectAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationRequest request = new GetModelEvaluationRequest { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), }; ModelEvaluation expectedResponse = new ModelEvaluation { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), SliceDimensions = { "slice_dimensions7d96a205", }, ModelExplanation = new ModelExplanation(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ModelEvaluation>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluation responseCallSettings = await client.GetModelEvaluationAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ModelEvaluation responseCancellationToken = await client.GetModelEvaluationAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelEvaluation() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationRequest request = new GetModelEvaluationRequest { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), }; ModelEvaluation expectedResponse = new ModelEvaluation { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), SliceDimensions = { "slice_dimensions7d96a205", }, ModelExplanation = new ModelExplanation(), }; mockGrpcClient.Setup(x => x.GetModelEvaluation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluation response = client.GetModelEvaluation(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelEvaluationAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationRequest request = new GetModelEvaluationRequest { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), }; ModelEvaluation expectedResponse = new ModelEvaluation { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), SliceDimensions = { "slice_dimensions7d96a205", }, ModelExplanation = new ModelExplanation(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ModelEvaluation>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluation responseCallSettings = await client.GetModelEvaluationAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ModelEvaluation responseCancellationToken = await client.GetModelEvaluationAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelEvaluationResourceNames() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationRequest request = new GetModelEvaluationRequest { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), }; ModelEvaluation expectedResponse = new ModelEvaluation { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), SliceDimensions = { "slice_dimensions7d96a205", }, ModelExplanation = new ModelExplanation(), }; mockGrpcClient.Setup(x => x.GetModelEvaluation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluation response = client.GetModelEvaluation(request.ModelEvaluationName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelEvaluationResourceNamesAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationRequest request = new GetModelEvaluationRequest { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), }; ModelEvaluation expectedResponse = new ModelEvaluation { ModelEvaluationName = ModelEvaluationName.FromProjectLocationModelEvaluation("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]"), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), SliceDimensions = { "slice_dimensions7d96a205", }, ModelExplanation = new ModelExplanation(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ModelEvaluation>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluation responseCallSettings = await client.GetModelEvaluationAsync(request.ModelEvaluationName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ModelEvaluation responseCancellationToken = await client.GetModelEvaluationAsync(request.ModelEvaluationName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelEvaluationSliceRequestObject() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationSliceRequest request = new GetModelEvaluationSliceRequest { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), }; ModelEvaluationSlice expectedResponse = new ModelEvaluationSlice { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), Slice = new ModelEvaluationSlice.Types.Slice(), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationSlice(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluationSlice response = client.GetModelEvaluationSlice(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelEvaluationSliceRequestObjectAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationSliceRequest request = new GetModelEvaluationSliceRequest { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), }; ModelEvaluationSlice expectedResponse = new ModelEvaluationSlice { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), Slice = new ModelEvaluationSlice.Types.Slice(), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationSliceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ModelEvaluationSlice>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluationSlice responseCallSettings = await client.GetModelEvaluationSliceAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ModelEvaluationSlice responseCancellationToken = await client.GetModelEvaluationSliceAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelEvaluationSlice() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationSliceRequest request = new GetModelEvaluationSliceRequest { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), }; ModelEvaluationSlice expectedResponse = new ModelEvaluationSlice { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), Slice = new ModelEvaluationSlice.Types.Slice(), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationSlice(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluationSlice response = client.GetModelEvaluationSlice(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelEvaluationSliceAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationSliceRequest request = new GetModelEvaluationSliceRequest { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), }; ModelEvaluationSlice expectedResponse = new ModelEvaluationSlice { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), Slice = new ModelEvaluationSlice.Types.Slice(), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationSliceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ModelEvaluationSlice>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluationSlice responseCallSettings = await client.GetModelEvaluationSliceAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ModelEvaluationSlice responseCancellationToken = await client.GetModelEvaluationSliceAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetModelEvaluationSliceResourceNames() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationSliceRequest request = new GetModelEvaluationSliceRequest { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), }; ModelEvaluationSlice expectedResponse = new ModelEvaluationSlice { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), Slice = new ModelEvaluationSlice.Types.Slice(), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationSlice(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluationSlice response = client.GetModelEvaluationSlice(request.ModelEvaluationSliceName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetModelEvaluationSliceResourceNamesAsync() { moq::Mock<ModelService.ModelServiceClient> mockGrpcClient = new moq::Mock<ModelService.ModelServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetModelEvaluationSliceRequest request = new GetModelEvaluationSliceRequest { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), }; ModelEvaluationSlice expectedResponse = new ModelEvaluationSlice { ModelEvaluationSliceName = ModelEvaluationSliceName.FromProjectLocationModelEvaluationSlice("[PROJECT]", "[LOCATION]", "[MODEL]", "[EVALUATION]", "[SLICE]"), Slice = new ModelEvaluationSlice.Types.Slice(), MetricsSchemaUri = "metrics_schema_uriad9a097a", Metrics = new wkt::Value(), CreateTime = new wkt::Timestamp(), }; mockGrpcClient.Setup(x => x.GetModelEvaluationSliceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ModelEvaluationSlice>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ModelServiceClient client = new ModelServiceClientImpl(mockGrpcClient.Object, null); ModelEvaluationSlice responseCallSettings = await client.GetModelEvaluationSliceAsync(request.ModelEvaluationSliceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ModelEvaluationSlice responseCancellationToken = await client.GetModelEvaluationSliceAsync(request.ModelEvaluationSliceName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
using System; using System.Collections.Generic; using System.Text; using System.Drawing.Printing; using System.Drawing; using DowUtils; namespace Factotum { class RsPartition : ReportSection { public RsPartition(MainReport report, ReportSectionRules rules, int subsections) : base(report, rules, subsections) { } public override bool IsIncluded() { return (rpt.eInspection != null && rpt.eInspection.InspectionHasGrid && rpt.eGrid.GridStartRow != null & rpt.eGrid.GridStartCol != null && rpt.eGrid.GridEndRow != null & rpt.eGrid.GridEndCol != null); } public override bool CanFitSome(PrintPageEventArgs args, float Y) { // If we can't fit the whole thing, put it on the next page. int padding = 5; int tablePadding = 2; int partitions = getPartitionCount(); return (args.MarginBounds.Bottom - Y - rpt.footerHeight > rpt.regTextFont.Height * partitions + rpt.boldRegTextFont.Height + padding * 3 + tablePadding * (partitions + 1)); } public override bool Print(PrintPageEventArgs args, float Y) { // Todo: use measurement graphics for row height? Graphics g = args.Graphics; Graphics measure = args.PageSettings.PrinterSettings.CreateMeasurementGraphics(); int leftX = args.MarginBounds.X; int centerX = (int)(leftX + args.MarginBounds.Width / 2); int rightX = leftX + args.MarginBounds.Width; int curX = leftX; int padding = 5; float startY = Y + padding; float curY = startY; float maxY = startY; int rows; int cols; int colHeadRows; int rowHeadCols; int headColWidth; int dataColWidth; int tablePadding; // Always include rows for U/S Ext, U/S Main, D/S Ext int partitionCount = getPartitionCount(); string[,] partitions; Font curFont = rpt.regTextFont; // Get the partition table. if (!GetPartitionArray(partitionCount, out partitions, out rows)) { g.DrawString("No Partition Information", curFont, Brushes.Black, leftX, curY); } else { // Put the title left-aligned. g.DrawString("Partition Information", curFont, Brushes.Black, leftX, curY); curY += curFont.Height + padding; // Put the partition table left-aligned cols = 5; colHeadRows = 1; rowHeadCols = 1; headColWidth = 75; dataColWidth = 40; tablePadding = 2; curY = DrawTable(partitions, rows, cols, colHeadRows, rowHeadCols, g, curX, curY, headColWidth, dataColWidth, tablePadding, rpt.boldRegTextFont, rpt.regTextFont, true, true); if (curY > maxY) maxY = curY; } // Insert the appropriate graphic based on the column layout for the grid. curX = 305; curY = startY; if (!rpt.eGrid.GridHideColumnLayoutGraphic) { g.DrawImage((rpt.eGrid.GridIsColumnCCW ? Properties.Resources.orientation_ccw : Properties.Resources.orientation_cw), curX, curY, 75, 75); curY += 75; if (curY > maxY) maxY = curY; } // The gridInfo table string[,] gridInfo = GetGridInfoArray(); curX = rightX - 400; curY = startY; curFont = rpt.regTextFont; // Put the title. string s = "Grid Information"; g.DrawString(s, curFont, Brushes.Black, curX, curY); curY += curFont.Height + padding; rows = 3; cols = 4; colHeadRows = 1; rowHeadCols = 1; tablePadding = 2; int[] colWidths = new int[] { 40, 50, 50, 260 }; // Draw the table curY = DrawTable(gridInfo, rows, cols, colHeadRows, rowHeadCols, g, curX, curY, colWidths, tablePadding, rpt.boldRegTextFont, rpt.regTextFont, true, true); if (curY > maxY) maxY = curY; maxY += padding; hr(args, maxY); this.Y = maxY; return true; } private bool GetPartitionArray(int partitionCount, out string[,] table, out int rows) { table = new string[partitionCount + 1, 5]; rows = 0; if (rpt.eGrid.GridUpExtStartRow == null && rpt.eGrid.GridUpExtEndRow == null && rpt.eGrid.GridUpMainStartRow == null && rpt.eGrid.GridUpMainEndRow == null && rpt.eGrid.GridDnMainStartRow == null && rpt.eGrid.GridDnMainEndRow == null && rpt.eGrid.GridDnExtStartRow == null && rpt.eGrid.GridDnExtEndRow == null && rpt.eGrid.GridBranchStartRow == null && rpt.eGrid.GridBranchEndRow == null && rpt.eGrid.GridBranchExtStartRow == null && rpt.eGrid.GridBranchExtEndRow == null) return false; int row, col; int gridStartRow = (int)rpt.eGrid.GridStartRow; int gridEndRow = (int)rpt.eGrid.GridEndRow; int gridStartCol = (int)rpt.eGrid.GridStartCol; int gridEndCol = (int)rpt.eGrid.GridEndCol; int gridRows = gridEndRow - gridStartRow + 1; int gridCols = gridEndCol - gridStartCol + 1; // No Heading row row = 0; // Min row col = 0; table[row, col] = ""; col++; table[row, col] = "S row"; col++; table[row, col] = "E row"; col++; table[row, col] = "S col"; col++; table[row, col] = "E col"; if (rpt.eGrid.GridUpExtStartRow != null && rpt.eGrid.GridUpExtEndRow != null) { row++; // U/S Ext row col = 0; table[row, col] = "U/S Ext"; col++; table[row, col] = formatRow(rpt.eGrid.GridUpExtStartRow); col++; table[row, col] = formatRow(rpt.eGrid.GridUpExtEndRow); col++; table[row, col] = formatCol(gridStartCol); col++; table[row, col] = formatCol(gridEndCol); } if (rpt.eGrid.GridUpMainStartRow != null && rpt.eGrid.GridUpMainEndRow != null) { row++; // U/S Main Row col = 0; table[row, col] = ( rpt.eGrid.GridDnMainStartRow != null && rpt.eGrid.GridDnMainEndRow != null ? "U/S Main" : "Main"); col++; table[row, col] = formatRow(rpt.eGrid.GridUpMainStartRow); col++; table[row, col] = formatRow(rpt.eGrid.GridUpMainEndRow); col++; table[row, col] = formatCol(gridStartCol); col++; table[row, col] = formatCol(gridEndCol); } if (rpt.eGrid.GridDnMainStartRow != null && rpt.eGrid.GridDnMainEndRow != null) { row++; // D/S Main col = 0; table[row, col] = "D/S Main"; col++; table[row, col] = formatRow(rpt.eGrid.GridDnMainStartRow); col++; table[row, col] = formatRow(rpt.eGrid.GridDnMainEndRow); col++; table[row, col] = formatCol(gridStartCol); col++; table[row, col] = formatCol(gridEndCol); } if (rpt.eGrid.GridDnExtStartRow != null && rpt.eGrid.GridDnExtEndRow != null) { row++; // D/S Ext col = 0; table[row, col] = "D/S Ext"; col++; table[row, col] = formatRow(rpt.eGrid.GridDnExtStartRow); col++; table[row, col] = formatRow(rpt.eGrid.GridDnExtEndRow); col++; table[row, col] = formatCol(gridStartCol); col++; table[row, col] = formatCol(gridEndCol); } if (rpt.eGrid.GridBranchStartRow != null && rpt.eGrid.GridBranchEndRow != null) { row++; // Branch col = 0; table[row, col] = "Branch"; col++; table[row, col] = formatRow(rpt.eGrid.GridBranchStartRow); col++; table[row, col] = formatRow(rpt.eGrid.GridBranchEndRow); col++; table[row, col] = formatCol(gridStartCol); col++; table[row, col] = formatCol(gridEndCol); } if (rpt.eGrid.GridBranchExtStartRow != null && rpt.eGrid.GridBranchExtEndRow != null) { row++; // Branch Ext col = 0; table[row, col] = "Br. Ext"; col++; table[row, col] = formatRow(rpt.eGrid.GridBranchExtStartRow); col++; table[row, col] = formatRow(rpt.eGrid.GridBranchExtEndRow); col++; table[row, col] = formatCol(gridStartCol); col++; table[row, col] = formatCol(gridEndCol); } rows = row+1; return true; } private string[,] GetGridInfoArray() { // First make a 2D string array to contain all the data that we want to present string[,] table = new string[3, 4]; string xy; // Heading row int row = 0; int col = 0; int gridStartRow = (int)rpt.eGrid.GridStartRow; int gridEndRow = (int)rpt.eGrid.GridEndRow; int gridStartCol = (int)rpt.eGrid.GridStartCol; int gridEndCol = (int)rpt.eGrid.GridEndCol; int gridRows = gridEndRow - gridStartRow + 1; int gridCols = gridEndCol - gridStartCol + 1; table[row, col] = ""; col++; table[row, col] = "Size"; col++; table[row, col] = "Ct."; col++; table[row, col] = "Zero Reference"; row++; col = 0; table[row, col] = "Row"; col++; table[row, col] = Util.GetFormattedDecimal_NA(rpt.eGrid.GridAxialDistance); col++; table[row, col] = Util.GetFormattedInt_NA(gridRows); col++; if (rpt.eGrid.GridAxialLocOverride == null) { if (rpt.eGrid.GridAxialDistance == null) xy = "N/A - Axial Distance not set"; else // Unless we have an override set, the start location is 3 grid bands (2 grid widths) U/S Weld 1 // Note: the first grid band is just slightly U/S of the weld. xy = String.Format("{0}\" U/S Weld 1", 2 * rpt.eGrid.GridAxialDistance); } else xy = rpt.eGrid.GridAxialLocOverride; table[row, col] = xy; row++; col = 0; table[row, col] = "Col"; col++; table[row, col] = Util.GetFormattedDecimal_NA(rpt.eGrid.GridRadialDistance); col++; table[row, col] = Util.GetFormattedInt_NA(gridCols); col++; xy = rpt.eGrid.GridRadialLocation; if (xy == null) xy = "N/A"; table[row, col] = xy; return table; } private int getPartitionCount() { // Always include rows for U/S Ext, U/S Main, D/S Ext int partitionCount = 3; // add one for D/S Main if (rpt.eComponent.ComponentHasDs) partitionCount++; // add for Branch and BranchExt if (rpt.eComponent.ComponentHasBranch) partitionCount += 2; return partitionCount; } private string formatRow(short? i) { if (i == null) return "N/A"; return (i+1).ToString(); } private string formatCol(short? i) { if (i == null) return "N/A"; return EMeasurement.GetColLabel((short)i); } private string formatCol(int? i) { if (i == null) return "N/A"; return EMeasurement.GetColLabel((short)i); } } }
#if FRB_MDX || XNA3 #define SUPPORTS_FRB_DRAWN_GUI #endif using System; using System.Collections.Generic; using System.Text; #if FRB_MDX using Keys = Microsoft.DirectX.DirectInput.Key; using Vector2 = Microsoft.DirectX.Vector2; using Vector3 = Microsoft.DirectX.Vector3; using Matrix = Microsoft.DirectX.Matrix; using FlatRedBall.Math; #elif FRB_XNA using Microsoft.Xna.Framework.Input; using Microsoft.Xna.Framework; using Keys = Microsoft.Xna.Framework.Input.Keys; #endif using FlatRedBall; using FlatRedBall.Gui; using FlatRedBall.Input; using FlatRedBall.Math; namespace EditorObjects { public static class CameraMethods { #region Fields public static Axis mUpAxis; public static Vector3 CenterTarget; #endregion #region Properties public static Axis UpAxis { get { return mUpAxis; } set { mUpAxis = value; } } #endregion #region Static Constructor static CameraMethods() { UpAxis = Axis.Y; } #endregion public static void FocusOn(Sprite sprite) { float distanceAway = 10 * Math.Max(sprite.ScaleX, sprite.ScaleY); SetFocusValues(distanceAway, sprite.Position); } private static void SetFocusValues(float distanceAway, Vector3 targetPosition) { float minimumDistanceFromClipPlane = SpriteManager.Camera.NearClipPlane * 1.05f; float maximumDistanceFromClipPlane = SpriteManager.Camera.FarClipPlane * .95f; distanceAway = Math.Max(distanceAway, minimumDistanceFromClipPlane); distanceAway = Math.Min(distanceAway, maximumDistanceFromClipPlane); #if FRB_MDX SpriteManager.Camera.Position = targetPosition - distanceAway * SpriteManager.Camera.RotationMatrix.Forward(); #else SpriteManager.Camera.Position = targetPosition - distanceAway * SpriteManager.Camera.RotationMatrix.Forward; #endif CameraMethods.CenterTarget = targetPosition; } public static void MouseCameraControl(Camera camera) { Cursor cursor = GuiManager.Cursor; if (cursor.WindowOver == null && GuiManager.DominantWindowActive == false && !cursor.MiddlePush #if SUPPORTS_FRB_DRAWN_GUI && cursor.WindowMiddleButtonPushed == null #endif #if! FRB_MDX && FlatRedBallServices.Game.IsActive #endif ) { float pixelSize = 1/camera.PixelsPerUnitAt(0); // middle-click drag moves the camera if (InputManager.Mouse.ButtonDown(FlatRedBall.Input.Mouse.MouseButtons.MiddleButton)) { camera.X += -InputManager.Mouse.XChange * pixelSize; camera.Y += InputManager.Mouse.YChange * pixelSize; } // double-click the middle-mouse button to center on the mouse's position if (InputManager.Mouse.ButtonDoubleClicked(FlatRedBall.Input.Mouse.MouseButtons.MiddleButton)) { cursor.GetCursorPosition(camera, 0); } // mouse-wheel scrolling zooms in and out if (InputManager.Mouse.ScrollWheelChange != 0) { if (camera.Orthogonal == false) camera.Z *= 1 + System.Math.Sign(InputManager.Mouse.ScrollWheelChange) * -.1f; else { camera.OrthogonalHeight *= 1 + System.Math.Sign(InputManager.Mouse.ScrollWheelChange) * -.1f; camera.OrthogonalWidth *= 1 + System.Math.Sign(InputManager.Mouse.ScrollWheelChange) * -.1f; } } } } public static void MouseCameraControl3D(Camera camera) { Cursor cursor = GuiManager.Cursor; if (cursor.WindowOver == null && GuiManager.DominantWindowActive == false && !cursor.MiddlePush #if SUPPORTS_FRB_DRAWN_GUI && cursor.WindowMiddleButtonPushed == null #endif #if FRB_XNA && FlatRedBallServices.Game.IsActive #endif ) { #region ScrollWheel zooms in/out if (InputManager.Mouse.ScrollWheelChange != 0) { int zoomValue = System.Math.Sign(InputManager.Mouse.ScrollWheelChange); ZoomBy(camera, zoomValue); } #endregion #region Alt+Right Mouse Button Down - Zoom if ((InputManager.Keyboard.KeyDown(Keys.LeftAlt) || InputManager.Keyboard.KeyDown(Keys.RightAlt)) && InputManager.Mouse.ButtonDown(FlatRedBall.Input.Mouse.MouseButtons.RightButton)) { if (InputManager.Mouse.YVelocity != 0) { ZoomBy(camera, (-InputManager.Mouse.YVelocity/512.0f )); } } #endregion #region Alt+Middle Mouse Button - Rotate if ((InputManager.Keyboard.KeyDown(Keys.LeftAlt) || InputManager.Keyboard.KeyDown(Keys.RightAlt)) && InputManager.Mouse.ButtonDown(FlatRedBall.Input.Mouse.MouseButtons.MiddleButton)) { Vector3 upVector; switch(mUpAxis) { case Axis.X: upVector = new Vector3(1,0,0); break; case Axis.Y: upVector = new Vector3(0,1,0); break; case Axis.Z: upVector = new Vector3(0,0,1); break; default: upVector = new Vector3(0, 1, 0); break; } InputManager.Mouse.ControlPositionedObjectOrbit(camera, CenterTarget, false, upVector); } #endregion #region MiddleMouseButtonPan else if (InputManager.Mouse.ButtonDown(FlatRedBall.Input.Mouse.MouseButtons.MiddleButton)) { float distanceAway = (camera.Position - CenterTarget).Length(); const float multiplier = .0015f; #if FRB_MDX Vector3 cameraRight = camera.RotationMatrix.Right(); Vector3 cameraUp = camera.RotationMatrix.Up(); #else Vector3 cameraRight = camera.RotationMatrix.Right; Vector3 cameraUp = camera.RotationMatrix.Up; #endif Vector3 offset = -InputManager.Mouse.XChange * distanceAway * multiplier * cameraRight + InputManager.Mouse.YChange * distanceAway * multiplier * cameraUp; camera.Position += offset; CenterTarget += offset; } #endregion } } private static void ZoomBy(Camera camera, float zoomValue) { if (camera.Orthogonal == false) { Vector3 distanceFromTarget = camera.Position - CenterTarget; distanceFromTarget *= 1 + zoomValue * -.1f; camera.Position = CenterTarget + distanceFromTarget; } else { camera.OrthogonalHeight *= 1 + zoomValue * -.1f; camera.OrthogonalWidth *= 1 + zoomValue * -.1f; } } public static void KeyboardCameraControl(Camera camera) { if (InputManager.ReceivingInput == null) { // movement should be time based, so use velocity #region if ortho if (camera.Orthogonal) { if (InputManager.Keyboard.KeyDown(Keys.Up)) camera.YVelocity = camera.OrthogonalHeight; else if (InputManager.Keyboard.KeyDown(Keys.Down)) camera.YVelocity = -camera.OrthogonalHeight; else camera.YVelocity = 0; if (InputManager.Keyboard.KeyDown(Keys.Left)) camera.XVelocity = -camera.OrthogonalWidth; else if (InputManager.Keyboard.KeyDown(Keys.Right)) camera.XVelocity = camera.OrthogonalWidth; else camera.XVelocity = 0; // TODO: Make this time based #if FRB_MDX if (InputManager.Keyboard.KeyDown(Keys.Equals)) #elif FRB_XNA if(InputManager.Keyboard.KeyDown(Keys.OemPlus)) #endif { camera.OrthogonalWidth *= .98f; camera.OrthogonalHeight *= .98f; } #if FRB_MDX else if (InputManager.Keyboard.KeyDown(Keys.Minus)) #elif FRB_XNA else if(InputManager.Keyboard.KeyDown(Keys.OemMinus)) #endif { camera.OrthogonalWidth *= 1.02f; camera.OrthogonalHeight *= 1.02f; } } #endregion #region 3D view else { // Y axis movement by pushing UP/DOWN if (InputManager.Keyboard.KeyDown(Keys.Up)) camera.YVelocity = -FlatRedBall.Math.MathFunctions.ForwardVector3.Z * camera.Z; else if (InputManager.Keyboard.KeyDown(Keys.Down)) camera.YVelocity = FlatRedBall.Math.MathFunctions.ForwardVector3.Z * camera.Z; else camera.YVelocity = 0; // X axis movement by pushing LEFT/RIGHT if (InputManager.Keyboard.KeyDown(Keys.Left)) camera.XVelocity = FlatRedBall.Math.MathFunctions.ForwardVector3.Z * camera.Z; else if (InputManager.Keyboard.KeyDown(Keys.Right)) camera.XVelocity = -FlatRedBall.Math.MathFunctions.ForwardVector3.Z * camera.Z; else camera.XVelocity = 0; // Z axis movement by pushing +/- #if FRB_MDX if (InputManager.Keyboard.KeyDown(Keys.Equals)) #elif FRB_XNA if(InputManager.Keyboard.KeyDown(Keys.OemPlus)) #endif { camera.ZVelocity = -camera.Z; } #if FRB_MDX else if (InputManager.Keyboard.KeyDown(Keys.Minus)) #elif FRB_XNA else if(InputManager.Keyboard.KeyDown(Keys.OemMinus)) #endif { camera.ZVelocity = camera.Z; #if FRB_MDX // it's been the case before that users have gotten to a camera Z = 0; // When this occurs the camera does not respond to keyboard input commands. // To remedy this problem simply move the camera back slightly if the camera is too far forward. // Later when implementing true 3D naviation this will have to be fixed. if (camera.Z > -.006f) camera.Z = -.006f; #endif } else camera.ZVelocity = 0; } #endregion #region set maximum bounds for the camera if (camera.X > 200000) camera.X = 200000; if (camera.X < -200000) camera.X = -200000; if (camera.Y > 200000) camera.Y = 200000; if (camera.Y < -200000) camera.Y = -200000; if (camera.Z < -200000) camera.Z = -200000; #endregion #region reset the position of the camera if any of its positions have a float.NaN value if (float.IsNaN(camera.X)) camera.X = 0; if (float.IsNaN(camera.Y)) camera.Y = 0; if (float.IsNaN(camera.Z)) camera.Z = FlatRedBall.Math.MathFunctions.ForwardVector3.Z * 50; #endregion } } public static void CameraControlFps(Camera camera) { GuiManager.Cursor.StaticPosition = true; Vector3 up = new Vector3(0, 1, 0); camera.Velocity = new Vector3(); Keys forwardKey = Keys.W; Keys backKey = Keys.S; Keys leftKey = Keys.A; Keys rightKey = Keys.D; FlatRedBall.Input.Keyboard keyboard = InputManager.Keyboard; float movementSpeed = 7; if (keyboard.KeyDown(forwardKey)) { camera.Velocity += new Vector3(camera.RotationMatrix.M31, camera.RotationMatrix.M32, camera.RotationMatrix.M33) * movementSpeed; } else if (keyboard.KeyDown(backKey)) { camera.Velocity += new Vector3(camera.RotationMatrix.M31, camera.RotationMatrix.M32, camera.RotationMatrix.M33) * -movementSpeed; } if (keyboard.KeyDown(leftKey)) { camera.Velocity += new Vector3(camera.RotationMatrix.M11, camera.RotationMatrix.M12, camera.RotationMatrix.M13) * -movementSpeed; } if (keyboard.KeyDown(rightKey)) { camera.Velocity += new Vector3(camera.RotationMatrix.M11, camera.RotationMatrix.M12, camera.RotationMatrix.M13) * movementSpeed; } #if FRB_XNA // These vaules may be way too fast/slow because I modified it to use pixels rather // than the somewhat arbitrary world coordinates camera.RotationMatrix *= Matrix.CreateFromAxisAngle( camera.RotationMatrix.Right, -.2f * GuiManager.Cursor.ScreenYChange * TimeManager.SecondDifference); camera.RotationMatrix *= Matrix.CreateFromAxisAngle( up, -.2f * GuiManager.Cursor.ScreenXChange * TimeManager.SecondDifference); #elif FRB_MDX camera.RotationMatrix *= Matrix.RotationAxis( new Vector3(camera.RotationMatrix.M11, camera.RotationMatrix.M12, camera.RotationMatrix.M13), -.2f * GuiManager.Cursor.YVelocity * TimeManager.SecondDifference); camera.RotationMatrix *= Matrix.RotationAxis( up, -.2f * GuiManager.Cursor.XVelocity * TimeManager.SecondDifference); #endif } } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the ec2-2014-10-01.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.EC2.Model { /// <summary> /// Container for the parameters to the ModifyInstanceAttribute operation. /// Modifies the specified attribute of the specified instance. You can specify only one /// attribute at a time. /// /// /// <para> /// To modify some attributes, the instance must be stopped. For more information, see /// <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_ChangingAttributesWhileInstanceStopped.html">Modifying /// Attributes of a Stopped Instance</a> in the <i>Amazon Elastic Compute Cloud User Guide /// for Linux</i>. /// </para> /// </summary> public partial class ModifyInstanceAttributeRequest : AmazonEC2Request { private InstanceAttributeName _attribute; private List<InstanceBlockDeviceMappingSpecification> _blockDeviceMappings = new List<InstanceBlockDeviceMappingSpecification>(); private bool? _disableApiTermination; private bool? _ebsOptimized; private List<string> _groups = new List<string>(); private string _instanceId; private string _instanceInitiatedShutdownBehavior; private string _instanceType; private string _kernel; private string _ramdisk; private bool? _sourceDestCheck; private string _sriovNetSupport; private string _userData; private string _value; /// <summary> /// Empty constructor used to set properties independently even when a simple constructor is available /// </summary> public ModifyInstanceAttributeRequest() { } /// <summary> /// Instantiates ModifyInstanceAttributeRequest with the parameterized properties /// </summary> /// <param name="instanceId">The ID of the instance.</param> /// <param name="attribute">The name of the attribute.</param> public ModifyInstanceAttributeRequest(string instanceId, InstanceAttributeName attribute) { _instanceId = instanceId; _attribute = attribute; } /// <summary> /// Gets and sets the property Attribute. /// <para> /// The name of the attribute. /// </para> /// </summary> public InstanceAttributeName Attribute { get { return this._attribute; } set { this._attribute = value; } } // Check to see if Attribute property is set internal bool IsSetAttribute() { return this._attribute != null; } /// <summary> /// Gets and sets the property BlockDeviceMappings. /// <para> /// Modifies the <code>DeleteOnTermination</code> attribute for volumes that are currently /// attached. The volume must be owned by the caller. If no value is specified for <code>DeleteOnTermination</code>, /// the default is <code>true</code> and the volume is deleted when the instance is terminated. /// </para> /// /// <para> /// To add instance store volumes to an Amazon EBS-backed instance, you must add them /// when you launch the instance. For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html#Using_OverridingAMIBDM">Updating /// the Block Device Mapping when Launching an Instance</a> in the <i>Amazon Elastic Compute /// Cloud User Guide for Linux</i>. /// </para> /// </summary> public List<InstanceBlockDeviceMappingSpecification> BlockDeviceMappings { get { return this._blockDeviceMappings; } set { this._blockDeviceMappings = value; } } // Check to see if BlockDeviceMappings property is set internal bool IsSetBlockDeviceMappings() { return this._blockDeviceMappings != null && this._blockDeviceMappings.Count > 0; } /// <summary> /// Gets and sets the property DisableApiTermination. /// <para> /// If the value is <code>true</code>, you can't terminate the instance using the Amazon /// EC2 console, CLI, or API; otherwise, you can. /// </para> /// </summary> public bool DisableApiTermination { get { return this._disableApiTermination.GetValueOrDefault(); } set { this._disableApiTermination = value; } } // Check to see if DisableApiTermination property is set internal bool IsSetDisableApiTermination() { return this._disableApiTermination.HasValue; } /// <summary> /// Gets and sets the property EbsOptimized. /// <para> /// Specifies whether the instance is optimized for EBS I/O. This optimization provides /// dedicated throughput to Amazon EBS and an optimized configuration stack to provide /// optimal EBS I/O performance. This optimization isn't available with all instance types. /// Additional usage charges apply when using an EBS Optimized instance. /// </para> /// </summary> public bool EbsOptimized { get { return this._ebsOptimized.GetValueOrDefault(); } set { this._ebsOptimized = value; } } // Check to see if EbsOptimized property is set internal bool IsSetEbsOptimized() { return this._ebsOptimized.HasValue; } /// <summary> /// Gets and sets the property Groups. /// <para> /// [EC2-VPC] Changes the security groups of the instance. You must specify at least one /// security group, even if it's just the default security group for the VPC. You must /// specify the security group ID, not the security group name. /// </para> /// /// <para> /// For example, if you want the instance to be in sg-1a1a1a1a and sg-9b9b9b9b, specify /// <code>GroupId.1=sg-1a1a1a1a</code> and <code>GroupId.2=sg-9b9b9b9b</code>. /// </para> /// </summary> public List<string> Groups { get { return this._groups; } set { this._groups = value; } } // Check to see if Groups property is set internal bool IsSetGroups() { return this._groups != null && this._groups.Count > 0; } /// <summary> /// Gets and sets the property InstanceId. /// <para> /// The ID of the instance. /// </para> /// </summary> public string InstanceId { get { return this._instanceId; } set { this._instanceId = value; } } // Check to see if InstanceId property is set internal bool IsSetInstanceId() { return this._instanceId != null; } /// <summary> /// Gets and sets the property InstanceInitiatedShutdownBehavior. /// <para> /// Specifies whether an instance stops or terminates when you initiate shutdown from /// the instance (using the operating system command for system shutdown). /// </para> /// </summary> public string InstanceInitiatedShutdownBehavior { get { return this._instanceInitiatedShutdownBehavior; } set { this._instanceInitiatedShutdownBehavior = value; } } // Check to see if InstanceInitiatedShutdownBehavior property is set internal bool IsSetInstanceInitiatedShutdownBehavior() { return this._instanceInitiatedShutdownBehavior != null; } /// <summary> /// Gets and sets the property InstanceType. /// <para> /// Changes the instance type to the specified value. For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html">Instance /// Types</a>. If the instance type is not valid, the error returned is <code>InvalidInstanceAttributeValue</code>. /// </para> /// </summary> public string InstanceType { get { return this._instanceType; } set { this._instanceType = value; } } // Check to see if InstanceType property is set internal bool IsSetInstanceType() { return this._instanceType != null; } /// <summary> /// Gets and sets the property Kernel. /// <para> /// Changes the instance's kernel to the specified value. We recommend that you use PV-GRUB /// instead of kernels and RAM disks. For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/UserProvidedKernels.html">PV-GRUB</a>. /// </para> /// </summary> public string Kernel { get { return this._kernel; } set { this._kernel = value; } } // Check to see if Kernel property is set internal bool IsSetKernel() { return this._kernel != null; } /// <summary> /// Gets and sets the property Ramdisk. /// <para> /// Changes the instance's RAM disk to the specified value. We recommend that you use /// PV-GRUB instead of kernels and RAM disks. For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/UserProvidedKernels.html">PV-GRUB</a>. /// </para> /// </summary> public string Ramdisk { get { return this._ramdisk; } set { this._ramdisk = value; } } // Check to see if Ramdisk property is set internal bool IsSetRamdisk() { return this._ramdisk != null; } /// <summary> /// Gets and sets the property SourceDestCheck. /// <para> /// Specifies whether source/destination checking is enabled. A value of <code>true</code> /// means that checking is enabled, and <code>false</code> means checking is disabled. /// This value must be <code>false</code> for a NAT instance to perform NAT. /// </para> /// </summary> public bool SourceDestCheck { get { return this._sourceDestCheck.GetValueOrDefault(); } set { this._sourceDestCheck = value; } } // Check to see if SourceDestCheck property is set internal bool IsSetSourceDestCheck() { return this._sourceDestCheck.HasValue; } /// <summary> /// Gets and sets the property SriovNetSupport. /// <para> /// Set to <code>simple</code> to enable enhanced networking for the instance. /// </para> /// /// <para> /// There is no way to disable enhanced networking at this time. /// </para> /// /// <para> /// This option is supported only for HVM instances. Specifying this option with a PV /// instance can make it unreachable. /// </para> /// </summary> public string SriovNetSupport { get { return this._sriovNetSupport; } set { this._sriovNetSupport = value; } } // Check to see if SriovNetSupport property is set internal bool IsSetSriovNetSupport() { return this._sriovNetSupport != null; } /// <summary> /// Gets and sets the property UserData. /// <para> /// Changes the instance's user data to the specified value. /// </para> /// </summary> public string UserData { get { return this._userData; } set { this._userData = value; } } // Check to see if UserData property is set internal bool IsSetUserData() { return this._userData != null; } /// <summary> /// Gets and sets the property Value. /// <para> /// A new value for the attribute. Use only with the <code>kernel</code>, <code>ramdisk</code>, /// <code>userData</code>, <code>disableApiTermination</code>, or <code>intanceInitiateShutdownBehavior</code> /// attribute. /// </para> /// </summary> public string Value { get { return this._value; } set { this._value = value; } } // Check to see if Value property is set internal bool IsSetValue() { return this._value != null; } } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace ApplicationGateway { using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// SubnetsOperations operations. /// </summary> internal partial class SubnetsOperations : IServiceOperations<NetworkClient>, ISubnetsOperations { /// <summary> /// Initializes a new instance of the SubnetsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal SubnetsOperations(NetworkClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the NetworkClient /// </summary> public NetworkClient Client { get; private set; } /// <summary> /// Deletes the specified subnet. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='subnetName'> /// The name of the subnet. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string subnetName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Send request AzureOperationResponse _response = await BeginDeleteWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, subnetName, customHeaders, cancellationToken).ConfigureAwait(false); return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets the specified subnet by virtual network and resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='subnetName'> /// The name of the subnet. /// </param> /// <param name='expand'> /// Expands referenced resources. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<Subnet>> GetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string subnetName, string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (virtualNetworkName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName"); } if (subnetName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subnetName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("virtualNetworkName", virtualNetworkName); tracingParameters.Add("subnetName", subnetName); tracingParameters.Add("expand", expand); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName)); _url = _url.Replace("{subnetName}", System.Uri.EscapeDataString(subnetName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (expand != null) { _queryParameters.Add(string.Format("$expand={0}", System.Uri.EscapeDataString(expand))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<Subnet>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Subnet>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Creates or updates a subnet in the specified virtual network. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='subnetName'> /// The name of the subnet. /// </param> /// <param name='subnetParameters'> /// Parameters supplied to the create or update subnet operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<Subnet>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string subnetName, Subnet subnetParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Send Request AzureOperationResponse<Subnet> _response = await BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, subnetName, subnetParameters, customHeaders, cancellationToken).ConfigureAwait(false); return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets all subnets in a virtual network. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<Subnet>>> ListWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (virtualNetworkName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("virtualNetworkName", virtualNetworkName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<Subnet>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<Subnet>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Deletes the specified subnet. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='subnetName'> /// The name of the subnet. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string subnetName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (virtualNetworkName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName"); } if (subnetName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subnetName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("virtualNetworkName", virtualNetworkName); tracingParameters.Add("subnetName", subnetName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "BeginDelete", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName)); _url = _url.Replace("{subnetName}", System.Uri.EscapeDataString(subnetName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("DELETE"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 204 && (int)_statusCode != 202) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Creates or updates a subnet in the specified virtual network. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='subnetName'> /// The name of the subnet. /// </param> /// <param name='subnetParameters'> /// Parameters supplied to the create or update subnet operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<Subnet>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string subnetName, Subnet subnetParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (virtualNetworkName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkName"); } if (subnetName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subnetName"); } if (subnetParameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subnetParameters"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("virtualNetworkName", virtualNetworkName); tracingParameters.Add("subnetName", subnetName); tracingParameters.Add("subnetParameters", subnetParameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{virtualNetworkName}", System.Uri.EscapeDataString(virtualNetworkName)); _url = _url.Replace("{subnetName}", System.Uri.EscapeDataString(subnetName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(subnetParameters != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(subnetParameters, Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 201) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<Subnet>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Subnet>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } // Deserialize Response if ((int)_statusCode == 201) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Subnet>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets all subnets in a virtual network. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<Subnet>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<Subnet>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<Subnet>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// The MIT License (MIT) // // CoreTweet - A .NET Twitter Library supporting Twitter API 1.1 // Copyright (c) 2014 lambdalice // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.Linq.Expressions; using CoreTweet.Core; namespace CoreTweet.Rest { ///<summary>GET/POST blocks</summary> public partial class Blocks : ApiProviderBase { internal Blocks(TokensBase e) : base(e) { } #if !PCL //GET Methods /// <summary> /// <para>Returns an array of numeric user ids the authenticating user is blocking.</para> /// <para>Avaliable parameters: </para> /// <para><paramref name="long cursor (semi-optional)"/> : Causes the list of IDs to be broken into pages of no more than 5000 IDs at a time. The number of IDs returned is not guaranteed to be 5000 as suspended users are filtered out after connections are queried. If no cursor is provided, a value of -1 will be assumed, which is the first "page." The response from the API will include a previous_cursor and next_cursor to allow paging back and forth. See Using cursors to navigate collections for more information.</para> /// </summary> /// <returns>IDs.</returns> /// <param name='parameters'> /// Parameters. /// </param> /// <see cref="https://dev.twitter.com/docs/misc/cursoring"/> public Cursored<long> Ids(params Expression<Func<string, object>>[] parameters) { return this.Tokens.AccessApi<Cursored<long>>(MethodType.Get, "blocks/ids", parameters); } public Cursored<long> Ids(IDictionary<string, object> parameters) { return this.Tokens.AccessApi<Cursored<long>>(MethodType.Get, "blocks/ids", parameters); } public Cursored<long> Ids<T>(T parameters) { return this.Tokens.AccessApi<Cursored<long>, T>(MethodType.Get, "blocks/ids", parameters); } /// <summary> /// <para>Enumerates numeric user ids the authenticating user is blocking.</para> /// <para>Avaliable parameters: </para> /// <para><paramref name="long cursor (optional)"/> : The first cursor. If not be specified, enumerating starts from the first page.</para> /// </summary> /// <returns> /// IDs. /// </returns> /// <see cref="https://dev.twitter.com/docs/misc/cursoring"/> /// <param name='mode'> /// <para> Specify whether enumerating goes to the next page or the previous.</para> /// </param> /// <param name='parameters'> /// Parameters. /// </param> public IEnumerable<long> EnumerateIds(EnumerateMode mode, params Expression<Func<string, object>>[] parameters) { return Cursored<long>.Enumerate(this.Tokens, "blocks/ids", mode, parameters); } public IEnumerable<long> EnumerateIds(EnumerateMode mode, IDictionary<string, object> parameters) { return Cursored<long>.Enumerate(this.Tokens, "blocks/ids", mode, parameters); } public IEnumerable<long> EnumerateIds<T>(EnumerateMode mode, T parameters) { return Cursored<long>.Enumerate<T>(this.Tokens, "blocks/ids", mode, parameters); } /// <summary> /// <para>Returns a collection of user objects that the authenticating user is blocking.</para> /// <para>Avaliable parameters: </para> /// <para><paramref name="bool include_entities (optional)"/> : The entities node will not be included when set to false.</para> /// <para><paramref name="bool skip_status (optional)"/> : When set to true, statuses will not be included in the returned user objects.</para> /// <para><paramref name="long cursor (semi-optional)"/> : Causes the list of blocked users to be broken into pages of no more than 5000 IDs at a time. The number of IDs returned is not guaranteed to be 5000 as suspended users are filtered out after connections are queried. If no cursor is provided, a value of -1 will be assumed, which is the first "page." The response from the API will include a previous_cursor and next_cursor to allow paging back and forth. See Using cursors to navigate collections for more information.</para> /// </summary> /// <returns>Users.</returns> /// <param name='parameters'> /// Parameters. /// </param> /// <see cref="https://dev.twitter.com/docs/misc/cursoring"/> public Cursored<User> List(params Expression<Func<string, object>>[] parameters) { return this.Tokens.AccessApi<Cursored<User>>(MethodType.Get, "blocks/list", parameters); } public Cursored<User> List(IDictionary<string, object> parameters) { return this.Tokens.AccessApi<Cursored<User>>(MethodType.Get, "blocks/list", parameters); } public Cursored<User> List<T>(T parameters) { return this.Tokens.AccessApi<Cursored<User>, T>(MethodType.Get, "blocks/list", parameters); } /// <summary> /// <para>Enumerates numeric user objects the authenticating user is blocking.</para> /// <para>Avaliable parameters: </para> /// <para><paramref name="bool include_entities (optional)"/> : The entities node will not be included when set to false.</para> /// <para><paramref name="bool skip_status (optional)"/> : When set to true, statuses will not be included in the returned user objects.</para> /// <para><paramref name="long cursor (optional)"/> : The first cursor. If not be specified, enumerating starts from the first page.</para> /// </summary> /// <returns> /// Users. /// </returns> /// <see cref="https://dev.twitter.com/docs/misc/cursoring"/> /// <param name='mode'> /// <para> Specify whether enumerating goes to the next page or the previous.</para> /// </param> /// <param name='parameters'> /// Parameters. /// </param> public IEnumerable<User> EnumerateList(EnumerateMode mode, params Expression<Func<string, object>>[] parameters) { return Cursored<User>.Enumerate(this.Tokens, "blocks/list", mode, parameters); } public IEnumerable<User> EnumerateList(EnumerateMode mode, IDictionary<string, object> parameters) { return Cursored<User>.Enumerate(this.Tokens, "blocks/list", mode, parameters); } public IEnumerable<User> EnumerateList<T>(EnumerateMode mode, T parameters) { return Cursored<User>.Enumerate<T>(this.Tokens, "blocks/list", mode, parameters); } //POST Methods /// <summary> /// <para>Blocks the specified user from following the authenticating user. In addition the blocked user will not show in the authenticating users mentions or timeline (unless retweeted by another user). If a follow or friend relationship exists it is destroyed.</para> /// <para>Note: Either screen_name or user_id must be provided.</para> /// <para>Avaliable parameters: </para> /// <para><paramref name="string screen_name (optional)"/> : The screen name of the potentially blocked user. Helpful for disambiguating when a valid screen name is also a user ID.</para> /// <para><paramref name="long user_id (optional)"/> : The ID of the potentially blocked user. Helpful for disambiguating when a valid user ID is also a valid screen name.</para> /// <para><paramref name="bool include_entities (optional)"/> : The entities node will not be included when set to false.</para> /// <para><paramref name="bool skip_status (optional)"/> : When set to true, statuses will not be included in the returned user objects.</para> /// </summary> /// <returns>The user.</returns> /// <param name='parameters'> /// Parameters. /// </param> public User Create(params Expression<Func<string, object>>[] parameters) { return this.Tokens.AccessApi<User>(MethodType.Post, "blocks/create", parameters); } public User Create(IDictionary<string, object> parameters) { return this.Tokens.AccessApi<User>(MethodType.Post, "blocks/create", parameters); } public User Create<T>(T parameters) { return this.Tokens.AccessApi<User, T>(MethodType.Post, "blocks/create", parameters); } /// <summary> /// <para>Un-blocks the user specified in the ID parameter for the authenticating user. Returns the un-blocked user in the requested format when successful. If relationships existed before the block was instated, they will not be restored.</para> /// <para>Note: Either screen_name or user_id must be provided.</para> /// <para>Avaliable parameters: </para> /// <para><paramref name="string screen_name (optional)"/> : The screen name of the potentially blocked user. Helpful for disambiguating when a valid screen name is also a user ID.</para> /// <para><paramref name="long user_id (optional)"/> : The ID of the potentially blocked user. Helpful for disambiguating when a valid user ID is also a valid screen name.</para> /// <para><paramref name="bool include_entities (optional)"/> : The entities node will not be included when set to false.</para> /// <para><paramref name="bool skip_status (optional)"/> : When set to either true, t or 1 statuses will not be included in the returned user objects.</para> /// </summary> /// <returns>The user.</returns> /// <param name='parameters'> /// Parameters. /// </param> public User Destroy(params Expression<Func<string, object>>[] parameters) { return this.Tokens.AccessApi<User>(MethodType.Post, "blocks/destroy", parameters); } public User Destroy(IDictionary<string, object> parameters) { return this.Tokens.AccessApi<User>(MethodType.Post, "blocks/destroy", parameters); } public User Destroy<T>(T parameters) { return this.Tokens.AccessApi<User, T>(MethodType.Post, "blocks/destroy", parameters); } #endif } }
using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Threading; using System.Threading.Tasks; using FluentAssertions; using Xunit; using Xunit.Abstractions; namespace MagicOnion.Generator.Tests { public class GenerateServiceTest { private readonly ITestOutputHelper _testOutputHelper; public GenerateServiceTest(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; } [Fact] public async Task Return_UnaryResultOfT() { using var tempWorkspace = TemporaryProjectWorkarea.Create(); tempWorkspace.AddFileToProject("IMyService.cs", @" using System; using System.Threading.Tasks; using MessagePack; using MagicOnion; namespace TempProject { public interface IMyService : IService<IMyService> { UnaryResult<int> A(); } } "); var compiler = new MagicOnionCompiler(_testOutputHelper.WriteLine, CancellationToken.None); await compiler.GenerateFileAsync( tempWorkspace.CsProjectPath, Path.Combine(tempWorkspace.OutputDirectory, "Generated.cs"), true, "TempProject.Generated", "", "MessagePack.Formatters" ); var compilation = tempWorkspace.GetOutputCompilation(); compilation.GetCompilationErrors().Should().BeEmpty(); } [Fact] public async Task Return_TaskOfUnaryResultOfT() { using var tempWorkspace = TemporaryProjectWorkarea.Create(); tempWorkspace.AddFileToProject("IMyService.cs", @" using System; using System.Threading.Tasks; using MessagePack; using MagicOnion; namespace TempProject { public interface IMyService : IService<IMyService> { Task<UnaryResult<int>> A(); } } "); var compiler = new MagicOnionCompiler(_testOutputHelper.WriteLine, CancellationToken.None); await compiler.GenerateFileAsync( tempWorkspace.CsProjectPath, Path.Combine(tempWorkspace.OutputDirectory, "Generated.cs"), true, "TempProject.Generated", "", "MessagePack.Formatters" ); var compilation = tempWorkspace.GetOutputCompilation(); compilation.GetCompilationErrors().Should().BeEmpty(); } [Fact] public async Task Return_StreamingResult() { using var tempWorkspace = TemporaryProjectWorkarea.Create(); tempWorkspace.AddFileToProject("IMyService.cs", @" using System; using MessagePack; using MagicOnion; using System.Threading.Tasks; namespace TempProject { public interface IMyService : IService<IMyService> { Task<ClientStreamingResult<string, string>> ClientStreamingAsync(); Task<ServerStreamingResult<string>> ServerStreamingAsync(); Task<DuplexStreamingResult<string, string>> DuplexStreamingAsync(); } } "); var compiler = new MagicOnionCompiler(_testOutputHelper.WriteLine, CancellationToken.None); await compiler.GenerateFileAsync( tempWorkspace.CsProjectPath, Path.Combine(tempWorkspace.OutputDirectory, "Generated.cs"), true, "TempProject.Generated", "", "MessagePack.Formatters" ); var compilation = tempWorkspace.GetOutputCompilation(); compilation.GetCompilationErrors().Should().BeEmpty(); } [Fact] public async Task Invalid_Return_NonGenerics() { using var tempWorkspace = TemporaryProjectWorkarea.Create(); tempWorkspace.AddFileToProject("IMyService.cs", @" using System; using System.Threading.Tasks; using MessagePack; using MagicOnion; namespace TempProject { public interface IMyService : IService<IMyService> { int A(); } } "); var compiler = new MagicOnionCompiler(_testOutputHelper.WriteLine, CancellationToken.None); await Assert.ThrowsAsync<InvalidOperationException>(async () => { await compiler.GenerateFileAsync( tempWorkspace.CsProjectPath, Path.Combine(tempWorkspace.OutputDirectory, "Generated.cs"), true, "TempProject.Generated", "", "MessagePack.Formatters" ); }); } [Fact] public async Task Invalid_Return_NonSupportedUnaryResultOfT() { using var tempWorkspace = TemporaryProjectWorkarea.Create(); tempWorkspace.AddFileToProject("IMyService.cs", @" using System; using System.Threading.Tasks; using MessagePack; using MagicOnion; namespace TempProject { public interface IMyService : IService<IMyService> { UnaryResult<ServerStreamingResult<int>> A(); } } "); var compiler = new MagicOnionCompiler(_testOutputHelper.WriteLine, CancellationToken.None); await Assert.ThrowsAsync<InvalidOperationException>(async () => { await compiler.GenerateFileAsync( tempWorkspace.CsProjectPath, Path.Combine(tempWorkspace.OutputDirectory, "Generated.cs"), true, "TempProject.Generated", "", "MessagePack.Formatters" ); }); } [Fact] public async Task Invalid_Return_RawStreaming_NonTask() { using var tempWorkspace = TemporaryProjectWorkarea.Create(); tempWorkspace.AddFileToProject("IMyService.cs", @" using System; using System.Threading.Tasks; using MessagePack; using MagicOnion; namespace TempProject { public interface IMyService : IService<IMyService> { ClientStreamingResult<string, string> ClientStreamingAsync(); ServerStreamingResult<string> ServerStreamingAsync(); DuplexStreamingResult<string, string> DuplexStreamingAsync(); } } "); var compiler = new MagicOnionCompiler(_testOutputHelper.WriteLine, CancellationToken.None); await Assert.ThrowsAsync<InvalidOperationException>(async () => { await compiler.GenerateFileAsync( tempWorkspace.CsProjectPath, Path.Combine(tempWorkspace.OutputDirectory, "Generated.cs"), true, "TempProject.Generated", "", "MessagePack.Formatters" ); }); } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using sys = System; namespace Google.Ads.GoogleAds.V10.Resources { /// <summary>Resource name for the <c>AdGroupAudienceView</c> resource.</summary> public sealed partial class AdGroupAudienceViewName : gax::IResourceName, sys::IEquatable<AdGroupAudienceViewName> { /// <summary>The possible contents of <see cref="AdGroupAudienceViewName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern /// <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c>. /// </summary> CustomerAdGroupCriterion = 1, } private static gax::PathTemplate s_customerAdGroupCriterion = new gax::PathTemplate("customers/{customer_id}/adGroupAudienceViews/{ad_group_id_criterion_id}"); /// <summary>Creates a <see cref="AdGroupAudienceViewName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="AdGroupAudienceViewName"/> containing the provided /// <paramref name="unparsedResourceName"/>. /// </returns> public static AdGroupAudienceViewName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new AdGroupAudienceViewName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="AdGroupAudienceViewName"/> with the pattern /// <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// A new instance of <see cref="AdGroupAudienceViewName"/> constructed from the provided ids. /// </returns> public static AdGroupAudienceViewName FromCustomerAdGroupCriterion(string customerId, string adGroupId, string criterionId) => new AdGroupAudienceViewName(ResourceNameType.CustomerAdGroupCriterion, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="AdGroupAudienceViewName"/> with pattern /// <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="AdGroupAudienceViewName"/> with pattern /// <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c>. /// </returns> public static string Format(string customerId, string adGroupId, string criterionId) => FormatCustomerAdGroupCriterion(customerId, adGroupId, criterionId); /// <summary> /// Formats the IDs into the string representation of this <see cref="AdGroupAudienceViewName"/> with pattern /// <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="AdGroupAudienceViewName"/> with pattern /// <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c>. /// </returns> public static string FormatCustomerAdGroupCriterion(string customerId, string adGroupId, string criterionId) => s_customerAdGroupCriterion.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), $"{(gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId)))}"); /// <summary> /// Parses the given resource name string into a new <see cref="AdGroupAudienceViewName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// </remarks> /// <param name="adGroupAudienceViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="AdGroupAudienceViewName"/> if successful.</returns> public static AdGroupAudienceViewName Parse(string adGroupAudienceViewName) => Parse(adGroupAudienceViewName, false); /// <summary> /// Parses the given resource name string into a new <see cref="AdGroupAudienceViewName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="adGroupAudienceViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="AdGroupAudienceViewName"/> if successful.</returns> public static AdGroupAudienceViewName Parse(string adGroupAudienceViewName, bool allowUnparsed) => TryParse(adGroupAudienceViewName, allowUnparsed, out AdGroupAudienceViewName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="AdGroupAudienceViewName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// </remarks> /// <param name="adGroupAudienceViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="AdGroupAudienceViewName"/>, or <c>null</c> if parsing /// failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string adGroupAudienceViewName, out AdGroupAudienceViewName result) => TryParse(adGroupAudienceViewName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="AdGroupAudienceViewName"/> instance; /// optionally allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="adGroupAudienceViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="AdGroupAudienceViewName"/>, or <c>null</c> if parsing /// failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string adGroupAudienceViewName, bool allowUnparsed, out AdGroupAudienceViewName result) { gax::GaxPreconditions.CheckNotNull(adGroupAudienceViewName, nameof(adGroupAudienceViewName)); gax::TemplatedResourceName resourceName; if (s_customerAdGroupCriterion.TryParseName(adGroupAudienceViewName, out resourceName)) { string[] split1 = ParseSplitHelper(resourceName[1], new char[] { '~', }); if (split1 == null) { result = null; return false; } result = FromCustomerAdGroupCriterion(resourceName[0], split1[0], split1[1]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(adGroupAudienceViewName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private static string[] ParseSplitHelper(string s, char[] separators) { string[] result = new string[separators.Length + 1]; int i0 = 0; for (int i = 0; i <= separators.Length; i++) { int i1 = i < separators.Length ? s.IndexOf(separators[i], i0) : s.Length; if (i1 < 0 || i1 == i0) { return null; } result[i] = s.Substring(i0, i1 - i0); i0 = i1 + 1; } return result; } private AdGroupAudienceViewName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string adGroupId = null, string criterionId = null, string customerId = null) { Type = type; UnparsedResource = unparsedResourceName; AdGroupId = adGroupId; CriterionId = criterionId; CustomerId = customerId; } /// <summary> /// Constructs a new instance of a <see cref="AdGroupAudienceViewName"/> class from the component parts of /// pattern <c>customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}</c> /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> public AdGroupAudienceViewName(string customerId, string adGroupId, string criterionId) : this(ResourceNameType.CustomerAdGroupCriterion, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>AdGroup</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string AdGroupId { get; } /// <summary> /// The <c>Criterion</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CriterionId { get; } /// <summary> /// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CustomerId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.CustomerAdGroupCriterion: return s_customerAdGroupCriterion.Expand(CustomerId, $"{AdGroupId}~{CriterionId}"); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as AdGroupAudienceViewName); /// <inheritdoc/> public bool Equals(AdGroupAudienceViewName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(AdGroupAudienceViewName a, AdGroupAudienceViewName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(AdGroupAudienceViewName a, AdGroupAudienceViewName b) => !(a == b); } public partial class AdGroupAudienceView { /// <summary> /// <see cref="AdGroupAudienceViewName"/>-typed view over the <see cref="ResourceName"/> resource name property. /// </summary> internal AdGroupAudienceViewName ResourceNameAsAdGroupAudienceViewName { get => string.IsNullOrEmpty(ResourceName) ? null : AdGroupAudienceViewName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } } }
// Copyright 2008-2009 Louis DeJardin - http://whereslou.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Collections.Generic; using System.IO; using NUnit.Framework; using Spark.Compiler; using Spark.Parser; using Spark.Ruby.Compiler; using Spark.Tests.Models; using Spark.Tests.Stubs; namespace Spark.Ruby.Tests { [TestFixture] public class RubyViewCompilerTests { private RubyViewCompiler _compiler; private RubyLanguageFactory _languageFactory; [SetUp] public void Init() { _compiler = new RubyViewCompiler { BaseClass = typeof(StubSparkView).FullName,Debug = true }; _languageFactory = new RubyLanguageFactory(); //load assemblies global::IronRuby.Ruby.CreateEngine(); } static IList<IList<Chunk>> Chunks(params Chunk[] chunks) { return new[] { (IList<Chunk>)chunks }; } private string ExecuteView() { return ExecuteView(new StubViewData()); } private string ExecuteView(StubViewData viewData) { var view = FastActivator<StubSparkView>.New(_compiler.CompiledType); _languageFactory.InstanceCreated(_compiler, view); view.ViewData = viewData; var contents = new StringWriter(); view.RenderView(contents); _languageFactory.InstanceReleased(_compiler, view); return contents.ToString(); } [Test] public void CreatedViewHasScriptProperty() { var chunks = Chunks(new SendLiteralChunk { Text = "Hello" }); _compiler.CompileView(chunks, chunks); var view = FastActivator<IScriptingSparkView>.New(_compiler.CompiledType); var source = _compiler.SourceCode; var script = view.ScriptSource; Assert.IsNotNull(source); Assert.IsNotEmpty(source); Assert.IsNotNull(script); Assert.IsNotEmpty(script); } [Test] public void CodeInheritsBaseClass() { var chunks = Chunks(); _compiler.BaseClass = "ThisIsTheBaseClass"; _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains(": ThisIsTheBaseClass")); } [Test] public void CodeInheritsBaseClassWithTModel() { var chunks = Chunks(new ViewDataModelChunk { TModel = "ThisIsTheModelClass" }); _compiler.BaseClass = "ThisIsTheBaseClass"; _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains(": ThisIsTheBaseClass<ThisIsTheModelClass>")); } [Test] public void LayeredTemplates() { var chunks0 = Chunks(new SendLiteralChunk { Text = "2" }); var chunks1 = Chunks( new SendLiteralChunk { Text = "4" }, new UseContentChunk { Name = "view" }, new SendLiteralChunk { Text = "0" }); var chunks = new[] { chunks0[0], chunks1[0] }; _compiler.CompileView(chunks, chunks); var content = ExecuteView(); Assert.AreEqual("420", content); } [/*Test,*/ Ignore("Not really sure if namespaces play a role in a dlr based spark view")] public void UsingNamespaces() { var chunks = Chunks(new UseNamespaceChunk { Namespace = "AnotherNamespace.ToBe.Used" }); _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("Imports AnotherNamespace.ToBe.Used")); } [Test] public void TargetNamespace() { var chunks = Chunks(new SendLiteralChunk { Text = "blah" }); _compiler.Descriptor = new SparkViewDescriptor { TargetNamespace = "TargetNamespace.ForThe.GeneratedView" }; _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("namespace TargetNamespace.ForThe.GeneratedView")); } [/*Test,*/ Ignore("Not really sure if namespaces play a role in a dlr based spark view")] public void TargetNamespaceWithUsingNamespaces() { var chunks = Chunks(new UseNamespaceChunk { Namespace = "AnotherNamespace.ToBe.Used" }); _compiler.Descriptor = new SparkViewDescriptor { TargetNamespace = "TargetNamespace.ForThe.GeneratedView" }; _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("Namespace TargetNamespace.ForThe.GeneratedView")); Assert.That(_compiler.SourceCode.Contains("Imports AnotherNamespace.ToBe.Used")); } [Test] public void ViewDescriptorAttribute() { var chunks = Chunks(); _compiler.Descriptor = new SparkViewDescriptor { TargetNamespace = "TargetNamespace.ForThe.GeneratedView", Templates = new[] { "one", "two", "three" } }; _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("SparkViewAttribute")); Assert.That(_compiler.SourceCode.Contains("one")); Assert.That(_compiler.SourceCode.Contains("two")); Assert.That(_compiler.SourceCode.Contains("three")); } [Test] public void ContainsGeneratedViewIdProperty() { var chunks = Chunks(); _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("GeneratedViewId")); Assert.That(_compiler.SourceCode.Contains("\"" + _compiler.GeneratedViewId + "\"")); } [Test] public void SendingLiteralOutput() { var chunks = Chunks(new SendLiteralChunk { Text = "Hello World" }); _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("Hello World")); } [Test] public void SendingIndentedExpressionOutput() { var chunks = Chunks(new SendExpressionChunk { Code = "5 + 3", Position = new Position(null, 100, 0, 50, 3, null) }); _compiler.GenerateSourceCode(chunks, chunks); Assert.That(_compiler.SourceCode.Contains("5 + 3")); } [Test] public void RenderingSimpleView() { var chunks = Chunks(new SendLiteralChunk { Text = "Hello World" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("Hello World", contents); } [Test] public void CompilingSimpleView() { var chunks = Chunks(new SendLiteralChunk { Text = "Hello World" }); _compiler.CompileView(chunks, chunks); Assert.That(typeof(StubSparkView).IsAssignableFrom(_compiler.CompiledType)); } [Test] public void SettingLocalVariable() { var chunks = Chunks( new LocalVariableChunk { Name = "x", Value = "4" }, new SendExpressionChunk { Code = "x" }, new AssignVariableChunk { Name = "x", Value = "2" }, new SendExpressionChunk { Code = "x" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("42", contents); } [Test] public void SettingGlobalVariable() { var chunks = Chunks( new GlobalVariableChunk { Type = "int", Name = "x", Value = "4" }, new SendExpressionChunk { Code = "x" }, new AssignVariableChunk { Name = "x", Value = "2" }, new SendExpressionChunk { Code = "x" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("42", contents); } [Test] public void UsingViewData() { var chunks = Chunks( new SendExpressionChunk { Code = "@hello" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(new StubViewData { { "hello", 42 } }); Assert.AreEqual("42", contents); } [/*Test,*/ Ignore("Not really sure if namespaces play a role in a dlr based spark view")] public void UsingViewDataDefault() { var chunks = Chunks( new ViewDataChunk { Type = "int", Name = "HelloNumber", Key = "hello", Default = "55" }, new SendExpressionChunk { Code = "HelloNumber" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(new StubViewData { { "hello", 42 } }); Assert.AreEqual("42", contents); var contents2 = ExecuteView(); Assert.AreEqual("55", contents2); } [Test] public void InlineCodeStatements() { var chunks = Chunks( new CodeStatementChunk { Code = "x = 20" }, new SendExpressionChunk { Code = "x + 22" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual(contents, "42"); } [Test] public void ScopeTest() { var scope1 = new ScopeChunk(); scope1.Body.Add(new LocalVariableChunk { Name = "x", Value = "4" }); scope1.Body.Add(new SendExpressionChunk { Code = "x" }); var scope2 = new ScopeChunk(); scope2.Body.Add(new LocalVariableChunk { Name = "x", Value = "2" }); scope2.Body.Add(new SendExpressionChunk { Code = "x" }); var chunks = Chunks(scope1, scope2); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual(contents, "42"); } [Test] public void ForEachLoopOverArray() { var loop = new ForEachChunk { Code = "number in @numbers" }; loop.Body.Add(new SendExpressionChunk { Code = "number" }); var chunks = Chunks( loop); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(new StubViewData { { "numbers", new[] { 1, 2, 3, 4, 5 } } }); Assert.AreEqual("12345", contents); } [Test] public void MacroAddsFunction() { var macro = new MacroChunk { Name = "foo", Parameters = new[] { new MacroParameter { Name = "x", Type = "string" } } }; macro.Body.Add(new SendExpressionChunk { Code = "x" }); var chunks = Chunks( new SendExpressionChunk { Code = "foo(\"hello\")" }, macro); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("hello", contents); } [Test] public void ConditionalChunkControlsExecution() { var condition1 = new ConditionalChunk { Condition = "x != 12" }; condition1.Body.Add(new SendLiteralChunk { Text = "fail" }); var condition2 = new ConditionalChunk { Condition = "x == 12" }; condition2.Body.Add(new SendLiteralChunk { Text = "ok1" }); var chunks = Chunks( new LocalVariableChunk { Name = "x", Value = "12" }, new SendLiteralChunk { Text = "a" }, condition1, condition2, new SendLiteralChunk { Text = "b" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("aok1b", contents); } [Test] public void ElseBlockFollowsIf() { var condition1 = new ConditionalChunk { Condition = "x != 12" }; condition1.Body.Add(new SendLiteralChunk { Text = "fail" }); var else1 = new ConditionalChunk { Type = ConditionalType.Else }; else1.Body.Add(new SendLiteralChunk { Text = "ok1" }); var condition2 = new ConditionalChunk { Condition = "x == 12" }; condition2.Body.Add(new SendLiteralChunk { Text = "ok2" }); var else2 = new ConditionalChunk { Type = ConditionalType.Else }; else2.Body.Add(new SendLiteralChunk { Text = "fail" }); var chunks = Chunks( new LocalVariableChunk { Name = "x", Value = "12" }, new SendLiteralChunk { Text = "a" }, condition1, else1, condition2, else2, new SendLiteralChunk { Text = "b" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("aok1ok2b", contents); } [Test] public void ConditionalChunkUnlessNegates() { var condition1 = new ConditionalChunk { Condition = "x != 12", Type = ConditionalType.Unless }; condition1.Body.Add(new SendLiteralChunk { Text = "ok1" }); var condition2 = new ConditionalChunk { Condition = "x == 12", Type = ConditionalType.Unless }; condition2.Body.Add(new SendLiteralChunk { Text = "fail" }); var chunks = Chunks( new LocalVariableChunk { Name = "x", Value = "12" }, new SendLiteralChunk { Text = "a" }, condition1, condition2, new SendLiteralChunk { Text = "b" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("aok1b", contents); } [Test] public void ChainingElseIfNestsProperly() { var condition1 = new ConditionalChunk { Type = ConditionalType.If, Condition = "x == 1" }; condition1.Body.Add(new SendLiteralChunk { Text = "a" }); var condition2 = new ConditionalChunk { Type = ConditionalType.ElseIf, Condition = "x == 2" }; condition2.Body.Add(new SendLiteralChunk { Text = "b" }); var condition3 = new ConditionalChunk { Type = ConditionalType.ElseIf, Condition = "x == 3" }; condition3.Body.Add(new SendLiteralChunk { Text = "c" }); var condition4 = new ConditionalChunk { Type = ConditionalType.Else }; condition4.Body.Add(new SendLiteralChunk { Text = "d" }); var loop = new ForEachChunk { Code = "x in @numbers" }; loop.Body.Add(condition1); loop.Body.Add(condition2); loop.Body.Add(condition3); loop.Body.Add(condition4); var chunks = Chunks( loop); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(new StubViewData { { "numbers", new[] { 0, 1, 2, 3, 4, 7, 2, -4 } } }); Assert.AreEqual("dabcddbd", contents); } [Test] public void RenderPartial() { var partial = Chunks( new SendLiteralChunk { Text = "Hello world" }); var chunks = Chunks( new SendLiteralChunk { Text = "(" }, new RenderPartialChunk { FileContext = new FileContext { Contents = partial[0] } }, new SendLiteralChunk { Text = ")" }); _compiler.CompileView(chunks, new[] { chunks[0], partial[0] }); var contents = ExecuteView(); Assert.AreEqual("(Hello world)", contents); } [Test] public void RenderPartialWithContainedContent() { var partial = Chunks( new SendLiteralChunk { Text = "[" }, new RenderSectionChunk(), new SendLiteralChunk { Text = "]" }); var renderPartial = new RenderPartialChunk { FileContext = new FileContext { Contents = partial[0] } }; var chunks = Chunks( new SendLiteralChunk { Text = "(" }, renderPartial, new SendLiteralChunk { Text = ")" }); renderPartial.Body.Add(new SendLiteralChunk { Text = "From inside caller" }); _compiler.CompileView(chunks, new[] { chunks[0], partial[0] }); var contents = ExecuteView(); Assert.AreEqual("([From inside caller])", contents); } [Test] public void RenderPartialWithSectionContent() { var partial = Chunks( new SendLiteralChunk { Text = "[" }, new RenderSectionChunk { Name = "foo" }, new SendLiteralChunk { Text = "]" }); var renderPartial = new RenderPartialChunk { FileContext = new FileContext { Contents = partial[0] } }; renderPartial.Sections.Add("foo", new[] { (Chunk) new SendLiteralChunk {Text = "From inside caller"} }); var chunks = Chunks( new SendLiteralChunk { Text = "(" }, renderPartial, new SendLiteralChunk { Text = ")" }); _compiler.CompileView(chunks, new[] { chunks[0], partial[0] }); var contents = ExecuteView(); Assert.AreEqual("([From inside caller])", contents); } [Test] public void CaptureContentToVariable() { var text1 = (Chunk)new SendLiteralChunk { Text = "a" }; var text2 = (Chunk)new SendLiteralChunk { Text = "b" }; var text3 = (Chunk)new SendLiteralChunk { Text = "c" }; var chunks = Chunks( new LocalVariableChunk { Type = "string", Name = "foo" }, new ContentSetChunk { AddType = ContentAddType.Replace, Variable = "foo", Body = new[] { text1 } }, new ContentSetChunk { AddType = ContentAddType.InsertBefore, Variable = "foo", Body = new[] { text2 } }, new ContentSetChunk { AddType = ContentAddType.AppendAfter, Variable = "foo", Body = new[] { text3 } }, new SendExpressionChunk { Code = "foo" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("bac", contents); } [Test] public void CaptureContentToNamedSpool() { var chunks = Chunks( new ContentChunk { Name = "foo", Body = new[] { (Chunk)new SendLiteralChunk { Text = "b" } } }, new SendLiteralChunk { Text = "a" }, new UseContentChunk { Name = "foo" }, new SendLiteralChunk { Text = "c" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("abc", contents); } [Test] public void DefaultVariablesWorking() { var chunks = Chunks( new DefaultVariableChunk { Name = "x1", Value = "\"a\"" }, new LocalVariableChunk { Name = "x2", Value = "\"b\"" }, new DefaultVariableChunk { Name = "x1", Value = "\"c\"" }, new DefaultVariableChunk { Name = "x2", Value = "\"d\"" }, new DefaultVariableChunk { Name = "x3", Value = "\"e\"" }, new SendExpressionChunk { Code = "x1" }, new SendExpressionChunk { Code = "x2" }, new SendExpressionChunk { Code = "x3" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("abe", contents); } [Test] public void OnceTestSendsThingsOnce() { var chunks = Chunks( new ConditionalChunk { Type = ConditionalType.Once, Condition = "\"foo\"", Body = new[] { (Chunk)new SendLiteralChunk { Text = "4" } } }, new ConditionalChunk { Type = ConditionalType.Once, Condition = "\"foo\"", Body = new[] { (Chunk)new SendLiteralChunk { Text = "3" } } }, new ConditionalChunk { Type = ConditionalType.Once, Condition = "\"bar\"", Body = new[] { (Chunk)new SendLiteralChunk { Text = "2" } } }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("42", contents); } [Test] public void HandleNullReferences() { var chunks = Chunks( new LocalVariableChunk { Name = "user", Type = typeof(UserInfo).FullName }, new SendLiteralChunk { Text = "1" }, new SendExpressionChunk { Code = "user.Name", SilentNulls = false }, new SendLiteralChunk { Text = "2" }, new SendExpressionChunk { Code = "user.Name", SilentNulls = true }, new SendLiteralChunk { Text = "3" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("1${user.Name => undefined method `Name' for nil:NilClass}23", contents); } [Test] public void ForLoopAutovariables() { var loop = Chunks( new SendExpressionChunk { Code = "fooIndex" }, new SendExpressionChunk { Code = "fooCount" }, new SendExpressionChunk { Code = "fooIsFirst" }, new SendExpressionChunk { Code = "fooIsLast" }); var chunks = Chunks( new ForEachChunk { Code = "foo in @stuff", Body = loop[0] }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(new StubViewData { {"stuff", new[] {6, 2, 7, 4}} }); Assert.AreEqual("04TrueFalse14FalseFalse24FalseFalse34FalseTrue", contents); } [Test] public void CallingMacro() { var macroBody = Chunks( new SendLiteralChunk { Text = "1" }); var chunks = Chunks( new MacroChunk { Name = "Foo", Body = macroBody[0] }, new LocalVariableChunk { Name = "x", Value = "Foo()" }, new SendLiteralChunk { Text = "2" }, new SendExpressionChunk { Code = "x" }, new SendLiteralChunk { Text = "3" }); _compiler.CompileView(chunks, chunks); var contents = ExecuteView(); Assert.AreEqual("213", contents); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeGeneration; using Roslyn.Test.Utilities; using Roslyn.Utilities; using Xunit; using CS = Microsoft.CodeAnalysis.CSharp; using VB = Microsoft.CodeAnalysis.VisualBasic; namespace Microsoft.CodeAnalysis.Editor.UnitTests.MetadataAsSource { public partial class MetadataAsSourceTests : AbstractMetadataAsSourceTests { [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestClass() { var metadataSource = "public class C {}"; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class [|C|] {{ public C(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class [|C|] Public Sub New() End Class"); } [WorkItem(546241, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546241")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestInterface() { var metadataSource = "public interface I {}"; var symbolName = "I"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public interface [|I|] {{ }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Interface [|I|] End Interface"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestConstructor() { var metadataSource = "public class C {}"; var symbolName = "C..ctor"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public [|C|](); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public Sub [|New|]() End Class"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestMethod() { var metadataSource = "public class C { public void Foo() {} }"; var symbolName = "C.Foo"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public C(); public void [|Foo|](); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public Sub New() Public Sub [|Foo|]() End Class"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestField() { var metadataSource = "public class C { public string S; }"; var symbolName = "C.S"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public string [|S|]; public C(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public [|S|] As String Public Sub New() End Class"); } [WorkItem(546240, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546240")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestProperty() { var metadataSource = "public class C { public string S { get; protected set; } }"; var symbolName = "C.S"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public C(); public string [|S|] {{ get; protected set; }} }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public Sub New() Public Property [|S|] As String End Class"); } [WorkItem(546194, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546194")] [WorkItem(546291, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546291")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEvent() { var metadataSource = "using System; public class C { public event Action E; }"; var symbolName = "C.E"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System; public class C {{ public C(); public event Action [|E|]; }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System Public Class C Public Sub New() Public Event [|E|] As Action End Class"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNestedType() { var metadataSource = "public class C { protected class D { } }"; var symbolName = "C+D"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public C(); protected class [|D|] {{ public D(); }} }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public Sub New() Protected Class [|D|] Public Sub New() End Class End Class"); } [WorkItem(546195, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546195"), WorkItem(546269, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546269")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEnum() { var metadataSource = "public enum E { A, B, C }"; var symbolName = "E"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public enum [|E|] {{ A = 0, B = 1, C = 2 }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Enum [|E|] A = 0 B = 1 C = 2 End Enum"); } [WorkItem(546195, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546195"), WorkItem(546269, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546269")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEnumFromField() { var metadataSource = "public enum E { A, B, C }"; var symbolName = "E.C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public enum E {{ A = 0, B = 1, [|C|] = 2 }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Enum E A = 0 B = 1 [|C|] = 2 End Enum"); } [WorkItem(546273, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546273")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEnumWithUnderlyingType() { var metadataSource = "public enum E : short { A = 0, B = 1, C = 2 }"; var symbolName = "E.C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public enum E : short {{ A = 0, B = 1, [|C|] = 2 }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Enum E As Short A = 0 B = 1 [|C|] = 2 End Enum"); } [WorkItem(650741, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/650741")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEnumWithOverflowingUnderlyingType() { var metadataSource = "public enum E : ulong { A = 9223372036854775808 }"; var symbolName = "E.A"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public enum E : ulong {{ [|A|] = 9223372036854775808 }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Enum E As ULong [|A|] = 9223372036854775808UL End Enum"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEnumWithDifferentValues() { var metadataSource = "public enum E : short { A = 1, B = 2, C = 3 }"; var symbolName = "E.C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public enum E : short {{ A = 1, B = 2, [|C|] = 3 }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Enum E As Short A = 1 B = 2 [|C|] = 3 End Enum"); } [WorkItem(546198, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546198")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestTypeInNamespace() { var metadataSource = "namespace N { public class C {} }"; var symbolName = "N.C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion namespace N {{ public class [|C|] {{ public C(); }} }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Namespace N Public Class [|C|] Public Sub New() End Class End Namespace"); } [WorkItem(546223, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546223")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestInlineConstant() { var metadataSource = @"public class C { public const string S = ""Hello mas""; }"; var symbolName = "C.S"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public const string [|S|] = ""Hello mas""; public C(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public Const [|S|] As String = ""Hello mas"" Public Sub New() End Class"); } [WorkItem(546221, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546221")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestInlineTypeOf() { var metadataSource = @" using System; public class MyTypeAttribute : Attribute { public MyTypeAttribute(Type type) {} } [MyType(typeof(string))] public class C {}"; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion [MyType(typeof(string))] public class [|C|] {{ public C(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region <MyType(GetType(String))> Public Class [|C|] Public Sub New() End Class"); } [WorkItem(546231, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546231")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNoDefaultConstructorInStructs() { var metadataSource = "public struct S {}"; var symbolName = "S"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public struct [|S|] {{ }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Structure [|S|] End Structure"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestReferenceDefinedType() { var metadataSource = "public class C { public static C Create() { return new C(); } }"; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class [|C|] {{ public C(); public static C Create(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class [|C|] Public Sub New() Public Shared Function Create() As C End Class"); } [WorkItem(546227, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546227")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestGenericType() { var metadataSource = "public class G<SomeType> { public SomeType S; }"; var symbolName = "G`1"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class [|G|]<SomeType> {{ public SomeType S; public G(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class [|G|](Of SomeType) Public S As SomeType Public Sub New() End Class"); } [WorkItem(546227, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546227")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestGenericDelegate() { var metadataSource = "public class C { public delegate void D<SomeType>(SomeType s); }"; var symbolName = "C+D`1"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public C(); public delegate void [|D|]<SomeType>(SomeType s); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Public Class C Public Sub New() Public Delegate Sub [|D|](Of SomeType)(s As SomeType) End Class"); } [WorkItem(546200, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546200")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestAttribute() { var metadataSource = @" using System; namespace N { public class WorkingAttribute : Attribute { public WorkingAttribute(bool working) {} } } [N.Working(true)] public class C {}"; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using N; [Working(true)] public class [|C|] {{ public C(); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports N <Working(True)> Public Class [|C|] Public Sub New() End Class"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestSymbolIdMatchesMetadata() { await TestSymbolIdMatchesMetadataAsync(LanguageNames.CSharp); await TestSymbolIdMatchesMetadataAsync(LanguageNames.VisualBasic); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNotReusedOnAssemblyDiffers() { await TestNotReusedOnAssemblyDiffersAsync(LanguageNames.CSharp); await TestNotReusedOnAssemblyDiffersAsync(LanguageNames.VisualBasic); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestThrowsOnGenerateNamespace() { var namespaceSymbol = CodeGenerationSymbolFactory.CreateNamespaceSymbol("Outerspace"); using (var context = TestContext.Create()) { await Assert.ThrowsAsync<ArgumentException>(async () => { await context.GenerateSourceAsync(namespaceSymbol); }); } } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestReuseGenerateMemberOfGeneratedType() { var metadataSource = "public class C { public bool Is; }"; using (var context = TestContext.Create(LanguageNames.CSharp, SpecializedCollections.SingletonEnumerable(metadataSource))) { var a = await context.GenerateSourceAsync("C"); var b = await context.GenerateSourceAsync("C.Is"); context.VerifyDocumentReused(a, b); } } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestReuseRepeatGeneration() { using (var context = TestContext.Create()) { var a = await context.GenerateSourceAsync(); var b = await context.GenerateSourceAsync(); context.VerifyDocumentReused(a, b); } } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestWorkspaceContextHasReasonableProjectName() { using (var context = TestContext.Create()) { var compilation = await context.DefaultProject.GetCompilationAsync(); var result = await context.GenerateSourceAsync(compilation.ObjectType); var openedDocument = context.GetDocument(result); Assert.Equal(openedDocument.Project.AssemblyName, "mscorlib"); Assert.Equal(openedDocument.Project.Name, "mscorlib"); } } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestReuseGenerateFromDifferentProject() { using (var context = TestContext.Create()) { var projectId = ProjectId.CreateNewId(); var project = context.CurrentSolution.AddProject(projectId, "ProjectB", "ProjectB", LanguageNames.CSharp).GetProject(projectId) .WithMetadataReferences(context.DefaultProject.MetadataReferences) .WithCompilationOptions(new CS.CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); var a = await context.GenerateSourceAsync(project: context.DefaultProject); var b = await context.GenerateSourceAsync(project: project); context.VerifyDocumentReused(a, b); } } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNotReusedGeneratingForDifferentLanguage() { using (var context = TestContext.Create(LanguageNames.CSharp)) { var projectId = ProjectId.CreateNewId(); var project = context.CurrentSolution.AddProject(projectId, "ProjectB", "ProjectB", LanguageNames.VisualBasic).GetProject(projectId) .WithMetadataReferences(context.DefaultProject.MetadataReferences) .WithCompilationOptions(new VB.VisualBasicCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); var a = await context.GenerateSourceAsync(project: context.DefaultProject); var b = await context.GenerateSourceAsync(project: project); context.VerifyDocumentNotReused(a, b); } } [WorkItem(546311, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546311")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task FormatMetadataAsSource() { using (var context = TestContext.Create(LanguageNames.CSharp)) { var file = await context.GenerateSourceAsync("System.Console", project: context.DefaultProject); var document = context.GetDocument(file); await Formatting.Formatter.FormatAsync(document); } } [WorkItem(530829, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/530829")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task IndexedProperty() { var metadataSource = @" Public Class C Public Property IndexProp(ByVal p1 As Integer) As String Get Return Nothing End Get Set(ByVal value As String) End Set End Property End Class"; var expected = $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public class C {{ public C(); public string [|get_IndexProp|](int p1); public void set_IndexProp(int p1, string value); }}"; var symbolName = "C.get_IndexProp"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, expected); } [WorkItem(566688, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/566688")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task AttributeReferencingInternalNestedType() { var metadataSource = @"using System; [My(typeof(D))] public class C { public C() { } internal class D { } } public class MyAttribute : Attribute { public MyAttribute(Type t) { } }"; var expected = $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion [My(typeof(D))] public class [|C|] {{ public C(); }}"; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, expected); } [WorkItem(530978, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/530978")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestAttributesOnMembers() { var metadataSource = @"using System; [Obsolete] public class C { [Obsolete] [ThreadStatic] public int field1; [Obsolete] public int prop1 { get; set; } [Obsolete] public int prop2 { get { return 10; } set {} } [Obsolete] public void method1() {} [Obsolete] public C() {} [Obsolete] ~C() {} [Obsolete] public int this[int x] { get { return 10; } set {} } [Obsolete] public event Action event1; [Obsolete] public event Action event2 { add {} remove {}} public void method2([System.Runtime.CompilerServices.CallerMemberName] string name = """") {} [Obsolete] public static C operator + (C c1, C c2) { return new C(); } } "; var expectedCS = $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System; using System.Reflection; using System.Runtime.CompilerServices; [DefaultMember(""Item"")] [Obsolete] public class [|C|] {{ [Obsolete] [ThreadStatic] public int field1; [Obsolete] public C(); [Obsolete] ~C(); [Obsolete] public int this[int x] {{ get; set; }} [Obsolete] public int prop1 {{ get; set; }} [Obsolete] public int prop2 {{ get; set; }} [Obsolete] public event Action event1; [Obsolete] public event Action event2; [Obsolete] public void method1(); public void method2([CallerMemberName] string name = """"); [Obsolete] public static C operator +(C c1, C c2); }} "; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, expectedCS); var expectedVB = $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System Imports System.Reflection Imports System.Runtime.CompilerServices <DefaultMember(""Item"")> <Obsolete> Public Class [|C|] <Obsolete> <ThreadStatic> Public field1 As Integer <Obsolete> Public Sub New() <Obsolete> Public Property prop1 As Integer <Obsolete> Public Property prop2 As Integer <Obsolete> Default Public Property Item(x As Integer) As Integer <Obsolete> Public Event event1 As Action <Obsolete> Public Event event2 As Action <Obsolete> Public Sub method1() Public Sub method2(<CallerMemberName> Optional name As String = """") <Obsolete> Protected Overrides Sub Finalize() <Obsolete> Public Shared Operator +(c1 As C, c2 As C) As C End Class"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, expectedVB); } [WorkItem(530923, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/530923")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEmptyLineBetweenMembers() { var metadataSource = @"using System; public class C { public int field1; public int prop1 { get; set; } public int field2; public int prop2 { get { return 10; } set {} } public void method1() {} public C() {} public void method2([System.Runtime.CompilerServices.CallerMemberName] string name = """") {} ~C() {} public int this[int x] { get { return 10; } set {} } public event Action event1; public static C operator + (C c1, C c2) { return new C(); } public event Action event2 { add {} remove {}} public static C operator - (C c1, C c2) { return new C(); } } "; var expectedCS = $@"#region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System; using System.Reflection; using System.Runtime.CompilerServices; [DefaultMember(""Item"")] public class [|C|] {{ public int field1; public int field2; public C(); ~C(); public int this[int x] {{ get; set; }} public int prop1 {{ get; set; }} public int prop2 {{ get; set; }} public event Action event1; public event Action event2; public void method1(); public void method2([CallerMemberName] string name = """"); public static C operator +(C c1, C c2); public static C operator -(C c1, C c2); }} "; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, expectedCS, ignoreTrivia: false); var expectedVB = $@"#Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System Imports System.Reflection Imports System.Runtime.CompilerServices <DefaultMember(""Item"")> Public Class [|C|] Public field1 As Integer Public field2 As Integer Public Sub New() Public Property prop1 As Integer Public Property prop2 As Integer Default Public Property Item(x As Integer) As Integer Public Event event1 As Action Public Event event2 As Action Public Sub method1() Public Sub method2(<CallerMemberName> Optional name As String = """") Protected Overrides Sub Finalize() Public Shared Operator +(c1 As C, c2 As C) As C Public Shared Operator -(c1 As C, c2 As C) As C End Class"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, expectedVB, ignoreTrivia: false); } [WorkItem(728644, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/728644")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestEmptyLineBetweenMembers2() { var source = @" using System; /// <summary>T:IFoo</summary> public interface IFoo { /// <summary>P:IFoo.Prop1</summary> Uri Prop1 { get; set; } /// <summary>M:IFoo.Method1</summary> Uri Method1(); } "; var symbolName = "IFoo"; var expectedCS = $@"#region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System; // // {FeaturesResources.Summary_colon} // T:IFoo public interface [|IFoo|] {{ // // {FeaturesResources.Summary_colon} // P:IFoo.Prop1 Uri Prop1 {{ get; set; }} // // {FeaturesResources.Summary_colon} // M:IFoo.Method1 Uri Method1(); }} "; await GenerateAndVerifySourceAsync(source, symbolName, LanguageNames.CSharp, expectedCS, ignoreTrivia: false, includeXmlDocComments: true); var expectedVB = $@"#Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System ' ' {FeaturesResources.Summary_colon} ' T:IFoo Public Interface [|IFoo|] ' ' {FeaturesResources.Summary_colon} ' P:IFoo.Prop1 Property Prop1 As Uri ' ' {FeaturesResources.Summary_colon} ' M:IFoo.Method1 Function Method1() As Uri End Interface "; await GenerateAndVerifySourceAsync(source, symbolName, LanguageNames.VisualBasic, expectedVB, ignoreTrivia: false, includeXmlDocComments: true); } [WorkItem(679114, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/679114"), WorkItem(715013, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/715013")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestDefaultValueEnum() { var source = @" using System.IO; public class Test { public void foo(FileOptions options = 0) {} } "; var symbolName = "Test"; var expectedCS = $@"#region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System.IO; public class [|Test|] {{ public Test(); public void foo(FileOptions options = FileOptions.None); }} "; await GenerateAndVerifySourceAsync(source, symbolName, LanguageNames.CSharp, expectedCS); var expectedVB = $@"#Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System.IO Public Class [|Test|] Public Sub New() Public Sub foo(Optional options As FileOptions = FileOptions.None) End Class"; await GenerateAndVerifySourceAsync(source, symbolName, LanguageNames.VisualBasic, expectedVB); } [WorkItem(651261, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/651261")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNullAttribute() { var source = @" using System; [Test(null)] public class TestAttribute : Attribute { public TestAttribute(int[] i) { } }"; var symbolName = "TestAttribute"; var expectedCS = $@"#region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System; [Test(null)] public class [|TestAttribute|] : Attribute {{ public TestAttribute(int[] i); }} "; await GenerateAndVerifySourceAsync(source, symbolName, LanguageNames.CSharp, expectedCS); var expectedVB = $@"#Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System <Test(Nothing)> Public Class [|TestAttribute|] Inherits Attribute Public Sub New(i() As Integer) End Class"; await GenerateAndVerifySourceAsync(source, symbolName, LanguageNames.VisualBasic, expectedVB); } [WorkItem(897006, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/897006")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNavigationViaReducedExtensionMethodCS() { var metadata = @"using System; public static class ObjectExtensions { public static void M(this object o, int x) { } }"; var sourceWithSymbolReference = @" class C { void M() { new object().[|M|](5); } }"; var expected = $@"#region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion public static class ObjectExtensions {{ public static void [|M|](this object o, int x); }} "; using (var context = TestContext.Create( LanguageNames.CSharp, SpecializedCollections.SingletonEnumerable(metadata), includeXmlDocComments: false, sourceWithSymbolReference: sourceWithSymbolReference)) { var navigationSymbol = await context.GetNavigationSymbolAsync(); var metadataAsSourceFile = await context.GenerateSourceAsync(navigationSymbol); context.VerifyResult(metadataAsSourceFile, expected); } } [WorkItem(897006, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/897006")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestNavigationViaReducedExtensionMethodVB() { var metadata = @"Imports System.Runtime.CompilerServices Namespace NS Public Module StringExtensions <Extension()> Public Sub M(ByVal o As String, x As Integer) End Sub End Module End Namespace"; var sourceWithSymbolReference = @" Imports NS.StringExtensions Public Module C Sub M() Dim s = ""Yay"" s.[|M|](1) End Sub End Module"; var expected = $@"#Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System.Runtime.CompilerServices Namespace NS <Extension> Public Module StringExtensions <Extension> Public Sub [|M|](o As String, x As Integer) End Module End Namespace"; using (var context = TestContext.Create( LanguageNames.VisualBasic, SpecializedCollections.SingletonEnumerable(metadata), includeXmlDocComments: false, sourceWithSymbolReference: sourceWithSymbolReference)) { var navigationSymbol = await context.GetNavigationSymbolAsync(); var metadataAsSourceFile = await context.GenerateSourceAsync(navigationSymbol); context.VerifyResult(metadataAsSourceFile, expected); } } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestIndexersAndOperators() { var metadataSource = @"public class Program { public int this[int x] { get { return 0; } set { } } public static Program operator + (Program p1, Program p2) { return new Program(); } }"; var symbolName = "Program"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System.Reflection; [DefaultMember(""Item"")] public class [|Program|] {{ public Program(); public int this[int x] {{ get; set; }} public static Program operator +(Program p1, Program p2); }}"); await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.VisualBasic, $@" #Region ""{FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null"" ' {CodeAnalysisResources.InMemoryAssembly} #End Region Imports System.Reflection <DefaultMember(""Item"")> Public Class [|Program|] Public Sub New() Default Public Property Item(x As Integer) As Integer Public Shared Operator +(p1 As Program, p2 As Program) As Program End Class"); } [WorkItem(15387, "https://github.com/dotnet/roslyn/issues/15387")] [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestComImport1() { var metadataSource = @" using System.Runtime.InteropServices; [ComImport] [Guid(""666A175D-2448-447A-B786-CCC82CBEF156"")] public interface IComImport { void MOverload(); void X(); void MOverload(int i); int Prop { get; } }"; var symbolName = "IComImport"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System.Runtime.InteropServices; [Guid(""666A175D-2448-447A-B786-CCC82CBEF156"")] public interface [|IComImport|] {{ void MOverload(); void X(); void MOverload(int i); int Prop {{ get; }} }}"); } [Fact, Trait(Traits.Feature, Traits.Features.MetadataAsSource)] public async Task TestOptionalParameterWithDefaultLiteral() { var metadataSource = @" using System.Threading; public class C { public void M(CancellationToken cancellationToken = default(CancellationToken)) { } }"; var symbolName = "C"; await GenerateAndVerifySourceAsync(metadataSource, symbolName, LanguageNames.CSharp, $@" #region {FeaturesResources.Assembly} ReferencedAssembly, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null // {CodeAnalysisResources.InMemoryAssembly} #endregion using System.Threading; public class [|C|] {{ public C(); public void M(CancellationToken cancellationToken = default); }}", languageVersion: "CSharp7_1"); } } }
//------------------------------------------------------------------------------ // <copyright file="HtmlObjetListAdapter.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ using System; using System.Collections; using System.Globalization; using System.Drawing; using System.IO; using System.Web; using System.Web.UI; using System.Web.UI.HtmlControls; using System.Web.UI.MobileControls; using System.Diagnostics; using System.Security.Permissions; using SR=System.Web.UI.MobileControls.Adapters.SR; #if COMPILING_FOR_SHIPPED_SOURCE namespace System.Web.UI.MobileControls.ShippedAdapterSource #else namespace System.Web.UI.MobileControls.Adapters #endif { /* * HtmlObjectListAdapter provides HTML rendering of Object List control. * * Copyright (c) 2000 Microsoft Corporation */ /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter"]/*' /> [AspNetHostingPermission(SecurityAction.LinkDemand, Level=AspNetHostingPermissionLevel.Minimal)] [AspNetHostingPermission(SecurityAction.InheritanceDemand, Level=AspNetHostingPermissionLevel.Minimal)] [Obsolete("The System.Web.Mobile.dll assembly has been deprecated and should no longer be used. For information about how to develop ASP.NET mobile applications, see http://go.microsoft.com/fwlink/?LinkId=157231.")] public class HtmlObjectListAdapter : HtmlControlAdapter { /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.BackToList"]/*' /> internal protected static readonly String BackToList = "__back"; /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.ShowMoreFormat"]/*' /> internal protected static readonly String ShowMoreFormat = "__more{0}"; /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.ShowMore"]/*' /> internal protected static readonly String ShowMore = "__more"; private const int _modeDetails = 1; /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.Control"]/*' /> protected new ObjectList Control { get { return (ObjectList)base.Control; } } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.OnInit"]/*' /> public override void OnInit(EventArgs e) { } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.OnPreRender"]/*' /> public override void OnPreRender(EventArgs e) { base.OnPreRender(e); if(Control.MobilePage.ActiveForm == Control.Form && Control.Visible && (Control.ViewMode == ObjectListViewMode.Commands || Control.ViewMode == ObjectListViewMode.Details)) { SecondaryUIMode = _modeDetails; if (Control.Items.Count > 0) { int itemIndex = Control.SelectedIndex; Debug.Assert(itemIndex >= 0, "itemIndex is negative"); Control.PreShowItemCommands(itemIndex); } } else { SecondaryUIMode = NotSecondaryUI; } } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.Render"]/*' /> public override void Render(HtmlMobileTextWriter writer) { if (Control.ViewMode == ObjectListViewMode.List) { if (Control.HasControls()) { RenderChildren(writer); } else { RenderItemsList(writer); } } else { if (Control.Selection.HasControls()) { Control.Selection.RenderChildren(writer); } else { RenderItemDetails(writer, Control.Selection); } FormAdapter.DisablePager(); } } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.CreateTemplatedUI"]/*' /> public override void CreateTemplatedUI(bool doDataBind) { if (Control.ViewMode == ObjectListViewMode.List) { Control.CreateTemplatedItemsList(doDataBind); } else { Control.CreateTemplatedItemDetails(doDataBind); } } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.RenderItemsList"]/*' /> protected virtual void RenderItemsList(HtmlMobileTextWriter writer) { Debug.Assert (Control.VisibleItemCount <= Control.Items.Count); if (Control.VisibleItemCount == 0) { return; } Debug.Assert (Control.AllFields != null && Control.AllFields.Count > 0, "Should never have items but no fields."); if (Device.Tables) { RenderItemsListWithTableTags(writer); } else { RenderItemsListWithoutTableTags(writer); } } private void RenderItemsListWithTableTags(HtmlMobileTextWriter writer) { int pageStart = Control.FirstVisibleItemIndex; int pageSize = Control.VisibleItemCount; ObjectListItemCollection items = Control.Items; // Determine how to render. bool shouldRenderAsTable = ShouldRenderAsTable(); bool hasDefaultCommand = HasDefaultCommand(); bool onlyHasDefaultCommand = OnlyHasDefaultCommand(); bool requiresDetailsScreen = HasItemDetails() || (!onlyHasDefaultCommand && HasCommands()); bool itemRequiresHyperlink = requiresDetailsScreen || hasDefaultCommand; bool itemRequiresMoreButton = requiresDetailsScreen && hasDefaultCommand; int fieldCount; int[] fieldIndices = new int[]{}; if (shouldRenderAsTable) { fieldIndices = Control.TableFieldIndices; } Debug.Assert(fieldIndices != null, "fieldIndices is null"); fieldCount = fieldIndices.Length; if(fieldCount == 0) { fieldIndices = new int[1]; fieldIndices[0] = Control.LabelFieldIndex; fieldCount = 1; } Style style = this.Style; Style subCommandStyle = Control.CommandStyle; Style labelStyle = Control.LabelStyle; Color foreColor = (Color)style[Style.ForeColorKey, true]; writer.BeginStyleContext(); writer.WriteLine("<table border=0 width=\"100%\">\r\n<tr>"); for (int field = 0; field < fieldCount; field++) { writer.Write("<td>"); writer.BeginStyleContext(); writer.EnterStyle(labelStyle); writer.WriteText(Control.AllFields[fieldIndices[field]].Title, true); writer.ExitStyle(labelStyle); writer.EndStyleContext(); writer.Write("</td>"); } if (itemRequiresMoreButton) { writer.WriteLine("<td/>"); } writer.WriteLine("\r\n</tr>"); RenderRule(writer, foreColor, fieldCount + 1); for (int i = 0; i < pageSize; i++) { ObjectListItem item = items[pageStart + i]; writer.WriteLine("<tr>"); for (int field = 0; field < fieldCount; field++) { writer.Write("<td>"); if (field == 0 && itemRequiresHyperlink) { writer.BeginStyleContext(); writer.EnterStyle(style); String eventArgument = hasDefaultCommand ? item.Index.ToString(CultureInfo.InvariantCulture) : String.Format(CultureInfo.InvariantCulture, ShowMoreFormat, item.Index.ToString(CultureInfo.InvariantCulture)); RenderPostBackEventAsAnchor(writer, eventArgument, item[fieldIndices[0]]); writer.ExitStyle(style); writer.EndStyleContext(); } else { writer.BeginStyleContext(); writer.EnterStyle(style); writer.WriteText(item[fieldIndices[field]], true); writer.ExitStyle(style); writer.EndStyleContext(); } writer.WriteLine("</td>"); } if (itemRequiresMoreButton) { writer.Write("<td align=right>"); writer.BeginStyleContext(); writer.EnterFormat(subCommandStyle); String moreText = Control.MoreText.Length == 0 ? GetDefaultLabel(MoreLabel) : Control.MoreText; RenderPostBackEventAsAnchor(writer, String.Format(CultureInfo.InvariantCulture, ShowMoreFormat, item.Index), moreText, subCommandStyle); writer.ExitFormat(subCommandStyle); writer.EndStyleContext(); writer.Write("</td>\r\n"); } writer.WriteLine("</tr>"); } RenderRule(writer, foreColor, fieldCount + 1); writer.Write("</table>\r\n"); writer.EndStyleContext(); } private void RenderItemsListWithoutTableTags(HtmlMobileTextWriter writer) { int startIndex = Control.FirstVisibleItemIndex; int pageSize = Control.VisibleItemCount; ObjectListItemCollection items = Control.Items; IObjectListFieldCollection allFields = Control.AllFields; int count = allFields.Count; int nextStartIndex = startIndex + pageSize; int labelFieldIndex = Control.LabelFieldIndex; Style style = this.Style; Style labelStyle = Control.LabelStyle; writer.EnterStyle(labelStyle); writer.WriteText(Control.AllFields[labelFieldIndex].Title, true); writer.ExitStyle(labelStyle, true); bool hasDefaultCommand = HasDefaultCommand(); bool onlyHasDefaultCommand = OnlyHasDefaultCommand(); bool requiresDetailsScreen = !onlyHasDefaultCommand && HasCommands(); // if there is > 1 visible field, need a details screen for (int visibleFields = 0, i = 0; !requiresDetailsScreen && i < count; i++) { visibleFields += allFields[i].Visible ? 1 : 0; requiresDetailsScreen = requiresDetailsScreen || visibleFields > 1; } bool itemRequiresHyperlink = requiresDetailsScreen || hasDefaultCommand; bool itemRequiresMoreButton = requiresDetailsScreen && hasDefaultCommand; Style subCommandStyle = Control.CommandStyle; subCommandStyle.Alignment = style.Alignment; subCommandStyle.Wrapping = style.Wrapping; writer.EnterStyle(style); for (int i = startIndex; i < nextStartIndex; i++) { ObjectListItem item = items[i]; if (itemRequiresHyperlink) { RenderPostBackEventAsAnchor(writer, hasDefaultCommand ? item.Index.ToString(CultureInfo.InvariantCulture) : String.Format(CultureInfo.InvariantCulture, ShowMoreFormat, item.Index), item[labelFieldIndex]); } else { writer.WriteText(item[labelFieldIndex], true); } if (itemRequiresMoreButton) { BooleanOption cachedItalic = subCommandStyle.Font.Italic; subCommandStyle.Font.Italic = BooleanOption.False; writer.EnterFormat(subCommandStyle); writer.Write(" ["); writer.ExitFormat(subCommandStyle); subCommandStyle.Font.Italic = cachedItalic; writer.EnterFormat(subCommandStyle); String moreText = Control.MoreText.Length == 0 ? GetDefaultLabel(MoreLabel) : Control.MoreText; writer.WriteBeginTag("a"); RenderPostBackEventAsAttribute(writer, "href", String.Format(CultureInfo.InvariantCulture, ShowMoreFormat, item.Index)); writer.Write(">"); writer.WriteText(moreText, true); writer.WriteEndTag("a"); writer.ExitFormat(subCommandStyle); subCommandStyle.Font.Italic = BooleanOption.False; writer.EnterFormat(subCommandStyle); writer.Write("]"); writer.ExitFormat(subCommandStyle); subCommandStyle.Font.Italic = cachedItalic; } if(i < (nextStartIndex - 1)) { writer.WriteBreak(); } } writer.ExitStyle(style, Control.BreakAfter); } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.RenderItemDetails"]/*' /> protected virtual void RenderItemDetails(HtmlMobileTextWriter writer, ObjectListItem item) { if (Control.AllFields.Count == 0) { return; } if(Device.Tables) { RenderItemDetailsWithTableTags(writer, item); } else { RenderItemDetailsWithoutTableTags(writer, item); } } private void RenderItemDetailsWithTableTags(HtmlMobileTextWriter writer, ObjectListItem item) { Style style = this.Style; Style labelStyle = Control.LabelStyle; Style subCommandStyle = Control.CommandStyle; Color foreColor = (Color)style[Style.ForeColorKey, true]; writer.Write("<table border=0 width=\"100%\">\r\n<tr><td colspan=2>"); writer.BeginStyleContext(); writer.EnterStyle(labelStyle); writer.WriteText(item[Control.LabelFieldIndex], true); writer.ExitStyle(labelStyle); writer.EndStyleContext(); writer.Write("</td></tr>\r\n<tr>"); RenderRule(writer, foreColor, 2); IObjectListFieldCollection fields = Control.AllFields; int fieldIndex = 0; foreach (ObjectListField field in fields) { if (field.Visible) { writer.Write("<tr><td>"); writer.BeginStyleContext(); writer.EnterStyle(Style); writer.WriteText(field.Title, true); writer.ExitStyle(Style); writer.EndStyleContext(); writer.Write("</td><td>"); writer.BeginStyleContext(); writer.EnterStyle(style); writer.WriteText(item[fieldIndex], true); writer.ExitStyle(style); writer.EndStyleContext(); writer.Write("</td></tr>\r\n"); } fieldIndex++; } RenderRule(writer, foreColor, 2); writer.Write("<tr><td colspan=2>"); writer.BeginStyleContext(); BooleanOption cachedItalic = subCommandStyle.Font.Italic; subCommandStyle.Font.Italic = BooleanOption.False; writer.EnterStyle(subCommandStyle); writer.Write("[&nbsp;"); writer.ExitStyle(subCommandStyle); subCommandStyle.Font.Italic = cachedItalic; writer.EnterStyle(subCommandStyle); ObjectListCommandCollection commands = Control.Commands; foreach (ObjectListCommand command in commands) { RenderPostBackEventAsAnchor(writer, command.Name, command.Text, subCommandStyle); writer.Write("&nbsp;|&nbsp;"); } String backCommandText = Control.BackCommandText.Length == 0 ? GetDefaultLabel(BackLabel) : Control.BackCommandText; RenderPostBackEventAsAnchor(writer, BackToList, backCommandText, subCommandStyle); writer.ExitStyle(subCommandStyle); subCommandStyle.Font.Italic = BooleanOption.False; writer.EnterStyle(subCommandStyle); writer.Write("&nbsp;]"); writer.ExitStyle(subCommandStyle); subCommandStyle.Font.Italic = cachedItalic; writer.EndStyleContext(); writer.Write("</td></tr></table>"); } private void RenderItemDetailsWithoutTableTags(HtmlMobileTextWriter writer, ObjectListItem item) { Style style = this.Style; Style labelStyle = Control.LabelStyle; Style subCommandStyle = Control.CommandStyle; writer.EnterStyle(labelStyle); writer.WriteText(item[Control.LabelFieldIndex], true); writer.ExitStyle(labelStyle, true); IObjectListFieldCollection fields = Control.AllFields; int fieldIndex = 0; bool boldInStyle = (style.Font.Bold == BooleanOption.True) ? true : false; writer.EnterStyle(style); foreach (ObjectListField field in fields) { if (field.Visible) { if (!boldInStyle) { writer.Write("<b>"); } writer.WriteText(field.Title + ":", true); if (!boldInStyle) { writer.Write("</b>"); } writer.Write("&nbsp;"); writer.WriteText(item[fieldIndex], true); writer.WriteBreak(); } fieldIndex++; } writer.ExitStyle(style); BooleanOption cachedItalic = subCommandStyle.Font.Italic; subCommandStyle.Font.Italic = BooleanOption.False; writer.EnterStyle(subCommandStyle); writer.Write("[&nbsp;"); writer.ExitStyle(subCommandStyle); subCommandStyle.Font.Italic = cachedItalic; writer.EnterStyle(subCommandStyle); ObjectListCommandCollection commands = Control.Commands; foreach (ObjectListCommand command in commands) { RenderPostBackEventAsAnchor(writer, command.Name, command.Text, subCommandStyle); writer.Write("&nbsp;|&nbsp;"); } String backCommandText = Control.BackCommandText.Length == 0 ? GetDefaultLabel(BackLabel) : Control.BackCommandText; RenderPostBackEventAsAnchor(writer, BackToList, backCommandText, subCommandStyle); writer.ExitStyle(subCommandStyle); subCommandStyle.Font.Italic = BooleanOption.False; writer.EnterStyle(subCommandStyle); writer.Write("&nbsp;]"); writer.ExitStyle(subCommandStyle, Control.BreakAfter); subCommandStyle.Font.Italic = cachedItalic; } // Private overload for use with subcommands. // Style, Enter/ExitFormat included only for completeness because style // is already set for subcommands. private void RenderPostBackEventAsAnchor( HtmlMobileTextWriter writer, String argument, String linkText, Style style) { writer.EnterFormat(style); writer.WriteBeginTag("a"); RenderPostBackEventAsAttribute(writer, "href", argument); writer.Write(">"); writer.WriteText(linkText, true); writer.WriteEndTag("a"); writer.ExitFormat(style); } private void RenderRule(HtmlMobileTextWriter writer, Color foreColor, int columnSpan) { writer.Write("<tr><td colspan="); writer.Write(columnSpan.ToString(CultureInfo.InvariantCulture)); writer.Write(" bgcolor=\""); writer.Write((foreColor != Color.Empty) ? ColorTranslator.ToHtml(foreColor) : "#000000"); writer.Write("\"></td></tr>"); } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.HandlePostBackEvent"]/*' /> public override bool HandlePostBackEvent(String eventArgument) { switch (Control.ViewMode) { case ObjectListViewMode.List: // DCR 2493 - raise a selection event, and only continue // handling if asked to. if (eventArgument.StartsWith(ShowMore, StringComparison.Ordinal)) { int itemIndex = ParseItemArg(eventArgument); if (Control.SelectListItem(itemIndex, true)) { if (Control.SelectedIndex > -1) { // ObjectListViewMode.Commands and .Details same for HTML, // but cannot access ObjLst.Details in Commands mode. Control.ViewMode = ObjectListViewMode.Details; } } } else { int itemIndex = -1; try { itemIndex = Int32.Parse(eventArgument, CultureInfo.InvariantCulture); } catch (System.FormatException) { throw new Exception (SR.GetString(SR.ObjectListAdapter_InvalidPostedData)); } if (Control.SelectListItem(itemIndex, false)) { Control.RaiseDefaultItemEvent(itemIndex); } } return true; case ObjectListViewMode.Commands: case ObjectListViewMode.Details: if (eventArgument == BackToList) { Control.ViewMode = ObjectListViewMode.List; return true; } break; } return false; } private static int ParseItemArg(String arg) { return Int32.Parse(arg.Substring(ShowMore.Length), CultureInfo.InvariantCulture); } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.ShouldRenderAsTable"]/*' /> protected virtual bool ShouldRenderAsTable() { return true; } private BooleanOption _hasItemDetails = BooleanOption.NotSet; /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.HasItemDetails"]/*' /> protected bool HasItemDetails() { if (_hasItemDetails == BooleanOption.NotSet) { // Calculate how many visible fields are shown in list view. int visibleFieldsInListView; int[] tableFieldIndices = Control.TableFieldIndices; if (ShouldRenderAsTable() && tableFieldIndices.Length != 0) { visibleFieldsInListView = VisibleTableFieldsCount; Debug.Assert (visibleFieldsInListView >= 0, "visibleFieldsInListView is negative"); } else { visibleFieldsInListView = Control.AllFields[Control.LabelFieldIndex].Visible ? 1 : 0; } // Calculate the number of visible fields. _hasItemDetails = BooleanOption.False; int visibleFieldCount = 0; foreach (ObjectListField field in Control.AllFields) { if (field.Visible) { visibleFieldCount++; if (visibleFieldCount > visibleFieldsInListView) { _hasItemDetails = BooleanOption.True; break; } } } } return _hasItemDetails == BooleanOption.True; } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.HasCommands"]/*' /> protected bool HasCommands() { return Control.Commands.Count > 0; } /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.HasDefaultCommand"]/*' /> protected bool HasDefaultCommand() { return Control.DefaultCommand.Length > 0; } private BooleanOption _onlyHasDefaultCommand = BooleanOption.NotSet; /// <include file='doc\HtmlObjectListAdapter.uex' path='docs/doc[@for="HtmlObjectListAdapter.OnlyHasDefaultCommand"]/*' /> protected bool OnlyHasDefaultCommand() { if (_onlyHasDefaultCommand == BooleanOption.NotSet) { String defaultCommand = Control.DefaultCommand; if (defaultCommand.Length > 0) { int commandCount = Control.Commands.Count; if (commandCount == 0 || (commandCount == 1 && String.Compare(defaultCommand, Control.Commands[0].Name, StringComparison.OrdinalIgnoreCase) == 0)) { _onlyHasDefaultCommand = BooleanOption.True; } else { _onlyHasDefaultCommand = BooleanOption.False; } } else { _onlyHasDefaultCommand = BooleanOption.False; } } return _onlyHasDefaultCommand == BooleanOption.True; } // This appears in both Html and Wml adapters, is used in // ShouldRenderAsTable(). In adapters rather than control // because specialized rendering method. private int _visibleTableFieldsCount = -1; private int VisibleTableFieldsCount { get { if (_visibleTableFieldsCount == -1) { int[] tableFieldIndices = Control.TableFieldIndices; _visibleTableFieldsCount = 0; for (int i = 0; i < tableFieldIndices.Length; i++) { if (Control.AllFields[tableFieldIndices[i]].Visible) { _visibleTableFieldsCount++; } } } return _visibleTableFieldsCount; } } } }
using System; using Eto.Drawing; using System.ComponentModel; namespace Eto.Forms { /// <summary> /// Specifies the text alignment for a <see cref="Label"/> /// </summary> public enum TextAlignment { /// <summary> /// Text will be aligned to the left /// </summary> Left, /// <summary> /// Text will be aligned in the center of the label /// </summary> Center, /// <summary> /// Text will be aligned to the right /// </summary> Right } /// <summary> /// Specifies the wrapping mode for the text of a <see cref="Label"/> /// </summary> /// <remarks> /// Regardless of the mode, you can always add hard wraps by inserting newline characters. /// </remarks> public enum WrapMode { /// <summary> /// No wrapping, the text will clip when smaller than the required space for the text. /// </summary> None, /// <summary> /// Text will wrap by word to fit the horizontal space available /// </summary> Word, /// <summary> /// Text will wrap by character to fit the horizontal space available /// </summary> Character } /// <summary> /// Displays a string of text on the form /// </summary> [Handler(typeof(Label.IHandler))] public class Label : TextControl { new IHandler Handler { get { return (IHandler)base.Handler; } } /// <summary> /// Gets or sets the wrap mode for the text /// </summary> /// <remarks> /// This defines the soft wrapping for the label's text. /// Hard wraps can be placed in the text with newline characters. /// /// Wrapping will only occur if the label's width is smaller than the space needed for the text. /// If you are autosizing your control, it may get autosized to the width so you will have to add constraints /// to the container or explicitly set the size. /// </remarks> /// <value>The wrapping mode for the text.</value> [DefaultValue(WrapMode.Word)] public WrapMode Wrap { get { return Handler.Wrap; } set { Handler.Wrap = value; } } /// <summary> /// Gets or sets the horizontal alignment of the text. /// </summary> /// <remarks> /// When auto sizing the label, this won't have an affect unless the label's container is larger than the text. /// </remarks> /// <value>The horizontal alignment.</value> public TextAlignment TextAlignment { get { return Handler.TextAlignment; } set { Handler.TextAlignment = value; } } /// <summary> /// Gets or sets the horizontal alignment of the text. /// </summary> /// <remarks> /// When auto sizing the label, this won't have an affect unless the label's container is larger than the text. /// </remarks> /// <value>The horizontal alignment.</value> [Obsolete("Since 2.1: Use TextAlignment instead")] public HorizontalAlign HorizontalAlign { get { return Handler.TextAlignment; } set { Handler.TextAlignment = value; } } /// <summary> /// Gets or sets the vertical alignment of the text. /// </summary> /// <remarks> /// When auto sizing the label, this won't have an affect unless the label's container is larger than the text. /// </remarks> /// <value>The vertical alignment.</value> public VerticalAlignment VerticalAlignment { get { return Handler.VerticalAlignment; } set { Handler.VerticalAlignment = value; } } /// <summary> /// Gets or sets the vertical alignment of the text. /// </summary> /// <remarks> /// When auto sizing the label, this won't have an affect unless the label's container is larger than the text. /// </remarks> /// <value>The vertical alignment.</value> [Obsolete("Since 2.1: Use VerticalAlignment instead")] public VerticalAlign VerticalAlign { get { return Handler.VerticalAlignment; } set { Handler.VerticalAlignment = value; } } /// <summary> /// Handler interface for the <see cref="Label"/> /// </summary> public new interface IHandler : TextControl.IHandler { /// <summary> /// Gets or sets the horizontal alignment of the text. /// </summary> /// <remarks> /// When auto sizing the label, this won't have an affect unless the label's container is larger than the text. /// </remarks> /// <value>The horizontal alignment.</value> TextAlignment TextAlignment { get; set; } /// <summary> /// Gets or sets the vertical alignment of the text. /// </summary> /// <remarks> /// When auto sizing the label, this won't have an affect unless the label's container is larger than the text. /// </remarks> /// <value>The vertical alignment.</value> VerticalAlignment VerticalAlignment { get; set; } /// <summary> /// Gets or sets the wrap mode for the text /// </summary> /// <remarks> /// This defines the soft wrapping for the label's text. /// Hard wraps can be placed in the text with newline characters. /// /// Wrapping will only occur if the label's width is smaller than the space needed for the text. /// If you are autosizing your control, it may get autosized to the width so you will have to add constraints /// to the container or explicitly set the size. /// </remarks> /// <value>The wrapping mode for the text.</value> WrapMode Wrap { get; set; } } } #region Obsolete /// <summary> /// Specifies the horizontal alignment for a <see cref="Label"/> /// </summary> [Obsolete("Since 2.1: Use TextAlignment instead")] public struct HorizontalAlign { readonly TextAlignment value; HorizontalAlign(TextAlignment value) { this.value = value; } /// <summary> /// Text will be aligned in the center of the label /// </summary> public static HorizontalAlign Center { get { return TextAlignment.Center; } } /// <summary> /// Text will be aligned to the left /// </summary> public static HorizontalAlign Left { get { return TextAlignment.Left; } } /// <summary> /// Text will be aligned to the right /// </summary> public static HorizontalAlign Right { get { return TextAlignment.Right; } } /// <summary>Converts to a TextAlignment</summary> public static implicit operator TextAlignment(HorizontalAlign value) { return value.value; } /// <summary>Converts an TextAlignment to a HorizontalAlign</summary> public static implicit operator HorizontalAlign(TextAlignment value) { return new HorizontalAlign(value); } /// <summary>Compares for equality</summary> /// <param name="value1">Value1.</param> /// <param name="value2">Value2.</param> public static bool operator ==(TextAlignment value1, HorizontalAlign value2) { return value1 == value2.value; } /// <summary>Compares for inequality</summary> /// <param name="value1">Value1.</param> /// <param name="value2">Value2.</param> public static bool operator !=(TextAlignment value1, HorizontalAlign value2) { return value1 != value2.value; } /// <summary>Convert from string to vertical align (for json/xaml compat)</summary> /// <param name="value">Value.</param> public static implicit operator HorizontalAlign(string value) { switch (value.ToLowerInvariant()) { case "Center": return HorizontalAlign.Center; case "Bottom": return HorizontalAlign.Right; default: case "Top": return HorizontalAlign.Left; } } /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to the current <see cref="Eto.Forms.HorizontalAlign"/>. /// </summary> /// <param name="obj">The <see cref="System.Object"/> to compare with the current <see cref="Eto.Forms.HorizontalAlign"/>.</param> /// <returns><c>true</c> if the specified <see cref="System.Object"/> is equal to the current /// <see cref="Eto.Forms.HorizontalAlign"/>; otherwise, <c>false</c>.</returns> public override bool Equals(object obj) { return (obj is HorizontalAlign && (this == (HorizontalAlign)obj)) || (obj is TextAlignment && (this == (TextAlignment)obj)); } /// <summary> /// Serves as a hash function for a <see cref="Eto.Forms.HorizontalAlign"/> object. /// </summary> /// <returns>A hash code for this instance that is suitable for use in hashing algorithms and data structures such as a hash table.</returns> public override int GetHashCode() { return value.GetHashCode(); } } /// <summary> /// Specifies the horizontal alignment for a <see cref="Label"/> /// </summary> [Obsolete("Since 2.1: Use VerticalAlignment instead")] public struct VerticalAlign { readonly VerticalAlignment value; VerticalAlign(VerticalAlignment value) { this.value = value; } /// <summary> /// Text will be aligned to the middle of the label /// </summary> public static VerticalAlign Middle { get { return VerticalAlignment.Center; } } /// <summary> /// Text will be aligned to the top of the label /// </summary> public static VerticalAlign Top { get { return VerticalAlignment.Top; } } /// <summary> /// Text will be aligned to the bottom of the label /// </summary> public static VerticalAlign Bottom { get { return VerticalAlignment.Bottom; } } /// <summary>Converts to an VerticalAlignment</summary> public static implicit operator VerticalAlignment(VerticalAlign value) { return value.value; } /// <summary>Converts an VerticalAlignment to a VerticalAlign</summary> public static implicit operator VerticalAlign(VerticalAlignment value) { return new VerticalAlign(value); } /// <summary>Compares for equality</summary> /// <param name="value1">Value1.</param> /// <param name="value2">Value2.</param> public static bool operator ==(VerticalAlignment value1, VerticalAlign value2) { return value1 == value2.value; } /// <summary>Compares for inequality</summary> /// <param name="value1">Value1.</param> /// <param name="value2">Value2.</param> public static bool operator !=(VerticalAlignment value1, VerticalAlign value2) { return value1 != value2.value; } /// <summary>Convert from string to vertical align (for json/xaml compat)</summary> /// <param name="value">Value.</param> public static implicit operator VerticalAlign(string value) { switch (value.ToLowerInvariant()) { case "Middle": return VerticalAlign.Middle; case "Bottom": return VerticalAlign.Bottom; default: case "Top": return VerticalAlign.Top; } } /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to the current <see cref="Eto.Forms.VerticalAlign"/>. /// </summary> /// <param name="obj">The <see cref="System.Object"/> to compare with the current <see cref="Eto.Forms.VerticalAlign"/>.</param> /// <returns><c>true</c> if the specified <see cref="System.Object"/> is equal to the current /// <see cref="Eto.Forms.VerticalAlign"/>; otherwise, <c>false</c>.</returns> public override bool Equals(object obj) { return (obj is VerticalAlign && (this == (VerticalAlign)obj)) || (obj is VerticalAlignment && (this == (VerticalAlignment)obj)); } /// <summary> /// Serves as a hash function for a <see cref="Eto.Forms.VerticalAlign"/> object. /// </summary> /// <returns>A hash code for this instance that is suitable for use in hashing algorithms and data structures such as a hash table.</returns> public override int GetHashCode() { return value.GetHashCode(); } } #endregion }
// The following code was generated by JFlex 1.5.1 using Lucene.Net.Analysis.TokenAttributes; using System; using System.IO; namespace Lucene.Net.Analysis.Standard { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// This class implements Word Break rules from the Unicode Text Segmentation /// algorithm, as specified in /// <a href="http://unicode.org/reports/tr29/">Unicode Standard Annex #29</a>. /// <para/> /// Tokens produced are of the following types: /// <list type="bullet"> /// <item><description>&lt;ALPHANUM&gt;: A sequence of alphabetic and numeric characters</description></item> /// <item><description>&lt;NUM&gt;: A number</description></item> /// <item><description>&lt;SOUTHEAST_ASIAN&gt;: A sequence of characters from South and Southeast /// Asian languages, including Thai, Lao, Myanmar, and Khmer</description></item> /// <item><description>&lt;IDEOGRAPHIC&gt;: A single CJKV ideographic character</description></item> /// <item><description>&lt;HIRAGANA&gt;: A single hiragana character</description></item> /// <item><description>&lt;KATAKANA&gt;: A sequence of katakana characters</description></item> /// <item><description>&lt;HANGUL&gt;: A sequence of Hangul characters</description></item> /// </list> /// </summary> public sealed class StandardTokenizerImpl : IStandardTokenizerInterface { /// <summary> /// This character denotes the end of file </summary> public static readonly int YYEOF = -1; /// <summary> /// initial size of the lookahead buffer </summary> private static readonly int ZZ_BUFFERSIZE = 4096; /// <summary> /// lexical states </summary> public const int YYINITIAL = 0; /// <summary> /// ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l /// ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l /// at the beginning of a line /// l is of the form l = 2*k, k a non negative integer /// </summary> private static readonly int[] ZZ_LEXSTATE = { 0, 0 }; /// <summary> /// Translates characters to character classes /// </summary> private const string ZZ_CMAP_PACKED = "\x0022\x0000\x0001\x008B\x0004\x0000\x0001\x008A\x0004\x0000\x0001\x0083\x0001\x0000\x0001\x0084\x0001\x0000\x000A\x0080" + "\x0001\x0082\x0001\x0083\x0005\x0000\x001A\x007E\x0004\x0000\x0001\x0085\x0001\x0000\x001A\x007E\x002F\x0000\x0001\x007E" + "\x0002\x0000\x0001\x007F\x0007\x0000\x0001\x007E\x0001\x0000\x0001\x0082\x0002\x0000\x0001\x007E\x0005\x0000\x0017\x007E" + "\x0001\x0000\x001F\x007E\x0001\x0000\u01ca\x007E\x0004\x0000\x000C\x007E\x0005\x0000\x0001\x0082\x0008\x0000\x0005\x007E" + "\x0007\x0000\x0001\x007E\x0001\x0000\x0001\x007E\x0011\x0000\x0070\x007F\x0005\x007E\x0001\x0000\x0002\x007E\x0002\x0000" + "\x0004\x007E\x0001\x0083\x0007\x0000\x0001\x007E\x0001\x0082\x0003\x007E\x0001\x0000\x0001\x007E\x0001\x0000\x0014\x007E" + "\x0001\x0000\x0053\x007E\x0001\x0000\x008B\x007E\x0001\x0000\x0007\x007F\x009E\x007E\x0009\x0000\x0026\x007E\x0002\x0000" + "\x0001\x007E\x0007\x0000\x0027\x007E\x0001\x0000\x0001\x0083\x0007\x0000\x002D\x007F\x0001\x0000\x0001\x007F\x0001\x0000" + "\x0002\x007F\x0001\x0000\x0002\x007F\x0001\x0000\x0001\x007F\x0008\x0000\x001B\x008C\x0005\x0000\x0003\x008C\x0001\x007E" + "\x0001\x0082\x000B\x0000\x0005\x007F\x0007\x0000\x0002\x0083\x0002\x0000\x000B\x007F\x0001\x0000\x0001\x007F\x0003\x0000" + "\x002B\x007E\x0015\x007F\x000A\x0080\x0001\x0000\x0001\x0080\x0001\x0083\x0001\x0000\x0002\x007E\x0001\x007F\x0063\x007E" + "\x0001\x0000\x0001\x007E\x0007\x007F\x0001\x007F\x0001\x0000\x0006\x007F\x0002\x007E\x0002\x007F\x0001\x0000\x0004\x007F" + "\x0002\x007E\x000A\x0080\x0003\x007E\x0002\x0000\x0001\x007E\x000F\x0000\x0001\x007F\x0001\x007E\x0001\x007F\x001E\x007E" + "\x001B\x007F\x0002\x0000\x0059\x007E\x000B\x007F\x0001\x007E\x000E\x0000\x000A\x0080\x0021\x007E\x0009\x007F\x0002\x007E" + "\x0002\x0000\x0001\x0083\x0001\x0000\x0001\x007E\x0005\x0000\x0016\x007E\x0004\x007F\x0001\x007E\x0009\x007F\x0001\x007E" + "\x0003\x007F\x0001\x007E\x0005\x007F\x0012\x0000\x0019\x007E\x0003\x007F\x0044\x0000\x0001\x007E\x0001\x0000\x000B\x007E" + "\x0037\x0000\x001B\x007F\x0001\x0000\x0004\x007F\x0036\x007E\x0003\x007F\x0001\x007E\x0012\x007F\x0001\x007E\x0007\x007F" + "\x000A\x007E\x0002\x007F\x0002\x0000\x000A\x0080\x0001\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0003\x007F" + "\x0001\x0000\x0008\x007E\x0002\x0000\x0002\x007E\x0002\x0000\x0016\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0001\x007E" + "\x0003\x0000\x0004\x007E\x0002\x0000\x0001\x007F\x0001\x007E\x0007\x007F\x0002\x0000\x0002\x007F\x0002\x0000\x0003\x007F" + "\x0001\x007E\x0008\x0000\x0001\x007F\x0004\x0000\x0002\x007E\x0001\x0000\x0003\x007E\x0002\x007F\x0002\x0000\x000A\x0080" + "\x0002\x007E\x000F\x0000\x0003\x007F\x0001\x0000\x0006\x007E\x0004\x0000\x0002\x007E\x0002\x0000\x0016\x007E\x0001\x0000" + "\x0007\x007E\x0001\x0000\x0002\x007E\x0001\x0000\x0002\x007E\x0001\x0000\x0002\x007E\x0002\x0000\x0001\x007F\x0001\x0000" + "\x0005\x007F\x0004\x0000\x0002\x007F\x0002\x0000\x0003\x007F\x0003\x0000\x0001\x007F\x0007\x0000\x0004\x007E\x0001\x0000" + "\x0001\x007E\x0007\x0000\x000A\x0080\x0002\x007F\x0003\x007E\x0001\x007F\x000B\x0000\x0003\x007F\x0001\x0000\x0009\x007E" + "\x0001\x0000\x0003\x007E\x0001\x0000\x0016\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0002\x007E\x0001\x0000\x0005\x007E" + "\x0002\x0000\x0001\x007F\x0001\x007E\x0008\x007F\x0001\x0000\x0003\x007F\x0001\x0000\x0003\x007F\x0002\x0000\x0001\x007E" + "\x000F\x0000\x0002\x007E\x0002\x007F\x0002\x0000\x000A\x0080\x0011\x0000\x0003\x007F\x0001\x0000\x0008\x007E\x0002\x0000" + "\x0002\x007E\x0002\x0000\x0016\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0002\x007E\x0001\x0000\x0005\x007E\x0002\x0000" + "\x0001\x007F\x0001\x007E\x0007\x007F\x0002\x0000\x0002\x007F\x0002\x0000\x0003\x007F\x0008\x0000\x0002\x007F\x0004\x0000" + "\x0002\x007E\x0001\x0000\x0003\x007E\x0002\x007F\x0002\x0000\x000A\x0080\x0001\x0000\x0001\x007E\x0010\x0000\x0001\x007F" + "\x0001\x007E\x0001\x0000\x0006\x007E\x0003\x0000\x0003\x007E\x0001\x0000\x0004\x007E\x0003\x0000\x0002\x007E\x0001\x0000" + "\x0001\x007E\x0001\x0000\x0002\x007E\x0003\x0000\x0002\x007E\x0003\x0000\x0003\x007E\x0003\x0000\x000C\x007E\x0004\x0000" + "\x0005\x007F\x0003\x0000\x0003\x007F\x0001\x0000\x0004\x007F\x0002\x0000\x0001\x007E\x0006\x0000\x0001\x007F\x000E\x0000" + "\x000A\x0080\x0011\x0000\x0003\x007F\x0001\x0000\x0008\x007E\x0001\x0000\x0003\x007E\x0001\x0000\x0017\x007E\x0001\x0000" + "\x000A\x007E\x0001\x0000\x0005\x007E\x0003\x0000\x0001\x007E\x0007\x007F\x0001\x0000\x0003\x007F\x0001\x0000\x0004\x007F" + "\x0007\x0000\x0002\x007F\x0001\x0000\x0002\x007E\x0006\x0000\x0002\x007E\x0002\x007F\x0002\x0000\x000A\x0080\x0012\x0000" + "\x0002\x007F\x0001\x0000\x0008\x007E\x0001\x0000\x0003\x007E\x0001\x0000\x0017\x007E\x0001\x0000\x000A\x007E\x0001\x0000" + "\x0005\x007E\x0002\x0000\x0001\x007F\x0001\x007E\x0007\x007F\x0001\x0000\x0003\x007F\x0001\x0000\x0004\x007F\x0007\x0000" + "\x0002\x007F\x0007\x0000\x0001\x007E\x0001\x0000\x0002\x007E\x0002\x007F\x0002\x0000\x000A\x0080\x0001\x0000\x0002\x007E" + "\x000F\x0000\x0002\x007F\x0001\x0000\x0008\x007E\x0001\x0000\x0003\x007E\x0001\x0000\x0029\x007E\x0002\x0000\x0001\x007E" + "\x0007\x007F\x0001\x0000\x0003\x007F\x0001\x0000\x0004\x007F\x0001\x007E\x0008\x0000\x0001\x007F\x0008\x0000\x0002\x007E" + "\x0002\x007F\x0002\x0000\x000A\x0080\x000A\x0000\x0006\x007E\x0002\x0000\x0002\x007F\x0001\x0000\x0012\x007E\x0003\x0000" + "\x0018\x007E\x0001\x0000\x0009\x007E\x0001\x0000\x0001\x007E\x0002\x0000\x0007\x007E\x0003\x0000\x0001\x007F\x0004\x0000" + "\x0006\x007F\x0001\x0000\x0001\x007F\x0001\x0000\x0008\x007F\x0012\x0000\x0002\x007F\x000D\x0000\x0030\x0086\x0001\x0087" + "\x0002\x0086\x0007\x0087\x0005\x0000\x0007\x0086\x0008\x0087\x0001\x0000\x000A\x0080\x0027\x0000\x0002\x0086\x0001\x0000" + "\x0001\x0086\x0002\x0000\x0002\x0086\x0001\x0000\x0001\x0086\x0002\x0000\x0001\x0086\x0006\x0000\x0004\x0086\x0001\x0000" + "\x0007\x0086\x0001\x0000\x0003\x0086\x0001\x0000\x0001\x0086\x0001\x0000\x0001\x0086\x0002\x0000\x0002\x0086\x0001\x0000" + "\x0004\x0086\x0001\x0087\x0002\x0086\x0006\x0087\x0001\x0000\x0002\x0087\x0001\x0086\x0002\x0000\x0005\x0086\x0001\x0000" + "\x0001\x0086\x0001\x0000\x0006\x0087\x0002\x0000\x000A\x0080\x0002\x0000\x0004\x0086\x0020\x0000\x0001\x007E\x0017\x0000" + "\x0002\x007F\x0006\x0000\x000A\x0080\x000B\x0000\x0001\x007F\x0001\x0000\x0001\x007F\x0001\x0000\x0001\x007F\x0004\x0000" + "\x0002\x007F\x0008\x007E\x0001\x0000\x0024\x007E\x0004\x0000\x0014\x007F\x0001\x0000\x0002\x007F\x0005\x007E\x000B\x007F" + "\x0001\x0000\x0024\x007F\x0009\x0000\x0001\x007F\x0039\x0000\x002B\x0086\x0014\x0087\x0001\x0086\x000A\x0080\x0006\x0000" + "\x0006\x0086\x0004\x0087\x0004\x0086\x0003\x0087\x0001\x0086\x0003\x0087\x0002\x0086\x0007\x0087\x0003\x0086\x0004\x0087" + "\x000D\x0086\x000C\x0087\x0001\x0086\x0001\x0087\x000A\x0080\x0004\x0087\x0002\x0086\x0026\x007E\x0001\x0000\x0001\x007E" + "\x0005\x0000\x0001\x007E\x0002\x0000\x002B\x007E\x0001\x0000\x0004\x007E\u0100\x008D\x0049\x007E\x0001\x0000\x0004\x007E" + "\x0002\x0000\x0007\x007E\x0001\x0000\x0001\x007E\x0001\x0000\x0004\x007E\x0002\x0000\x0029\x007E\x0001\x0000\x0004\x007E" + "\x0002\x0000\x0021\x007E\x0001\x0000\x0004\x007E\x0002\x0000\x0007\x007E\x0001\x0000\x0001\x007E\x0001\x0000\x0004\x007E" + "\x0002\x0000\x000F\x007E\x0001\x0000\x0039\x007E\x0001\x0000\x0004\x007E\x0002\x0000\x0043\x007E\x0002\x0000\x0003\x007F" + "\x0020\x0000\x0010\x007E\x0010\x0000\x0055\x007E\x000C\x0000\u026c\x007E\x0002\x0000\x0011\x007E\x0001\x0000\x001A\x007E" + "\x0005\x0000\x004B\x007E\x0003\x0000\x0003\x007E\x000F\x0000\x000D\x007E\x0001\x0000\x0004\x007E\x0003\x007F\x000B\x0000" + "\x0012\x007E\x0003\x007F\x000B\x0000\x0012\x007E\x0002\x007F\x000C\x0000\x000D\x007E\x0001\x0000\x0003\x007E\x0001\x0000" + "\x0002\x007F\x000C\x0000\x0034\x0086\x0020\x0087\x0003\x0000\x0001\x0086\x0004\x0000\x0001\x0086\x0001\x0087\x0002\x0000" + "\x000A\x0080\x0021\x0000\x0003\x007F\x0001\x007F\x0001\x0000\x000A\x0080\x0006\x0000\x0058\x007E\x0008\x0000\x0029\x007E" + "\x0001\x007F\x0001\x007E\x0005\x0000\x0046\x007E\x000A\x0000\x001D\x007E\x0003\x0000\x000C\x007F\x0004\x0000\x000C\x007F" + "\x000A\x0000\x000A\x0080\x001E\x0086\x0002\x0000\x0005\x0086\x000B\x0000\x002C\x0086\x0004\x0000\x0011\x0087\x0007\x0086" + "\x0002\x0087\x0006\x0000\x000A\x0080\x0001\x0086\x0003\x0000\x0002\x0086\x0020\x0000\x0017\x007E\x0005\x007F\x0004\x0000" + "\x0035\x0086\x000A\x0087\x0001\x0000\x001D\x0087\x0002\x0000\x0001\x007F\x000A\x0080\x0006\x0000\x000A\x0080\x0006\x0000" + "\x000E\x0086\x0052\x0000\x0005\x007F\x002F\x007E\x0011\x007F\x0007\x007E\x0004\x0000\x000A\x0080\x0011\x0000\x0009\x007F" + "\x000C\x0000\x0003\x007F\x001E\x007E\x000D\x007F\x0002\x007E\x000A\x0080\x002C\x007E\x000E\x007F\x000C\x0000\x0024\x007E" + "\x0014\x007F\x0008\x0000\x000A\x0080\x0003\x0000\x0003\x007E\x000A\x0080\x0024\x007E\x0052\x0000\x0003\x007F\x0001\x0000" + "\x0015\x007F\x0004\x007E\x0001\x007F\x0004\x007E\x0003\x007F\x0002\x007E\x0009\x0000\x00C0\x007E\x0027\x007F\x0015\x0000" + "\x0004\x007F\u0116\x007E\x0002\x0000\x0006\x007E\x0002\x0000\x0026\x007E\x0002\x0000\x0006\x007E\x0002\x0000\x0008\x007E" + "\x0001\x0000\x0001\x007E\x0001\x0000\x0001\x007E\x0001\x0000\x0001\x007E\x0001\x0000\x001F\x007E\x0002\x0000\x0035\x007E" + "\x0001\x0000\x0007\x007E\x0001\x0000\x0001\x007E\x0003\x0000\x0003\x007E\x0001\x0000\x0007\x007E\x0003\x0000\x0004\x007E" + "\x0002\x0000\x0006\x007E\x0004\x0000\x000D\x007E\x0005\x0000\x0003\x007E\x0001\x0000\x0007\x007E\x000F\x0000\x0002\x007F" + "\x0002\x007F\x0008\x0000\x0002\x0084\x000A\x0000\x0001\x0084\x0002\x0000\x0001\x0082\x0002\x0000\x0005\x007F\x0010\x0000" + "\x0002\x0085\x0003\x0000\x0001\x0083\x000F\x0000\x0001\x0085\x000B\x0000\x0005\x007F\x0001\x0000\x000A\x007F\x0001\x0000" + "\x0001\x007E\x000D\x0000\x0001\x007E\x0010\x0000\x000D\x007E\x0033\x0000\x0021\x007F\x0011\x0000\x0001\x007E\x0004\x0000" + "\x0001\x007E\x0002\x0000\x000A\x007E\x0001\x0000\x0001\x007E\x0003\x0000\x0005\x007E\x0006\x0000\x0001\x007E\x0001\x0000" + "\x0001\x007E\x0001\x0000\x0001\x007E\x0001\x0000\x0004\x007E\x0001\x0000\x000B\x007E\x0002\x0000\x0004\x007E\x0005\x0000" + "\x0005\x007E\x0004\x0000\x0001\x007E\x0011\x0000\x0029\x007E\u032d\x0000\x0034\x007E\u0716\x0000\x002F\x007E\x0001\x0000" + "\x002F\x007E\x0001\x0000\x0085\x007E\x0006\x0000\x0004\x007E\x0003\x007F\x0002\x007E\x000C\x0000\x0026\x007E\x0001\x0000" + "\x0001\x007E\x0005\x0000\x0001\x007E\x0002\x0000\x0038\x007E\x0007\x0000\x0001\x007E\x000F\x0000\x0001\x007F\x0017\x007E" + "\x0009\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0007\x007E" + "\x0001\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0001\x0000\x0020\x007F\x002F\x0000\x0001\x007E" + "\x0050\x0000\x001A\x0088\x0001\x0000\x0059\x0088\x000C\x0000\x00D6\x0088\x002F\x0000\x0001\x007E\x0001\x0000\x0001\x0088" + "\x0019\x0000\x0009\x0088\x0006\x007F\x0001\x0000\x0005\x0081\x0002\x0000\x0003\x0088\x0001\x007E\x0001\x007E\x0004\x0000" + "\x0056\x0089\x0002\x0000\x0002\x007F\x0002\x0081\x0003\x0089\x005B\x0081\x0001\x0000\x0004\x0081\x0005\x0000\x0029\x007E" + "\x0003\x0000\x005E\x008D\x0011\x0000\x001B\x007E\x0035\x0000\x0010\x0081\x00D0\x0000\x002F\x0081\x0001\x0000\x0058\x0081" + "\x00A8\x0000\u19b6\x0088\x004A\x0000\u51cd\x0088\x0033\x0000\u048d\x007E\x0043\x0000\x002E\x007E\x0002\x0000\u010d\x007E" + "\x0003\x0000\x0010\x007E\x000A\x0080\x0002\x007E\x0014\x0000\x002F\x007E\x0004\x007F\x0001\x0000\x000A\x007F\x0001\x0000" + "\x0019\x007E\x0007\x0000\x0001\x007F\x0050\x007E\x0002\x007F\x0025\x0000\x0009\x007E\x0002\x0000\x0067\x007E\x0002\x0000" + "\x0004\x007E\x0001\x0000\x0004\x007E\x000C\x0000\x000B\x007E\x004D\x0000\x000A\x007E\x0001\x007F\x0003\x007E\x0001\x007F" + "\x0004\x007E\x0001\x007F\x0017\x007E\x0005\x007F\x0018\x0000\x0034\x007E\x000C\x0000\x0002\x007F\x0032\x007E\x0011\x007F" + "\x000B\x0000\x000A\x0080\x0006\x0000\x0012\x007F\x0006\x007E\x0003\x0000\x0001\x007E\x0004\x0000\x000A\x0080\x001C\x007E" + "\x0008\x007F\x0002\x0000\x0017\x007E\x000D\x007F\x000C\x0000\x001D\x008D\x0003\x0000\x0004\x007F\x002F\x007E\x000E\x007F" + "\x000E\x0000\x0001\x007E\x000A\x0080\x0026\x0000\x0029\x007E\x000E\x007F\x0009\x0000\x0003\x007E\x0001\x007F\x0008\x007E" + "\x0002\x007F\x0002\x0000\x000A\x0080\x0006\x0000\x001B\x0086\x0001\x0087\x0004\x0000\x0030\x0086\x0001\x0087\x0001\x0086" + "\x0003\x0087\x0002\x0086\x0002\x0087\x0005\x0086\x0002\x0087\x0001\x0086\x0001\x0087\x0001\x0086\x0018\x0000\x0005\x0086" + "\x000B\x007E\x0005\x007F\x0002\x0000\x0003\x007E\x0002\x007F\x000A\x0000\x0006\x007E\x0002\x0000\x0006\x007E\x0002\x0000" + "\x0006\x007E\x0009\x0000\x0007\x007E\x0001\x0000\x0007\x007E\x0091\x0000\x0023\x007E\x0008\x007F\x0001\x0000\x0002\x007F" + "\x0002\x0000\x000A\x0080\x0006\x0000\u2ba4\x008D\x000C\x0000\x0017\x008D\x0004\x0000\x0031\x008D\x0004\x0000\x0001\x0024" + "\x0001\x0020\x0001\x0037\x0001\x0034\x0001\x001B\x0001\x0018\x0002\x0000\x0001\x0014\x0001\x0011\x0002\x0000\x0001\x000F" + "\x0001\x000D\x000C\x0000\x0001\x0003\x0001\x0006\x0010\x0000\x0001\x006E\x0007\x0000\x0001\x0049\x0001\x0008\x0005\x0000" + "\x0001\x0001\x0001\x007A\x0003\x0000\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073" + "\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073" + "\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073" + "\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073" + "\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0074\x0001\x0073\x0001\x0073\x0001\x0073\x0001\x0078\x0001\x0076" + "\x000F\x0000\x0001\x0070\u02c1\x0000\x0001\x004C\x00BF\x0000\x0001\x006F\x0001\x004D\x0001\x000E\x0003\x0077\x0002\x0032" + "\x0001\x0077\x0001\x0032\x0002\x0077\x0001\x001E\x0011\x0077\x0002\x0046\x0007\x004F\x0001\x004E\x0007\x004F\x0007\x0042" + "\x0001\x001F\x0001\x0042\x0001\x0060\x0002\x0036\x0001\x0035\x0001\x0060\x0001\x0036\x0001\x0035\x0008\x0060\x0002\x0047" + "\x0005\x0043\x0002\x003D\x0005\x0043\x0001\x0012\x0008\x002B\x0005\x0013\x0003\x0021\x000A\x0052\x0010\x0021\x0003\x0033" + "\x001A\x0023\x0001\x0022\x0002\x0031\x0002\x0056\x0001\x0057\x0002\x0056\x0002\x0057\x0002\x0056\x0001\x0057\x0003\x0031" + "\x0001\x0030\x0002\x0031\x000A\x0048\x0001\x005E\x0001\x0028\x0001\x0025\x0001\x0048\x0006\x0028\x0001\x0025\x000B\x0028" + "\x0019\x0031\x0007\x0028\x000A\x0053\x0001\x0028\x0005\x000B\x0003\x005F\x0003\x0041\x0001\x0040\x0004\x0041\x0002\x0040" + "\x0008\x0041\x0001\x0040\x0007\x001D\x0001\x001C\x0002\x001D\x0007\x0041\x000E\x005F\x0001\x0069\x0004\x0054\x0001\x0004" + "\x0004\x0051\x0001\x0004\x0005\x0068\x0001\x0067\x0001\x0068\x0003\x0067\x0007\x0068\x0001\x0067\x0013\x0068\x0005\x004B" + "\x0003\x0068\x0006\x004B\x0002\x004B\x0006\x004A\x0005\x004A\x0003\x0064\x0002\x0041\x0007\x0063\x001E\x0041\x0004\x0063" + "\x0005\x0041\x0005\x005F\x0006\x005D\x0002\x005F\x0001\x005D\x0004\x001D\x000B\x0066\x000A\x0051\x000C\x0066\x000A\x007D" + "\x000D\x007C\x0001\x0065\x0002\x007C\x0001\x007B\x0003\x006A\x0001\x000B\x0002\x006A\x0005\x0071\x0004\x006A\x0004\x0072" + "\x0001\x0071\x0003\x0072\x0001\x0071\x0005\x0072\x0002\x0038\x0001\x003B\x0002\x0038\x0001\x003B\x0001\x0038\x0002\x003B" + "\x0001\x0038\x0001\x003B\x000A\x0038\x0001\x003B\x0004\x0005\x0001\x006C\x0001\x006B\x0001\x006D\x0001\x000A\x0003\x0075" + "\x0001\x006D\x0002\x0075\x0001\x0061\x0002\x0062\x0002\x0075\x0001\x000A\x0001\x0075\x0001\x000A\x0001\x0075\x0001\x000A" + "\x0001\x0075\x0003\x000A\x0001\x0075\x0002\x000A\x0001\x0075\x0001\x000A\x0002\x0075\x0001\x000A\x0001\x0075\x0001\x000A" + "\x0001\x0075\x0001\x000A\x0001\x0075\x0001\x000A\x0001\x0075\x0001\x000A\x0001\x003E\x0002\x003A\x0001\x003E\x0001\x003A" + "\x0002\x003E\x0004\x003A\x0001\x003E\x0007\x003A\x0001\x003E\x0004\x003A\x0001\x003E\x0004\x003A\x0001\x0075\x0001\x000A" + "\x0001\x0075\x000A\x0019\x0001\x002F\x0011\x0019\x0001\x002F\x0003\x001A\x0001\x002F\x0003\x0019\x0001\x002F\x0001\x0019" + "\x0002\x0002\x0002\x0019\x0001\x002F\x000D\x005C\x0004\x0027\x0004\x002C\x0001\x0050\x0001\x002E\x0008\x0050\x0007\x002C" + "\x0006\x0075\x0004\x0015\x0001\x0017\x001F\x0015\x0001\x0017\x0004\x0015\x0015\x0045\x0001\x0079\x0009\x0045\x0011\x0016" + "\x0005\x0045\x0001\x0007\x000A\x002D\x0005\x0045\x0006\x0044\x0004\x003E\x0001\x003F\x0001\x0016\x0005\x005B\x000A\x0059" + "\x000F\x005B\x0001\x003C\x0003\x0039\x000C\x0058\x0001\x0009\x0009\x0026\x0001\x002A\x0005\x0026\x0004\x005A\x000B\x0029" + "\x0002\x000C\x0009\x0026\x0001\x002A\x0019\x0026\x0001\x002A\x0004\x0009\x0004\x0026\x0002\x002A\x0002\x0055\x0001\x0010" + "\x0005\x0055\x002A\x0010\u1900\x0000\u016e\x0088\x0002\x0000\x006A\x0088\x0026\x0000\x0007\x007E\x000C\x0000\x0005\x007E" + "\x0005\x0000\x0001\x008C\x0001\x007F\x000A\x008C\x0001\x0000\x000D\x008C\x0001\x0000\x0005\x008C\x0001\x0000\x0001\x008C" + "\x0001\x0000\x0002\x008C\x0001\x0000\x0002\x008C\x0001\x0000\x000A\x008C\x0062\x007E\x0021\x0000\u016b\x007E\x0012\x0000" + "\x0040\x007E\x0002\x0000\x0036\x007E\x0028\x0000\x000C\x007E\x0004\x0000\x0010\x007F\x0001\x0083\x0002\x0000\x0001\x0082" + "\x0001\x0083\x000B\x0000\x0007\x007F\x000C\x0000\x0002\x0085\x0018\x0000\x0003\x0085\x0001\x0083\x0001\x0000\x0001\x0084" + "\x0001\x0000\x0001\x0083\x0001\x0082\x001A\x0000\x0005\x007E\x0001\x0000\x0087\x007E\x0002\x0000\x0001\x007F\x0007\x0000" + "\x0001\x0084\x0004\x0000\x0001\x0083\x0001\x0000\x0001\x0084\x0001\x0000\x000A\x0080\x0001\x0082\x0001\x0083\x0005\x0000" + "\x001A\x007E\x0004\x0000\x0001\x0085\x0001\x0000\x001A\x007E\x000B\x0000\x0038\x0081\x0002\x007F\x001F\x008D\x0003\x0000" + "\x0006\x008D\x0002\x0000\x0006\x008D\x0002\x0000\x0006\x008D\x0002\x0000\x0003\x008D\x001C\x0000\x0003\x007F\x0004\x0000"; /// <summary> /// Translates characters to character classes /// </summary> private static readonly char[] ZZ_CMAP = ZzUnpackCMap(ZZ_CMAP_PACKED); /// <summary> /// Translates DFA states to action switch labels. /// </summary> private static readonly int[] ZZ_ACTION = ZzUnpackAction(); private const string ZZ_ACTION_PACKED_0 = "\x0001\x0000\x0016\x0001\x0001\x0002\x0001\x0003\x0001\x0004\x0001\x0001\x0001\x0005\x0001\x0006" + "\x0001\x0007\x0001\x0002\x0001\x0008\x0011\x0000\x0001\x0002\x0001\x0000\x0001\x0002\x000A\x0000" + "\x0001\x0003\x0011\x0000\x0001\x0002\x0015\x0000\x0001\x0002\x004D\x0000\x0001\x0001\x0010\x0000"; private static int[] ZzUnpackAction() { int[] result = new int[197]; int offset = 0; offset = ZzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); return result; } private static int ZzUnpackAction(string packed, int offset, int[] result) { int i = 0; // index in packed string int j = offset; // index in unpacked array int l = packed.Length; while (i < l) { int count = packed[i++]; int value = packed[i++]; do { result[j++] = value; } while (--count > 0); } return j; } /// <summary> /// Translates a state to a row index in the transition table /// </summary> private static readonly int[] ZZ_ROWMAP = ZzUnpackRowMap(); private const string ZZ_ROWMAP_PACKED_0 = "\x0000\x0000\x0000\x008E\x0000\u011c\x0000\u01aa\x0000\u0238\x0000\u02c6\x0000\u0354\x0000\u03e2" + "\x0000\u0470\x0000\u04fe\x0000\u058c\x0000\u061a\x0000\u06a8\x0000\u0736\x0000\u07c4\x0000\u0852" + "\x0000\u08e0\x0000\u096e\x0000\u09fc\x0000\u0a8a\x0000\u0b18\x0000\u0ba6\x0000\u0c34\x0000\u0cc2" + "\x0000\u0d50\x0000\u0dde\x0000\u0e6c\x0000\u0efa\x0000\u0f88\x0000\u1016\x0000\u10a4\x0000\u1132" + "\x0000\u11c0\x0000\u011c\x0000\u01aa\x0000\u124e\x0000\u12dc\x0000\u0354\x0000\u03e2\x0000\u0470" + "\x0000\u04fe\x0000\u136a\x0000\u13f8\x0000\u1486\x0000\u1514\x0000\u07c4\x0000\u15a2\x0000\u1630" + "\x0000\u16be\x0000\u174c\x0000\u17da\x0000\u1868\x0000\u18f6\x0000\u02c6\x0000\u1984\x0000\u1a12" + "\x0000\u06a8\x0000\u1aa0\x0000\u1b2e\x0000\u1bbc\x0000\u1c4a\x0000\u1cd8\x0000\u1d66\x0000\u1df4" + "\x0000\u1e82\x0000\u1f10\x0000\u1f9e\x0000\u202c\x0000\u20ba\x0000\u2148\x0000\u21d6\x0000\u2264" + "\x0000\u22f2\x0000\u2380\x0000\u240e\x0000\u249c\x0000\u252a\x0000\u25b8\x0000\u2646\x0000\u0e6c" + "\x0000\u26d4\x0000\u2762\x0000\u27f0\x0000\u287e\x0000\u290c\x0000\u299a\x0000\u2a28\x0000\u2ab6" + "\x0000\u2b44\x0000\u2bd2\x0000\u2c60\x0000\u2cee\x0000\u2d7c\x0000\u2e0a\x0000\u2e98\x0000\u2f26" + "\x0000\u2fb4\x0000\u3042\x0000\u30d0\x0000\u315e\x0000\u31ec\x0000\u327a\x0000\u3308\x0000\u3396" + "\x0000\u3424\x0000\u34b2\x0000\u3540\x0000\u35ce\x0000\u365c\x0000\u36ea\x0000\u3778\x0000\u3806" + "\x0000\u3894\x0000\u3922\x0000\u39b0\x0000\u3a3e\x0000\u3acc\x0000\u3b5a\x0000\u3be8\x0000\u3c76" + "\x0000\u3d04\x0000\u3d92\x0000\u3e20\x0000\u3eae\x0000\u3f3c\x0000\u3fca\x0000\u4058\x0000\u40e6" + "\x0000\u4174\x0000\u4202\x0000\u4290\x0000\u431e\x0000\u43ac\x0000\u443a\x0000\u44c8\x0000\u4556" + "\x0000\u45e4\x0000\u4672\x0000\u4700\x0000\u478e\x0000\u481c\x0000\u48aa\x0000\u4938\x0000\u49c6" + "\x0000\u4a54\x0000\u4ae2\x0000\u4b70\x0000\u4bfe\x0000\u4c8c\x0000\u4d1a\x0000\u4da8\x0000\u4e36" + "\x0000\u4ec4\x0000\u4f52\x0000\u4fe0\x0000\u506e\x0000\u50fc\x0000\u518a\x0000\u5218\x0000\u52a6" + "\x0000\u5334\x0000\u53c2\x0000\u5450\x0000\u54de\x0000\u556c\x0000\u55fa\x0000\u5688\x0000\u5716" + "\x0000\u57a4\x0000\u5832\x0000\u58c0\x0000\u594e\x0000\u59dc\x0000\u5a6a\x0000\u5af8\x0000\u5b86" + "\x0000\u5c14\x0000\u5ca2\x0000\u5d30\x0000\u5dbe\x0000\u5e4c\x0000\u5eda\x0000\u5f68\x0000\u5ff6" + "\x0000\u6084\x0000\u6112\x0000\u61a0\x0000\u622e\x0000\u62bc\x0000\u634a\x0000\u63d8\x0000\u6466" + "\x0000\u64f4\x0000\u6582\x0000\u6610\x0000\u669e\x0000\u672c"; private static int[] ZzUnpackRowMap() { int[] result = new int[197]; int offset = 0; offset = ZzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); return result; } private static int ZzUnpackRowMap(string packed, int offset, int[] result) { int i = 0; // index in packed string int j = offset; // index in unpacked array int l = packed.Length; while (i < l) { int high = packed[i++] << 16; result[j++] = high | packed[i++]; } return j; } /// <summary> /// The transition table of the DFA /// </summary> private static readonly int[] ZZ_TRANS = ZzUnpackTrans(); private const string ZZ_TRANS_PACKED_0 = "\x0001\x0002\x0001\x0003\x0001\x0002\x0001\x0004\x0002\x0002\x0001\x0005\x0001\x0002\x0001\x0006" + "\x0004\x0002\x0001\x0007\x0001\x0002\x0001\x0008\x0001\x0002\x0001\x0009\x0002\x0002\x0001\x000A" + "\x0003\x0002\x0001\x000B\x0002\x0002\x0001\x000C\x0004\x0002\x0001\x000D\x0003\x0002\x0001\x000E" + "\x000F\x0002\x0001\x000F\x0002\x0002\x0001\x0010\x0036\x0002\x0001\x0011\x0001\x0002\x0001\x0012" + "\x0002\x0002\x0001\x0013\x0001\x0014\x0001\x0002\x0001\x0015\x0001\x0002\x0001\x0016\x0001\x0002" + "\x0001\x0017\x0003\x0002\x0001\x0018\x0001\x0002\x0001\x0019\x0001\x001A\x0003\x0002\x0001\x001B" + "\x0002\x001C\x0001\x001D\x0001\x001E\x0002\x0002\x0001\x001F\x0001\x0020\x0090\x0000\x0001\x0018" + "\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x000E\x0000\x0001\x0018\x000D\x0000\x0001\x0018" + "\x0010\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0021\x0000\x0001\x0018\x0004\x0000\x0001\x0018" + "\x0008\x0000\x0002\x0018\x0005\x0000\x0002\x0018\x0008\x0000\x0001\x0018\x0016\x0000\x0002\x0018" + "\x0005\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0008\x0000\x0004\x0018" + "\x0001\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0002\x0000\x0001\x0018" + "\x0004\x0000\x0004\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0002\x0000\x0004\x0018\x0002\x0000\x0003\x0018\x0001\x0000\x0002\x0018" + "\x0001\x0000\x0003\x0018\x0001\x0000\x0004\x0018\x0001\x0000\x0002\x0018\x0005\x0000\x0004\x0018" + "\x0002\x0000\x0008\x0018\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0002\x0018" + "\x0004\x0000\x0001\x0018\x0003\x0000\x0003\x0018\x0017\x0000\x0001\x0018\x0004\x0000\x0001\x0018" + "\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0017\x0000\x0001\x0018" + "\x0033\x0000\x0001\x0018\x0019\x0000\x0001\x0018\x0003\x0000\x0004\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0019\x0002\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0002\x0018" + "\x0002\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0002\x0000\x0004\x0018" + "\x0001\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0001\x0000\x0002\x0018" + "\x0001\x0000\x0004\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0008\x0018\x0001\x0000\x0002\x0018" + "\x0001\x0000\x0008\x0018\x0001\x0019\x0001\x0000\x0007\x0018\x0001\x0000\x0008\x0018\x0001\x0000" + "\x0006\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0003\x0000\x0003\x0018\x001E\x0000\x0001\x0018\x000F\x0000\x0001\x0018\x0013\x0000" + "\x0001\x0018\x0013\x0000\x0001\x0018\x0006\x0000\x0003\x0018\x001F\x0000\x0001\x0018\x0007\x0000" + "\x0001\x0018\x0018\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0004\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0001\x0000" + "\x0003\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0004\x0018\x0001\x0000\x0003\x0018\x0001\x0000" + "\x000F\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0011\x0018\x0001\x0000\x0002\x0018\x0001\x0000" + "\x0021\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x001E\x0000\x0001\x0018\x0003\x0000" + "\x0002\x0018\x000A\x0000\x0002\x0018\x000B\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0002\x0000" + "\x0002\x0018\x0006\x0000\x0001\x0018\x0004\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0005\x0000" + "\x0003\x0018\x0010\x0000\x0001\x0018\x000E\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0018\x0000" + "\x0001\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0002\x0000" + "\x0001\x0018\x0003\x0000\x0002\x0018\x0001\x0000\x0003\x0018\x0001\x0000\x0002\x0018\x0001\x0000" + "\x0004\x0018\x0001\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000" + "\x0009\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0001\x0000" + "\x000C\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0008\x0018\x0001\x0000\x0002\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0013\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0012\x0000" + "\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000\x0001\x0019\x0001\x0018\x0020\x0000\x0001\x0019" + "\x0041\x0000\x0001\x0019\x0017\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018" + "\x000D\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0007\x0000\x0002\x0018\x0001\x0000\x0004\x0019" + "\x0001\x0000\x0002\x0018\x000B\x0000\x0001\x0018\x0013\x0000\x0001\x0018\x0024\x0000\x0001\x0018" + "\x0003\x0000\x0002\x0018\x000A\x0000\x0002\x0018\x0001\x0000\x0003\x0018\x0007\x0000\x0001\x0018" + "\x0006\x0000\x0002\x0018\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0004\x0000\x0002\x0018" + "\x0002\x0000\x0002\x0018\x0005\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0003\x0000\x0002\x0019" + "\x0008\x0000\x0001\x0018\x000E\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0018\x0000\x0001\x0018" + "\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000\x0001\x0018" + "\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000\x0001\x0018" + "\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018" + "\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0006\x0000\x0002\x0018\x0006\x0000\x0001\x0018" + "\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0022\x0000\x0001\x0018\x000F\x0000\x0002\x0018" + "\x0012\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x000B\x0000\x0001\x0018\x0003\x0000\x0002\x0018" + "\x0005\x0000\x0003\x0018\x0010\x0000\x0001\x0018\x000E\x0000\x0001\x0018\x0007\x0000\x0001\x0018" + "\x001D\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018" + "\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018" + "\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000\x0001\x0018" + "\x0005\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x005F\x0000\x0001\x001E\x0021\x0000\x0001\x001A\x0022\x0000\x0001\x001D" + "\x0006\x0000\x0001\x001D\x0002\x0000\x0001\x001D\x0003\x0000\x0002\x001D\x0008\x0000\x0004\x001D" + "\x0001\x0000\x0003\x001D\x0001\x0000\x0001\x001D\x0002\x0000\x0001\x001D\x0002\x0000\x0001\x001D" + "\x0004\x0000\x0004\x001D\x0001\x0000\x0002\x001D\x0006\x0000\x0001\x001D\x0002\x0000\x0004\x001D" + "\x0002\x0000\x0003\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0003\x001D\x0001\x0000\x0004\x001D" + "\x0001\x0000\x0002\x001D\x0005\x0000\x0004\x001D\x0002\x0000\x0008\x001D\x0004\x0000\x0001\x001D" + "\x0001\x0000\x0002\x001D\x0004\x0000\x0001\x001D\x0003\x0000\x0003\x001D\x0012\x0000\x0001\x001D" + "\x0001\x0000\x0002\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0004\x001D\x0001\x0000\x0001\x001D" + "\x0001\x0000\x0001\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0003\x001D\x0001\x0000\x0002\x001D" + "\x0001\x0000\x0004\x001D\x0001\x0000\x0003\x001D\x0001\x0000\x000F\x001D\x0001\x0000\x0002\x001D" + "\x0001\x0000\x0011\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0021\x001D\x0001\x0000\x0001\x001D" + "\x0001\x0000\x0002\x001D\x0002\x0000\x0001\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0001\x001D" + "\x0001\x0000\x0003\x001D\x0012\x0000\x0001\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0001\x001D" + "\x0001\x0000\x0004\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0002\x001D" + "\x0002\x0000\x0001\x001D\x0002\x0000\x0002\x001D\x0001\x0000\x0004\x001D\x0001\x0000\x0003\x001D" + "\x0001\x0000\x000F\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0011\x001D\x0001\x0000\x0002\x001D" + "\x0001\x0000\x0021\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0002\x001D\x0002\x0000\x0001\x001D" + "\x0001\x0000\x0001\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0003\x001D\x001E\x0000\x0001\x001D" + "\x000F\x0000\x0001\x001D\x0013\x0000\x0001\x001D\x001A\x0000\x0001\x001D\x0021\x0000\x0001\x001D" + "\x0007\x0000\x0001\x001D\x0018\x0000\x0001\x001D\x0001\x0000\x0002\x001D\x0003\x0000\x0004\x001D" + "\x0001\x0000\x0001\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0003\x001D" + "\x0001\x0000\x0002\x001D\x0001\x0000\x0004\x001D\x0001\x0000\x0003\x001D\x0001\x0000\x0008\x001D" + "\x0001\x0000\x0006\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0011\x001D\x0001\x0000\x0002\x001D" + "\x0001\x0000\x0021\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0002\x001D\x0002\x0000\x0001\x001D" + "\x0001\x0000\x0001\x001D\x0001\x0000\x0001\x001D\x0001\x0000\x0003\x001D\x0075\x0000\x0001\x0021" + "\x0015\x0000\x0001\x001E\x0002\x0021\x0011\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000" + "\x0001\x0024\x0001\x0000\x0001\x0025\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000" + "\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x002A\x0002\x0000\x0001\x002B\x0004\x0000" + "\x0001\x002C\x0003\x0000\x0001\x002D\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x002F\x0011\x0000" + "\x0001\x0030\x0002\x0000\x0001\x0031\x0031\x0000\x0002\x0018\x0001\x0032\x0001\x0000\x0001\x0033" + "\x0001\x0000\x0001\x0033\x0001\x0034\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0033\x0001\x0000" + "\x0001\x001F\x0001\x0018\x0001\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000\x0001\x0035" + "\x0001\x0000\x0001\x0036\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000\x0001\x0028" + "\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x0037\x0002\x0000\x0001\x0038\x0004\x0000\x0001\x0039" + "\x0003\x0000\x0001\x003A\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x003B\x0011\x0000\x0001\x003C" + "\x0002\x0000\x0001\x003D\x0031\x0000\x0001\x0018\x0002\x0019\x0002\x0000\x0002\x003E\x0001\x003F" + "\x0001\x0000\x0001\x0019\x0002\x0000\x0001\x003E\x0001\x0000\x0001\x001F\x0001\x0018\x0006\x0000" + "\x0001\x0040\x0011\x0000\x0001\x0041\x0002\x0000\x0001\x0042\x0008\x0000\x0001\x0043\x0012\x0000" + "\x0001\x0044\x0011\x0000\x0001\x0045\x0002\x0000\x0001\x0046\x0021\x0000\x0001\x0047\x0010\x0000" + "\x0001\x001A\x0001\x0000\x0001\x001A\x0003\x0000\x0001\x0034\x0001\x0000\x0001\x001A\x0007\x0000" + "\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000\x0001\x0048\x0001\x0000\x0001\x0036\x0004\x0000" + "\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000" + "\x0001\x0049\x0002\x0000\x0001\x004A\x0004\x0000\x0001\x0039\x0003\x0000\x0001\x004B\x000F\x0000" + "\x0001\x002E\x0002\x0000\x0001\x004C\x0011\x0000\x0001\x004D\x0002\x0000\x0001\x004E\x0021\x0000" + "\x0001\x004F\x000F\x0000\x0001\x0018\x0001\x0050\x0001\x0019\x0001\x0051\x0003\x0000\x0001\x0050" + "\x0001\x0000\x0001\x0050\x0004\x0000\x0001\x001F\x0001\x0018\x0086\x0000\x0002\x001C\x000C\x0000" + "\x0001\x0052\x0011\x0000\x0001\x0053\x0002\x0000\x0001\x0054\x0008\x0000\x0001\x0055\x0012\x0000" + "\x0001\x0056\x0011\x0000\x0001\x0057\x0002\x0000\x0001\x0058\x0032\x0000\x0001\x001D\x0007\x0000" + "\x0001\x001D\x000C\x0000\x0001\x0059\x0011\x0000\x0001\x005A\x0002\x0000\x0001\x005B\x0008\x0000" + "\x0001\x005C\x0012\x0000\x0001\x005D\x0011\x0000\x0001\x005E\x0002\x0000\x0001\x005F\x0032\x0000" + "\x0001\x001E\x0007\x0000\x0001\x001E\x0007\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000" + "\x0001\x0060\x0001\x0000\x0001\x0025\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000" + "\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x0061\x0002\x0000\x0001\x0062\x0004\x0000" + "\x0001\x002C\x0003\x0000\x0001\x0063\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x0064\x0011\x0000" + "\x0001\x0065\x0002\x0000\x0001\x0066\x0031\x0000\x0001\x0018\x0001\x001F\x0001\x0032\x0001\x0000" + "\x0001\x0033\x0001\x0000\x0001\x0033\x0001\x0034\x0001\x0000\x0001\x001F\x0002\x0000\x0001\x0067" + "\x0001\x0068\x0001\x001F\x0001\x0018\x0001\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000" + "\x0001\x0069\x0001\x0000\x0001\x0025\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000" + "\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x006A\x0002\x0000\x0001\x006B\x0004\x0000" + "\x0001\x002C\x0003\x0000\x0001\x006C\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x006D\x0011\x0000" + "\x0001\x006E\x0002\x0000\x0001\x006F\x0031\x0000\x0001\x0018\x0001\x0020\x0001\x0032\x0001\x0000" + "\x0001\x0033\x0001\x0000\x0001\x0033\x0001\x0034\x0001\x0000\x0001\x0020\x0002\x0000\x0001\x0033" + "\x0001\x0000\x0001\x001F\x0001\x0020\x0006\x0000\x0001\x0070\x0011\x0000\x0001\x0071\x0002\x0000" + "\x0001\x0072\x0008\x0000\x0001\x0073\x0012\x0000\x0001\x0074\x0011\x0000\x0001\x0075\x0002\x0000" + "\x0001\x0076\x002D\x0000\x0001\x0077\x0004\x0000\x0001\x0021\x0007\x0000\x0001\x0021\x000D\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x000B\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0008\x0000\x0001\x0018\x0012\x0000" + "\x0004\x0018\x001D\x0000\x0001\x0018\x0019\x0000\x0001\x0018\x0003\x0000\x0004\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0032\x0002\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000" + "\x0002\x0018\x0002\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0002\x0000" + "\x0004\x0018\x0001\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0001\x0000" + "\x0002\x0018\x0001\x0000\x0004\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0008\x0018\x0001\x0000" + "\x0002\x0018\x0001\x0000\x0008\x0018\x0001\x0032\x0001\x0000\x0007\x0018\x0001\x0000\x0008\x0018" + "\x0001\x0000\x0006\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0003\x0000\x0003\x0018\x0012\x0000\x0001\x0018\x0016\x0000\x0002\x0018" + "\x0013\x0000\x0001\x0032\x0001\x0018\x0020\x0000\x0001\x0032\x000B\x0000\x0001\x0018\x0035\x0000" + "\x0001\x0032\x0009\x0000\x0001\x0018\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000" + "\x0004\x0018\x0001\x0000\x0002\x0018\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0004\x0000\x0003\x0018\x0001\x0000\x0004\x0032\x0001\x0000\x0002\x0018\x0005\x0000" + "\x0004\x0018\x0002\x0000\x0002\x0018\x000A\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0024\x0000" + "\x0001\x0018\x0003\x0000\x0002\x0018\x000A\x0000\x0002\x0018\x0001\x0000\x0003\x0018\x0007\x0000" + "\x0001\x0018\x0006\x0000\x0002\x0018\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0004\x0000" + "\x0002\x0018\x0002\x0000\x0002\x0018\x0005\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0003\x0000" + "\x0002\x0032\x0008\x0000\x0001\x0018\x000E\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0018\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000" + "\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000" + "\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x001B\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0007\x0000" + "\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000\x0005\x0018\x0001\x0000" + "\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x005C\x0000\x0002\x0018\x0015\x0000\x0004\x0018\x002D\x0000\x0001\x0018\x000D\x0000" + "\x0002\x0018\x0008\x0000\x0002\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0009\x0000" + "\x0001\x0018\x0009\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0002\x0000\x0004\x0018\x0003\x0000" + "\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x0003\x0018\x0001\x0000\x0002\x0018\x0001\x0000" + "\x0001\x0018\x0008\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0001\x0000" + "\x0004\x0018\x0013\x0000\x0001\x0018\x0011\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000" + "\x0001\x0078\x0001\x0000\x0001\x0025\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000" + "\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x0079\x0002\x0000\x0001\x007A\x0004\x0000" + "\x0001\x002C\x0003\x0000\x0001\x007B\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x007C\x0011\x0000" + "\x0001\x007D\x0002\x0000\x0001\x007E\x0031\x0000\x0001\x0018\x0002\x0032\x0002\x0000\x0002\x007F" + "\x0001\x0034\x0001\x0000\x0001\x0032\x0002\x0000\x0001\x007F\x0001\x0000\x0001\x001F\x0001\x0018" + "\x0001\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000\x0001\x0080\x0001\x0000\x0001\x0081" + "\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000\x0001\x0028\x0002\x0000\x0001\x0029" + "\x0003\x0000\x0001\x0082\x0002\x0000\x0001\x0083\x0004\x0000\x0001\x0084\x0003\x0000\x0001\x0085" + "\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x0086\x0011\x0000\x0001\x0087\x0002\x0000\x0001\x0088" + "\x0031\x0000\x0001\x0018\x0001\x0033\x0007\x0000\x0001\x0033\x0004\x0000\x0002\x0018\x0001\x0000" + "\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000\x0001\x0089\x0001\x0000\x0001\x0025\x0004\x0000" + "\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000" + "\x0001\x008A\x0002\x0000\x0001\x008B\x0004\x0000\x0001\x002C\x0003\x0000\x0001\x008C\x000F\x0000" + "\x0001\x002E\x0002\x0000\x0001\x008D\x0011\x0000\x0001\x008E\x0002\x0000\x0001\x008F\x0021\x0000" + "\x0001\x004F\x000F\x0000\x0001\x0018\x0001\x0034\x0001\x0032\x0001\x0051\x0003\x0000\x0001\x0034" + "\x0001\x0000\x0001\x0034\x0004\x0000\x0001\x001F\x0001\x0018\x0007\x0000\x0001\x0018\x0004\x0000" + "\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x000B\x0000" + "\x0001\x0019\x0002\x0000\x0001\x0019\x0008\x0000\x0001\x0018\x0012\x0000\x0004\x0019\x001D\x0000" + "\x0001\x0018\x0016\x0000\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000\x0001\x0019\x0001\x0018" + "\x0020\x0000\x0001\x0019\x000B\x0000\x0001\x0019\x0035\x0000\x0001\x0019\x0009\x0000\x0001\x0019" + "\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018\x0001\x0019\x0001\x0000" + "\x0002\x0019\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000\x0001\x0019\x0004\x0000" + "\x0001\x0019\x0002\x0018\x0001\x0000\x0004\x0019\x0001\x0000\x0002\x0018\x0005\x0000\x0004\x0019" + "\x0002\x0000\x0001\x0018\x0001\x0019\x000A\x0000\x0001\x0019\x0007\x0000\x0001\x0018\x0018\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000" + "\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x0019\x0001\x0000" + "\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x001B\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0007\x0000" + "\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000\x0001\x0018\x0004\x0019" + "\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x0019\x0015\x0000\x0004\x0019\x002D\x0000\x0001\x0019" + "\x000D\x0000\x0002\x0019\x0008\x0000\x0002\x0019\x0001\x0000\x0001\x0019\x0001\x0000\x0001\x0019" + "\x0009\x0000\x0001\x0019\x0009\x0000\x0002\x0019\x0006\x0000\x0001\x0019\x0002\x0000\x0004\x0019" + "\x0003\x0000\x0001\x0019\x0002\x0000\x0002\x0019\x0001\x0000\x0003\x0019\x0001\x0000\x0002\x0019" + "\x0001\x0000\x0001\x0019\x0008\x0000\x0001\x0019\x0001\x0000\x0002\x0019\x0002\x0000\x0002\x0019" + "\x0001\x0000\x0004\x0019\x0013\x0000\x0001\x0019\x0016\x0000\x0001\x0090\x0001\x0000\x0001\x0091" + "\x000F\x0000\x0001\x0092\x0002\x0000\x0001\x0093\x0004\x0000\x0001\x0094\x0003\x0000\x0001\x0095" + "\x0012\x0000\x0001\x0096\x0011\x0000\x0001\x0097\x0002\x0000\x0001\x0098\x0032\x0000\x0001\x003E" + "\x0001\x0019\x0006\x0000\x0001\x003E\x0007\x0000\x0001\x0022\x0001\x0000\x0001\x0023\x0002\x0000" + "\x0001\x0099\x0001\x0000\x0001\x0036\x0004\x0000\x0001\x0026\x0001\x0000\x0001\x0027\x0001\x0000" + "\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x009A\x0002\x0000\x0001\x009B\x0004\x0000" + "\x0001\x0039\x0003\x0000\x0001\x009C\x000F\x0000\x0001\x002E\x0002\x0000\x0001\x009D\x0011\x0000" + "\x0001\x009E\x0002\x0000\x0001\x009F\x0021\x0000\x0001\x004F\x000F\x0000\x0001\x0018\x0001\x003F" + "\x0001\x0019\x0001\x0051\x0003\x0000\x0001\x003F\x0001\x0000\x0001\x003F\x0004\x0000\x0001\x001F" + "\x0001\x0018\x0039\x0000\x0001\x001A\x0002\x0000\x0001\x001A\x001B\x0000\x0004\x001A\x008E\x0000" + "\x0001\x001A\x003F\x0000\x0001\x001A\x0024\x0000\x0001\x001A\x0001\x0000\x0002\x001A\x0011\x0000" + "\x0001\x001A\x0004\x0000\x0001\x001A\x000F\x0000\x0004\x001A\x0003\x0000\x0001\x001A\x000A\x0000" + "\x0001\x001A\x0083\x0000\x0001\x001A\x0092\x0000\x0004\x001A\x006A\x0000\x0002\x001A\x0015\x0000" + "\x0004\x001A\x002D\x0000\x0001\x001A\x000D\x0000\x0002\x001A\x0008\x0000\x0002\x001A\x0001\x0000" + "\x0001\x001A\x0001\x0000\x0001\x001A\x0009\x0000\x0001\x001A\x0009\x0000\x0002\x001A\x0006\x0000" + "\x0001\x001A\x0002\x0000\x0004\x001A\x0003\x0000\x0001\x001A\x0002\x0000\x0002\x001A\x0001\x0000" + "\x0003\x001A\x0001\x0000\x0002\x001A\x0001\x0000\x0001\x001A\x0008\x0000\x0001\x001A\x0001\x0000" + "\x0002\x001A\x0002\x0000\x0002\x001A\x0001\x0000\x0004\x001A\x0013\x0000\x0001\x001A\x007F\x0000" + "\x0001\x001A\x0025\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000" + "\x0001\x0018\x0003\x0000\x0001\x0018\x000B\x0000\x0001\x0050\x0002\x0000\x0001\x0050\x0008\x0000" + "\x0001\x0018\x0012\x0000\x0004\x0050\x001D\x0000\x0001\x0018\x0016\x0000\x0001\x0018\x0016\x0000" + "\x0002\x0018\x0013\x0000\x0001\x0019\x0001\x0018\x0020\x0000\x0001\x0019\x000B\x0000\x0001\x0050" + "\x0035\x0000\x0001\x0019\x0009\x0000\x0001\x0050\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018" + "\x000C\x0000\x0003\x0018\x0001\x0050\x0001\x0000\x0002\x0050\x0009\x0000\x0003\x0018\x0003\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0050\x0004\x0000\x0001\x0050\x0002\x0018\x0001\x0000\x0004\x0019" + "\x0001\x0000\x0002\x0018\x0005\x0000\x0004\x0050\x0002\x0000\x0001\x0018\x0001\x0050\x000A\x0000" + "\x0001\x0050\x0007\x0000\x0001\x0018\x0018\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000" + "\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0050\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x001B\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x0003\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000" + "\x0003\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000" + "\x0002\x0018\x0004\x0000\x0001\x0018\x0004\x0050\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018" + "\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x0050" + "\x0015\x0000\x0004\x0050\x002D\x0000\x0001\x0050\x000D\x0000\x0002\x0050\x0008\x0000\x0002\x0050" + "\x0001\x0000\x0001\x0050\x0001\x0000\x0001\x0050\x0009\x0000\x0001\x0050\x0009\x0000\x0002\x0050" + "\x0006\x0000\x0001\x0050\x0002\x0000\x0004\x0050\x0003\x0000\x0001\x0050\x0002\x0000\x0002\x0050" + "\x0001\x0000\x0003\x0050\x0001\x0000\x0002\x0050\x0001\x0000\x0001\x0050\x0008\x0000\x0001\x0050" + "\x0001\x0000\x0002\x0050\x0002\x0000\x0002\x0050\x0001\x0000\x0004\x0050\x0013\x0000\x0001\x0050" + "\x007F\x0000\x0001\x0051\x0024\x0000\x0001\x00A0\x0011\x0000\x0001\x00A1\x0002\x0000\x0001\x00A2" + "\x0008\x0000\x0001\x00A3\x0012\x0000\x0001\x00A4\x0011\x0000\x0001\x00A5\x0002\x0000\x0001\x00A6" + "\x0021\x0000\x0001\x004F\x0010\x0000\x0001\x0051\x0001\x0000\x0001\x0051\x0003\x0000\x0001\x0034" + "\x0001\x0000\x0001\x0051\x003F\x0000\x0001\x001D\x0002\x0000\x0001\x001D\x001B\x0000\x0004\x001D" + "\x008E\x0000\x0001\x001D\x003F\x0000\x0001\x001D\x0024\x0000\x0001\x001D\x0001\x0000\x0002\x001D" + "\x0011\x0000\x0001\x001D\x0004\x0000\x0001\x001D\x000F\x0000\x0004\x001D\x0003\x0000\x0001\x001D" + "\x000A\x0000\x0001\x001D\x0083\x0000\x0001\x001D\x0092\x0000\x0004\x001D\x006A\x0000\x0002\x001D" + "\x0015\x0000\x0004\x001D\x002D\x0000\x0001\x001D\x000D\x0000\x0002\x001D\x0008\x0000\x0002\x001D" + "\x0001\x0000\x0001\x001D\x0001\x0000\x0001\x001D\x0009\x0000\x0001\x001D\x0009\x0000\x0002\x001D" + "\x0006\x0000\x0001\x001D\x0002\x0000\x0004\x001D\x0003\x0000\x0001\x001D\x0002\x0000\x0002\x001D" + "\x0001\x0000\x0003\x001D\x0001\x0000\x0002\x001D\x0001\x0000\x0001\x001D\x0008\x0000\x0001\x001D" + "\x0001\x0000\x0002\x001D\x0002\x0000\x0002\x001D\x0001\x0000\x0004\x001D\x0013\x0000\x0001\x001D" + "\x0049\x0000\x0001\x001E\x0002\x0000\x0001\x001E\x001B\x0000\x0004\x001E\x008E\x0000\x0001\x001E" + "\x003F\x0000\x0001\x001E\x0024\x0000\x0001\x001E\x0001\x0000\x0002\x001E\x0011\x0000\x0001\x001E" + "\x0004\x0000\x0001\x001E\x000F\x0000\x0004\x001E\x0003\x0000\x0001\x001E\x000A\x0000\x0001\x001E" + "\x0083\x0000\x0001\x001E\x0092\x0000\x0004\x001E\x006A\x0000\x0002\x001E\x0015\x0000\x0004\x001E" + "\x002D\x0000\x0001\x001E\x000D\x0000\x0002\x001E\x0008\x0000\x0002\x001E\x0001\x0000\x0001\x001E" + "\x0001\x0000\x0001\x001E\x0009\x0000\x0001\x001E\x0009\x0000\x0002\x001E\x0006\x0000\x0001\x001E" + "\x0002\x0000\x0004\x001E\x0003\x0000\x0001\x001E\x0002\x0000\x0002\x001E\x0001\x0000\x0003\x001E" + "\x0001\x0000\x0002\x001E\x0001\x0000\x0001\x001E\x0008\x0000\x0001\x001E\x0001\x0000\x0002\x001E" + "\x0002\x0000\x0002\x001E\x0001\x0000\x0004\x001E\x0013\x0000\x0001\x001E\x0017\x0000\x0001\x0018" + "\x0004\x0000\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018" + "\x000B\x0000\x0001\x001F\x0002\x0000\x0001\x001F\x0008\x0000\x0001\x0018\x0012\x0000\x0004\x001F" + "\x001D\x0000\x0001\x0018\x0016\x0000\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000\x0001\x0032" + "\x0001\x0018\x0020\x0000\x0001\x0032\x000B\x0000\x0001\x001F\x0035\x0000\x0001\x0032\x0009\x0000" + "\x0001\x001F\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018\x0001\x001F" + "\x0001\x0000\x0002\x001F\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000\x0001\x001F" + "\x0004\x0000\x0001\x001F\x0002\x0018\x0001\x0000\x0004\x0032\x0001\x0000\x0002\x0018\x0005\x0000" + "\x0004\x001F\x0002\x0000\x0001\x0018\x0001\x001F\x000A\x0000\x0001\x001F\x0007\x0000\x0001\x0018" + "\x0018\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018" + "\x0006\x0000\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018" + "\x0002\x0000\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x001F" + "\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x001B\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018" + "\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018" + "\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000\x0001\x0018" + "\x0004\x001F\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x001F\x0015\x0000\x0004\x001F\x002D\x0000" + "\x0001\x001F\x000D\x0000\x0002\x001F\x0008\x0000\x0002\x001F\x0001\x0000\x0001\x001F\x0001\x0000" + "\x0001\x001F\x0009\x0000\x0001\x001F\x0009\x0000\x0002\x001F\x0006\x0000\x0001\x001F\x0002\x0000" + "\x0004\x001F\x0003\x0000\x0001\x001F\x0002\x0000\x0002\x001F\x0001\x0000\x0003\x001F\x0001\x0000" + "\x0002\x001F\x0001\x0000\x0001\x001F\x0008\x0000\x0001\x001F\x0001\x0000\x0002\x001F\x0002\x0000" + "\x0002\x001F\x0001\x0000\x0004\x001F\x0013\x0000\x0001\x001F\x0011\x0000\x0001\x0022\x0001\x0000" + "\x0001\x0023\x0002\x0000\x0001\x00A7\x0001\x0000\x0001\x0025\x0004\x0000\x0001\x0026\x0001\x0000" + "\x0001\x0027\x0001\x0000\x0001\x0028\x0002\x0000\x0001\x0029\x0003\x0000\x0001\x00A8\x0002\x0000" + "\x0001\x00A9\x0004\x0000\x0001\x002C\x0003\x0000\x0001\x00AA\x000F\x0000\x0001\x002E\x0002\x0000" + "\x0001\x00AB\x0011\x0000\x0001\x00AC\x0002\x0000\x0001\x00AD\x0031\x0000\x0001\x0018\x0001\x0067" + "\x0001\x0032\x0004\x0000\x0001\x0034\x0001\x0000\x0001\x0067\x0004\x0000\x0001\x001F\x0001\x0018" + "\x0006\x0000\x0001\x00AE\x0011\x0000\x0001\x00AF\x0002\x0000\x0001\x00B0\x0008\x0000\x0001\x00B1" + "\x0012\x0000\x0001\x00B2\x0011\x0000\x0001\x00B3\x0002\x0000\x0001\x00B4\x0032\x0000\x0001\x0068" + "\x0007\x0000\x0001\x0068\x0004\x0000\x0001\x0067\x0008\x0000\x0001\x0018\x0004\x0000\x0001\x0018" + "\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x000B\x0000\x0001\x0020" + "\x0002\x0000\x0001\x0020\x0008\x0000\x0001\x0018\x0012\x0000\x0004\x0020\x001D\x0000\x0001\x0018" + "\x0016\x0000\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000\x0001\x0032\x0001\x0018\x0020\x0000" + "\x0001\x0032\x000B\x0000\x0001\x0020\x0035\x0000\x0001\x0032\x0009\x0000\x0001\x0020\x000D\x0000" + "\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018\x0001\x0020\x0001\x0000\x0002\x0020" + "\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000\x0001\x0020\x0004\x0000\x0001\x0020" + "\x0002\x0018\x0001\x0000\x0004\x0032\x0001\x0000\x0002\x0018\x0005\x0000\x0004\x0020\x0002\x0000" + "\x0001\x0018\x0001\x0020\x000A\x0000\x0001\x0020\x0007\x0000\x0001\x0018\x0018\x0000\x0001\x0018" + "\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000\x0001\x0018" + "\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000\x0001\x0018" + "\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018" + "\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x0020\x0001\x0000\x0002\x0018" + "\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x001B\x0000\x0001\x0018" + "\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0007\x0000\x0001\x0018" + "\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018" + "\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000\x0001\x0018\x0004\x0020\x0001\x0000" + "\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x005C\x0000\x0002\x0020\x0015\x0000\x0004\x0020\x002D\x0000\x0001\x0020\x000D\x0000" + "\x0002\x0020\x0008\x0000\x0002\x0020\x0001\x0000\x0001\x0020\x0001\x0000\x0001\x0020\x0009\x0000" + "\x0001\x0020\x0009\x0000\x0002\x0020\x0006\x0000\x0001\x0020\x0002\x0000\x0004\x0020\x0003\x0000" + "\x0001\x0020\x0002\x0000\x0002\x0020\x0001\x0000\x0003\x0020\x0001\x0000\x0002\x0020\x0001\x0000" + "\x0001\x0020\x0008\x0000\x0001\x0020\x0001\x0000\x0002\x0020\x0002\x0000\x0002\x0020\x0001\x0000" + "\x0004\x0020\x0013\x0000\x0001\x0020\x0049\x0000\x0001\x0021\x0002\x0000\x0001\x0021\x001B\x0000" + "\x0004\x0021\x008E\x0000\x0001\x0021\x003F\x0000\x0001\x0021\x0024\x0000\x0001\x0021\x0001\x0000" + "\x0002\x0021\x0011\x0000\x0001\x0021\x0004\x0000\x0001\x0021\x000F\x0000\x0004\x0021\x0003\x0000" + "\x0001\x0021\x000A\x0000\x0001\x0021\x0083\x0000\x0001\x0021\x0092\x0000\x0004\x0021\x006A\x0000" + "\x0002\x0021\x0015\x0000\x0004\x0021\x002D\x0000\x0001\x0021\x000D\x0000\x0002\x0021\x0008\x0000" + "\x0002\x0021\x0001\x0000\x0001\x0021\x0001\x0000\x0001\x0021\x0009\x0000\x0001\x0021\x0009\x0000" + "\x0002\x0021\x0006\x0000\x0001\x0021\x0002\x0000\x0004\x0021\x0003\x0000\x0001\x0021\x0002\x0000" + "\x0002\x0021\x0001\x0000\x0003\x0021\x0001\x0000\x0002\x0021\x0001\x0000\x0001\x0021\x0008\x0000" + "\x0001\x0021\x0001\x0000\x0002\x0021\x0002\x0000\x0002\x0021\x0001\x0000\x0004\x0021\x0013\x0000" + "\x0001\x0021\x0075\x0000\x0001\x00B5\x0016\x0000\x0002\x00B5\x0017\x0000\x0001\x0018\x0004\x0000" + "\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x000B\x0000" + "\x0001\x0032\x0002\x0000\x0001\x0032\x0008\x0000\x0001\x0018\x0012\x0000\x0004\x0032\x001D\x0000" + "\x0001\x0018\x0016\x0000\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000\x0001\x0032\x0001\x0018" + "\x0020\x0000\x0001\x0032\x000B\x0000\x0001\x0032\x0035\x0000\x0001\x0032\x0009\x0000\x0001\x0032" + "\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018\x0001\x0032\x0001\x0000" + "\x0002\x0032\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000\x0001\x0032\x0004\x0000" + "\x0001\x0032\x0002\x0018\x0001\x0000\x0004\x0032\x0001\x0000\x0002\x0018\x0005\x0000\x0004\x0032" + "\x0002\x0000\x0001\x0018\x0001\x0032\x000A\x0000\x0001\x0032\x0007\x0000\x0001\x0018\x0018\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000" + "\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x0032\x0001\x0000" + "\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x001B\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0007\x0000" + "\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018\x0006\x0000" + "\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000\x0001\x0018\x0004\x0032" + "\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x0032\x0015\x0000\x0004\x0032\x002D\x0000\x0001\x0032" + "\x000D\x0000\x0002\x0032\x0008\x0000\x0002\x0032\x0001\x0000\x0001\x0032\x0001\x0000\x0001\x0032" + "\x0009\x0000\x0001\x0032\x0009\x0000\x0002\x0032\x0006\x0000\x0001\x0032\x0002\x0000\x0004\x0032" + "\x0003\x0000\x0001\x0032\x0002\x0000\x0002\x0032\x0001\x0000\x0003\x0032\x0001\x0000\x0002\x0032" + "\x0001\x0000\x0001\x0032\x0008\x0000\x0001\x0032\x0001\x0000\x0002\x0032\x0002\x0000\x0002\x0032" + "\x0001\x0000\x0004\x0032\x0013\x0000\x0001\x0032\x0016\x0000\x0001\x00B6\x0001\x0000\x0001\x00B7" + "\x000F\x0000\x0001\x00B8\x0002\x0000\x0001\x00B9\x0004\x0000\x0001\x00BA\x0003\x0000\x0001\x00BB" + "\x0012\x0000\x0001\x00BC\x0011\x0000\x0001\x00BD\x0002\x0000\x0001\x00BE\x0032\x0000\x0001\x007F" + "\x0001\x0032\x0006\x0000\x0001\x007F\x000D\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0009\x0000" + "\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x000B\x0000\x0001\x0033\x0002\x0000" + "\x0001\x0033\x0008\x0000\x0001\x0018\x0012\x0000\x0004\x0033\x001D\x0000\x0001\x0018\x0019\x0000" + "\x0001\x0018\x0003\x0000\x0004\x0018\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000" + "\x0002\x0018\x0002\x0000\x0002\x0018\x0002\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0002\x0000\x0004\x0018\x0001\x0000\x0003\x0018\x0001\x0000\x0001\x0018\x0001\x0000" + "\x0003\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0004\x0018\x0001\x0000\x0002\x0018\x0002\x0000" + "\x0008\x0018\x0001\x0000\x0002\x0018\x0001\x0000\x0008\x0018\x0002\x0000\x0007\x0018\x0001\x0000" + "\x0008\x0018\x0001\x0000\x0006\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0003\x0000\x0003\x0018\x0012\x0000\x0001\x0018\x0016\x0000" + "\x0002\x0018\x0014\x0000\x0001\x0018\x002C\x0000\x0001\x0033\x003F\x0000\x0001\x0033\x000D\x0000" + "\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018\x0001\x0033\x0001\x0000\x0002\x0033" + "\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000\x0001\x0033\x0004\x0000\x0001\x0033" + "\x0002\x0018\x0006\x0000\x0002\x0018\x0005\x0000\x0004\x0033\x0002\x0000\x0001\x0018\x0001\x0033" + "\x000A\x0000\x0001\x0033\x0007\x0000\x0001\x0018\x0024\x0000\x0001\x0018\x0003\x0000\x0002\x0018" + "\x000A\x0000\x0002\x0018\x0001\x0000\x0003\x0018\x0007\x0000\x0001\x0018\x0006\x0000\x0002\x0018" + "\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0004\x0000\x0002\x0018\x0002\x0000\x0002\x0018" + "\x0005\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x000D\x0000\x0001\x0018\x000E\x0000\x0001\x0018" + "\x0007\x0000\x0001\x0018\x0018\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018" + "\x0003\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018" + "\x0001\x0000\x000F\x0018\x0002\x0000\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018" + "\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0004\x0000\x0001\x0033\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x001B\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018" + "\x0003\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018" + "\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018" + "\x0004\x0000\x0001\x0018\x0004\x0033\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x0033\x0015\x0000" + "\x0004\x0033\x002D\x0000\x0001\x0033\x000D\x0000\x0002\x0033\x0008\x0000\x0002\x0033\x0001\x0000" + "\x0001\x0033\x0001\x0000\x0001\x0033\x0009\x0000\x0001\x0033\x0009\x0000\x0002\x0033\x0006\x0000" + "\x0001\x0033\x0002\x0000\x0004\x0033\x0003\x0000\x0001\x0033\x0002\x0000\x0002\x0033\x0001\x0000" + "\x0003\x0033\x0001\x0000\x0002\x0033\x0001\x0000\x0001\x0033\x0008\x0000\x0001\x0033\x0001\x0000" + "\x0002\x0033\x0002\x0000\x0002\x0033\x0001\x0000\x0004\x0033\x0013\x0000\x0001\x0033\x0017\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x000B\x0000\x0001\x0034\x0002\x0000\x0001\x0034\x0008\x0000\x0001\x0018\x0012\x0000" + "\x0004\x0034\x001D\x0000\x0001\x0018\x0016\x0000\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000" + "\x0001\x0032\x0001\x0018\x0020\x0000\x0001\x0032\x000B\x0000\x0001\x0034\x0035\x0000\x0001\x0032" + "\x0009\x0000\x0001\x0034\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018" + "\x0001\x0034\x0001\x0000\x0002\x0034\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0034\x0004\x0000\x0001\x0034\x0002\x0018\x0001\x0000\x0004\x0032\x0001\x0000\x0002\x0018" + "\x0005\x0000\x0004\x0034\x0002\x0000\x0001\x0018\x0001\x0034\x000A\x0000\x0001\x0034\x0007\x0000" + "\x0001\x0018\x0018\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000" + "\x000F\x0018\x0002\x0000\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000" + "\x0001\x0034\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x001B\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000" + "\x0001\x0018\x0004\x0034\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x0034\x0015\x0000\x0004\x0034" + "\x002D\x0000\x0001\x0034\x000D\x0000\x0002\x0034\x0008\x0000\x0002\x0034\x0001\x0000\x0001\x0034" + "\x0001\x0000\x0001\x0034\x0009\x0000\x0001\x0034\x0009\x0000\x0002\x0034\x0006\x0000\x0001\x0034" + "\x0002\x0000\x0004\x0034\x0003\x0000\x0001\x0034\x0002\x0000\x0002\x0034\x0001\x0000\x0003\x0034" + "\x0001\x0000\x0002\x0034\x0001\x0000\x0001\x0034\x0008\x0000\x0001\x0034\x0001\x0000\x0002\x0034" + "\x0002\x0000\x0002\x0034\x0001\x0000\x0004\x0034\x0013\x0000\x0001\x0034\x0049\x0000\x0001\x003E" + "\x0002\x0000\x0001\x003E\x001B\x0000\x0004\x003E\x0042\x0000\x0001\x0019\x0044\x0000\x0001\x0019" + "\x0066\x0000\x0001\x0019\x0021\x0000\x0001\x0019\x000B\x0000\x0001\x003E\x0035\x0000\x0001\x0019" + "\x0009\x0000\x0001\x003E\x0024\x0000\x0001\x003E\x0001\x0000\x0002\x003E\x0011\x0000\x0001\x003E" + "\x0004\x0000\x0001\x003E\x0003\x0000\x0004\x0019\x0008\x0000\x0004\x003E\x0003\x0000\x0001\x003E" + "\x000A\x0000\x0001\x003E\x0074\x0000\x0002\x0019\x009B\x0000\x0001\x003E\x0092\x0000\x0004\x003E" + "\x006A\x0000\x0002\x003E\x0015\x0000\x0004\x003E\x002D\x0000\x0001\x003E\x000D\x0000\x0002\x003E" + "\x0008\x0000\x0002\x003E\x0001\x0000\x0001\x003E\x0001\x0000\x0001\x003E\x0009\x0000\x0001\x003E" + "\x0009\x0000\x0002\x003E\x0006\x0000\x0001\x003E\x0002\x0000\x0004\x003E\x0003\x0000\x0001\x003E" + "\x0002\x0000\x0002\x003E\x0001\x0000\x0003\x003E\x0001\x0000\x0002\x003E\x0001\x0000\x0001\x003E" + "\x0008\x0000\x0001\x003E\x0001\x0000\x0002\x003E\x0002\x0000\x0002\x003E\x0001\x0000\x0004\x003E" + "\x0013\x0000\x0001\x003E\x0017\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0009\x0000\x0001\x0018" + "\x0012\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x000B\x0000\x0001\x003F\x0002\x0000\x0001\x003F" + "\x0008\x0000\x0001\x0018\x0012\x0000\x0004\x003F\x001D\x0000\x0001\x0018\x0016\x0000\x0001\x0018" + "\x0016\x0000\x0002\x0018\x0013\x0000\x0001\x0019\x0001\x0018\x0020\x0000\x0001\x0019\x000B\x0000" + "\x0001\x003F\x0035\x0000\x0001\x0019\x0009\x0000\x0001\x003F\x000D\x0000\x0004\x0018\x0002\x0000" + "\x0002\x0018\x000C\x0000\x0003\x0018\x0001\x003F\x0001\x0000\x0002\x003F\x0009\x0000\x0003\x0018" + "\x0003\x0000\x0001\x0018\x0001\x0000\x0001\x003F\x0004\x0000\x0001\x003F\x0002\x0018\x0001\x0000" + "\x0004\x0019\x0001\x0000\x0002\x0018\x0005\x0000\x0004\x003F\x0002\x0000\x0001\x0018\x0001\x003F" + "\x000A\x0000\x0001\x003F\x0007\x0000\x0001\x0018\x0018\x0000\x0001\x0018\x0004\x0000\x0001\x0018" + "\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0005\x0000\x0001\x0018" + "\x0002\x0000\x0002\x0018\x0001\x0000\x000F\x0018\x0002\x0000\x0001\x0018\x000B\x0000\x0007\x0018" + "\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018" + "\x0001\x0000\x0003\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0004\x0000\x0001\x003F\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018" + "\x0007\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x001B\x0000\x0001\x0018\x0006\x0000\x0001\x0018" + "\x0003\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018" + "\x0005\x0000\x0003\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018" + "\x0002\x0000\x0002\x0018\x0004\x0000\x0001\x0018\x0004\x003F\x0001\x0000\x0001\x0018\x0002\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x005C\x0000" + "\x0002\x003F\x0015\x0000\x0004\x003F\x002D\x0000\x0001\x003F\x000D\x0000\x0002\x003F\x0008\x0000" + "\x0002\x003F\x0001\x0000\x0001\x003F\x0001\x0000\x0001\x003F\x0009\x0000\x0001\x003F\x0009\x0000" + "\x0002\x003F\x0006\x0000\x0001\x003F\x0002\x0000\x0004\x003F\x0003\x0000\x0001\x003F\x0002\x0000" + "\x0002\x003F\x0001\x0000\x0003\x003F\x0001\x0000\x0002\x003F\x0001\x0000\x0001\x003F\x0008\x0000" + "\x0001\x003F\x0001\x0000\x0002\x003F\x0002\x0000\x0002\x003F\x0001\x0000\x0004\x003F\x0013\x0000" + "\x0001\x003F\x0049\x0000\x0001\x0051\x0002\x0000\x0001\x0051\x001B\x0000\x0004\x0051\x008E\x0000" + "\x0001\x0051\x003F\x0000\x0001\x0051\x0024\x0000\x0001\x0051\x0001\x0000\x0002\x0051\x0011\x0000" + "\x0001\x0051\x0004\x0000\x0001\x0051\x000F\x0000\x0004\x0051\x0003\x0000\x0001\x0051\x000A\x0000" + "\x0001\x0051\x0083\x0000\x0001\x0051\x0092\x0000\x0004\x0051\x006A\x0000\x0002\x0051\x0015\x0000" + "\x0004\x0051\x002D\x0000\x0001\x0051\x000D\x0000\x0002\x0051\x0008\x0000\x0002\x0051\x0001\x0000" + "\x0001\x0051\x0001\x0000\x0001\x0051\x0009\x0000\x0001\x0051\x0009\x0000\x0002\x0051\x0006\x0000" + "\x0001\x0051\x0002\x0000\x0004\x0051\x0003\x0000\x0001\x0051\x0002\x0000\x0002\x0051\x0001\x0000" + "\x0003\x0051\x0001\x0000\x0002\x0051\x0001\x0000\x0001\x0051\x0008\x0000\x0001\x0051\x0001\x0000" + "\x0002\x0051\x0002\x0000\x0002\x0051\x0001\x0000\x0004\x0051\x0013\x0000\x0001\x0051\x0017\x0000" + "\x0001\x0018\x0004\x0000\x0001\x0018\x0009\x0000\x0001\x0018\x0012\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x000B\x0000\x0001\x0067\x0002\x0000\x0001\x0067\x0008\x0000\x0001\x0018\x0012\x0000" + "\x0004\x0067\x001D\x0000\x0001\x0018\x0016\x0000\x0001\x0018\x0016\x0000\x0002\x0018\x0013\x0000" + "\x0001\x0032\x0001\x0018\x0020\x0000\x0001\x0032\x000B\x0000\x0001\x0067\x0035\x0000\x0001\x0032" + "\x0009\x0000\x0001\x0067\x000D\x0000\x0004\x0018\x0002\x0000\x0002\x0018\x000C\x0000\x0003\x0018" + "\x0001\x0067\x0001\x0000\x0002\x0067\x0009\x0000\x0003\x0018\x0003\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0067\x0004\x0000\x0001\x0067\x0002\x0018\x0001\x0000\x0004\x0032\x0001\x0000\x0002\x0018" + "\x0005\x0000\x0004\x0067\x0002\x0000\x0001\x0018\x0001\x0067\x000A\x0000\x0001\x0067\x0007\x0000" + "\x0001\x0018\x0018\x0000\x0001\x0018\x0004\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0005\x0000\x0001\x0018\x0002\x0000\x0002\x0018\x0001\x0000" + "\x000F\x0018\x0002\x0000\x0001\x0018\x000B\x0000\x0007\x0018\x0002\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x0001\x0000\x0002\x0018\x0002\x0000\x0001\x0018\x0001\x0000\x0003\x0018\x0002\x0000" + "\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x0004\x0000" + "\x0001\x0067\x0001\x0000\x0002\x0018\x0006\x0000\x0001\x0018\x0007\x0000\x0001\x0018\x0001\x0000" + "\x0001\x0018\x001B\x0000\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0001\x0018\x0003\x0000" + "\x0001\x0018\x0007\x0000\x0001\x0018\x0019\x0000\x0010\x0018\x0005\x0000\x0003\x0018\x0004\x0000" + "\x0001\x0018\x0006\x0000\x0001\x0018\x0003\x0000\x0002\x0018\x0002\x0000\x0002\x0018\x0004\x0000" + "\x0001\x0018\x0004\x0067\x0001\x0000\x0001\x0018\x0002\x0000\x0001\x0018\x0004\x0000\x0001\x0018" + "\x0001\x0000\x0001\x0018\x0001\x0000\x0001\x0018\x005C\x0000\x0002\x0067\x0015\x0000\x0004\x0067" + "\x002D\x0000\x0001\x0067\x000D\x0000\x0002\x0067\x0008\x0000\x0002\x0067\x0001\x0000\x0001\x0067" + "\x0001\x0000\x0001\x0067\x0009\x0000\x0001\x0067\x0009\x0000\x0002\x0067\x0006\x0000\x0001\x0067" + "\x0002\x0000\x0004\x0067\x0003\x0000\x0001\x0067\x0002\x0000\x0002\x0067\x0001\x0000\x0003\x0067" + "\x0001\x0000\x0002\x0067\x0001\x0000\x0001\x0067\x0008\x0000\x0001\x0067\x0001\x0000\x0002\x0067" + "\x0002\x0000\x0002\x0067\x0001\x0000\x0004\x0067\x0013\x0000\x0001\x0067\x0049\x0000\x0001\x0068" + "\x0002\x0000\x0001\x0068\x001B\x0000\x0004\x0068\x008E\x0000\x0001\x0068\x003F\x0000\x0001\x0068" + "\x0024\x0000\x0001\x0068\x0001\x0000\x0002\x0068\x0011\x0000\x0001\x0068\x0004\x0000\x0001\x0068" + "\x000F\x0000\x0004\x0068\x0003\x0000\x0001\x0068\x000A\x0000\x0001\x0068\x0083\x0000\x0001\x0068" + "\x0092\x0000\x0004\x0068\x006A\x0000\x0002\x0068\x0015\x0000\x0004\x0068\x002D\x0000\x0001\x0068" + "\x000D\x0000\x0002\x0068\x0008\x0000\x0002\x0068\x0001\x0000\x0001\x0068\x0001\x0000\x0001\x0068" + "\x0009\x0000\x0001\x0068\x0009\x0000\x0002\x0068\x0006\x0000\x0001\x0068\x0002\x0000\x0004\x0068" + "\x0003\x0000\x0001\x0068\x0002\x0000\x0002\x0068\x0001\x0000\x0003\x0068\x0001\x0000\x0002\x0068" + "\x0001\x0000\x0001\x0068\x0008\x0000\x0001\x0068\x0001\x0000\x0002\x0068\x0002\x0000\x0002\x0068" + "\x0001\x0000\x0004\x0068\x0013\x0000\x0001\x0068\x0016\x0000\x0001\x00BF\x0011\x0000\x0001\x00C0" + "\x0002\x0000\x0001\x00C1\x0008\x0000\x0001\x00C2\x0012\x0000\x0001\x00C3\x0011\x0000\x0001\x00C4" + "\x0002\x0000\x0001\x00C5\x002D\x0000\x0001\x0077\x0004\x0000\x0001\x00B5\x0007\x0000\x0001\x00B5" + "\x003F\x0000\x0001\x007F\x0002\x0000\x0001\x007F\x001B\x0000\x0004\x007F\x0042\x0000\x0001\x0032" + "\x0044\x0000\x0001\x0032\x0066\x0000\x0001\x0032\x0021\x0000\x0001\x0032\x000B\x0000\x0001\x007F" + "\x0035\x0000\x0001\x0032\x0009\x0000\x0001\x007F\x0024\x0000\x0001\x007F\x0001\x0000\x0002\x007F" + "\x0011\x0000\x0001\x007F\x0004\x0000\x0001\x007F\x0003\x0000\x0004\x0032\x0008\x0000\x0004\x007F" + "\x0003\x0000\x0001\x007F\x000A\x0000\x0001\x007F\x0074\x0000\x0002\x0032\x009B\x0000\x0001\x007F" + "\x0092\x0000\x0004\x007F\x006A\x0000\x0002\x007F\x0015\x0000\x0004\x007F\x002D\x0000\x0001\x007F" + "\x000D\x0000\x0002\x007F\x0008\x0000\x0002\x007F\x0001\x0000\x0001\x007F\x0001\x0000\x0001\x007F" + "\x0009\x0000\x0001\x007F\x0009\x0000\x0002\x007F\x0006\x0000\x0001\x007F\x0002\x0000\x0004\x007F" + "\x0003\x0000\x0001\x007F\x0002\x0000\x0002\x007F\x0001\x0000\x0003\x007F\x0001\x0000\x0002\x007F" + "\x0001\x0000\x0001\x007F\x0008\x0000\x0001\x007F\x0001\x0000\x0002\x007F\x0002\x0000\x0002\x007F" + "\x0001\x0000\x0004\x007F\x0013\x0000\x0001\x007F\x0049\x0000\x0001\x00B5\x0002\x0000\x0001\x00B5" + "\x001B\x0000\x0004\x00B5\x008E\x0000\x0001\x00B5\x003F\x0000\x0001\x00B5\x0024\x0000\x0001\x00B5" + "\x0001\x0000\x0002\x00B5\x0011\x0000\x0001\x00B5\x0004\x0000\x0001\x00B5\x000F\x0000\x0004\x00B5" + "\x0003\x0000\x0001\x00B5\x000A\x0000\x0001\x00B5\x0083\x0000\x0001\x00B5\x0092\x0000\x0004\x00B5" + "\x006A\x0000\x0002\x00B5\x0015\x0000\x0004\x00B5\x002D\x0000\x0001\x00B5\x000D\x0000\x0002\x00B5" + "\x0008\x0000\x0002\x00B5\x0001\x0000\x0001\x00B5\x0001\x0000\x0001\x00B5\x0009\x0000\x0001\x00B5" + "\x0009\x0000\x0002\x00B5\x0006\x0000\x0001\x00B5\x0002\x0000\x0004\x00B5\x0003\x0000\x0001\x00B5" + "\x0002\x0000\x0002\x00B5\x0001\x0000\x0003\x00B5\x0001\x0000\x0002\x00B5\x0001\x0000\x0001\x00B5" + "\x0008\x0000\x0001\x00B5\x0001\x0000\x0002\x00B5\x0002\x0000\x0002\x00B5\x0001\x0000\x0004\x00B5" + "\x0013\x0000\x0001\x00B5\x0010\x0000"; private static int[] ZzUnpackTrans() { int[] result = new int[26554]; int offset = 0; offset = ZzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); return result; } private static int ZzUnpackTrans(string packed, int offset, int[] result) { int i = 0; // index in packed string int j = offset; // index in unpacked array int l = packed.Length; while (i < l) { int count = packed[i++]; int value = packed[i++]; value--; do { result[j++] = value; } while (--count > 0); } return j; } /* error codes */ private const int ZZ_UNKNOWN_ERROR = 0; private const int ZZ_NO_MATCH = 1; private const int ZZ_PUSHBACK_2BIG = 2; /* error messages for the codes above */ private static readonly string[] ZZ_ERROR_MSG = { "Unkown internal scanner error", "Error: could not match input", "Error: pushback value was too large" }; /// <summary> /// ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> /// </summary> private static readonly int[] ZZ_ATTRIBUTE = ZzUnpackAttribute(); private const string ZZ_ATTRIBUTE_PACKED_0 = "\x0001\x0000\x0001\x0009\x001E\x0001\x0011\x0000\x0001\x0001\x0001\x0000\x0001\x0001\x000A\x0000" + "\x0001\x0001\x0011\x0000\x0001\x0001\x0015\x0000\x0001\x0001\x004D\x0000\x0001\x0001\x0010\x0000"; private static int[] ZzUnpackAttribute() { int[] result = new int[197]; int offset = 0; offset = ZzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); return result; } private static int ZzUnpackAttribute(string packed, int offset, int[] result) { int i = 0; // index in packed string int j = offset; // index in unpacked array int l = packed.Length; while (i < l) { int count = packed[i++]; int value = packed[i++]; do { result[j++] = value; } while (--count > 0); } return j; } /// <summary> /// the input device </summary> private TextReader zzReader; /// <summary> /// the current state of the DFA </summary> private int zzState; /// <summary> /// the current lexical state </summary> private int zzLexicalState = YYINITIAL; /// <summary> /// this buffer contains the current text to be matched and is /// the source of the YyText() string /// </summary> private char[] zzBuffer = new char[ZZ_BUFFERSIZE]; /// <summary> /// the textposition at the last accepting state </summary> private int zzMarkedPos; /// <summary> /// the current text position in the buffer </summary> private int zzCurrentPos; /// <summary> /// startRead marks the beginning of the YyText() string in the buffer </summary> private int zzStartRead; /// <summary> /// endRead marks the last character in the buffer, that has been read /// from input /// </summary> private int zzEndRead; /// <summary> /// number of newlines encountered up to the start of the matched text </summary> private int yyline; /// <summary> /// the number of characters up to the start of the matched text </summary> private int yyChar; #pragma warning disable 169, 414 /// <summary> /// the number of characters from the last newline up to the start of the /// matched text /// </summary> private int yycolumn; /// <summary> /// zzAtBOL == true &lt;=&gt; the scanner is currently at the beginning of a line /// </summary> private bool zzAtBOL = true; /// <summary> /// zzAtEOF == true &lt;=&gt; the scanner is at the EOF </summary> private bool zzAtEOF; /// <summary> /// denotes if the user-EOF-code has already been executed </summary> private bool zzEOFDone; #pragma warning restore 169, 414 /* user code: */ /// <summary> /// Alphanumeric sequences </summary> public static readonly int WORD_TYPE = StandardTokenizer.ALPHANUM; /// <summary> /// Numbers </summary> public static readonly int NUMERIC_TYPE = StandardTokenizer.NUM; /// <summary> /// Chars in class \p{Line_Break = Complex_Context} are from South East Asian /// scripts (Thai, Lao, Myanmar, Khmer, etc.). Sequences of these are kept /// together as as a single token rather than broken up, because the logic /// required to break them at word boundaries is too complex for UAX#29. /// <para> /// See Unicode Line Breaking Algorithm: http://www.unicode.org/reports/tr14/#SA /// </para> /// </summary> public static readonly int SOUTH_EAST_ASIAN_TYPE = StandardTokenizer.SOUTHEAST_ASIAN; public static readonly int IDEOGRAPHIC_TYPE = StandardTokenizer.IDEOGRAPHIC; public static readonly int HIRAGANA_TYPE = StandardTokenizer.HIRAGANA; public static readonly int KATAKANA_TYPE = StandardTokenizer.KATAKANA; public static readonly int HANGUL_TYPE = StandardTokenizer.HANGUL; public int YyChar => yyChar; /// <summary> /// Fills CharTermAttribute with the current token text. /// </summary> public void GetText(ICharTermAttribute t) { t.CopyBuffer(zzBuffer, zzStartRead, zzMarkedPos - zzStartRead); } /// <summary> /// Creates a new scanner /// </summary> /// <param name="in"> the TextReader to read input from. </param> public StandardTokenizerImpl(TextReader @in) { this.zzReader = @in; } /// <summary> /// Unpacks the compressed character translation table. /// </summary> /// <param name="packed"> the packed character translation table </param> /// <returns> the unpacked character translation table </returns> private static char[] ZzUnpackCMap(string packed) { char[] map = new char[0x10000]; int i = 0; // index in packed string int j = 0; // index in unpacked array while (i < 2860) { int count = packed[i++]; char value = packed[i++]; do { map[j++] = value; } while (--count > 0); } return map; } /// <summary> /// Refills the input buffer. /// </summary> /// <returns> <code>false</code>, iff there was new input. /// </returns> /// <exception cref="IOException"> if any I/O-Error occurs </exception> private bool ZzRefill() { /* first: make room (if you can) */ if (zzStartRead > 0) { Array.Copy(zzBuffer, zzStartRead, zzBuffer, 0, zzEndRead - zzStartRead); /* translate stored positions */ zzEndRead -= zzStartRead; zzCurrentPos -= zzStartRead; zzMarkedPos -= zzStartRead; zzStartRead = 0; } /* is the buffer big enough? */ if (zzCurrentPos >= zzBuffer.Length) { /* if not: blow it up */ char[] newBuffer = new char[zzCurrentPos * 2]; Array.Copy(zzBuffer, 0, newBuffer, 0, zzBuffer.Length); zzBuffer = newBuffer; } /* finally: fill the buffer with new input */ int numRead = zzReader.Read(zzBuffer, zzEndRead, zzBuffer.Length - zzEndRead); if (numRead > 0) { zzEndRead += numRead; return false; } // unlikely but not impossible: read 0 characters, but not at end of stream if (numRead == 0) { int c = zzReader.Read(); if (c <= 0) { return true; } else { zzBuffer[zzEndRead++] = (char)c; return false; } } // numRead < 0 return true; } /// <summary> /// Disposes the input stream. /// </summary> public void YyClose() { zzAtEOF = true; // indicate end of file zzEndRead = zzStartRead; // invalidate buffer if (zzReader != null) { zzReader.Dispose(); } } /// <summary> /// Resets the scanner to read from a new input stream. /// Does not close the old reader. /// /// All internal variables are reset, the old input stream /// <b>cannot</b> be reused (internal buffer is discarded and lost). /// Lexical state is set to <see cref="YYINITIAL"/>. /// /// Internal scan buffer is resized down to its initial length, if it has grown. /// </summary> /// <param name="reader"> the new input stream </param> public void YyReset(TextReader reader) { zzReader = reader; zzAtBOL = true; zzAtEOF = false; zzEOFDone = false; zzEndRead = zzStartRead = 0; zzCurrentPos = zzMarkedPos = 0; yyline = yyChar = yycolumn = 0; zzLexicalState = YYINITIAL; if (zzBuffer.Length > ZZ_BUFFERSIZE) { zzBuffer = new char[ZZ_BUFFERSIZE]; } } /// <summary> /// Returns the current lexical state. /// </summary> public int YyState => zzLexicalState; /// <summary> /// Enters a new lexical state /// </summary> /// <param name="newState"> the new lexical state </param> public void YyBegin(int newState) { zzLexicalState = newState; } /// <summary> /// Returns the text matched by the current regular expression. /// </summary> public string YyText => new string(zzBuffer, zzStartRead, zzMarkedPos - zzStartRead); /// <summary> /// Returns the character at position <paramref name="pos"/> from the /// matched text. /// /// It is equivalent to YyText[pos], but faster /// </summary> /// <param name="pos"> the position of the character to fetch. /// A value from 0 to YyLength-1. /// </param> /// <returns> the character at position pos </returns> public char YyCharAt(int pos) { return zzBuffer[zzStartRead + pos]; } /// <summary> /// Returns the length of the matched text region. /// </summary> public int YyLength => zzMarkedPos - zzStartRead; /// <summary> /// Reports an error that occured while scanning. /// <para/> /// In a wellformed scanner (no or only correct usage of /// YyPushBack(int) and a match-all fallback rule) this method /// will only be called with things that "Can't Possibly Happen". /// If this method is called, something is seriously wrong /// (e.g. a JFlex bug producing a faulty scanner etc.). /// <para/> /// Usual syntax/scanner level error handling should be done /// in error fallback rules. /// </summary> /// <param name="errorCode"> the code of the errormessage to display </param> private void ZzScanError(int errorCode) { string message; try { message = ZZ_ERROR_MSG[errorCode]; } catch (IndexOutOfRangeException) { message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR]; } throw new Exception(message); } /// <summary> /// Pushes the specified amount of characters back into the input stream. /// /// They will be read again by then next call of the scanning method /// </summary> /// <param name="number"> the number of characters to be read again. /// This number must not be greater than YyLength! </param> public void YyPushBack(int number) { if (number > YyLength) { ZzScanError(ZZ_PUSHBACK_2BIG); } zzMarkedPos -= number; } /// <summary> /// Resumes scanning until the next regular expression is matched, /// the end of input is encountered or an I/O-Error occurs. /// </summary> /// <returns> the next token </returns> /// <exception cref="IOException"> if any I/O-Error occurs </exception> public int GetNextToken() { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; char[] zzBufferL = zzBuffer; char[] zzCMapL = ZZ_CMAP; int[] zzTransL = ZZ_TRANS; int[] zzRowMapL = ZZ_ROWMAP; int[] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; yyChar += zzMarkedPosL - zzStartRead; zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; zzState = ZZ_LEXSTATE[zzLexicalState]; // set up zzAction for empty match case: int zzAttributes = zzAttrL[zzState]; if ((zzAttributes & 1) == 1) { zzAction = zzState; } { while (true) { if (zzCurrentPosL < zzEndReadL) { zzInput = zzBufferL[zzCurrentPosL++]; } else if (zzAtEOF) { zzInput = StandardTokenizerInterface.YYEOF; goto zzForActionBreak; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; bool eof = ZzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = StandardTokenizerInterface.YYEOF; goto zzForActionBreak; } else { zzInput = zzBufferL[zzCurrentPosL++]; } } int zzNext = zzTransL[zzRowMapL[zzState] + zzCMapL[zzInput]]; if (zzNext == -1) { goto zzForActionBreak; } zzState = zzNext; zzAttributes = zzAttrL[zzState]; if ((zzAttributes & 1) == 1) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ((zzAttributes & 8) == 8) { goto zzForActionBreak; } } } } zzForActionBreak: // store back cached position zzMarkedPos = zzMarkedPosL; switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 1: { // Break so we don't hit fall-through warning: break; // Not numeric, word, ideographic, hiragana, or SE Asian -- ignore it. } // goto case 9; // unreachable case 9: break; case 2: { return WORD_TYPE; } case 10: break; case 3: { return NUMERIC_TYPE; } case 11: break; case 4: { return KATAKANA_TYPE; } case 12: break; case 5: { return SOUTH_EAST_ASIAN_TYPE; } case 13: break; case 6: { return IDEOGRAPHIC_TYPE; } case 14: break; case 7: { return HIRAGANA_TYPE; } case 15: break; case 8: { return HANGUL_TYPE; } case 16: break; default: if (zzInput == StandardTokenizerInterface.YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; { return StandardTokenizerInterface.YYEOF; } } else { ZzScanError(ZZ_NO_MATCH); } break; } } } } }
using Microsoft.TeamFoundation.DistributedTask.WebApi; using Microsoft.VisualStudio.Services.Agent.Listener.Capabilities; using Microsoft.VisualStudio.Services.Agent.Util; using Microsoft.VisualStudio.Services.Common; using Microsoft.VisualStudio.Services.OAuth; using Microsoft.VisualStudio.Services.WebApi; using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Security.Cryptography; using System.Security.Principal; using System.Threading; using System.Threading.Tasks; namespace Microsoft.VisualStudio.Services.Agent.Listener.Configuration { [ServiceLocator(Default = typeof(ConfigurationManager))] public interface IConfigurationManager : IAgentService { bool IsConfigured(); Task ConfigureAsync(CommandSettings command); Task UnconfigureAsync(CommandSettings command); AgentSettings LoadSettings(); } public sealed class ConfigurationManager : AgentService, IConfigurationManager { private IConfigurationStore _store; private IAgentServer _agentServer; private ITerminal _term; public override void Initialize(IHostContext hostContext) { base.Initialize(hostContext); Trace.Verbose("Creating _store"); _store = hostContext.GetService<IConfigurationStore>(); Trace.Verbose("store created"); _term = hostContext.GetService<ITerminal>(); } public bool IsConfigured() { bool result = _store.IsConfigured(); Trace.Info($"Is configured: {result}"); return result; } public AgentSettings LoadSettings() { Trace.Info(nameof(LoadSettings)); if (!IsConfigured()) { throw new InvalidOperationException("Not configured"); } AgentSettings settings = _store.GetSettings(); Trace.Info("Settings Loaded"); return settings; } public async Task ConfigureAsync(CommandSettings command) { ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); Trace.Info(nameof(ConfigureAsync)); if (IsConfigured()) { throw new InvalidOperationException(StringUtil.Loc("AlreadyConfiguredError")); } AgentSettings agentSettings = new AgentSettings(); // TEE EULA agentSettings.AcceptTeeEula = false; switch (Constants.Agent.Platform) { case Constants.OSPlatform.OSX: case Constants.OSPlatform.Linux: // Write the section header. WriteSection(StringUtil.Loc("EulasSectionHeader")); // Verify the EULA exists on disk in the expected location. string eulaFile = Path.Combine(IOUtil.GetExternalsPath(), Constants.Path.TeeDirectory, "license.html"); ArgUtil.File(eulaFile, nameof(eulaFile)); // Write elaborate verbiage about the TEE EULA. _term.WriteLine(StringUtil.Loc("TeeEula", eulaFile)); _term.WriteLine(); // Prompt to acccept the TEE EULA. agentSettings.AcceptTeeEula = command.GetAcceptTeeEula(); break; case Constants.OSPlatform.Windows: // Warn and continue if .NET 4.6 is not installed. var netFrameworkUtil = HostContext.GetService<INetFrameworkUtil>(); if (!netFrameworkUtil.Test(new Version(4, 6))) { WriteSection(StringUtil.Loc("PrerequisitesSectionHeader")); // Section header. _term.WriteLine(StringUtil.Loc("MinimumNetFrameworkTfvc")); // Warning. } break; default: throw new NotSupportedException(); } // Create the configuration provider as per agent type. string agentType = command.DeploymentGroup ? Constants.Agent.AgentConfigurationProvider.DeploymentAgentConfiguration : Constants.Agent.AgentConfigurationProvider.BuildReleasesAgentConfiguration; var extensionManager = HostContext.GetService<IExtensionManager>(); IConfigurationProvider agentProvider = (extensionManager.GetExtensions<IConfigurationProvider>()) .FirstOrDefault(x => x.ConfigurationProviderType == agentType); ArgUtil.NotNull(agentProvider, agentType); // TODO: Check if its running with elevated permission and stop early if its not // Loop getting url and creds until you can connect ICredentialProvider credProvider = null; VssCredentials creds = null; WriteSection(StringUtil.Loc("ConnectSectionHeader")); while (true) { // Get the URL agentProvider.GetServerUrl(agentSettings, command); // Get the credentials credProvider = GetCredentialProvider(command, agentSettings.ServerUrl); creds = credProvider.GetVssCredentials(HostContext); Trace.Info("cred retrieved"); try { // Validate can connect. await agentProvider.TestConnectionAsync(agentSettings, creds); Trace.Info("Test Connection complete."); break; } catch (Exception e) when (!command.Unattended) { _term.WriteError(e); _term.WriteError(StringUtil.Loc("FailedToConnect")); } } _agentServer = HostContext.GetService<IAgentServer>(); // We want to use the native CSP of the platform for storage, so we use the RSACSP directly RSAParameters publicKey; var keyManager = HostContext.GetService<IRSAKeyManager>(); using (var rsa = keyManager.CreateKey()) { publicKey = rsa.ExportParameters(false); } // Loop getting agent name and pool name WriteSection(StringUtil.Loc("RegisterAgentSectionHeader")); while (true) { try { await agentProvider.GetPoolId(agentSettings, command); break; } catch (Exception e) when (!command.Unattended) { _term.WriteError(e); _term.WriteError(agentProvider.GetFailedToFindPoolErrorString()); } } TaskAgent agent; while (true) { agentSettings.AgentName = command.GetAgentName(); // Get the system capabilities. // TODO: Hook up to ctrl+c cancellation token. _term.WriteLine(StringUtil.Loc("ScanToolCapabilities")); Dictionary<string, string> systemCapabilities = await HostContext.GetService<ICapabilitiesManager>().GetCapabilitiesAsync(agentSettings, CancellationToken.None); _term.WriteLine(StringUtil.Loc("ConnectToServer")); agent = await agentProvider.GetAgentAsync(agentSettings); if (agent != null) { if (command.GetReplace()) { // Update existing agent with new PublicKey, agent version and SystemCapabilities. agent = UpdateExistingAgent(agent, publicKey, systemCapabilities); try { agent = await agentProvider.UpdateAgentAsync(agentSettings, agent, command); _term.WriteLine(StringUtil.Loc("AgentReplaced")); break; } catch (Exception e) when (!command.Unattended) { _term.WriteError(e); _term.WriteError(StringUtil.Loc("FailedToReplaceAgent")); } } else if (command.Unattended) { // if not replace and it is unattended config. agentProvider.ThrowTaskAgentExistException(agentSettings); } } else { // Create a new agent. agent = CreateNewAgent(agentSettings.AgentName, publicKey, systemCapabilities); try { agent = await agentProvider.AddAgentAsync(agentSettings, agent, command); _term.WriteLine(StringUtil.Loc("AgentAddedSuccessfully")); break; } catch (Exception e) when (!command.Unattended) { _term.WriteError(e); _term.WriteError(StringUtil.Loc("AddAgentFailed")); } } } // Add Agent Id to settings agentSettings.AgentId = agent.Id; // respect the serverUrl resolve by server. // in case of agent configured using collection url instead of account url. string agentServerUrl; if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) && !string.IsNullOrEmpty(agentServerUrl)) { Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'."); // we need make sure the Schema/Host/Port component of the url remain the same. UriBuilder inputServerUrl = new UriBuilder(agentSettings.ServerUrl); UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl); if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0) { inputServerUrl.Path = serverReturnedServerUrl.Path; Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'."); agentSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri; } else { agentSettings.ServerUrl = agentServerUrl; } } // See if the server supports our OAuth key exchange for credentials if (agent.Authorization != null && agent.Authorization.ClientId != Guid.Empty && agent.Authorization.AuthorizationUrl != null) { var credentialData = new CredentialData { Scheme = Constants.Configuration.OAuth, Data = { { "clientId", agent.Authorization.ClientId.ToString("D") }, { "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri }, }, }; // Save the negotiated OAuth credential data _store.SaveCredential(credentialData); } else { switch (Constants.Agent.Platform) { case Constants.OSPlatform.OSX: case Constants.OSPlatform.Linux: // Save the provided admin cred for compat with previous agent. _store.SaveCredential(credProvider.CredentialData); break; case Constants.OSPlatform.Windows: // Not supported against TFS 2015. _term.WriteError(StringUtil.Loc("Tfs2015NotSupported")); return; default: throw new NotSupportedException(); } } // Testing agent connection, detect any protential connection issue, like local clock skew that cause OAuth token expired. _term.WriteLine(StringUtil.Loc("TestAgentConnection")); var credMgr = HostContext.GetService<ICredentialManager>(); VssCredentials credential = credMgr.LoadCredentials(); VssConnection conn = ApiUtil.CreateConnection(new Uri(agentSettings.ServerUrl), credential); var agentSvr = HostContext.GetService<IAgentServer>(); try { await agentSvr.ConnectAsync(conn); } catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is")) { // there are two exception messages server send that indicate clock skew. // 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}. // 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}. Trace.Error("Catch exception during test agent connection."); Trace.Error(ex); throw new Exception(StringUtil.Loc("LocalClockSkewed")); } // We will Combine() what's stored with root. Defaults to string a relative path agentSettings.WorkFolder = command.GetWork(); // notificationPipeName for Hosted agent provisioner. agentSettings.NotificationPipeName = command.GetNotificationPipeName(); agentSettings.NotificationSocketAddress = command.GetNotificationSocketAddress(); _store.SaveSettings(agentSettings); _term.WriteLine(StringUtil.Loc("SavedSettings", DateTime.UtcNow)); #if OS_WINDOWS // config windows service as part of configuration bool runAsService = command.GetRunAsService(); if (runAsService) { Trace.Info("Configuring to run the agent as service"); var serviceControlManager = HostContext.GetService<IWindowsServiceControlManager>(); serviceControlManager.ConfigureService(agentSettings, command); } //This will be enabled with AutoLogon code changes are tested else if (command.GetEnableAutoLogon()) { Trace.Info("Agent is going to run as process setting up the 'AutoLogon' capability for the agent."); var autoLogonConfigManager = HostContext.GetService<IAutoLogonManager>(); await autoLogonConfigManager.ConfigureAsync(command); //Important: The machine may restart if the autologon user is not same as the current user //if you are adding code after this, keep that in mind } #elif OS_LINUX || OS_OSX // generate service config script for OSX and Linux, GenerateScripts() will no-opt on windows. var serviceControlManager = HostContext.GetService<ILinuxServiceControlManager>(); serviceControlManager.GenerateScripts(agentSettings); #endif } public async Task UnconfigureAsync(CommandSettings command) { ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); string currentAction = string.Empty; try { //stop, uninstall service and remove service config file if (_store.IsServiceConfigured()) { currentAction = StringUtil.Loc("UninstallingService"); _term.WriteLine(currentAction); #if OS_WINDOWS var serviceControlManager = HostContext.GetService<IWindowsServiceControlManager>(); serviceControlManager.UnconfigureService(); _term.WriteLine(StringUtil.Loc("Success") + currentAction); #elif OS_LINUX // unconfig system D service first throw new Exception(StringUtil.Loc("UnconfigureServiceDService")); #elif OS_OSX // unconfig osx service first throw new Exception(StringUtil.Loc("UnconfigureOSXService")); #endif } else { #if OS_WINDOWS //running as process, unconfigure autologon if it was configured if (_store.IsAutoLogonConfigured()) { currentAction = StringUtil.Loc("UnconfigAutologon"); _term.WriteLine(currentAction); var autoLogonConfigManager = HostContext.GetService<IAutoLogonManager>(); autoLogonConfigManager.Unconfigure(); _term.WriteLine(StringUtil.Loc("Success") + currentAction); } else { Trace.Info("AutoLogon was not configured on the agent."); } #endif } //delete agent from the server currentAction = StringUtil.Loc("UnregisteringAgent"); _term.WriteLine(currentAction); bool isConfigured = _store.IsConfigured(); bool hasCredentials = _store.HasCredentials(); if (isConfigured && hasCredentials) { AgentSettings settings = _store.GetSettings(); var credentialManager = HostContext.GetService<ICredentialManager>(); // Get the credentials var credProvider = GetCredentialProvider(command, settings.ServerUrl); VssCredentials creds = credProvider.GetVssCredentials(HostContext); Trace.Info("cred retrieved"); bool isDeploymentGroup = (settings.MachineGroupId > 0) || (settings.DeploymentGroupId > 0); Trace.Info("Agent configured for deploymentGroup : {0}", isDeploymentGroup.ToString()); string agentType = isDeploymentGroup ? Constants.Agent.AgentConfigurationProvider.DeploymentAgentConfiguration : Constants.Agent.AgentConfigurationProvider.BuildReleasesAgentConfiguration; var extensionManager = HostContext.GetService<IExtensionManager>(); IConfigurationProvider agentProvider = (extensionManager.GetExtensions<IConfigurationProvider>()).FirstOrDefault(x => x.ConfigurationProviderType == agentType); ArgUtil.NotNull(agentProvider, agentType); await agentProvider.TestConnectionAsync(settings, creds); TaskAgent agent = await agentProvider.GetAgentAsync(settings); if (agent == null) { _term.WriteLine(StringUtil.Loc("Skipping") + currentAction); } else { await agentProvider.DeleteAgentAsync(settings); _term.WriteLine(StringUtil.Loc("Success") + currentAction); } } else { _term.WriteLine(StringUtil.Loc("MissingConfig")); } //delete credential config files currentAction = StringUtil.Loc("DeletingCredentials"); _term.WriteLine(currentAction); if (hasCredentials) { _store.DeleteCredential(); var keyManager = HostContext.GetService<IRSAKeyManager>(); keyManager.DeleteKey(); _term.WriteLine(StringUtil.Loc("Success") + currentAction); } else { _term.WriteLine(StringUtil.Loc("Skipping") + currentAction); } //delete settings config file currentAction = StringUtil.Loc("DeletingSettings"); _term.WriteLine(currentAction); if (isConfigured) { _store.DeleteSettings(); _term.WriteLine(StringUtil.Loc("Success") + currentAction); } else { _term.WriteLine(StringUtil.Loc("Skipping") + currentAction); } } catch (Exception) { _term.WriteLine(StringUtil.Loc("Failed") + currentAction); throw; } } private ICredentialProvider GetCredentialProvider(CommandSettings command, string serverUrl) { Trace.Info(nameof(GetCredentialProvider)); var credentialManager = HostContext.GetService<ICredentialManager>(); // Get the auth type. On premise defaults to negotiate (Kerberos with fallback to NTLM). // Hosted defaults to PAT authentication. string defaultAuth = UrlUtil.IsHosted(serverUrl) ? Constants.Configuration.PAT : (Constants.Agent.Platform == Constants.OSPlatform.Windows ? Constants.Configuration.Integrated : Constants.Configuration.Negotiate); string authType = command.GetAuth(defaultValue: defaultAuth); // Create the credential. Trace.Info("Creating credential for auth: {0}", authType); var provider = credentialManager.GetCredentialProvider(authType); provider.EnsureCredential(HostContext, command, serverUrl); return provider; } private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, Dictionary<string, string> systemCapabilities) { ArgUtil.NotNull(agent, nameof(agent)); agent.Authorization = new TaskAgentAuthorization { PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus), }; // update - update instead of delete so we don't lose user capabilities etc... agent.Version = Constants.Agent.Version; foreach (KeyValuePair<string, string> capability in systemCapabilities) { agent.SystemCapabilities[capability.Key] = capability.Value ?? string.Empty; } return agent; } private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, Dictionary<string, string> systemCapabilities) { TaskAgent agent = new TaskAgent(agentName) { Authorization = new TaskAgentAuthorization { PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus), }, MaxParallelism = 1, Version = Constants.Agent.Version }; foreach (KeyValuePair<string, string> capability in systemCapabilities) { agent.SystemCapabilities[capability.Key] = capability.Value ?? string.Empty; } return agent; } private void WriteSection(string message) { _term.WriteLine(); _term.WriteLine($">> {message}:"); _term.WriteLine(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Text; using System.Threading; using System.Threading.Tasks; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Buffers; namespace System.IO { // This abstract base class represents a reader that can read a sequential // stream of characters. This is not intended for reading bytes - // there are methods on the Stream class to read bytes. // A subclass must minimally implement the Peek() and Read() methods. // // This class is intended for character input, not bytes. // There are methods on the Stream class for reading bytes. public abstract partial class TextReader : MarshalByRefObject, IDisposable { public static readonly TextReader Null = new NullTextReader(); protected TextReader() { } public virtual void Close() { Dispose(true); GC.SuppressFinalize(this); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { } // Returns the next available character without actually reading it from // the input stream. The current position of the TextReader is not changed by // this operation. The returned value is -1 if no further characters are // available. // // This default method simply returns -1. // public virtual int Peek() { return -1; } // Reads the next character from the input stream. The returned value is // -1 if no further characters are available. // // This default method simply returns -1. // public virtual int Read() { return -1; } // Reads a block of characters. This method will read up to // count characters from this TextReader into the // buffer character array starting at position // index. Returns the actual number of characters read. // public virtual int Read(char[] buffer, int index, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); } if (index < 0) { throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum); } if (buffer.Length - index < count) { throw new ArgumentException(SR.Argument_InvalidOffLen); } int n; for (n = 0; n < count; n++) { int ch = Read(); if (ch == -1) break; buffer[index + n] = (char)ch; } return n; } // Reads a span of characters. This method will read up to // count characters from this TextReader into the // span of characters Returns the actual number of characters read. // public virtual int Read(Span<char> buffer) { char[] array = ArrayPool<char>.Shared.Rent(buffer.Length); try { int numRead = Read(array, 0, buffer.Length); if ((uint)numRead > (uint)buffer.Length) { throw new IOException(SR.IO_InvalidReadLength); } new Span<char>(array, 0, numRead).CopyTo(buffer); return numRead; } finally { ArrayPool<char>.Shared.Return(array); } } // Reads all characters from the current position to the end of the // TextReader, and returns them as one string. public virtual string ReadToEnd() { char[] chars = new char[4096]; int len; StringBuilder sb = new StringBuilder(4096); while ((len = Read(chars, 0, chars.Length)) != 0) { sb.Append(chars, 0, len); } return sb.ToString(); } // Blocking version of read. Returns only when count // characters have been read or the end of the file was reached. // public virtual int ReadBlock(char[] buffer, int index, int count) { int i, n = 0; do { n += (i = Read(buffer, index + n, count - n)); } while (i > 0 && n < count); return n; } // Blocking version of read for span of characters. Returns only when count // characters have been read or the end of the file was reached. // public virtual int ReadBlock(Span<char> buffer) { char[] array = ArrayPool<char>.Shared.Rent(buffer.Length); try { int numRead = ReadBlock(array, 0, buffer.Length); if ((uint)numRead > (uint)buffer.Length) { throw new IOException(SR.IO_InvalidReadLength); } new Span<char>(array, 0, numRead).CopyTo(buffer); return numRead; } finally { ArrayPool<char>.Shared.Return(array); } } // Reads a line. A line is defined as a sequence of characters followed by // a carriage return ('\r'), a line feed ('\n'), or a carriage return // immediately followed by a line feed. The resulting string does not // contain the terminating carriage return and/or line feed. The returned // value is null if the end of the input stream has been reached. // public virtual string? ReadLine() { StringBuilder sb = new StringBuilder(); while (true) { int ch = Read(); if (ch == -1) break; if (ch == '\r' || ch == '\n') { if (ch == '\r' && Peek() == '\n') { Read(); } return sb.ToString(); } sb.Append((char)ch); } if (sb.Length > 0) { return sb.ToString(); } return null; } #region Task based Async APIs public virtual Task<string?> ReadLineAsync() { return Task<string?>.Factory.StartNew(state => { return ((TextReader)state!).ReadLine(); }, this, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default); } public async virtual Task<string> ReadToEndAsync() { var sb = new StringBuilder(4096); char[] chars = ArrayPool<char>.Shared.Rent(4096); try { int len; while ((len = await ReadAsyncInternal(chars, default).ConfigureAwait(false)) != 0) { sb.Append(chars, 0, len); } } finally { ArrayPool<char>.Shared.Return(chars); } return sb.ToString(); } public virtual Task<int> ReadAsync(char[] buffer, int index, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); } if (index < 0 || count < 0) { throw new ArgumentOutOfRangeException((index < 0 ? nameof(index): nameof(count)), SR.ArgumentOutOfRange_NeedNonNegNum); } if (buffer.Length - index < count) { throw new ArgumentException(SR.Argument_InvalidOffLen); } return ReadAsyncInternal(new Memory<char>(buffer, index, count), default).AsTask(); } public virtual ValueTask<int> ReadAsync(Memory<char> buffer, CancellationToken cancellationToken = default) => new ValueTask<int>(MemoryMarshal.TryGetArray(buffer, out ArraySegment<char> array) ? ReadAsync(array.Array!, array.Offset, array.Count) : Task<int>.Factory.StartNew(state => { var t = (Tuple<TextReader, Memory<char>>)state!; return t.Item1.Read(t.Item2.Span); }, Tuple.Create(this, buffer), cancellationToken, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default)); internal virtual ValueTask<int> ReadAsyncInternal(Memory<char> buffer, CancellationToken cancellationToken) { var tuple = new Tuple<TextReader, Memory<char>>(this, buffer); return new ValueTask<int>(Task<int>.Factory.StartNew(state => { var t = (Tuple<TextReader, Memory<char>>)state!; return t.Item1.Read(t.Item2.Span); }, tuple, cancellationToken, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default)); } public virtual Task<int> ReadBlockAsync(char[] buffer, int index, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); } if (index < 0 || count < 0) { throw new ArgumentOutOfRangeException((index < 0 ? nameof(index): nameof(count)), SR.ArgumentOutOfRange_NeedNonNegNum); } if (buffer.Length - index < count) { throw new ArgumentException(SR.Argument_InvalidOffLen); } return ReadBlockAsyncInternal(new Memory<char>(buffer, index, count), default).AsTask(); } public virtual ValueTask<int> ReadBlockAsync(Memory<char> buffer, CancellationToken cancellationToken = default) => new ValueTask<int>(MemoryMarshal.TryGetArray(buffer, out ArraySegment<char> array) ? ReadBlockAsync(array.Array!, array.Offset, array.Count) : Task<int>.Factory.StartNew(state => { var t = (Tuple<TextReader, Memory<char>>)state!; return t.Item1.ReadBlock(t.Item2.Span); }, Tuple.Create(this, buffer), cancellationToken, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default)); internal async ValueTask<int> ReadBlockAsyncInternal(Memory<char> buffer, CancellationToken cancellationToken) { int n = 0, i; do { i = await ReadAsyncInternal(buffer.Slice(n), cancellationToken).ConfigureAwait(false); n += i; } while (i > 0 && n < buffer.Length); return n; } #endregion private sealed class NullTextReader : TextReader { public NullTextReader() { } public override int Read(char[] buffer, int index, int count) { return 0; } public override string? ReadLine() { return null; } } public static TextReader Synchronized(TextReader reader) { if (reader == null) throw new ArgumentNullException(nameof(reader)); return reader is SyncTextReader ? reader : new SyncTextReader(reader); } internal sealed class SyncTextReader : TextReader { internal readonly TextReader _in; internal SyncTextReader(TextReader t) { _in = t; } [MethodImpl(MethodImplOptions.Synchronized)] public override void Close() => _in.Close(); [MethodImpl(MethodImplOptions.Synchronized)] protected override void Dispose(bool disposing) { // Explicitly pick up a potentially methodimpl'ed Dispose if (disposing) ((IDisposable)_in).Dispose(); } [MethodImpl(MethodImplOptions.Synchronized)] public override int Peek() => _in.Peek(); [MethodImpl(MethodImplOptions.Synchronized)] public override int Read() => _in.Read(); [MethodImpl(MethodImplOptions.Synchronized)] public override int Read(char[] buffer, int index, int count) => _in.Read(buffer, index, count); [MethodImpl(MethodImplOptions.Synchronized)] public override int ReadBlock(char[] buffer, int index, int count) => _in.ReadBlock(buffer, index, count); [MethodImpl(MethodImplOptions.Synchronized)] public override string? ReadLine() => _in.ReadLine(); [MethodImpl(MethodImplOptions.Synchronized)] public override string ReadToEnd() => _in.ReadToEnd(); // // On SyncTextReader all APIs should run synchronously, even the async ones. // [MethodImpl(MethodImplOptions.Synchronized)] public override Task<string?> ReadLineAsync() => Task.FromResult(ReadLine()); [MethodImpl(MethodImplOptions.Synchronized)] public override Task<string> ReadToEndAsync() => Task.FromResult(ReadToEnd()); [MethodImpl(MethodImplOptions.Synchronized)] public override Task<int> ReadBlockAsync(char[] buffer, int index, int count) { if (buffer == null) throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); if (index < 0 || count < 0) throw new ArgumentOutOfRangeException((index < 0 ? nameof(index) : nameof(count)), SR.ArgumentOutOfRange_NeedNonNegNum); if (buffer.Length - index < count) throw new ArgumentException(SR.Argument_InvalidOffLen); return Task.FromResult(ReadBlock(buffer, index, count)); } [MethodImpl(MethodImplOptions.Synchronized)] public override Task<int> ReadAsync(char[] buffer, int index, int count) { if (buffer == null) throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); if (index < 0 || count < 0) throw new ArgumentOutOfRangeException((index < 0 ? nameof(index) : nameof(count)), SR.ArgumentOutOfRange_NeedNonNegNum); if (buffer.Length - index < count) throw new ArgumentException(SR.Argument_InvalidOffLen); return Task.FromResult(Read(buffer, index, count)); } } } }
using System; using System.IO; using System.Linq.Expressions; using System.Web.UI; using Glass.Mapper.Sc.Web.Ui; using Sitecore.Data.Items; namespace Glass.Mapper.Sc.Web.WebForms.Ui { /// <summary> /// Class AbstractGlassUserControl /// </summary> [Obsolete("This class will be removed in future releases")] public abstract class AbstractGlassUserControl : UserControl { private TextWriter _writer; protected AbstractGlassUserControl(IWebFormsContext context) { WebContext = context; } protected TextWriter Output { get { return _writer ?? this.Response.Output; } } /// <summary> /// Gets a value indicating whether this instance is in editing mode. /// </summary> /// <value><c>true</c> if this instance is in editing mode; otherwise, <c>false</c>.</value> public bool IsInEditingMode { get { return Sc.GlassHtml.IsInEditingMode; } } /// <summary> /// Represents the current Sitecore context /// </summary> /// <value>The sitecore context.</value> public virtual IWebFormsContext WebContext { get; set; } /// <summary> /// Access to rendering helpers /// </summary> /// <value>The glass HTML.</value> public virtual IGlassHtml GlassHtml { get { return WebContext.GlassHtml; } } /// <summary> /// Returns either the item specified by the DataSource or the current context item /// </summary> /// <value>The layout item.</value> public Item LayoutItem { get { return DataSourceItem ?? ContextItem; } } /// <summary> /// Returns either the item specified by the current context item /// </summary> /// <value>The layout item.</value> public Item ContextItem { get { return WebContext.ContextItem; } } /// <summary> /// Returns the item specificed by the data source only. Returns null if no datasource set /// </summary> public Item DataSourceItem { get { return WebContext.GetDataSourceItem(this); } } /// <summary> /// Returns the Context Item as strongly typed /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public T GetContext<T>(GetKnownOptions options = null) where T : class { options = options == null ? new GetKnownOptions() : options; return WebContext.GetContextItem<T>(options); } /// <summary> /// Returns the Data Source Item as strongly typed /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public T GetDataSource<T>(GetKnownOptions options = null) where T : class { options = options == null ? new GetKnownOptions() : options; return WebContext.GetDataSourceItem<T>(this, options) ; } /// <summary> /// Returns the DataSource item or the Context Item as strongly typed /// </summary> /// <typeparam name="T"></typeparam> /// <returns></returns> public T GetLayoutItem<T>(GetKnownOptions options = null) where T : class { var dataSource = WebContext.GetDataSourceItem(this); if (dataSource != null) { return GetDataSource<T>(options); } else { return GetContext<T>(options); } } /// <summary> /// Makes a field editable via the Page Editor. Use the Model property as the target item, e.g. model =&gt; model.Title where Title is field name. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="model">The model.</param> /// <param name="field">The field.</param> /// <param name="parameters">The parameters.</param> /// <returns>System.String.</returns> public string Editable<T>(T model, Expression<Func<T, object>> field, object parameters = null) { return GlassHtml.Editable(model, field, parameters); } /// <summary> /// Makes a field editable via the Page Editor. Use the Model property as the target item, e.g. model =&gt; model.Title where Title is field name. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="model">The model.</param> /// <param name="field">The field.</param> /// <param name="standardOutput">The standard output.</param> /// <param name="parameters">The parameters.</param> /// <returns>System.String.</returns> public string Editable<T>(T model, Expression<Func<T, object>> field, Expression<Func<T, string>> standardOutput, object parameters = null) { return GlassHtml.Editable(model, field, standardOutput, parameters); } /// <summary> /// Renders an image allowing simple page editor support /// </summary> /// <typeparam name="T">The model type</typeparam> /// <param name="model">The model that contains the image field</param> /// <param name="field">A lambda expression to the image field, should be of type Glass.Mapper.Sc.Fields.Image</param> /// <param name="parameters">Image parameters, e.g. width, height</param> /// <param name="isEditable">Indicates if the field should be editable</param> /// <param name="outputHeightWidth">Indicates if the height and width attributes should be outputted when rendering the image</param> /// <returns></returns> public virtual string RenderImage<T>(T model, Expression<Func<T, object>> field, object parameters = null, bool isEditable = false, bool outputHeightWidth = true) { return GlassHtml.RenderImage(model, field, parameters, isEditable, outputHeightWidth); } /// <summary> /// Render HTML for a link with contents /// </summary> /// <typeparam name="T">The model type</typeparam> /// <param name="model">The model</param> /// <param name="field">The link field to user</param> /// <param name="attributes">Any additional link attributes</param> /// <param name="isEditable">Make the link editable</param> /// <returns></returns> public virtual RenderingResult BeginRenderLink<T>(T model, Expression<Func<T, object>> field, object attributes = null, bool isEditable = false) { return GlassHtml.BeginRenderLink(model, field, this.Output, attributes, isEditable); } /// <summary> /// Render HTML for a link /// </summary> /// <typeparam name="T">The model type</typeparam> /// <param name="model">The model</param> /// <param name="field">The link field to user</param> /// <param name="attributes">Any additional link attributes</param> /// <param name="isEditable">Make the link editable</param> /// <param name="contents">Content to override the default decription or item name</param> /// <returns></returns> public virtual string RenderLink<T>(T model, Expression<Func<T, object>> field, object attributes = null, bool isEditable = false, string contents=null) { return GlassHtml.RenderLink(model, field, attributes, isEditable, contents); } /// <summary> /// Returns an Sitecore Edit Frame /// </summary> /// <returns> /// GlassEditFrame. /// </returns> public GlassEditFrame BeginEditFrame<T>(T model, string title = null, params Expression<Func<T, object>>[] fields) where T : class { return GlassHtml.EditFrame(model, title, this.Output, fields); } public override void RenderControl(HtmlTextWriter writer) { this._writer = writer; base.RenderControl(writer); } public virtual string RenderingParameters { get { return WebContext.GetRenderingParameters(this); } } public virtual TParam GetRenderingParameters<TParam>() where TParam : class { return RenderingParameters.HasValue() ? GlassHtml.GetRenderingParameters<TParam>(RenderingParameters) : default(TParam); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. namespace Microsoft.Azure.Management.AppService.Fluent { /// <summary> /// /// </summary> /// <typeparam name="FluentT"></typeparam> /// <typeparam name="FluentImplT"></typeparam> /// <typeparam name="DefAfterRegionT"></typeparam> /// <typeparam name="DefAfterGroupT"></typeparam> /// <typeparam name="UpdateT"></typeparam> internal partial class WebAppSourceControlImpl<FluentT, FluentImplT, DefAfterRegionT, DefAfterGroupT, UpdateT> { /// <summary> /// Specifies the GitHub personal access token. You can acquire one from /// https://github.com/settings/tokens. /// </summary> /// <param name="personalAccessToken">The personal access token from GitHub.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IGitHubWithAttach<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithGitHubAccessToken<WebAppBase.Update.IUpdate<FluentT>>.WithGitHubAccessToken(string personalAccessToken) { return this.WithGitHubAccessToken(personalAccessToken); } /// <summary> /// Specifies the GitHub personal access token. You can acquire one from /// https://github.com/settings/tokens. /// </summary> /// <param name="personalAccessToken">The personal access token from GitHub.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IGitHubWithAttach<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithGitHubAccessToken<WebAppBase.Definition.IWithCreate<FluentT>>.WithGitHubAccessToken(string personalAccessToken) { return this.WithGitHubAccessToken(personalAccessToken); } /// <summary> /// Gets the name of the resource. /// </summary> string Microsoft.Azure.Management.ResourceManager.Fluent.Core.IHasName.Name { get { return this.Name(); } } /// <summary> /// Gets the parent of this child object. /// </summary> Microsoft.Azure.Management.AppService.Fluent.IWebAppBase Microsoft.Azure.Management.ResourceManager.Fluent.Core.IHasParent<Microsoft.Azure.Management.AppService.Fluent.IWebAppBase>.Parent { get { return this.Parent(); } } /// <summary> /// Attaches the child definition to the parent resource update. /// </summary> /// <return>The next stage of the parent definition.</return> WebAppBase.Update.IUpdate<FluentT> Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Update.IInUpdate<WebAppBase.Update.IUpdate<FluentT>>.Attach() { return this.Attach(); } /// <summary> /// Attaches the child definition to the parent resource definiton. /// </summary> /// <return>The next stage of the parent definition.</return> WebAppBase.Definition.IWithCreate<FluentT> Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Definition.IInDefinition<WebAppBase.Definition.IWithCreate<FluentT>>.Attach() { return this.Attach(); } /// <summary> /// Specifies the branch in the repository to use. /// </summary> /// <param name="branch">The branch to use.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IGitHubWithAttach<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithGitHubBranch<WebAppBase.Update.IUpdate<FluentT>>.WithBranch(string branch) { return this.WithBranch(branch); } /// <summary> /// Specifies the branch in the repository to use. /// </summary> /// <param name="branch">The branch to use.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IGitHubWithAttach<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithGitHubBranch<WebAppBase.Definition.IWithCreate<FluentT>>.WithBranch(string branch) { return this.WithBranch(branch); } /// <summary> /// Gets the name of the branch to use for deployment. /// </summary> string Microsoft.Azure.Management.AppService.Fluent.IWebAppSourceControl.Branch { get { return this.Branch(); } } /// <summary> /// Gets whether to do manual or continuous integration. /// </summary> bool Microsoft.Azure.Management.AppService.Fluent.IWebAppSourceControl.IsManualIntegration { get { return this.IsManualIntegration(); } } /// <summary> /// Gets the repository or source control url. /// </summary> string Microsoft.Azure.Management.AppService.Fluent.IWebAppSourceControl.RepositoryUrl { get { return this.RepositoryUrl(); } } /// <summary> /// Gets whether deployment rollback is enabled. /// </summary> bool Microsoft.Azure.Management.AppService.Fluent.IWebAppSourceControl.DeploymentRollbackEnabled { get { return this.DeploymentRollbackEnabled(); } } /// <summary> /// Gets mercurial or Git repository type. /// </summary> Microsoft.Azure.Management.AppService.Fluent.RepositoryType? Microsoft.Azure.Management.AppService.Fluent.IWebAppSourceControl.RepositoryType { get { return this.RepositoryType(); } } /// <summary> /// Specifies the branch in the repository to use. /// </summary> /// <param name="branch">The branch to use.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IWithAttach<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithBranch<WebAppBase.Update.IUpdate<FluentT>>.WithBranch(string branch) { return this.WithBranch(branch); } /// <summary> /// Specifies the branch in the repository to use. /// </summary> /// <param name="branch">The branch to use.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IWithAttach<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithBranch<WebAppBase.Definition.IWithCreate<FluentT>>.WithBranch(string branch) { return this.WithBranch(branch); } /// <summary> /// Specifies the repository to be a public external repository, either Git or Mercurial. /// Continuous integration will not be turned on. /// </summary> /// <param name="url">The url of the Mercurial repository.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IWithBranch<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithRepositoryType<WebAppBase.Update.IUpdate<FluentT>>.WithPublicMercurialRepository(string url) { return this.WithPublicMercurialRepository(url); } /// <summary> /// Specifies the repository to be a GitHub repository. Continuous integration /// will be turned on. /// This repository can be either public or private, but your GitHub access token /// must have enough privileges to add a webhook to the repository. /// </summary> /// <param name="organization">The user name or organization name the GitHub repository belongs to, e.g. Azure.</param> /// <param name="repository">The name of the repository, e.g. azure-sdk-for-java.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IWithGitHubBranch<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithRepositoryType<WebAppBase.Update.IUpdate<FluentT>>.WithContinuouslyIntegratedGitHubRepository(string organization, string repository) { return this.WithContinuouslyIntegratedGitHubRepository(organization, repository); } /// <summary> /// Specifies the repository to be a GitHub repository. Continuous integration /// will be turned on. /// This repository can be either public or private, but your GitHub access token /// must have enough privileges to add a webhook to the repository. /// </summary> /// <param name="url">The URL pointing to the repository, e.g. https://github.com/Azure/azure-sdk-for-java.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IWithGitHubBranch<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithRepositoryType<WebAppBase.Update.IUpdate<FluentT>>.WithContinuouslyIntegratedGitHubRepository(string url) { return this.WithContinuouslyIntegratedGitHubRepository(url); } /// <summary> /// Specifies the repository to be a public external repository, either Git or Mercurial. /// Continuous integration will not be turned on. /// </summary> /// <param name="url">The url of the Git repository.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.UpdateDefinition.IWithBranch<WebAppBase.Update.IUpdate<FluentT>> WebAppSourceControl.UpdateDefinition.IWithRepositoryType<WebAppBase.Update.IUpdate<FluentT>>.WithPublicGitRepository(string url) { return this.WithPublicGitRepository(url); } /// <summary> /// Specifies the repository to be a public external repository, either Git or Mercurial. /// Continuous integration will not be turned on. /// </summary> /// <param name="url">The url of the Mercurial repository.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IWithBranch<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithRepositoryType<WebAppBase.Definition.IWithCreate<FluentT>>.WithPublicMercurialRepository(string url) { return this.WithPublicMercurialRepository(url); } /// <summary> /// Specifies the repository to be a GitHub repository. Continuous integration /// will be turned on. /// This repository can be either public or private, but your GitHub access token /// must have enough privileges to add a webhook to the repository. /// </summary> /// <param name="organization">The user name or organization name the GitHub repository belongs to, e.g. Azure.</param> /// <param name="repository">The name of the repository, e.g. azure-sdk-for-java.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IWithGitHubBranch<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithRepositoryType<WebAppBase.Definition.IWithCreate<FluentT>>.WithContinuouslyIntegratedGitHubRepository(string organization, string repository) { return this.WithContinuouslyIntegratedGitHubRepository(organization, repository); } /// <summary> /// Specifies the repository to be a GitHub repository. Continuous integration /// will be turned on. /// This repository can be either public or private, but your GitHub access token /// must have enough privileges to add a webhook to the repository. /// </summary> /// <param name="url">The URL pointing to the repository, e.g. https://github.com/Azure/azure-sdk-for-java.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IWithGitHubBranch<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithRepositoryType<WebAppBase.Definition.IWithCreate<FluentT>>.WithContinuouslyIntegratedGitHubRepository(string url) { return this.WithContinuouslyIntegratedGitHubRepository(url); } /// <summary> /// Specifies the repository to be a public external repository, either Git or Mercurial. /// Continuous integration will not be turned on. /// </summary> /// <param name="url">The url of the Git repository.</param> /// <return>The next stage of the definition.</return> WebAppSourceControl.Definition.IWithBranch<WebAppBase.Definition.IWithCreate<FluentT>> WebAppSourceControl.Definition.IWithRepositoryType<WebAppBase.Definition.IWithCreate<FluentT>>.WithPublicGitRepository(string url) { return this.WithPublicGitRepository(url); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Runtime.InteropServices; using Microsoft.CodeAnalysis.ExpressionEvaluator; using Microsoft.VisualStudio.Debugger; using Microsoft.VisualStudio.Debugger.Clr; using Microsoft.VisualStudio.Debugger.Evaluation; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.UnitTests { public class DebuggerDisplayAttributeTests : CSharpResultProviderTestBase { [Fact] public void WithoutExpressionHoles() { var source = @" using System.Diagnostics; class C0 { } [DebuggerDisplay(""Value"")] class C1 { } [DebuggerDisplay(""Value"", Name=""Name"")] class C2 { } [DebuggerDisplay(""Value"", Type=""Type"")] class C3 { } [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] class C4 { } class Wrapper { C0 c0 = new C0(); C1 c1 = new C1(); C2 c2 = new C2(); C3 c3 = new C3(); C4 c4 = new C4(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("w", value)), EvalResult("c0", "{C0}", "C0", "w.c0", DkmEvaluationResultFlags.None), EvalResult("c1", "Value", "C1", "w.c1", DkmEvaluationResultFlags.None), EvalResult("Name", "Value", "C2", "w.c2", DkmEvaluationResultFlags.None), EvalResult("c3", "Value", "Type", "w.c3", DkmEvaluationResultFlags.None), EvalResult("Name", "Value", "Type", "w.c4", DkmEvaluationResultFlags.None)); } [Fact] public void OnlyExpressionHoles() { var source = @" using System.Diagnostics; [DebuggerDisplay(""{value}"", Name=""{name}"", Type=""{type}"")] class C { string name = ""Name""; string value = ""Value""; string type = ""Type""; } class Wrapper { C c = new C(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("c", value)), EvalResult("\"Name\"", "\"Value\"", "\"Type\"", "c.c", DkmEvaluationResultFlags.Expandable)); } [Fact] public void FormatStrings() { var source = @" using System.Diagnostics; [DebuggerDisplay(""<{value}>"", Name=""<{name}>"", Type=""<{type}>"")] class C { string name = ""Name""; string value = ""Value""; string type = ""Type""; } class Wrapper { C c = new C(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("w", value)), EvalResult("<\"Name\">", "<\"Value\">", "<\"Type\">", "w.c", DkmEvaluationResultFlags.Expandable)); } [Fact] public void BindingError() { var source = @" using System.Diagnostics; [DebuggerDisplay(""<{missing}>"")] class C { } "; const string rootExpr = "c"; // Note that this is the full name in all cases - DebuggerDisplayAttribute does not affect it. var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "<Problem evaluating expression>", "C", rootExpr, DkmEvaluationResultFlags.None)); // Message inlined without quotation marks. } [Fact] public void RecursiveDebuggerDisplay() { var source = @" using System.Diagnostics; [DebuggerDisplay(""{value}"")] class C { C value; C() { this.value = this; } } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); // No stack overflow, since attribute on computed value is ignored. Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "{C}", "C", rootExpr, DkmEvaluationResultFlags.Expandable)); } [Fact] public void MultipleAttributes() { var source = @" using System.Diagnostics; [DebuggerDisplay(""V1"")] [DebuggerDisplay(""V2"")] class C { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); // First attribute wins, as in dev12. Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "V1", "C", rootExpr)); } [Fact] public void NullValues() { var source = @" using System.Diagnostics; [DebuggerDisplay(null, Name=null, Type=null)] class C { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "{C}", "C", rootExpr)); } [Fact] public void EmptyStringValues() { var source = @" using System.Diagnostics; [DebuggerDisplay("""", Name="""", Type="""")] class C { } class Wrapper { C c = new C(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("w", value)), EvalResult("", "", "", "w.c")); } [Fact] public void ConstructedGenericType() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Name"")] class C<T> { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C`1").MakeGenericType(typeof(int)); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult("c", "Name", "C<int>", rootExpr)); } [Fact] public void MemberExpansion() { var source = @" using System.Diagnostics; interface I { D P { get; } } class C : I { D I.P { get { return new D(); } } D Q { get { return new D(); } } } [DebuggerDisplay(""Value"", Name=""Name"")] class D { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult(rootExpr, value); Verify(root, EvalResult(rootExpr, "{C}", "C", rootExpr, DkmEvaluationResultFlags.Expandable)); Verify(GetChildren(root), EvalResult("Name", "Value", "D", "((I)c).P", DkmEvaluationResultFlags.ReadOnly), // Not "I.Name". EvalResult("Name", "Value", "D", "c.Q", DkmEvaluationResultFlags.ReadOnly)); } [Fact] public void PointerDereferenceExpansion_Null() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] unsafe struct Display { Display* DisplayPointer; NoDisplay* NoDisplayPointer; } unsafe struct NoDisplay { Display* DisplayPointer; NoDisplay* NoDisplayPointer; } class Wrapper { Display display = new Display(); } "; var assembly = GetUnsafeAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult("wrapper", value); Verify(DepthFirstSearch(GetChildren(root).Single(), maxDepth: 3), EvalResult("Name", "Value", "Type", "wrapper.display", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", PointerToString(IntPtr.Zero), "Display*", "wrapper.display.DisplayPointer"), EvalResult("NoDisplayPointer", PointerToString(IntPtr.Zero), "NoDisplay*", "wrapper.display.NoDisplayPointer")); } [WorkItem(321, "https://github.com/dotnet/roslyn/issues/321")] [Fact(Skip = "https://github.com/dotnet/roslyn/issues/321")] public void PointerDereferenceExpansion_NonNull() { var source = @" using System; using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] unsafe struct Display { public Display* DisplayPointer; public NoDisplay* NoDisplayPointer; } unsafe struct NoDisplay { public Display* DisplayPointer; public NoDisplay* NoDisplayPointer; } unsafe class C { Display* DisplayPointer; NoDisplay* NoDisplayPointer; public C(IntPtr d, IntPtr nd) { this.DisplayPointer = (Display*)d; this.NoDisplayPointer = (NoDisplay*)nd; this.DisplayPointer->DisplayPointer = this.DisplayPointer; this.DisplayPointer->NoDisplayPointer = this.NoDisplayPointer; this.NoDisplayPointer->DisplayPointer = this.DisplayPointer; this.NoDisplayPointer->NoDisplayPointer = this.NoDisplayPointer; } } "; var assembly = GetUnsafeAssembly(source); unsafe { var displayType = assembly.GetType("Display"); var displayInstance = displayType.Instantiate(); var displayHandle = GCHandle.Alloc(displayInstance, GCHandleType.Pinned); var displayPtr = displayHandle.AddrOfPinnedObject(); var noDisplayType = assembly.GetType("NoDisplay"); var noDisplayInstance = noDisplayType.Instantiate(); var noDisplayHandle = GCHandle.Alloc(noDisplayInstance, GCHandleType.Pinned); var noDisplayPtr = noDisplayHandle.AddrOfPinnedObject(); var testType = assembly.GetType("C"); var testInstance = ReflectionUtilities.Instantiate(testType, displayPtr, noDisplayPtr); var testValue = CreateDkmClrValue(testInstance, testType, evalFlags: DkmEvaluationResultFlags.None); var displayPtrString = PointerToString(displayPtr); var noDisplayPtrString = PointerToString(noDisplayPtr); Verify(DepthFirstSearch(FormatResult("c", testValue), maxDepth: 3), EvalResult("c", "{C}", "C", "c", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", displayPtrString, "Display*", "c.DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "*c.DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", displayPtrString, "Display*", "(*c.DisplayPointer).DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayPointer", noDisplayPtrString, "NoDisplay*", "(*c.DisplayPointer).NoDisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayPointer", noDisplayPtrString, "NoDisplay*", "c.NoDisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("*c.NoDisplayPointer", "{NoDisplay}", "NoDisplay", "*c.NoDisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", displayPtrString, "Display*", "(*c.NoDisplayPointer).DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayPointer", noDisplayPtrString, "NoDisplay*", "(*c.NoDisplayPointer).NoDisplayPointer", DkmEvaluationResultFlags.Expandable)); displayHandle.Free(); noDisplayHandle.Free(); } } [Fact] public void ArrayExpansion() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] struct Display { public Display[] DisplayArray; public NoDisplay[] NoDisplayArray; } struct NoDisplay { public Display[] DisplayArray; public NoDisplay[] NoDisplayArray; } class C { public Display[] DisplayArray; public NoDisplay[] NoDisplayArray; public C() { this.DisplayArray = new[] { new Display() }; this.NoDisplayArray = new[] { new NoDisplay() }; this.DisplayArray[0].DisplayArray = this.DisplayArray; this.DisplayArray[0].NoDisplayArray = this.NoDisplayArray; this.NoDisplayArray[0].DisplayArray = this.DisplayArray; this.NoDisplayArray[0].NoDisplayArray = this.NoDisplayArray; } } "; var assembly = GetUnsafeAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult("c", value); Verify(DepthFirstSearch(root, maxDepth: 4), EvalResult("c", "{C}", "C", "c", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayArray", "{Display[1]}", "Display[]", "c.DisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "c.DisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayArray", "{Display[1]}", "Display[]", "c.DisplayArray[0].DisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "c.DisplayArray[0].DisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayArray", "{NoDisplay[1]}", "NoDisplay[]", "c.DisplayArray[0].NoDisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("[0]", "{NoDisplay}", "NoDisplay", "c.DisplayArray[0].NoDisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayArray", "{NoDisplay[1]}", "NoDisplay[]", "c.NoDisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("[0]", "{NoDisplay}", "NoDisplay", "c.NoDisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayArray", "{Display[1]}", "Display[]", "c.NoDisplayArray[0].DisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "c.NoDisplayArray[0].DisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayArray", "{NoDisplay[1]}", "NoDisplay[]", "c.NoDisplayArray[0].NoDisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("[0]", "{NoDisplay}", "NoDisplay", "c.NoDisplayArray[0].NoDisplayArray[0]", DkmEvaluationResultFlags.Expandable)); } [Fact] public void DebuggerTypeProxyExpansion() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] public struct Display { } public struct NoDisplay { } [DebuggerTypeProxy(typeof(P))] public class C { public Display DisplayC = new Display(); public NoDisplay NoDisplayC = new NoDisplay(); } public class P { public Display DisplayP = new Display(); public NoDisplay NoDisplayP = new NoDisplay(); public P(C c) { } } "; var assembly = GetUnsafeAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult("c", value); Verify(DepthFirstSearch(root, maxDepth: 4), EvalResult("c", "{C}", "C", "c", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "new P(c).DisplayP"), EvalResult("NoDisplayP", "{NoDisplay}", "NoDisplay", "new P(c).NoDisplayP"), EvalResult("Raw View", null, "", "c, raw", DkmEvaluationResultFlags.Expandable | DkmEvaluationResultFlags.ReadOnly, DkmEvaluationResultCategory.Data), EvalResult("Name", "Value", "Type", "c.DisplayC"), EvalResult("NoDisplayC", "{NoDisplay}", "NoDisplay", "c.NoDisplayC")); } [Fact] public void NullInstance() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Hello"")] class C { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(null, type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "null", "C", rootExpr)); } [Fact] public void NonGenericDisplayAttributeOnGenericBase() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Type={GetType()}"")] class A<T> { } class B : A<int> { } "; var assembly = GetAssembly(source); var type = assembly.GetType("B"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var result = FormatResult("b", value); Verify(result, EvalResult("b", "Type={B}", "B", "b", DkmEvaluationResultFlags.None)); } [WorkItem(1016895)] [Fact] public void RootVersusInternal() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name = ""Name"")] class A { } class B { A a; public B(A a) { this.a = a; } } "; var assembly = GetAssembly(source); var typeA = assembly.GetType("A"); var typeB = assembly.GetType("B"); var instanceA = typeA.Instantiate(); var instanceB = typeB.Instantiate(instanceA); var result = FormatResult("a", CreateDkmClrValue(instanceA)); Verify(result, EvalResult("a", "Value", "A", "a", DkmEvaluationResultFlags.None)); result = FormatResult("b", CreateDkmClrValue(instanceB)); Verify(GetChildren(result), EvalResult("Name", "Value", "A", "b.a", DkmEvaluationResultFlags.None)); } [Fact] public void Error() { var source = @"using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] class A { } class B { bool f; internal A P { get { return new A(); } } internal A Q { get { while(f) { } return new A(); } } } "; DkmClrRuntimeInstance runtime = null; GetMemberValueDelegate getMemberValue = (v, m) => (m == "Q") ? CreateErrorValue(runtime.GetType("A"), "Function evaluation timed out") : null; runtime = new DkmClrRuntimeInstance(ReflectionUtilities.GetMscorlibAndSystemCore(GetAssembly(source)), getMemberValue: getMemberValue); using (runtime.Load()) { var type = runtime.GetType("B"); var value = CreateDkmClrValue(type.Instantiate(), type: type); var evalResult = FormatResult("o", value); var children = GetChildren(evalResult); Verify(children, EvalResult("Name", "Value", "Type", "o.P", DkmEvaluationResultFlags.ReadOnly), EvalFailedResult("Q", "Function evaluation timed out", "A", "o.Q"), EvalResult("f", "false", "bool", "o.f", DkmEvaluationResultFlags.Boolean)); } } [Fact] public void UnhandledException() { var source = @"using System.Diagnostics; [DebuggerDisplay(""Value}"")] class A { internal int Value; } "; var assembly = GetAssembly(source); var typeA = assembly.GetType("A"); var instanceA = typeA.Instantiate(); var result = FormatResult("a", CreateDkmClrValue(instanceA)); Verify(result, EvalFailedResult("a", "Unmatched closing brace in 'Value}'", null, null, DkmEvaluationResultFlags.None)); } [Fact, WorkItem(171123, "https://devdiv.visualstudio.com/DefaultCollection/DevDiv")] public void ExceptionDuringEvaluate() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Make it throw."")] public class Picard { } "; var assembly = GetAssembly(source); var picard = assembly.GetType("Picard"); var jeanLuc = picard.Instantiate(); var result = FormatResult("says", CreateDkmClrValue(jeanLuc), declaredType: new BadType(picard)); Verify(result, EvalFailedResult("says", BadType.Exception.Message, null, null, DkmEvaluationResultFlags.None)); } private class BadType : DkmClrType { public static readonly Exception Exception = new TargetInvocationException(new DkmException(DkmExceptionCode.E_PROCESS_DESTROYED)); public BadType(System.Type innerType) : base((TypeImpl)innerType) { } public override VisualStudio.Debugger.Metadata.Type GetLmrType() { if (Environment.StackTrace.Contains("Microsoft.CodeAnalysis.ExpressionEvaluator.ResultProvider.GetTypeName")) { throw Exception; } return base.GetLmrType(); } } private IReadOnlyList<DkmEvaluationResult> DepthFirstSearch(DkmEvaluationResult root, int maxDepth) { var builder = ArrayBuilder<DkmEvaluationResult>.GetInstance(); DepthFirstSearchInternal(builder, root, 0, maxDepth); return builder.ToImmutableAndFree(); } private void DepthFirstSearchInternal(ArrayBuilder<DkmEvaluationResult> builder, DkmEvaluationResult curr, int depth, int maxDepth) { Assert.InRange(depth, 0, maxDepth); builder.Add(curr); var childDepth = depth + 1; if (childDepth <= maxDepth) { foreach (var child in GetChildren(curr)) { DepthFirstSearchInternal(builder, child, childDepth, maxDepth); } } } } }
using ATL.Logging; using System; using System.IO; using static ATL.AudioData.AudioDataManager; using Commons; using System.Collections.Generic; using System.Text; using static ATL.ChannelsArrangements; namespace ATL.AudioData.IO { /// <summary> /// Class for TwinVQ files manipulation (extension : .VQF) /// </summary> class TwinVQ : MetaDataIO, IAudioDataIO { // Twin VQ header ID private const string TWIN_ID = "TWIN"; private static IDictionary<string, byte> frameMapping; // Mapping between TwinVQ frame codes and ATL frame codes // Private declarations private int sampleRate; private double bitrate; private double duration; private ChannelsArrangement channelsArrangement; private bool isValid; private SizeInfo sizeInfo; private readonly string filePath; public bool Corrupted // True if file corrupted { get { return this.isCorrupted(); } } protected override byte getFrameMapping(string zone, string ID, byte tagVersion) { byte supportedMetaId = 255; // Finds the ATL field identifier according to the ID3v2 version if (frameMapping.ContainsKey(ID)) supportedMetaId = frameMapping[ID]; return supportedMetaId; } // TwinVQ chunk header private class ChunkHeader { public string ID; public uint Size; // Chunk size public void Reset() { Size = 0; } } // File header data - for internal use private class HeaderInfo { // Real structure of TwinVQ file header public char[] ID = new char[4]; // Always "TWIN" public char[] Version = new char[8]; // Version ID public uint Size; // Header size public ChunkHeader Common = new ChunkHeader(); // Common chunk header public uint ChannelMode; // Channel mode: 0 - mono, 1 - stereo public uint BitRate; // Total bit rate public uint SampleRate; // Sample rate (khz) public uint SecurityLevel; // Always 0 } // ---------- INFORMATIVE INTERFACE IMPLEMENTATIONS & MANDATORY OVERRIDES // IAudioDataIO public int SampleRate // Sample rate (hz) { get { return this.sampleRate; } } public bool IsVBR { get { return false; } } public int CodecFamily { get { return AudioDataIOFactory.CF_LOSSY; } } public string FileName { get { return filePath; } } public double BitRate { get { return bitrate; } } public double Duration { get { return duration; } } public ChannelsArrangement ChannelsArrangement { get { return channelsArrangement; } } public bool IsMetaSupported(int metaDataType) { return (metaDataType == MetaDataIOFactory.TAG_NATIVE) || (metaDataType == MetaDataIOFactory.TAG_ID3V1); } // IMetaDataIO protected override int getDefaultTagOffset() { return TO_BUILTIN; } protected override int getImplementedTagType() { return MetaDataIOFactory.TAG_NATIVE; } public override byte FieldCodeFixedLength { get { return 4; } } protected override bool isLittleEndian { get { return false; } } // ---------- CONSTRUCTORS & INITIALIZERS static TwinVQ() { frameMapping = new Dictionary<string, byte> { { "NAME", TagData.TAG_FIELD_TITLE }, { "ALBM", TagData.TAG_FIELD_ALBUM }, { "AUTH", TagData.TAG_FIELD_ARTIST }, { "(c) ", TagData.TAG_FIELD_COPYRIGHT }, { "MUSC", TagData.TAG_FIELD_COMPOSER }, { "CDCT", TagData.TAG_FIELD_CONDUCTOR }, { "TRCK", TagData.TAG_FIELD_TRACK_NUMBER }, // Unofficial; found in sample files { "DATE", TagData.TAG_FIELD_RECORDING_DATE }, // Unofficial; found in sample files { "GENR", TagData.TAG_FIELD_GENRE }, // Unofficial; found in sample files { "COMT", TagData.TAG_FIELD_COMMENT } // TODO - handle integer extension sub-chunks : YEAR, TRAC }; } private void resetData() { duration = 0; bitrate = 0; isValid = false; sampleRate = 0; ResetData(); } public TwinVQ(string filePath) { this.filePath = filePath; resetData(); } // ---------- SUPPORT METHODS private static bool readHeader(BinaryReader source, ref HeaderInfo Header) { bool result = true; // Read header and get file size Header.ID = source.ReadChars(4); Header.Version = source.ReadChars(8); Header.Size = StreamUtils.ReverseUInt32(source.ReadUInt32()); Header.Common.ID = Utils.Latin1Encoding.GetString(source.ReadBytes(4)); Header.Common.Size = StreamUtils.ReverseUInt32(source.ReadUInt32()); Header.ChannelMode = StreamUtils.ReverseUInt32(source.ReadUInt32()); Header.BitRate = StreamUtils.ReverseUInt32(source.ReadUInt32()); Header.SampleRate = StreamUtils.ReverseUInt32(source.ReadUInt32()); Header.SecurityLevel = StreamUtils.ReverseUInt32(source.ReadUInt32()); return result; } private static ChannelsArrangement getChannelArrangement(HeaderInfo Header) { switch (Header.ChannelMode) { case 0: return MONO; case 1: return STEREO; default: return new ChannelsArrangement((int)Header.ChannelMode); } } private static uint getBitRate(HeaderInfo Header) { return Header.BitRate; } private int GetSampleRate(HeaderInfo Header) { int result = (int)Header.SampleRate; switch (result) { case 11: result = 11025; break; case 22: result = 22050; break; case 44: result = 44100; break; default: result = (ushort)(result * 1000); break; } return result; } // Get duration from header private double getDuration(HeaderInfo Header) { return Math.Abs(sizeInfo.FileSize - Header.Size - 20) * 1000.0 / 125.0 / (double)Header.BitRate; } private static bool headerEndReached(ChunkHeader Chunk) { // Check for header end return (((byte)(Chunk.ID[0]) < 32) || ((byte)(Chunk.ID[1]) < 32) || ((byte)(Chunk.ID[2]) < 32) || ((byte)(Chunk.ID[3]) < 32) || "DSIZ".Equals(Chunk.ID)); } private bool readTag(BinaryReader source, HeaderInfo Header, ReadTagParams readTagParams) { ChunkHeader chunk = new ChunkHeader(); string data; bool result = false; bool first = true; long tagStart = -1; source.BaseStream.Seek(40, SeekOrigin.Begin); do { // Read chunk header (length : 8 bytes) chunk.ID = Utils.Latin1Encoding.GetString(source.ReadBytes(4)); chunk.Size = StreamUtils.ReverseUInt32(source.ReadUInt32()); // Read chunk data and set tag item if chunk header valid if (headerEndReached(chunk)) break; if (first) { tagStart = source.BaseStream.Position - 8; first = false; } tagExists = true; // If something else than mandatory info is stored, we can consider metadata is present data = Encoding.UTF8.GetString(source.ReadBytes((int)chunk.Size)).Trim(); SetMetaField(chunk.ID, data, readTagParams.ReadAllMetaFrames); result = true; } while (source.BaseStream.Position < source.BaseStream.Length); if (readTagParams.PrepareForWriting) { // Zone goes from the first field after COMM to the last field before DSIZ if (-1 == tagStart) structureHelper.AddZone(source.BaseStream.Position - 8, 0); else structureHelper.AddZone(tagStart, (int)(source.BaseStream.Position - tagStart - 8)); structureHelper.AddSize(12, (uint)Header.Size); } return result; } private bool isCorrupted() { // Check for file corruption return ((isValid) && ((0 == channelsArrangement.NbChannels) || (bitrate < 8000) || (bitrate > 192000) || (sampleRate < 8000) || (sampleRate > 44100) || (duration < 0.1) || (duration > 10000))); } public bool Read(BinaryReader source, AudioDataManager.SizeInfo sizeInfo, MetaDataIO.ReadTagParams readTagParams) { this.sizeInfo = sizeInfo; return read(source, readTagParams); } protected override bool read(BinaryReader source, MetaDataIO.ReadTagParams readTagParams) { HeaderInfo Header = new HeaderInfo(); resetData(); source.BaseStream.Seek(sizeInfo.ID3v2Size, SeekOrigin.Begin); bool result = readHeader(source, ref Header); // Process data if loaded and header valid if ((result) && StreamUtils.StringEqualsArr(TWIN_ID, Header.ID)) { isValid = true; // Fill properties with header data channelsArrangement = getChannelArrangement(Header); bitrate = getBitRate(Header); sampleRate = GetSampleRate(Header); duration = getDuration(Header); // Get tag information and fill properties readTag(source, Header, readTagParams); } return result; } protected override int write(TagData tag, BinaryWriter w, string zone) { int result = 0; string recordingYear = ""; IDictionary<byte, string> map = tag.ToMap(); // 1st pass to gather date information foreach (byte frameType in map.Keys) { if (map[frameType].Length > 0) // No frame with empty value { if (TagData.TAG_FIELD_RECORDING_YEAR == frameType) { recordingYear = map[frameType]; } } } if (recordingYear.Length > 0) { string recordingDate = Utils.ProtectValue(tag.RecordingDate); if (0 == recordingDate.Length || !recordingDate.StartsWith(recordingYear)) map[TagData.TAG_FIELD_RECORDING_DATE] = recordingYear; } // Supported textual fields foreach (byte frameType in map.Keys) { foreach (string s in frameMapping.Keys) { if (frameType == frameMapping[s]) { if (map[frameType].Length > 0) // No frame with empty value { string value = formatBeforeWriting(frameType, tag, map); writeTextFrame(w, s, value); result++; } break; } } } // Other textual fields foreach (MetaFieldInfo fieldInfo in tag.AdditionalFields) { if ((fieldInfo.TagType.Equals(MetaDataIOFactory.TAG_ANY) || fieldInfo.TagType.Equals(getImplementedTagType())) && !fieldInfo.MarkedForDeletion && fieldInfo.NativeFieldCode.Length > 0) { writeTextFrame(w, fieldInfo.NativeFieldCode, fieldInfo.Value); result++; } } return result; } private void writeTextFrame(BinaryWriter writer, string frameCode, string text) { writer.Write(Utils.Latin1Encoding.GetBytes(frameCode)); byte[] textBytes = Encoding.UTF8.GetBytes(text); writer.Write(StreamUtils.ReverseUInt32((uint)textBytes.Length)); writer.Write(textBytes); } // Specific implementation for conservation of fields that are required for playback public override bool Remove(BinaryWriter w) { TagData tag = new TagData(); foreach (byte b in frameMapping.Values) { tag.IntegrateValue(b, ""); } string fieldCode; foreach (MetaFieldInfo fieldInfo in GetAdditionalFields()) { fieldCode = fieldInfo.NativeFieldCode.ToLower(); if (!fieldCode.StartsWith("_") && !fieldCode.Equals("DSIZ") && !fieldCode.Equals("COMM")) { MetaFieldInfo emptyFieldInfo = new MetaFieldInfo(fieldInfo); emptyFieldInfo.MarkedForDeletion = true; tag.AdditionalFields.Add(emptyFieldInfo); } } BinaryReader r = new BinaryReader(w.BaseStream); return Write(r, w, tag); } } }
namespace PokerTell.Statistics.Analyzation { using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using log4net; using PokerTell.Infrastructure.Enumerations.PokerHand; using PokerTell.Infrastructure.Interfaces.PokerHand; using PokerTell.Statistics.Interfaces; using Tools; /// <summary> /// Uses the information of the given analyzation Preparer to further inspect the Sequence in order /// to determine if and how much opponent raised, how the hero reacted and if the situation is standard /// or even valid reactions were found. /// </summary> public class RaiseReactionAnalyzer : IRaiseReactionAnalyzer { static readonly ILog Log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); IReactionAnalyzationPreparer _analyzationPreparer; IConvertedPokerActionWithId _heroReaction; bool _considerOpponentsRaiseSize; public IAnalyzablePokerPlayer AnalyzablePokerPlayer { get; set; } /// <summary> /// Indicates how hero reacted to a raise by the opponent (e.g. fold) /// </summary> public ActionTypes HeroReactionType { get { return _heroReaction.What; } } /// <summary> /// Situation is considered standard if no more raises occurred after the original raise of the opponent or /// the hero's reaction occurred before these additional raises and thus didn't influence his decision. /// </summary> public bool IsStandardSituation { get; protected set; } /// <summary> /// Is only true if an opponents raise after the action of the hero and a reaction to this /// raise by the hero were found. /// </summary> public bool IsValidResult { get; protected set; } /// <summary> /// Indicates the ratio of the opponents raise to which the hero reacted /// </summary> public int ConsideredRaiseSize { get; private set; } public double[] RaiseSizeKeys { get; protected set; } public override string ToString() { return string.Format( "ConsideredRaiseSize: {0}, IsStandardSituation: {1}, IsValidResult: {2}, HeroReactionType: {3}", ConsideredRaiseSize, IsStandardSituation, IsValidResult, _heroReaction); } /// <summary> /// Analyzes the data given by the analyzation preparer. /// </summary> /// <param name="analyzablePokerPlayer"> /// Player whose data is examined /// </param> /// <param name="analyzationPreparer"> /// Provides StartingIndex (Hero's original action) and Sequence /// </param> /// <param name="considerOpponentsRaiseSize">If true, it will determine the opponents re(raise) size and set the Considered raise size to it. /// So far this only is done like this for PostFlopHero acts. In all other cases set it to false and the raise size of the hero is used.</param> /// <param name="raiseSizeKeys"> /// Raise sizes to which the Opponent Raise size should be normalized to /// </param> public IRaiseReactionAnalyzer AnalyzeUsingDataFrom( IAnalyzablePokerPlayer analyzablePokerPlayer, IReactionAnalyzationPreparer analyzationPreparer, bool considerOpponentsRaiseSize, double[] raiseSizeKeys) { _considerOpponentsRaiseSize = considerOpponentsRaiseSize; AnalyzablePokerPlayer = analyzablePokerPlayer; _analyzationPreparer = analyzationPreparer; RaiseSizeKeys = raiseSizeKeys; try { IsValidResult = AnalyzeReaction(); } catch (Exception excep) { Log.Error(ToString(), excep); IsValidResult = false; } return this; } bool AdditionalRaisesHappenedAfterHerosReactionToFirstRaise( IList<IConvertedPokerActionWithId> remainingActions, IEnumerable<IConvertedPokerActionWithId> foundRaises) { int herosReactionIndex = remainingActions.IndexOf(_heroReaction); int count = 0; foreach ( IConvertedPokerActionWithId _ in foundRaises.Where(raise => remainingActions.IndexOf(raise) < herosReactionIndex)) { count++; if (count > 1) { return false; } } return true; } bool AnalyzeReaction() { IEnumerable<IConvertedPokerActionWithId> foundRaises = SetConsideredRaiseSize(); if (foundRaises.Count() == 0) { return false; } List<IConvertedPokerActionWithId> actionsAfterHeroRaise = GetActionsAfterHeroRaise(); SetHerosReaction(actionsAfterHeroRaise); if (_heroReaction == null || !ActionTypesUtility.Reactions.Contains(HeroReactionType)) { return false; } DetermineIfSituationIsStandard(actionsAfterHeroRaise, foundRaises); return true; } void DetermineIfSituationIsStandard( IList<IConvertedPokerActionWithId> actionsAfterHeroRaise, IEnumerable<IConvertedPokerActionWithId> foundRaises) { IsStandardSituation = foundRaises.Count() == 1 || AdditionalRaisesHappenedAfterHerosReactionToFirstRaise( actionsAfterHeroRaise, foundRaises); } List<IConvertedPokerActionWithId> GetActionsAfterHeroRaise() { var actionsAfterHeroRaise = new List<IConvertedPokerActionWithId>(); for (int i = _analyzationPreparer.StartingActionIndex + 1; i < _analyzationPreparer.Sequence.Count; i++) { actionsAfterHeroRaise.Add((IConvertedPokerActionWithId)_analyzationPreparer.Sequence[i]); } return actionsAfterHeroRaise; } void SetHerosReaction(IEnumerable<IConvertedPokerActionWithId> actionsAfterHeroRaise) { IEnumerable<IConvertedPokerActionWithId> herosReaction = from IConvertedPokerActionWithId action in actionsAfterHeroRaise where action.Id.Equals(_analyzationPreparer.HeroPosition) select action; _heroReaction = herosReaction.Count() > 0 ? herosReaction.First() : null; } IEnumerable<IConvertedPokerActionWithId> SetConsideredRaiseSize() { const ActionTypes actionToLookFor = ActionTypes.R; var actionsAfterHerosInitialAction = _analyzationPreparer.Sequence.Actions.Skip(_analyzationPreparer.StartingActionIndex); IEnumerable<IConvertedPokerActionWithId> opponentsRaises = from IConvertedPokerActionWithId action in actionsAfterHerosInitialAction where action.What.Equals(actionToLookFor) && !action.Id.Equals(_analyzationPreparer.HeroPosition) select action; if (_considerOpponentsRaiseSize && opponentsRaises.Count() > 0) { ConsideredRaiseSize = (int)Normalizer.NormalizeToKeyValues(RaiseSizeKeys, opponentsRaises.First().Ratio); } else if (!_considerOpponentsRaiseSize) { IEnumerable<IConvertedPokerActionWithId> herosRaises = from IConvertedPokerActionWithId action in actionsAfterHerosInitialAction where action.What.Equals(actionToLookFor) && action.Id.Equals(_analyzationPreparer.HeroPosition) select action; if (herosRaises.Count() > 0) { ConsideredRaiseSize = (int)Normalizer.NormalizeToKeyValues(RaiseSizeKeys, herosRaises.First().Ratio); } else { // Returning empty list will let caller know that the appropriate reaction was not found return herosRaises; } } return opponentsRaises; } } }
using Lucene.Net.Documents; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; using NUnit.Framework; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Search.Spans { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using Field = Field; using IndexReader = Lucene.Net.Index.IndexReader; using IndexReaderContext = Lucene.Net.Index.IndexReaderContext; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using Term = Lucene.Net.Index.Term; [TestFixture] public class TestNearSpansOrdered : LuceneTestCase { protected internal IndexSearcher searcher; protected internal Directory directory; protected internal IndexReader reader; public const string FIELD = "field"; [TearDown] public override void TearDown() { reader.Dispose(); directory.Dispose(); base.TearDown(); } [SetUp] public override void SetUp() { base.SetUp(); directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy())); for (int i = 0; i < docFields.Length; i++) { Document doc = new Document(); doc.Add(NewTextField(FIELD, docFields[i], Field.Store.NO)); writer.AddDocument(doc); } reader = writer.GetReader(); writer.Dispose(); searcher = NewSearcher(reader); } protected internal string[] docFields = new string[] { "w1 w2 w3 w4 w5", "w1 w3 w2 w3 zz", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3 zz" }; protected internal virtual SpanNearQuery MakeQuery(string s1, string s2, string s3, int slop, bool inOrder) { return new SpanNearQuery(new SpanQuery[] { new SpanTermQuery(new Term(FIELD, s1)), new SpanTermQuery(new Term(FIELD, s2)), new SpanTermQuery(new Term(FIELD, s3)) }, slop, inOrder); } protected internal virtual SpanNearQuery MakeQuery() { return MakeQuery("w1", "w2", "w3", 1, true); } [Test] public virtual void TestSpanNearQuery() { SpanNearQuery q = MakeQuery(); CheckHits.DoCheckHits( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, q, FIELD, searcher, new int[] { 0, 1 }); } public virtual string s(Spans span) { return s(span.Doc, span.Start, span.End); } public virtual string s(int doc, int start, int end) { return "s(" + doc + "," + start + "," + end + ")"; } [Test] public virtual void TestNearSpansNext() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(true, span.MoveNext()); Assert.AreEqual(s(0, 0, 3), s(span)); Assert.AreEqual(true, span.MoveNext()); Assert.AreEqual(s(1, 0, 4), s(span)); Assert.AreEqual(false, span.MoveNext()); } /// <summary> /// test does not imply that skipTo(doc+1) should work exactly the /// same as next -- it's only applicable in this case since we know doc /// does not contain more than one span /// </summary> [Test] public virtual void TestNearSpansSkipToLikeNext() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(true, span.SkipTo(0)); Assert.AreEqual(s(0, 0, 3), s(span)); Assert.AreEqual(true, span.SkipTo(1)); Assert.AreEqual(s(1, 0, 4), s(span)); Assert.AreEqual(false, span.SkipTo(2)); } [Test] public virtual void TestNearSpansNextThenSkipTo() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(true, span.MoveNext()); Assert.AreEqual(s(0, 0, 3), s(span)); Assert.AreEqual(true, span.SkipTo(1)); Assert.AreEqual(s(1, 0, 4), s(span)); Assert.AreEqual(false, span.MoveNext()); } [Test] public virtual void TestNearSpansNextThenSkipPast() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(true, span.MoveNext()); Assert.AreEqual(s(0, 0, 3), s(span)); Assert.AreEqual(false, span.SkipTo(2)); } [Test] public virtual void TestNearSpansSkipPast() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(false, span.SkipTo(2)); } [Test] public virtual void TestNearSpansSkipTo0() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(true, span.SkipTo(0)); Assert.AreEqual(s(0, 0, 3), s(span)); } [Test] public virtual void TestNearSpansSkipTo1() { SpanNearQuery q = MakeQuery(); Spans span = MultiSpansWrapper.Wrap(searcher.TopReaderContext, q); Assert.AreEqual(true, span.SkipTo(1)); Assert.AreEqual(s(1, 0, 4), s(span)); } /// <summary> /// not a direct test of NearSpans, but a demonstration of how/when /// this causes problems /// </summary> [Test] public virtual void TestSpanNearScorerSkipTo1() { SpanNearQuery q = MakeQuery(); Weight w = searcher.CreateNormalizedWeight(q); IndexReaderContext topReaderContext = searcher.TopReaderContext; AtomicReaderContext leave = topReaderContext.Leaves[0]; Scorer s = w.GetScorer(leave, ((AtomicReader)leave.Reader).LiveDocs); Assert.AreEqual(1, s.Advance(1)); } /// <summary> /// not a direct test of NearSpans, but a demonstration of how/when /// this causes problems /// </summary> [Test] public virtual void TestSpanNearScorerExplain() { SpanNearQuery q = MakeQuery(); Explanation e = searcher.Explain(q, 1); Assert.IsTrue(0.0f < e.Value, "Scorer explanation value for doc#1 isn't positive: " + e.ToString()); } } }
//----------------------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- // // Presharp uses the c# pragma mechanism to supress its warnings. // These are not recognised by the base compiler so we need to explictly // disable the following warnings. See http://winweb/cse/Tools/PREsharp/userguide/default.asp // for details. // #pragma warning disable 1634, 1691 // unknown message, unknown pragma namespace System.IdentityModel.Selectors { using System; using System.IO; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Runtime.InteropServices; using System.IdentityModel.Claims; using System.Text; using System.Xml; using System.IdentityModel.Tokens; using System.ServiceProcess; using System.Globalization; using System.Runtime.ConstrainedExecution; using System.Runtime.CompilerServices; using Microsoft.InfoCards.Diagnostics; using Microsoft.Win32; using IDT = Microsoft.InfoCards.Diagnostics.InfoCardTrace; // // For common & resources // using Microsoft.InfoCards; // // Summary // This structure is the native version of the GenericXmlSecurityToken // // Remark // When adding new fields to this structure, add pointers to the end to make // sure that alignment is done correctly // [StructLayout(LayoutKind.Sequential)] struct RpcGenericXmlToken { public Int64 createDate; // Date the token was created on public Int64 expiryDate; // Date the token will expire on [MarshalAs(UnmanagedType.LPWStr)] public string xmlToken; // Token [MarshalAs(UnmanagedType.LPWStr)] public string internalTokenReference; // Internal Token reference [MarshalAs(UnmanagedType.LPWStr)] public string externalTokenReference; // External Token reference } // // Summary // This class implements the client API for the Infocard system // public static class CardSpaceSelector { static CardSpaceShim s_cardSpaceShim = new CardSpaceShim(); // // The default quotas we apply to incoming xml messages // private static XmlDictionaryReaderQuotas DefaultQuotas = new XmlDictionaryReaderQuotas(); // // Used by infocard.exe as well. // internal const int MaxPolicyChainLength = 50; static CardSpaceSelector() { // // Quotas for xml readers // DefaultQuotas.MaxDepth = 32; // max depth of elements DefaultQuotas.MaxStringContentLength = 8192; // maximum string read DefaultQuotas.MaxArrayLength = 20 * 1024 * 1024; // maximum byte array DefaultQuotas.MaxBytesPerRead = 4096; // max start element tag DefaultQuotas.MaxNameTableCharCount = 16384; // max size of name table } // Summary // Request a security token from the infocard system // // Parameters // endPoint - The token recipient end point. // policy - Policy stating the requirements for the token. // requiredRemoteTokenIssuer - The returned token should be issued by this // specific issuer. // public static GenericXmlSecurityToken GetToken(XmlElement endpoint, IEnumerable<XmlElement> policy, XmlElement requiredRemoteTokenIssuer, SecurityTokenSerializer tokenSerializer) { if (null == endpoint) { throw IDT.ThrowHelperArgumentNull("endpoint"); } if (null == policy) { throw IDT.ThrowHelperArgumentNull("policy"); } if (null == tokenSerializer) { throw IDT.ThrowHelperArgumentNull("tokenSerializer"); } Collection<XmlElement> policyCollection = new Collection<XmlElement>(); foreach (XmlElement element in policy) { policyCollection.Add(element); } return GetToken(new CardSpacePolicyElement[] { new CardSpacePolicyElement(endpoint, requiredRemoteTokenIssuer, policyCollection, null, 0, false) }, tokenSerializer); } // Summary // Request a security token from the infocard system // // Parameters // policyChain - an array of PolicyElements that describe the federated security chain that the client // needs a final token to unwind. // public static GenericXmlSecurityToken GetToken(CardSpacePolicyElement[] policyChain, SecurityTokenSerializer tokenSerializer) { IDT.TraceDebug("ICARDCLIENT: GetToken called with a policy chain of length {0}", policyChain.Length); InfoCardProofToken proofToken = null; InternalRefCountedHandle nativeCryptoHandle = null; GenericXmlSecurityToken token = null; RpcGenericXmlToken infocardToken = new RpcGenericXmlToken(); SafeTokenHandle nativeToken = null; Int32 result = 0; if (null == policyChain || 0 == policyChain.Length) { throw IDT.ThrowHelperArgumentNull("policyChain"); } if (null == tokenSerializer) { throw IDT.ThrowHelperArgumentNull("tokenSerializer"); } if (null == tokenSerializer) { throw IDT.ThrowHelperArgumentNull("tokenSerializer"); } try { RuntimeHelpers.PrepareConstrainedRegions(); bool mustRelease = false; try { } finally { // // The PolicyChain class will do the marshalling and native buffer management for us. // try { using (PolicyChain tmpChain = new PolicyChain(policyChain)) { IDT.TraceDebug("ICARDCLIENT: PInvoking the native GetToken call"); result = GetShim().m_csShimGetToken( tmpChain.Length, tmpChain.DoMarshal(), out nativeToken, out nativeCryptoHandle); } if (0 == result) { IDT.TraceDebug("ICARDCLIENT: The PInvoke of GetToken succeeded"); nativeToken.DangerousAddRef(ref mustRelease); infocardToken = (RpcGenericXmlToken)Marshal.PtrToStructure( nativeToken.DangerousGetHandle(), typeof(RpcGenericXmlToken)); } } finally { if (mustRelease) { nativeToken.DangerousRelease(); } } } if (0 == result) { using (ProofTokenCryptoHandle crypto = (ProofTokenCryptoHandle)CryptoHandle.Create(nativeCryptoHandle)) { proofToken = crypto.CreateProofToken(); } XmlDocument xmlDoc = new XmlDocument(); xmlDoc.LoadXml(infocardToken.xmlToken); SecurityKeyIdentifierClause internalTokenReference = null; if (null != infocardToken.internalTokenReference) { internalTokenReference = tokenSerializer.ReadKeyIdentifierClause( CreateReaderWithQuotas(infocardToken.internalTokenReference)); } SecurityKeyIdentifierClause externalTokenReference = null; if (null != infocardToken.externalTokenReference) { externalTokenReference = tokenSerializer.ReadKeyIdentifierClause( CreateReaderWithQuotas(infocardToken.externalTokenReference)); } IDT.TraceDebug("ICARDCLIENT: Constructing a new GenericXmlSecurityToken"); token = new GenericXmlSecurityToken( xmlDoc.DocumentElement, proofToken, DateTime.FromFileTimeUtc(infocardToken.createDate), DateTime.FromFileTimeUtc(infocardToken.expiryDate), internalTokenReference, externalTokenReference, null); } else { IDT.TraceDebug("ICARDCLIENT: The PInvoke of GetToken failed with a return code of {0}", result); // // Convert the HRESULTS to exceptions // ExceptionHelper.ThrowIfCardSpaceException((int)result); throw IDT.ThrowHelperError(new CardSpaceException(SR.GetString(SR.ClientAPIInfocardError))); } } catch { if (null != nativeCryptoHandle) { nativeCryptoHandle.Dispose(); } if (null != proofToken) { proofToken.Dispose(); } throw; } finally { if (null != nativeToken) { nativeToken.Dispose(); } } return token; } // // Summary // Start the management user interface // public static void Manage() { Int32 result = CardSpaceSelector.GetShim().m_csShimManageCardSpace(); // // Convert HRESULTS to errors // if (0 != result) { // // Convert the HRESULTS to exceptions // ExceptionHelper.ThrowIfCardSpaceException((int)result); throw IDT.ThrowHelperError(new CardSpaceException(SR.GetString(SR.ClientAPIInfocardError))); } } // // Summary // Start the import card user interface // public static void Import(string fileName) { if (String.IsNullOrEmpty(fileName)) { throw IDT.ThrowHelperArgumentNull("fileName"); } IDT.TraceDebug("Import Infocard has been called"); Int32 result = CardSpaceSelector.GetShim().m_csShimImportInformationCard(fileName); // // Convert HRESULTS to errors // if (0 != result) { // // Convert the HRESULTS to exceptions // ExceptionHelper.ThrowIfCardSpaceException((int)result); throw IDT.ThrowHelperError(new CardSpaceException(SR.GetString(SR.ClientAPIInfocardError))); } } internal static CardSpaceShim GetShim() { s_cardSpaceShim.InitializeIfNecessary(); return s_cardSpaceShim; } // // Summary // Convert the XML data to a string // // Parameter // xml - The xml data to be converted into a string // // Returns // A string format of the XML // internal static string XmlToString(IEnumerable<XmlElement> xml) { StringBuilder builder = new StringBuilder(); foreach (XmlElement element in xml) { if (null == element) { throw IDT.ThrowHelperError(new ArgumentException(SR.GetString(SR.ClientAPIInvalidPolicy))); } builder.Append(element.OuterXml); } return builder.ToString(); } private static XmlDictionaryReader CreateReaderWithQuotas(string root) { UTF8Encoding utf8 = new UTF8Encoding(); byte[] rootbytes = utf8.GetBytes(root); return XmlDictionaryReader.CreateTextReader( rootbytes, 0, rootbytes.GetLength(0), null, DefaultQuotas, null); } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.Collections.Generic; using System.ComponentModel; using DotSpatial.Serialization; namespace DotSpatial.Symbology { /// <summary> /// Category. /// </summary> public class Category : LegendItem { #region Constructors /// <summary> /// Initializes a new instance of the <see cref="Category"/> class. /// </summary> public Category() { } /// <summary> /// Initializes a new instance of the <see cref="Category"/> class and tailors the range to the specifeid values. /// </summary> /// <param name="startValue">The start value.</param> /// <param name="endValue">The end value.</param> public Category(double? startValue, double? endValue) { Range = new Range(startValue, endValue); } /// <summary> /// Initializes a new instance of the <see cref="Category"/> class that has the same value for both minimum and maximum. /// </summary> /// <param name="value">The value to use.</param> public Category(double value) { Range = new Range(value); } #endregion #region Properties /// <summary> /// Gets or sets the maximum value for this category using the scheme field. This is a convenient caching tool only, and doesn't control the filter expression at all. /// Use ApplyMinMax after setting this to update the filter expression. /// </summary> [Description("Gets or sets the maximum value for this category using the scheme field.")] public double? Maximum { get { return Range?.Maximum; } set { if (Range == null) { Range = new Range(null, value); return; } Range.Maximum = value; } } /// <summary> /// Gets or sets the color to be used for this break. For /// BiValued breaks, this only sets one of the colors. If /// this is higher than the high value, both are set to this. /// If this equals the high value, IsBiValue will be false. /// </summary> [Description("Gets or sets a minimum value for this category using the scheme field.")] public double? Minimum { get { return Range?.Minimum; } set { if (Range == null) { Range = new Range(value, null); return; } Range.Minimum = value; } } /// <summary> /// Gets or sets the numeric range for this color break. /// </summary> [Serialize("Range")] public Range Range { get; set; } /// <summary> /// Gets or sets a status message for this string. /// </summary> public string Status { get; set; } /// <summary> /// Gets or sets a value indicating whether items of this category get selected by the parent layers Select methods. /// If the legend is used for selection this gets set if either the whole layer is selected or this category. /// If the legend is not used for selection this has to be set by code to be able to select only features of this category. /// By default selection is allowed. /// </summary> public bool SelectionEnabled { get; set; } = true; /// <summary> /// Gets or sets the tag. This is not used by DotSpatial, but is provided for convenient linking for this object /// in plugins or other applications. /// </summary> public object Tag { get; set; } #endregion #region Methods /// <summary> /// Since rasters are numeric and not relying on an SQL expression, this allows /// this only sets the legend text using the method and digits to help with /// formatting. /// </summary> /// <param name="settings">An EditorSettings from either a feature scheme or color scheme.</param> public virtual void ApplyMinMax(EditorSettings settings) { LegendText = Range.ToString(settings.IntervalSnapMethod, settings.IntervalRoundingDigits); } /// <summary> /// Applies the snapping rule directly to the categories, instead of the breaks. /// </summary> /// <param name="method">Snapping method that should be applied.</param> /// <param name="numDigits">Number of significant digits.</param> /// <param name="values">Values to get the min and max from when using IntervalSnapMethod.DataValue as method.</param> public void ApplySnapping(IntervalSnapMethod method, int numDigits, List<double> values) { switch (method) { case IntervalSnapMethod.None: break; case IntervalSnapMethod.SignificantFigures: if (Maximum != null) { Maximum = Utils.SigFig(Maximum.Value, numDigits); } if (Minimum != null) { Minimum = Utils.SigFig(Minimum.Value, numDigits); } break; case IntervalSnapMethod.Rounding: if (Maximum != null) { Maximum = Math.Round((double)Maximum, numDigits); } if (Minimum != null) { Minimum = Math.Round((double)Minimum, numDigits); } break; case IntervalSnapMethod.DataValue: if (Maximum != null) { Maximum = Utils.GetNearestValue((double)Maximum, values); } if (Minimum != null) { Minimum = Utils.GetNearestValue((double)Minimum, values); } break; } } /// <summary> /// Tests to see if the specified value falls in the range specified by this ColorCategory. /// </summary> /// <param name="value">The value of type int to test.</param> /// <returns>Boolean, true if the value was found in the range.</returns> public bool Contains(double value) { return Range == null || Range.Contains(value); } /// <summary> /// Returns this Number as a string. This uses the DotSpatial.Globalization.CulturePreferences and /// Controls the number type using the NumberFormat enumeration plus the DecimalCount to create /// a number format. /// </summary> /// <returns>The string created using the specified number format and precision.</returns> public override string ToString() { return Range.ToString(); } /// <summary> /// Returns this Number as a string. /// </summary> /// <param name="method">Specifies how the numbers are modified so that the numeric text can be cleaned up.</param> /// <param name="digits">An integer clarifying digits for rounding or significant figure situations.</param> /// <returns>A string with the formatted number.</returns> public virtual string ToString(IntervalSnapMethod method, int digits) { return Range.ToString(method, digits); } #endregion } }
using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.IO; using System.Data; using System.Windows.Forms; using Epi; using Epi.Windows; using Epi.Windows.Dialogs; using System.Reflection; using Epi.Windows.Menu.Dialogs; namespace Epi.Windows.Menu { partial class MenuMainForm { private System.Windows.Forms.MenuStrip mnuMainMenu; //private System.Windows.Forms.ToolStripMenuItem mniMenuItem; //private System.Windows.Forms.ToolStripMenuItem mniSubmenuItem; private System.Windows.Forms.ImageList imlImageList; //private System.Windows.Forms.PictureBox pbxPictureBox; private System.Windows.Forms.PictureBox pbxBackground; //private System.Windows.Forms.StatusBar sbStatusBar; //private System.Windows.Forms.StatusBarPanel sbpStatusBarPanel; //private System.Windows.Forms.ToolBar tbrToolBar; //private System.Windows.Forms.ToolBarButton tbbToolBarButton; //private System.Windows.Forms.ListBox lbxListBox; private System.Windows.Forms.ToolTip tltToolTip; private System.Windows.Forms.Button btnMakeView; private System.Windows.Forms.Button btnAnalyze; private System.Windows.Forms.Button btnEnterData; private System.Windows.Forms.Button btnExit; private System.Windows.Forms.Button btnCreateMaps; private System.Windows.Forms.Button btnCreateReports; private System.Windows.Forms.Button btnWebsite; private System.ComponentModel.IContainer components = null; #region Designer generated code /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (components != null) { components.Dispose(); } } base.Dispose(disposing); } /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(MenuMainForm)); this.pbxBackground = new System.Windows.Forms.PictureBox(); this.btnWebsite = new System.Windows.Forms.Button(); this.btnCreateReports = new System.Windows.Forms.Button(); this.btnCreateMaps = new System.Windows.Forms.Button(); this.btnExit = new System.Windows.Forms.Button(); this.btnEnterData = new System.Windows.Forms.Button(); this.btnAnalyze = new System.Windows.Forms.Button(); this.btnMakeView = new System.Windows.Forms.Button(); this.tltToolTip = new System.Windows.Forms.ToolTip(this.components); this.imlImageList = new System.Windows.Forms.ImageList(this.components); this.mnuMainMenu = new System.Windows.Forms.MenuStrip(); this.fileToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.exitToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.viewToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripMenuItemStatusBar = new System.Windows.Forms.ToolStripMenuItem(); this.epiInfoLogsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.makeViewToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.enterDataToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.analyzeDataToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.classicToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.dashboardToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.createMapsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator(); this.optionsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.statCalcToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.sampleSizeAndPowerToolStripMenuItem1 = new System.Windows.Forms.ToolStripMenuItem(); this.populationSurveyToolStripMenuItem1 = new System.Windows.Forms.ToolStripMenuItem(); this.cohortOrCrossSectionalToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.unmatchedCasecontrolToolStripMenuItem1 = new System.Windows.Forms.ToolStripMenuItem(); this.chiSquareForTrendToolStripMenuItem1 = new System.Windows.Forms.ToolStripMenuItem(); this.tables2X22XNToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.mnuPoisson = new System.Windows.Forms.ToolStripMenuItem(); this.mnuBinomial = new System.Windows.Forms.ToolStripMenuItem(); this.mnuMatchedPairCaseControl = new System.Windows.Forms.ToolStripMenuItem(); this.helpToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.contentsToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.videosToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.communityMessageBoardToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.contactHelpdeskToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.otherEpiResourcesToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.activEpicomToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.openEpicomToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator2 = new System.Windows.Forms.ToolStripSeparator(); this.aboutEpiInfoToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.btnDashboard = new System.Windows.Forms.Button(); ((System.ComponentModel.ISupportInitialize)(this.pbxBackground)).BeginInit(); this.mnuMainMenu.SuspendLayout(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // baseImageList // this.baseImageList.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("baseImageList.ImageStream"))); this.baseImageList.Images.SetKeyName(0, ""); this.baseImageList.Images.SetKeyName(1, ""); this.baseImageList.Images.SetKeyName(2, ""); this.baseImageList.Images.SetKeyName(3, ""); this.baseImageList.Images.SetKeyName(4, ""); this.baseImageList.Images.SetKeyName(5, ""); this.baseImageList.Images.SetKeyName(6, ""); this.baseImageList.Images.SetKeyName(7, ""); this.baseImageList.Images.SetKeyName(8, ""); this.baseImageList.Images.SetKeyName(9, ""); this.baseImageList.Images.SetKeyName(10, ""); this.baseImageList.Images.SetKeyName(11, ""); this.baseImageList.Images.SetKeyName(12, ""); this.baseImageList.Images.SetKeyName(13, ""); this.baseImageList.Images.SetKeyName(14, ""); this.baseImageList.Images.SetKeyName(15, ""); this.baseImageList.Images.SetKeyName(16, ""); this.baseImageList.Images.SetKeyName(17, ""); this.baseImageList.Images.SetKeyName(18, ""); this.baseImageList.Images.SetKeyName(19, ""); this.baseImageList.Images.SetKeyName(20, ""); this.baseImageList.Images.SetKeyName(21, ""); this.baseImageList.Images.SetKeyName(22, ""); this.baseImageList.Images.SetKeyName(23, ""); this.baseImageList.Images.SetKeyName(24, ""); this.baseImageList.Images.SetKeyName(25, ""); this.baseImageList.Images.SetKeyName(26, ""); this.baseImageList.Images.SetKeyName(27, ""); this.baseImageList.Images.SetKeyName(28, ""); this.baseImageList.Images.SetKeyName(29, ""); this.baseImageList.Images.SetKeyName(30, ""); this.baseImageList.Images.SetKeyName(31, ""); this.baseImageList.Images.SetKeyName(32, ""); this.baseImageList.Images.SetKeyName(33, ""); this.baseImageList.Images.SetKeyName(34, ""); this.baseImageList.Images.SetKeyName(35, ""); this.baseImageList.Images.SetKeyName(36, ""); this.baseImageList.Images.SetKeyName(37, ""); this.baseImageList.Images.SetKeyName(38, ""); this.baseImageList.Images.SetKeyName(39, ""); this.baseImageList.Images.SetKeyName(40, ""); this.baseImageList.Images.SetKeyName(41, ""); this.baseImageList.Images.SetKeyName(42, ""); this.baseImageList.Images.SetKeyName(43, ""); this.baseImageList.Images.SetKeyName(44, ""); this.baseImageList.Images.SetKeyName(45, ""); this.baseImageList.Images.SetKeyName(46, ""); this.baseImageList.Images.SetKeyName(47, ""); this.baseImageList.Images.SetKeyName(48, ""); this.baseImageList.Images.SetKeyName(49, ""); this.baseImageList.Images.SetKeyName(50, ""); this.baseImageList.Images.SetKeyName(51, ""); this.baseImageList.Images.SetKeyName(52, ""); this.baseImageList.Images.SetKeyName(53, ""); this.baseImageList.Images.SetKeyName(54, ""); this.baseImageList.Images.SetKeyName(55, ""); this.baseImageList.Images.SetKeyName(56, ""); this.baseImageList.Images.SetKeyName(57, ""); this.baseImageList.Images.SetKeyName(58, ""); this.baseImageList.Images.SetKeyName(59, ""); this.baseImageList.Images.SetKeyName(60, ""); this.baseImageList.Images.SetKeyName(61, ""); this.baseImageList.Images.SetKeyName(62, ""); this.baseImageList.Images.SetKeyName(63, ""); this.baseImageList.Images.SetKeyName(64, ""); this.baseImageList.Images.SetKeyName(65, ""); this.baseImageList.Images.SetKeyName(66, ""); this.baseImageList.Images.SetKeyName(67, ""); // // pbxBackground // this.pbxBackground.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D; resources.ApplyResources(this.pbxBackground, "pbxBackground"); this.pbxBackground.Name = "pbxBackground"; this.pbxBackground.TabStop = false; this.pbxBackground.Click += new System.EventHandler(this.pbxBackground_Click); // // btnWebsite // this.btnWebsite.BackColor = System.Drawing.SystemColors.Control; this.btnWebsite.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnWebsite.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnWebsite.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnWebsite.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnWebsite, "btnWebsite"); this.btnWebsite.ForeColor = System.Drawing.SystemColors.ControlText; this.btnWebsite.Name = "btnWebsite"; this.btnWebsite.UseVisualStyleBackColor = false; this.btnWebsite.Click += new System.EventHandler(this.Website_Activate); // // btnCreateReports // this.btnCreateReports.BackColor = System.Drawing.SystemColors.Control; this.btnCreateReports.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnCreateReports.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnCreateReports.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnCreateReports.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnCreateReports, "btnCreateReports"); this.btnCreateReports.ForeColor = System.Drawing.SystemColors.ControlText; this.btnCreateReports.Name = "btnCreateReports"; this.btnCreateReports.UseVisualStyleBackColor = false; this.btnCreateReports.Click += new System.EventHandler(this.CreateReports_Activate); // // btnCreateMaps // this.btnCreateMaps.BackColor = System.Drawing.SystemColors.Control; this.btnCreateMaps.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnCreateMaps.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnCreateMaps.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnCreateMaps.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnCreateMaps, "btnCreateMaps"); this.btnCreateMaps.ForeColor = System.Drawing.SystemColors.ControlText; this.btnCreateMaps.Name = "btnCreateMaps"; this.btnCreateMaps.UseVisualStyleBackColor = false; this.btnCreateMaps.Click += new System.EventHandler(this.CreateMaps_Activate); // // btnExit // this.btnExit.BackColor = System.Drawing.SystemColors.Control; this.btnExit.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnExit.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnExit.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnExit.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnExit, "btnExit"); this.btnExit.ForeColor = System.Drawing.SystemColors.ControlText; this.btnExit.Name = "btnExit"; this.btnExit.UseVisualStyleBackColor = false; this.btnExit.Click += new System.EventHandler(this.Exit_Activate); // // btnEnterData // this.btnEnterData.BackColor = System.Drawing.SystemColors.Control; this.btnEnterData.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnEnterData.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnEnterData.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnEnterData.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnEnterData, "btnEnterData"); this.btnEnterData.ForeColor = System.Drawing.SystemColors.ControlText; this.btnEnterData.Name = "btnEnterData"; this.btnEnterData.UseVisualStyleBackColor = false; this.btnEnterData.Click += new System.EventHandler(this.EnterData_Activate); // // btnAnalyze // this.btnAnalyze.BackColor = System.Drawing.SystemColors.Control; this.btnAnalyze.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnAnalyze.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnAnalyze.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnAnalyze.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnAnalyze, "btnAnalyze"); this.btnAnalyze.ForeColor = System.Drawing.SystemColors.ControlText; this.btnAnalyze.Name = "btnAnalyze"; this.btnAnalyze.UseVisualStyleBackColor = false; this.btnAnalyze.Click += new System.EventHandler(this.AnalyzeData_Activate); // // btnMakeView // this.btnMakeView.BackColor = System.Drawing.SystemColors.Control; this.btnMakeView.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnMakeView.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnMakeView.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnMakeView.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnMakeView, "btnMakeView"); this.btnMakeView.ForeColor = System.Drawing.SystemColors.ControlText; this.btnMakeView.Name = "btnMakeView"; this.btnMakeView.UseVisualStyleBackColor = false; this.btnMakeView.Click += new System.EventHandler(this.MakeView_Activate); // // imlImageList // this.imlImageList.ColorDepth = System.Windows.Forms.ColorDepth.Depth8Bit; resources.ApplyResources(this.imlImageList, "imlImageList"); this.imlImageList.TransparentColor = System.Drawing.Color.Transparent; // // mnuMainMenu // this.mnuMainMenu.ImageScalingSize = new System.Drawing.Size(20, 20); this.mnuMainMenu.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.fileToolStripMenuItem, this.viewToolStripMenuItem, this.toolsToolStripMenuItem, this.statCalcToolStripMenuItem, this.helpToolStripMenuItem}); resources.ApplyResources(this.mnuMainMenu, "mnuMainMenu"); this.mnuMainMenu.Name = "mnuMainMenu"; this.mnuMainMenu.ItemClicked += new System.Windows.Forms.ToolStripItemClickedEventHandler(this.mnuMainMenu_ItemClicked); // // fileToolStripMenuItem // this.fileToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.exitToolStripMenuItem}); this.fileToolStripMenuItem.Name = "fileToolStripMenuItem"; resources.ApplyResources(this.fileToolStripMenuItem, "fileToolStripMenuItem"); // // exitToolStripMenuItem // this.exitToolStripMenuItem.Name = "exitToolStripMenuItem"; resources.ApplyResources(this.exitToolStripMenuItem, "exitToolStripMenuItem"); this.exitToolStripMenuItem.Click += new System.EventHandler(this.Exit_Activate); // // viewToolStripMenuItem // this.viewToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.toolStripMenuItemStatusBar, this.epiInfoLogsToolStripMenuItem}); this.viewToolStripMenuItem.Name = "viewToolStripMenuItem"; resources.ApplyResources(this.viewToolStripMenuItem, "viewToolStripMenuItem"); // // toolStripMenuItemStatusBar // this.toolStripMenuItemStatusBar.Checked = true; this.toolStripMenuItemStatusBar.CheckState = System.Windows.Forms.CheckState.Checked; this.toolStripMenuItemStatusBar.Name = "toolStripMenuItemStatusBar"; resources.ApplyResources(this.toolStripMenuItemStatusBar, "toolStripMenuItemStatusBar"); this.toolStripMenuItemStatusBar.Click += new System.EventHandler(this.toolStripMenuItemStatusBar_Click); // // epiInfoLogsToolStripMenuItem // this.epiInfoLogsToolStripMenuItem.Name = "epiInfoLogsToolStripMenuItem"; resources.ApplyResources(this.epiInfoLogsToolStripMenuItem, "epiInfoLogsToolStripMenuItem"); this.epiInfoLogsToolStripMenuItem.Click += new System.EventHandler(this.epiInfoLogsToolStripMenuItem_Click); // // toolsToolStripMenuItem // this.toolsToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.makeViewToolStripMenuItem, this.enterDataToolStripMenuItem, this.analyzeDataToolStripMenuItem, this.createMapsToolStripMenuItem, this.toolStripSeparator1, this.optionsToolStripMenuItem}); this.toolsToolStripMenuItem.Name = "toolsToolStripMenuItem"; resources.ApplyResources(this.toolsToolStripMenuItem, "toolsToolStripMenuItem"); // // makeViewToolStripMenuItem // this.makeViewToolStripMenuItem.Name = "makeViewToolStripMenuItem"; resources.ApplyResources(this.makeViewToolStripMenuItem, "makeViewToolStripMenuItem"); this.makeViewToolStripMenuItem.Click += new System.EventHandler(this.MakeView_Activate); // // enterDataToolStripMenuItem // this.enterDataToolStripMenuItem.Name = "enterDataToolStripMenuItem"; resources.ApplyResources(this.enterDataToolStripMenuItem, "enterDataToolStripMenuItem"); this.enterDataToolStripMenuItem.Click += new System.EventHandler(this.EnterData_Activate); // // analyzeDataToolStripMenuItem // this.analyzeDataToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.classicToolStripMenuItem, this.dashboardToolStripMenuItem}); this.analyzeDataToolStripMenuItem.Name = "analyzeDataToolStripMenuItem"; resources.ApplyResources(this.analyzeDataToolStripMenuItem, "analyzeDataToolStripMenuItem"); // // classicToolStripMenuItem // this.classicToolStripMenuItem.Name = "classicToolStripMenuItem"; resources.ApplyResources(this.classicToolStripMenuItem, "classicToolStripMenuItem"); this.classicToolStripMenuItem.Click += new System.EventHandler(this.AnalyzeData_Activate); // // dashboardToolStripMenuItem // this.dashboardToolStripMenuItem.Name = "dashboardToolStripMenuItem"; resources.ApplyResources(this.dashboardToolStripMenuItem, "dashboardToolStripMenuItem"); this.dashboardToolStripMenuItem.Click += new System.EventHandler(this.btnDashboard_Click); // // createMapsToolStripMenuItem // this.createMapsToolStripMenuItem.Name = "createMapsToolStripMenuItem"; resources.ApplyResources(this.createMapsToolStripMenuItem, "createMapsToolStripMenuItem"); this.createMapsToolStripMenuItem.Click += new System.EventHandler(this.CreateMaps_Activate); // // toolStripSeparator1 // this.toolStripSeparator1.Name = "toolStripSeparator1"; resources.ApplyResources(this.toolStripSeparator1, "toolStripSeparator1"); // // optionsToolStripMenuItem // this.optionsToolStripMenuItem.Name = "optionsToolStripMenuItem"; resources.ApplyResources(this.optionsToolStripMenuItem, "optionsToolStripMenuItem"); this.optionsToolStripMenuItem.Click += new System.EventHandler(this.Options_Activate); // // statCalcToolStripMenuItem // this.statCalcToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.sampleSizeAndPowerToolStripMenuItem1, this.chiSquareForTrendToolStripMenuItem1, this.tables2X22XNToolStripMenuItem, this.mnuPoisson, this.mnuBinomial, this.mnuMatchedPairCaseControl}); this.statCalcToolStripMenuItem.Name = "statCalcToolStripMenuItem"; resources.ApplyResources(this.statCalcToolStripMenuItem, "statCalcToolStripMenuItem"); // // sampleSizeAndPowerToolStripMenuItem1 // this.sampleSizeAndPowerToolStripMenuItem1.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.populationSurveyToolStripMenuItem1, this.cohortOrCrossSectionalToolStripMenuItem, this.unmatchedCasecontrolToolStripMenuItem1}); this.sampleSizeAndPowerToolStripMenuItem1.Name = "sampleSizeAndPowerToolStripMenuItem1"; resources.ApplyResources(this.sampleSizeAndPowerToolStripMenuItem1, "sampleSizeAndPowerToolStripMenuItem1"); this.sampleSizeAndPowerToolStripMenuItem1.Click += new System.EventHandler(this.sampleSizeAndPowerToolStripMenuItem1_Click); // // populationSurveyToolStripMenuItem1 // this.populationSurveyToolStripMenuItem1.Name = "populationSurveyToolStripMenuItem1"; resources.ApplyResources(this.populationSurveyToolStripMenuItem1, "populationSurveyToolStripMenuItem1"); this.populationSurveyToolStripMenuItem1.Click += new System.EventHandler(this.populationSurveyToolStripMenuItem_Click); // // cohortOrCrossSectionalToolStripMenuItem // this.cohortOrCrossSectionalToolStripMenuItem.Name = "cohortOrCrossSectionalToolStripMenuItem"; resources.ApplyResources(this.cohortOrCrossSectionalToolStripMenuItem, "cohortOrCrossSectionalToolStripMenuItem"); this.cohortOrCrossSectionalToolStripMenuItem.Click += new System.EventHandler(this.cohortOrCrossToolStripMenuItem_Click); // // unmatchedCasecontrolToolStripMenuItem1 // this.unmatchedCasecontrolToolStripMenuItem1.Name = "unmatchedCasecontrolToolStripMenuItem1"; resources.ApplyResources(this.unmatchedCasecontrolToolStripMenuItem1, "unmatchedCasecontrolToolStripMenuItem1"); this.unmatchedCasecontrolToolStripMenuItem1.Click += new System.EventHandler(this.unmatchedCasecontrolToolStripMenuItem_Click); // // chiSquareForTrendToolStripMenuItem1 // this.chiSquareForTrendToolStripMenuItem1.Name = "chiSquareForTrendToolStripMenuItem1"; resources.ApplyResources(this.chiSquareForTrendToolStripMenuItem1, "chiSquareForTrendToolStripMenuItem1"); this.chiSquareForTrendToolStripMenuItem1.Click += new System.EventHandler(this.chiSquareForTrendToolStripMenuItem_Click); // // tables2X22XNToolStripMenuItem // this.tables2X22XNToolStripMenuItem.Name = "tables2X22XNToolStripMenuItem"; resources.ApplyResources(this.tables2X22XNToolStripMenuItem, "tables2X22XNToolStripMenuItem"); this.tables2X22XNToolStripMenuItem.Click += new System.EventHandler(this.tables2x2ToolStripMenuItem_Click); // // mnuPoisson // this.mnuPoisson.Name = "mnuPoisson"; resources.ApplyResources(this.mnuPoisson, "mnuPoisson"); this.mnuPoisson.Click += new System.EventHandler(this.mnuPoisson_Click); // // mnuBinomial // this.mnuBinomial.Name = "mnuBinomial"; resources.ApplyResources(this.mnuBinomial, "mnuBinomial"); this.mnuBinomial.Click += new System.EventHandler(this.mnuBinomial_Click); // // mnuMatchedPairCaseControl // this.mnuMatchedPairCaseControl.Name = "mnuMatchedPairCaseControl"; resources.ApplyResources(this.mnuMatchedPairCaseControl, "mnuMatchedPairCaseControl"); this.mnuMatchedPairCaseControl.Click += new System.EventHandler(this.mnuMatchedPairCaseControl_Click); // // helpToolStripMenuItem // this.helpToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.contentsToolStripMenuItem, this.videosToolStripMenuItem, this.communityMessageBoardToolStripMenuItem, this.contactHelpdeskToolStripMenuItem, this.otherEpiResourcesToolStripMenuItem, this.toolStripSeparator2, this.aboutEpiInfoToolStripMenuItem}); this.helpToolStripMenuItem.Name = "helpToolStripMenuItem"; resources.ApplyResources(this.helpToolStripMenuItem, "helpToolStripMenuItem"); // // contentsToolStripMenuItem // this.contentsToolStripMenuItem.Name = "contentsToolStripMenuItem"; resources.ApplyResources(this.contentsToolStripMenuItem, "contentsToolStripMenuItem"); this.contentsToolStripMenuItem.Click += new System.EventHandler(this.Contents_Activate); // // videosToolStripMenuItem // this.videosToolStripMenuItem.Name = "videosToolStripMenuItem"; resources.ApplyResources(this.videosToolStripMenuItem, "videosToolStripMenuItem"); this.videosToolStripMenuItem.Click += new System.EventHandler(this.videosToolStripMenuItem_Click); // // communityMessageBoardToolStripMenuItem // this.communityMessageBoardToolStripMenuItem.Name = "communityMessageBoardToolStripMenuItem"; resources.ApplyResources(this.communityMessageBoardToolStripMenuItem, "communityMessageBoardToolStripMenuItem"); this.communityMessageBoardToolStripMenuItem.Click += new System.EventHandler(this.communityMessageBoardToolStripMenuItem_Click); // // contactHelpdeskToolStripMenuItem // this.contactHelpdeskToolStripMenuItem.Name = "contactHelpdeskToolStripMenuItem"; resources.ApplyResources(this.contactHelpdeskToolStripMenuItem, "contactHelpdeskToolStripMenuItem"); this.contactHelpdeskToolStripMenuItem.Click += new System.EventHandler(this.contactHelpdeskToolStripMenuItem_Click); // // otherEpiResourcesToolStripMenuItem // this.otherEpiResourcesToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] { this.activEpicomToolStripMenuItem, this.openEpicomToolStripMenuItem}); this.otherEpiResourcesToolStripMenuItem.Name = "otherEpiResourcesToolStripMenuItem"; resources.ApplyResources(this.otherEpiResourcesToolStripMenuItem, "otherEpiResourcesToolStripMenuItem"); // // activEpicomToolStripMenuItem // this.activEpicomToolStripMenuItem.Name = "activEpicomToolStripMenuItem"; resources.ApplyResources(this.activEpicomToolStripMenuItem, "activEpicomToolStripMenuItem"); this.activEpicomToolStripMenuItem.Click += new System.EventHandler(this.activEpicomToolStripMenuItem_Click); // // openEpicomToolStripMenuItem // this.openEpicomToolStripMenuItem.Name = "openEpicomToolStripMenuItem"; resources.ApplyResources(this.openEpicomToolStripMenuItem, "openEpicomToolStripMenuItem"); this.openEpicomToolStripMenuItem.Click += new System.EventHandler(this.openEpicomToolStripMenuItem_Click); // // toolStripSeparator2 // this.toolStripSeparator2.Name = "toolStripSeparator2"; resources.ApplyResources(this.toolStripSeparator2, "toolStripSeparator2"); // // aboutEpiInfoToolStripMenuItem // this.aboutEpiInfoToolStripMenuItem.Name = "aboutEpiInfoToolStripMenuItem"; resources.ApplyResources(this.aboutEpiInfoToolStripMenuItem, "aboutEpiInfoToolStripMenuItem"); this.aboutEpiInfoToolStripMenuItem.Click += new System.EventHandler(this.AboutEpiInfo_Activate); // // groupBox1 // this.groupBox1.BackColor = System.Drawing.Color.Transparent; this.groupBox1.Controls.Add(this.btnDashboard); this.groupBox1.Controls.Add(this.btnAnalyze); resources.ApplyResources(this.groupBox1, "groupBox1"); this.groupBox1.ForeColor = System.Drawing.Color.White; this.groupBox1.Name = "groupBox1"; this.groupBox1.TabStop = false; this.groupBox1.Enter += new System.EventHandler(this.groupBox1_Enter); // // btnDashboard // this.btnDashboard.BackColor = System.Drawing.SystemColors.Control; this.btnDashboard.FlatAppearance.BorderColor = System.Drawing.Color.White; this.btnDashboard.FlatAppearance.CheckedBackColor = System.Drawing.Color.DimGray; this.btnDashboard.FlatAppearance.MouseDownBackColor = System.Drawing.Color.Gainsboro; this.btnDashboard.FlatAppearance.MouseOverBackColor = System.Drawing.Color.DarkSlateGray; resources.ApplyResources(this.btnDashboard, "btnDashboard"); this.btnDashboard.ForeColor = System.Drawing.SystemColors.ControlText; this.btnDashboard.Name = "btnDashboard"; this.btnDashboard.UseVisualStyleBackColor = false; this.btnDashboard.Click += new System.EventHandler(this.btnDashboard_Click); // // MenuMainForm // resources.ApplyResources(this, "$this"); this.Controls.Add(this.groupBox1); this.Controls.Add(this.btnWebsite); this.Controls.Add(this.btnCreateReports); this.Controls.Add(this.btnCreateMaps); this.Controls.Add(this.btnExit); this.Controls.Add(this.btnEnterData); this.Controls.Add(this.btnMakeView); this.Controls.Add(this.pbxBackground); this.Controls.Add(this.mnuMainMenu); this.MainMenuStrip = this.mnuMainMenu; this.Name = "MenuMainForm"; this.Load += new System.EventHandler(this.MenuMainForm_Load); this.Controls.SetChildIndex(this.mnuMainMenu, 0); this.Controls.SetChildIndex(this.pbxBackground, 0); this.Controls.SetChildIndex(this.btnMakeView, 0); this.Controls.SetChildIndex(this.btnEnterData, 0); this.Controls.SetChildIndex(this.btnExit, 0); this.Controls.SetChildIndex(this.btnCreateMaps, 0); this.Controls.SetChildIndex(this.btnCreateReports, 0); this.Controls.SetChildIndex(this.btnWebsite, 0); this.Controls.SetChildIndex(this.groupBox1, 0); ((System.ComponentModel.ISupportInitialize)(this.pbxBackground)).EndInit(); this.mnuMainMenu.ResumeLayout(false); this.mnuMainMenu.PerformLayout(); this.groupBox1.ResumeLayout(false); this.ResumeLayout(false); this.PerformLayout(); } #endregion Designer generated code private ToolStripMenuItem fileToolStripMenuItem; private ToolStripMenuItem exitToolStripMenuItem; private ToolStripMenuItem toolsToolStripMenuItem; private ToolStripMenuItem makeViewToolStripMenuItem; private ToolStripMenuItem enterDataToolStripMenuItem; private ToolStripMenuItem analyzeDataToolStripMenuItem; private ToolStripMenuItem createMapsToolStripMenuItem; private ToolStripSeparator toolStripSeparator1; private ToolStripMenuItem optionsToolStripMenuItem; private ToolStripMenuItem helpToolStripMenuItem; private ToolStripMenuItem contentsToolStripMenuItem; private ToolStripMenuItem aboutEpiInfoToolStripMenuItem; private ToolStripMenuItem viewToolStripMenuItem; private ToolStripMenuItem toolStripMenuItemStatusBar; private ToolStripMenuItem epiInfoLogsToolStripMenuItem; private GroupBox groupBox1; private Button btnDashboard; private ToolStripMenuItem classicToolStripMenuItem; private ToolStripMenuItem dashboardToolStripMenuItem; private ToolStripMenuItem statCalcToolStripMenuItem; private ToolStripMenuItem sampleSizeAndPowerToolStripMenuItem1; private ToolStripMenuItem populationSurveyToolStripMenuItem1; private ToolStripMenuItem cohortOrCrossSectionalToolStripMenuItem; private ToolStripMenuItem unmatchedCasecontrolToolStripMenuItem1; private ToolStripMenuItem chiSquareForTrendToolStripMenuItem1; private ToolStripMenuItem tables2X22XNToolStripMenuItem; private ToolStripMenuItem videosToolStripMenuItem; private ToolStripMenuItem communityMessageBoardToolStripMenuItem; private ToolStripMenuItem contactHelpdeskToolStripMenuItem; private ToolStripSeparator toolStripSeparator2; private ToolStripMenuItem mnuPoisson; private ToolStripMenuItem mnuBinomial; private ToolStripMenuItem mnuMatchedPairCaseControl; private ToolStripMenuItem otherEpiResourcesToolStripMenuItem; private ToolStripMenuItem openEpicomToolStripMenuItem; private ToolStripMenuItem activEpicomToolStripMenuItem; } }
using System.Collections.Generic; using System.Linq; using JoinRpg.DataModel; using JoinRpg.DataModel.Mocks; using JoinRpg.Domain.CharacterFields; using Shouldly; using Xunit; namespace JoinRpg.Domain.Test { public class FieldSaveHelperTest { private MockedProject _original = null!; // Should be initialized per fact private IFieldDefaultValueGenerator _generator = null!; // Should be initialized per fact [Fact] public void SaveOnAddTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateClaim(mock.Character, mock.Player); // ReSharper disable once MustUseReturnValue _ = FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, "test"}, }, _generator); mock.Character.JsonData .ShouldBe(_original.Character.JsonData, "Adding claim should not modify any character fields"); mock.Character.Groups.Select(g => g.CharacterGroupId).ShouldBe( mock.Character.Groups.Select(g => g.CharacterGroupId), "Adding claim should not modify any character groups"); claim.JsonData.ShouldBe($"{{\"{mock.CharacterField.ProjectFieldId}\":\"test\"}}"); } [Fact] public void TryToChangeMasterOnlyFieldOnAdd() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); _ = Should.Throw<NoAccessToProjectException>(() => FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, mock.CreateClaim(mock.Character, mock.Player), new Dictionary<int, string?>() { {mock.MasterOnlyField.ProjectFieldId, "test"}, }, _generator)); } [Fact] public void ApprovedClaimHiddenChangeTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateApprovedClaim(mock.Character, mock.Player); var publicField = new FieldWithValue(mock.PublicField, "Public"); MockedProject.AssignFieldValues(claim, publicField); _ = FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.HideForUnApprovedClaim.ProjectFieldId, "test"}, {mock.CharacterField.ProjectFieldId, null}, }, _generator); mock.Character.FieldValuesShouldBe(new FieldWithValue(mock.HideForUnApprovedClaim, "test"), publicField); ShouldBeTestExtensions.ShouldBe(claim.JsonData, "{}"); } [Fact] public void MasterHiddenChangeTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var publicField = new FieldWithValue(mock.PublicField, "Public"); MockedProject.AssignFieldValues(mock.Character, publicField); _ = FieldSaveHelper.SaveCharacterFields( mock.Master.UserId, mock.Character, new Dictionary<int, string?>() { {mock.HideForUnApprovedClaim.ProjectFieldId, "test"}, {mock.CharacterField.ProjectFieldId, null}, }, _generator); mock.Character.FieldValuesShouldBe(new FieldWithValue(mock.HideForUnApprovedClaim, "test"), publicField); } [Fact] public void ApprovedClaimChangeTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateApprovedClaim(mock.Character, mock.Player); _ = FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, "test"}, }, _generator); mock.Character.FieldValuesShouldBe(new FieldWithValue(mock.CharacterField, "test")); ShouldBeTestExtensions.ShouldBe(claim.JsonData, "{}"); } [Fact] public void ConditionalFieldChangeTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateClaim(mock.Character, mock.Player); var conditionalField = mock.CreateConditionalField(); _ = FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {conditionalField.ProjectFieldId, "test"}, }, _generator); ShouldBeTestExtensions.ShouldBe(claim.JsonData, $"{{\"{conditionalField.ProjectFieldId}\":\"test\"}}"); ShouldBeTestExtensions.ShouldBe(mock.Character.JsonData, _original.Character.JsonData, "Adding claim should not modify any character fields"); } [Fact] public void ConditionalFieldChangeTestForGroup() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateClaim(mock.Group, mock.Player); var conditionalField = mock.CreateConditionalField(); _ = FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {conditionalField.ProjectFieldId, "test"}, }, _generator); ShouldBeTestExtensions.ShouldBe(claim.JsonData, $"{{\"{conditionalField.ProjectFieldId}\":\"test\"}}"); ShouldBeTestExtensions.ShouldBe(mock.Character.JsonData, _original.Character.JsonData, "Adding claim should not modify any character fields"); } [Fact] public void HiddenFieldChangeFailedTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateClaim(mock.Group, mock.Player); _ = Should.Throw<NoAccessToProjectException>(() => FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.HideForUnApprovedClaim.ProjectFieldId, "test"}, }, _generator)); } [Fact] public void DisableUnapprovedClaimToChangeCharacterTest() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); var claim = mock.CreateClaim(mock.Character, mock.Player); _ = FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, "test"}, }, _generator); ShouldBeTestExtensions.ShouldBe(mock.Character.JsonData, _original.Character.JsonData, "Adding claim should not modify any character fields"); mock.Character.Groups.Select(g => g.CharacterGroupId).ToList().ShouldBe( (IEnumerable<int>)_original.Character.Groups.Select(g => g.CharacterGroupId) .ToList(), "Adding claim should not modify any character groups"); ShouldBeTestExtensions.ShouldBe(claim.JsonData, $"{{\"{mock.CharacterField.ProjectFieldId}\":\"test\"}}"); } [Fact] public void TryToChangeAnotherUserCharacter() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); _ = Should.Throw<NoAccessToProjectException>(() => FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, mock.Character, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, "test"}, }, _generator)); } [Fact] public void TryToSkipMandatoryField() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); mock.CharacterField.MandatoryStatus = MandatoryStatus.Required; var claim = mock.CreateApprovedClaim(mock.Character, mock.Player); var exception = Should.Throw<FieldRequiredException>(() => FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, ""}, }, _generator)); exception.FieldName.ShouldBe(mock.CharacterField.FieldName); } [Fact] public void SetMandatoryField() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); mock.CharacterField.MandatoryStatus = MandatoryStatus.Required; var claim = mock.CreateApprovedClaim(mock.Character, mock.Player); var exception = Should.NotThrow(() => FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, "test"}, }, _generator)); mock.Character.JsonData.ShouldBe($"{{\"{mock.CharacterField.ProjectFieldId}\":\"test\"}}"); } [Fact] public void SkipOptionalField() { _original = new MockedProject(); _generator = new MockedFieldDefaultValueGenerator(); var mock = new MockedProject(); mock.CharacterField.MandatoryStatus = MandatoryStatus.Optional; var claim = mock.CreateApprovedClaim(mock.Character, mock.Player); var exception = Should.NotThrow(() => FieldSaveHelper.SaveCharacterFields( mock.Player.UserId, claim, new Dictionary<int, string?>() { {mock.CharacterField.ProjectFieldId, ""}, }, _generator)); mock.Character.JsonData.ShouldBe("{}"); } } public class MockedFieldDefaultValueGenerator : IFieldDefaultValueGenerator { public string? CreateDefaultValue(Claim? claim, FieldWithValue feld) => null; public string? CreateDefaultValue(Character? character, FieldWithValue field) => null; } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections; using System.Collections.Specialized; using System.Runtime.Serialization; using System.Text; using System.Web; namespace SoftLogik.Web { [Serializable] public class HttpValueCollection : NameValueCollection { public HttpValueCollection() : base(StringComparer.OrdinalIgnoreCase) { } public HttpValueCollection(int capacity) : base(capacity, StringComparer.OrdinalIgnoreCase) { } public HttpValueCollection(SerializationInfo info, StreamingContext context) : base(info, context) { } public HttpValueCollection(string str) : this(str, false, false, Encoding.UTF8) { } public HttpValueCollection(string str, bool urlencoded, Encoding encoding) : this(str, false, urlencoded, encoding) { } public HttpValueCollection(string str, bool readOnly, bool urlencoded, Encoding encoding) : base(StringComparer.OrdinalIgnoreCase) { if (!string.IsNullOrEmpty(str)) FillFromString(str, urlencoded, encoding); IsReadOnly = readOnly; } internal void Add(HttpCookieCollection c) { int num1 = c.Count; for (int num2 = 0; num2 < num1; num2++) { HttpCookie cookie1 = c.Get(num2); base.Add(cookie1.Name, cookie1.Value); } } internal void FillFromEncodedBytes(byte[] bytes, Encoding encoding) { int num1 = (bytes != null) ? bytes.Length : 0; for (int num2 = 0; num2 < num1; num2++) { string text1; string text2; int num3 = num2; int num4 = -1; while (num2 < num1) { switch (bytes[num2]) { case 0x3d: if (num4 < 0) { num4 = num2; } break; } num2++; } if (num4 >= 0) { text1 = HttpUtility.UrlDecode(bytes, num3, num4 - num3, encoding); text2 = HttpUtility.UrlDecode(bytes, num4 + 1, (num2 - num4) - 1, encoding); } else { text1 = null; text2 = HttpUtility.UrlDecode(bytes, num3, num2 - num3, encoding); } base.Add(text1, text2); if ((num2 == (num1 - 1)) && (bytes[num2] == 0x26)) { base.Add(null, string.Empty); } } } public void FillFromString(string s) { FillFromString(s, false, null); } public void FillFromString(string s, bool urlEncoded, Encoding encoding) { if (string.IsNullOrEmpty(s)) return; for (int i = 0; i < s.Length; i++) { int ampIndex = i; int equalsIndex = -1; while (i < s.Length) { if (s[i] == '=') { if (equalsIndex < 0) equalsIndex = i; } else if (s[i] == '&') { break; } i++; } string name = null; string value = null; if (equalsIndex >= 0) { name = s.Substring(ampIndex, equalsIndex - ampIndex); value = s.Substring(equalsIndex + 1, (i - equalsIndex) - 1); } else { value = s.Substring(ampIndex, i - ampIndex); } if (urlEncoded) base.Add(HttpUtility.UrlDecode(name, encoding), HttpUtility.UrlDecode(value, encoding)); else base.Add(name, value); if (i == (s.Length - 1) && s[i] == '&') { base.Add(null, string.Empty); } } } internal void MakeReadOnly() { base.IsReadOnly = true; } internal void MakeReadWrite() { base.IsReadOnly = false; } internal void Reset() { base.Clear(); } public override string ToString() { return ToString(true); } public virtual string ToString(bool urlencoded) { return ToString(urlencoded, null); } public virtual string ToString(bool urlEncoded, IDictionary excludeKeys) { if (Count == 0) return string.Empty; StringBuilder sb = new StringBuilder(); bool excludeContainsViewState = (excludeKeys != null) && (excludeKeys["__VIEWSTATE"] != null); for (int i = 0; i < Count; i++) { string key = GetKey(i); if ((!excludeContainsViewState || key == null || !key.StartsWith("__VIEWSTATE", StringComparison.Ordinal)) && (excludeKeys == null || key == null || excludeKeys[key] == null)) { if (urlEncoded) key = HttpUtility.UrlEncodeUnicode(key); key = !string.IsNullOrEmpty(key) ? (key + "=") : string.Empty; ArrayList keyValues = (ArrayList)base.BaseGet(i); if (sb.Length > 0) sb.Append('&'); int valuesCount = (keyValues != null) ? keyValues.Count : 0; if (valuesCount == 1) { sb.Append(key); string value = (string)keyValues[0]; if (urlEncoded) value = HttpUtility.UrlEncodeUnicode(value); sb.Append(value); } else if (valuesCount == 0) { sb.Append(key); } else { for (int j = 0; j < valuesCount; j++) { if (j > 0) sb.Append('&'); sb.Append(key); string value = (string)keyValues[j]; if (urlEncoded) value = HttpUtility.UrlEncodeUnicode(value); sb.Append(value); } } } } return sb.ToString(); } } }
// ReSharper disable once CheckNamespace namespace Fluent { using System; using System.Collections; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.Linq; using System.Windows; using System.Windows.Automation.Peers; using System.Windows.Controls; using System.Windows.Controls.Primitives; using System.Windows.Data; using System.Windows.Input; using System.Windows.Markup; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Threading; using Fluent.Automation.Peers; using Fluent.Extensibility; using Fluent.Extensions; using Fluent.Helpers; using Fluent.Internal; using Fluent.Internal.KnownBoxes; /// <summary> /// Represents the In-Ribbon Gallery, a gallery-based control that exposes /// a default subset of items directly in the Ribbon. Any remaining items /// are displayed when a drop-down menu button is clicked /// </summary> [ContentProperty(nameof(Items))] [TemplatePart(Name = "PART_ExpandButton", Type = typeof(ToggleButton))] [TemplatePart(Name = "PART_DropDownButton", Type = typeof(ToggleButton))] [TemplatePart(Name = "PART_Popup", Type = typeof(Popup))] [TemplatePart(Name = "PART_PopupContentControl", Type = typeof(ResizeableContentControl))] [TemplatePart(Name = "PART_FilterDropDownButton", Type = typeof(DropDownButton))] [TemplatePart(Name = "PART_GalleryPanel", Type = typeof(GalleryPanel))] [TemplatePart(Name = "PART_FakeImage", Type = typeof(Image))] [TemplatePart(Name = "PART_ContentPresenter", Type = typeof(ContentControl))] [TemplatePart(Name = "PART_PopupContentPresenter", Type = typeof(ContentControl))] [TemplatePart(Name = "PART_PopupResizeBorder", Type = typeof(FrameworkElement))] [TemplatePart(Name = "PART_DropDownBorder", Type = typeof(Border))] public class InRibbonGallery : Selector, IScalableRibbonControl, IDropDownControl, IRibbonControl, IQuickAccessItemProvider, IRibbonSizeChangedSink, ILargeIconProvider, IMediumIconProvider, ISimplifiedRibbonControl { #region Fields private ObservableCollection<GalleryGroupFilter>? filters; private ToggleButton? expandButton; private ToggleButton? dropDownButton; // Freezed image (created during snapping) private Image snappedImage = new Image(); // Is visual currently snapped private bool isSnapped; private DropDownButton? groupsMenuButton; private GalleryPanel? galleryPanel; private ContentControl? controlPresenter; private ContentControl? popupControlPresenter; private bool isButtonClicked; private ResizeableContentControl? popupContentControl; internal GalleryPanelState? CurrentGalleryPanelState { get; private set; } #endregion #region Properties #region Size /// <inheritdoc /> public RibbonControlSize Size { get { return (RibbonControlSize)this.GetValue(SizeProperty); } set { this.SetValue(SizeProperty, value); } } /// <summary>Identifies the <see cref="Size"/> dependency property.</summary> public static readonly DependencyProperty SizeProperty = RibbonProperties.SizeProperty.AddOwner(typeof(InRibbonGallery)); #endregion #region SizeDefinition /// <inheritdoc /> public RibbonControlSizeDefinition SizeDefinition { get { return (RibbonControlSizeDefinition)this.GetValue(SizeDefinitionProperty); } set { this.SetValue(SizeDefinitionProperty, value); } } /// <summary>Identifies the <see cref="SizeDefinition"/> dependency property.</summary> public static readonly DependencyProperty SizeDefinitionProperty = RibbonProperties.SizeDefinitionProperty.AddOwner(typeof(InRibbonGallery)); #endregion #region SimplifiedSizeDefinition /// <inheritdoc /> public RibbonControlSizeDefinition SimplifiedSizeDefinition { get { return (RibbonControlSizeDefinition)this.GetValue(SimplifiedSizeDefinitionProperty); } set { this.SetValue(SimplifiedSizeDefinitionProperty, value); } } /// <summary>Identifies the <see cref="SimplifiedSizeDefinition"/> dependency property.</summary> public static readonly DependencyProperty SimplifiedSizeDefinitionProperty = RibbonProperties.SimplifiedSizeDefinitionProperty.AddOwner(typeof(InRibbonGallery)); #endregion #region KeyTip /// <inheritdoc /> public string? KeyTip { get { return (string?)this.GetValue(KeyTipProperty); } set { this.SetValue(KeyTipProperty, value); } } /// <summary> /// Using a DependencyProperty as the backing store for Keys. /// This enables animation, styling, binding, etc... /// </summary> public static readonly DependencyProperty KeyTipProperty = Fluent.KeyTip.KeysProperty.AddOwner(typeof(InRibbonGallery)); #endregion #region Header /// <inheritdoc /> public object? Header { get { return this.GetValue(HeaderProperty); } set { this.SetValue(HeaderProperty, value); } } /// <summary>Identifies the <see cref="Header"/> dependency property.</summary> public static readonly DependencyProperty HeaderProperty = RibbonControl.HeaderProperty.AddOwner(typeof(InRibbonGallery), new PropertyMetadata(LogicalChildSupportHelper.OnLogicalChildPropertyChanged)); #endregion #region Icon /// <inheritdoc /> public object? Icon { get { return this.GetValue(IconProperty); } set { this.SetValue(IconProperty, value); } } /// <summary>Identifies the <see cref="Icon"/> dependency property.</summary> public static readonly DependencyProperty IconProperty = RibbonControl.IconProperty.AddOwner(typeof(InRibbonGallery), new PropertyMetadata(LogicalChildSupportHelper.OnLogicalChildPropertyChanged)); #endregion #region MinItemsInDropDownRow /// <summary> /// Min width of the Gallery /// </summary> public int MinItemsInDropDownRow { get { return (int)this.GetValue(MinItemsInDropDownRowProperty); } set { this.SetValue(MinItemsInDropDownRowProperty, value); } } /// <summary>Identifies the <see cref="MinItemsInDropDownRow"/> dependency property.</summary> public static readonly DependencyProperty MinItemsInDropDownRowProperty = DependencyProperty.Register(nameof(MinItemsInDropDownRow), typeof(int), typeof(InRibbonGallery), new PropertyMetadata(IntBoxes.One)); #endregion #region MaxItemsInDropDownRow /// <summary> /// Max width of the Gallery /// </summary> public int MaxItemsInDropDownRow { get { return (int)this.GetValue(MaxItemsInDropDownRowProperty); } set { this.SetValue(MaxItemsInDropDownRowProperty, value); } } /// <summary>Identifies the <see cref="MaxItemsInDropDownRow"/> dependency property.</summary> public static readonly DependencyProperty MaxItemsInDropDownRowProperty = DependencyProperty.Register(nameof(MaxItemsInDropDownRow), typeof(int), typeof(InRibbonGallery), new PropertyMetadata(IntBoxes.Zero)); #endregion #region ItemWidth /// <summary> /// Gets or sets item width /// </summary> public double ItemWidth { get { return (double)this.GetValue(ItemWidthProperty); } set { this.SetValue(ItemWidthProperty, value); } } /// <summary>Identifies the <see cref="ItemWidth"/> dependency property.</summary> public static readonly DependencyProperty ItemWidthProperty = DependencyProperty.Register(nameof(ItemWidth), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(DoubleBoxes.NaN)); /// <summary> /// Gets or sets item height /// </summary> public double ItemHeight { get { return (double)this.GetValue(ItemHeightProperty); } set { this.SetValue(ItemHeightProperty, value); } } /// <summary>Identifies the <see cref="ItemHeight"/> dependency property.</summary> public static readonly DependencyProperty ItemHeightProperty = DependencyProperty.Register(nameof(ItemHeight), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(DoubleBoxes.NaN)); #endregion #region GroupBy /// <summary> /// Gets or sets name of property which /// will use to group items in the Gallery. /// </summary> public string? GroupBy { get { return (string?)this.GetValue(GroupByProperty); } set { this.SetValue(GroupByProperty, value); } } /// <summary>Identifies the <see cref="GroupBy"/> dependency property.</summary> public static readonly DependencyProperty GroupByProperty = DependencyProperty.Register(nameof(GroupBy), typeof(string), typeof(InRibbonGallery), new PropertyMetadata()); #endregion #region GroupByAdvanced /// <summary> /// Gets or sets name of property which /// will use to group items in the Gallery. /// </summary> public Func<object, string>? GroupByAdvanced { get { return (Func<object, string>?)this.GetValue(GroupByAdvancedProperty); } set { this.SetValue(GroupByAdvancedProperty, value); } } /// <summary>Identifies the <see cref="GroupByAdvanced"/> dependency property.</summary> public static readonly DependencyProperty GroupByAdvancedProperty = DependencyProperty.Register(nameof(GroupByAdvanced), typeof(Func<object, string>), typeof(InRibbonGallery), new PropertyMetadata()); #endregion #region Orientation /// <summary> /// Gets or sets orientation of gallery /// </summary> public Orientation Orientation { get { return (Orientation)this.GetValue(OrientationProperty); } set { this.SetValue(OrientationProperty, value); } } /// <summary>Identifies the <see cref="Orientation"/> dependency property.</summary> public static readonly DependencyProperty OrientationProperty = DependencyProperty.Register(nameof(Orientation), typeof(Orientation), typeof(InRibbonGallery), new PropertyMetadata(Orientation.Horizontal)); #endregion #region Filters /// <summary> /// Gets collection of filters /// </summary> public ObservableCollection<GalleryGroupFilter> Filters { get { if (this.filters is null) { this.filters = new ObservableCollection<GalleryGroupFilter>(); this.filters.CollectionChanged += this.OnFilterCollectionChanged; } return this.filters; } } // Handle toolbar items changes private void OnFilterCollectionChanged(object? sender, NotifyCollectionChangedEventArgs e) { this.HasFilter = this.Filters.Count > 0; this.InvalidateProperty(SelectedFilterProperty); switch (e.Action) { case NotifyCollectionChangedAction.Add: foreach (var item in e.NewItems.NullSafe().OfType<GalleryGroupFilter>()) { if (this.groupsMenuButton is not null) { var filter = item; var menuItem = new MenuItem { Header = filter.Title, Tag = filter, IsDefinitive = false }; if (ReferenceEquals(filter, this.SelectedFilter)) { menuItem.IsChecked = true; } menuItem.Click += this.OnFilterMenuItemClick; this.groupsMenuButton.Items.Add(menuItem); } } break; case NotifyCollectionChangedAction.Remove: foreach (var item in e.OldItems.NullSafe().OfType<GalleryGroupFilter>()) { this.groupsMenuButton?.Items.Remove(this.GetFilterMenuItem(item)); } break; case NotifyCollectionChangedAction.Replace: foreach (var item in e.OldItems.NullSafe().OfType<GalleryGroupFilter>()) { this.groupsMenuButton?.Items.Remove(this.GetFilterMenuItem(item)); } foreach (var item in e.NewItems.NullSafe().OfType<GalleryGroupFilter>()) { if (this.groupsMenuButton is not null) { var filter = item; var menuItem = new MenuItem { Header = filter.Title, Tag = filter, IsDefinitive = false }; if (ReferenceEquals(filter, this.SelectedFilter)) { menuItem.IsChecked = true; } menuItem.Click += this.OnFilterMenuItemClick; this.groupsMenuButton.Items.Add(menuItem); } } break; case NotifyCollectionChangedAction.Reset: this.groupsMenuButton?.Items.Clear(); break; } } /// <summary> /// Gets or sets selected filter /// </summary> public GalleryGroupFilter? SelectedFilter { get { return (GalleryGroupFilter?)this.GetValue(SelectedFilterProperty); } set { this.SetValue(SelectedFilterProperty, value); } } /// <summary>Identifies the <see cref="SelectedFilter"/> dependency property.</summary> public static readonly DependencyProperty SelectedFilterProperty = DependencyProperty.Register(nameof(SelectedFilter), typeof(GalleryGroupFilter), typeof(InRibbonGallery), new PropertyMetadata(null, OnSelectedFilterChanged, CoerceSelectedFilter)); // Coerce selected filter private static object? CoerceSelectedFilter(DependencyObject d, object? basevalue) { var gallery = (InRibbonGallery)d; if (basevalue is null && gallery.Filters.Count > 0) { return gallery.Filters[0]; } return basevalue; } // Handles filter property changed private static void OnSelectedFilterChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var gallery = (InRibbonGallery)d; if (e.OldValue is GalleryGroupFilter oldFilter) { var menuItem = gallery.GetFilterMenuItem(oldFilter); if (menuItem is not null) { menuItem.IsChecked = false; } } if (e.NewValue is GalleryGroupFilter newFilter) { gallery.SelectedFilterTitle = newFilter.Title; gallery.SelectedFilterGroups = newFilter.Groups; var menuItem = gallery.GetFilterMenuItem(newFilter); if (menuItem is not null) { menuItem.IsChecked = true; } } else { gallery.SelectedFilterTitle = string.Empty; gallery.SelectedFilterGroups = null; } gallery.UpdateLayout(); } /// <summary> /// Gets selected filter title /// </summary> public string? SelectedFilterTitle { get { return (string?)this.GetValue(SelectedFilterTitleProperty); } private set { this.SetValue(SelectedFilterTitlePropertyKey, value); } } private static readonly DependencyPropertyKey SelectedFilterTitlePropertyKey = DependencyProperty.RegisterReadOnly(nameof(SelectedFilterTitle), typeof(string), typeof(InRibbonGallery), new PropertyMetadata()); /// <summary>Identifies the <see cref="SelectedFilterTitle"/> dependency property.</summary> public static readonly DependencyProperty SelectedFilterTitleProperty = SelectedFilterTitlePropertyKey.DependencyProperty; /// <summary> /// Gets selected filter groups /// </summary> public string? SelectedFilterGroups { get { return (string?)this.GetValue(SelectedFilterGroupsProperty); } private set { this.SetValue(SelectedFilterGroupsPropertyKey, value); } } private static readonly DependencyPropertyKey SelectedFilterGroupsPropertyKey = DependencyProperty.RegisterReadOnly(nameof(SelectedFilterGroups), typeof(string), typeof(InRibbonGallery), new PropertyMetadata()); /// <summary>Identifies the <see cref="SelectedFilterGroups"/> dependency property.</summary> public static readonly DependencyProperty SelectedFilterGroupsProperty = SelectedFilterGroupsPropertyKey.DependencyProperty; /// <summary> /// Gets whether gallery has selected filter /// </summary> public bool HasFilter { get { return (bool)this.GetValue(HasFilterProperty); } private set { this.SetValue(HasFilterPropertyKey, BooleanBoxes.Box(value)); } } private static readonly DependencyPropertyKey HasFilterPropertyKey = DependencyProperty.RegisterReadOnly(nameof(HasFilter), typeof(bool), typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.FalseBox)); /// <summary>Identifies the <see cref="HasFilter"/> dependency property.</summary> public static readonly DependencyProperty HasFilterProperty = HasFilterPropertyKey.DependencyProperty; private void OnFilterMenuItemClick(object sender, RoutedEventArgs e) { var senderItem = (MenuItem)sender; var item = this.GetFilterMenuItem(this.SelectedFilter); if (item is not null) { item.IsChecked = false; } senderItem.IsChecked = true; this.SelectedFilter = senderItem.Tag as GalleryGroupFilter; if (this.groupsMenuButton is not null) { this.groupsMenuButton.IsDropDownOpen = false; } e.Handled = true; } private MenuItem? GetFilterMenuItem(GalleryGroupFilter? filter) { if (filter is null) { return null; } return this.groupsMenuButton?.Items.Cast<MenuItem>() .FirstOrDefault(item => item is not null && item.Header.ToString() == filter.Title); } #endregion #region Selectable /// <summary> /// Gets or sets whether gallery items can be selected /// </summary> public bool Selectable { get { return (bool)this.GetValue(SelectableProperty); } set { this.SetValue(SelectableProperty, BooleanBoxes.Box(value)); } } /// <summary>Identifies the <see cref="Selectable"/> dependency property.</summary> public static readonly DependencyProperty SelectableProperty = DependencyProperty.Register(nameof(Selectable), typeof(bool), typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.TrueBox, OnSelectableChanged)); private static void OnSelectableChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { d.CoerceValue(SelectedItemProperty); } #endregion #region IsDropDownOpen /// <inheritdoc /> public Popup? DropDownPopup { get; private set; } /// <inheritdoc /> public bool IsContextMenuOpened { get; set; } /// <inheritdoc /> public bool IsDropDownOpen { get { return (bool)this.GetValue(IsDropDownOpenProperty); } set { this.SetValue(IsDropDownOpenProperty, BooleanBoxes.Box(value)); } } /// <summary>Identifies the <see cref="IsDropDownOpen"/> dependency property.</summary> public static readonly DependencyProperty IsDropDownOpenProperty = DependencyProperty.Register(nameof(IsDropDownOpen), typeof(bool), typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.FalseBox, OnIsDropDownOpenChanged)); private static void OnIsDropDownOpenChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var inRibbonGallery = (InRibbonGallery)d; var newValue = (bool)e.NewValue; var oldValue = !newValue; // Fire accessibility event if (UIElementAutomationPeer.FromElement(inRibbonGallery) is RibbonInRibbonGalleryAutomationPeer peer) { peer.RaiseExpandCollapseAutomationEvent(oldValue, newValue); } if (newValue) { inRibbonGallery.IsSnapped = true; if (inRibbonGallery.controlPresenter is not null) { inRibbonGallery.controlPresenter.Content = inRibbonGallery.snappedImage; } if (inRibbonGallery.galleryPanel is not null) { using (new ScopeGuard(inRibbonGallery.galleryPanel.SuspendUpdates, inRibbonGallery.galleryPanel.ResumeUpdatesRefresh).Start()) { inRibbonGallery.CurrentGalleryPanelState?.Save(); inRibbonGallery.galleryPanel.MinItemsInRow = inRibbonGallery.MinItemsInDropDownRow; inRibbonGallery.galleryPanel.MaxItemsInRow = inRibbonGallery.MaxItemsInDropDownRow; inRibbonGallery.galleryPanel.IsGrouped = true; } } if (inRibbonGallery.popupControlPresenter is not null) { inRibbonGallery.popupControlPresenter.Content = inRibbonGallery.galleryPanel; } inRibbonGallery.DropDownOpened?.Invoke(inRibbonGallery, EventArgs.Empty); Mouse.Capture(inRibbonGallery, CaptureMode.SubTree); if (inRibbonGallery.DropDownPopup?.Child is not null) { inRibbonGallery.RunInDispatcherAsync(() => { Keyboard.Focus(inRibbonGallery.DropDownPopup.Child); inRibbonGallery.DropDownPopup.Child.MoveFocus(new TraversalRequest(FocusNavigationDirection.First)); }); } } else { if (inRibbonGallery.popupControlPresenter is not null) { inRibbonGallery.popupControlPresenter.Content = null; } if (inRibbonGallery.galleryPanel is not null) { using (new ScopeGuard(inRibbonGallery.galleryPanel.SuspendUpdates, inRibbonGallery.galleryPanel.ResumeUpdatesRefresh).Start()) { inRibbonGallery.CurrentGalleryPanelState?.Restore(); inRibbonGallery.galleryPanel.IsGrouped = false; inRibbonGallery.galleryPanel.ClearValue(WidthProperty); } } if (inRibbonGallery.IsSnapped && inRibbonGallery.IsFrozen == false) { inRibbonGallery.IsSnapped = false; } if (inRibbonGallery.controlPresenter is not null) { inRibbonGallery.controlPresenter.Content = inRibbonGallery.galleryPanel; } inRibbonGallery.DropDownClosed?.Invoke(inRibbonGallery, EventArgs.Empty); inRibbonGallery.RunInDispatcherAsync(() => { var selectedContainer = inRibbonGallery.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(inRibbonGallery.SelectedItem); selectedContainer?.BringIntoView(); }, DispatcherPriority.SystemIdle); // If focus is within the subtree, make sure we have the focus so that focus isn't in the disposed hwnd if (inRibbonGallery.IsKeyboardFocusWithin) { // make sure the inRibbonGallery has focus inRibbonGallery.Focus(); inRibbonGallery.RunInDispatcherAsync(() => { var selectedContainer = inRibbonGallery.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(inRibbonGallery.SelectedItem); if (selectedContainer is not null) { selectedContainer.Focus(); } else { inRibbonGallery.MoveFocus(new TraversalRequest(FocusNavigationDirection.First)); } }, DispatcherPriority.SystemIdle); } if (Mouse.Captured == inRibbonGallery) { Mouse.Capture(null); } } } #endregion #region ResizeMode /// <summary> /// Gets or sets context menu resize mode /// </summary> public ContextMenuResizeMode ResizeMode { get { return (ContextMenuResizeMode)this.GetValue(ResizeModeProperty); } set { this.SetValue(ResizeModeProperty, value); } } /// <summary>Identifies the <see cref="ResizeMode"/> dependency property.</summary> public static readonly DependencyProperty ResizeModeProperty = DependencyProperty.Register(nameof(ResizeMode), typeof(ContextMenuResizeMode), typeof(InRibbonGallery), new PropertyMetadata(ContextMenuResizeMode.None)); #endregion #region CanCollapseToButton /// <summary> /// Gets or sets whether InRibbonGallery /// </summary> public bool CanCollapseToButton { get { return (bool)this.GetValue(CanCollapseToButtonProperty); } set { this.SetValue(CanCollapseToButtonProperty, BooleanBoxes.Box(value)); } } /// <summary>Identifies the <see cref="CanCollapseToButton"/> dependency property.</summary> public static readonly DependencyProperty CanCollapseToButtonProperty = DependencyProperty.Register(nameof(CanCollapseToButton), typeof(bool), typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.TrueBox)); #endregion #region IsCollapsed /// <summary> /// Gets whether InRibbonGallery is collapsed to button /// </summary> public bool IsCollapsed { get { return (bool)this.GetValue(IsCollapsedProperty); } set { this.SetValue(IsCollapsedProperty, BooleanBoxes.Box(value)); } } /// <summary>Identifies the <see cref="IsCollapsed"/> dependency property.</summary> public static readonly DependencyProperty IsCollapsedProperty = DependencyProperty.Register(nameof(IsCollapsed), typeof(bool), typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.FalseBox)); #endregion #region LargeIcon /// <inheritdoc /> public object? LargeIcon { get { return this.GetValue(LargeIconProperty); } set { this.SetValue(LargeIconProperty, value); } } /// <summary>Identifies the <see cref="LargeIcon"/> dependency property.</summary> public static readonly DependencyProperty LargeIconProperty = LargeIconProviderProperties.LargeIconProperty.AddOwner(typeof(InRibbonGallery), new PropertyMetadata(LogicalChildSupportHelper.OnLogicalChildPropertyChanged)); #endregion #region MediumIcon /// <inheritdoc /> public object? MediumIcon { get { return this.GetValue(MediumIconProperty); } set { this.SetValue(MediumIconProperty, value); } } /// <summary>Identifies the <see cref="MediumIcon"/> dependency property.</summary> public static readonly DependencyProperty MediumIconProperty = MediumIconProviderProperties.MediumIconProperty.AddOwner(typeof(InRibbonGallery), new PropertyMetadata(LogicalChildSupportHelper.OnLogicalChildPropertyChanged)); #endregion #region Snapping /// <summary> /// Snaps / Unsnaps the Visual /// (remove visuals and substitute with freezed image) /// </summary> public bool IsSnapped { get => this.isSnapped; private set { if (value == this.isSnapped) { return; } if (this.IsCollapsed) { return; } if (this.IsVisible == false) { return; } if (value && (int)this.ActualWidth > 0 && (int)this.ActualHeight > 0 && this.galleryPanel is not null && (int)this.galleryPanel.ActualWidth > 0 && (int)this.galleryPanel.ActualHeight > 0) { // Render the freezed image RenderOptions.SetBitmapScalingMode(this.snappedImage, BitmapScalingMode.NearestNeighbor); var renderTargetBitmap = new RenderTargetBitmap( (int)this.galleryPanel.ActualWidth, (int)this.galleryPanel.ActualHeight, 96, 96, PixelFormats.Pbgra32); renderTargetBitmap.Render(this.galleryPanel); this.snappedImage.Source = renderTargetBitmap; this.snappedImage.FlowDirection = this.FlowDirection; this.snappedImage.Width = this.galleryPanel.ActualWidth; this.snappedImage.Height = this.galleryPanel.ActualHeight; } else { this.snappedImage.Source = null; this.snappedImage.Width = 0; this.snappedImage.Height = 0; } this.isSnapped = value; } } /// <summary> /// Defines whether this item is frozen or not because the copy of this item shown in the <see cref="QuickAccessToolBar"/> has it's dropdown open. /// </summary> public bool IsFrozen { get; private set; } #endregion #region Menu /// <summary> /// Gets or sets menu to show in combo box bottom /// </summary> public RibbonMenu? Menu { get { return (RibbonMenu?)this.GetValue(MenuProperty); } set { this.SetValue(MenuProperty, value); } } /// <summary>Identifies the <see cref="Menu"/> dependency property.</summary> public static readonly DependencyProperty MenuProperty = DependencyProperty.Register(nameof(Menu), typeof(RibbonMenu), typeof(InRibbonGallery), new PropertyMetadata()); #endregion #region Min/Max Sizes /// <summary> /// Gets or sets max count of items in row /// </summary> public int MaxItemsInRow { get { return (int)this.GetValue(MaxItemsInRowProperty); } set { this.SetValue(MaxItemsInRowProperty, value); } } /// <summary>Identifies the <see cref="MaxItemsInRow"/> dependency property.</summary> public static readonly DependencyProperty MaxItemsInRowProperty = DependencyProperty.Register(nameof(MaxItemsInRow), typeof(int), typeof(InRibbonGallery), new PropertyMetadata(8, OnMaxItemsInRowChanged)); private static void OnMaxItemsInRowChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var gal = (InRibbonGallery)d; var maxItemsInRow = (int)e.NewValue; if (gal.IsDropDownOpen == false && gal.galleryPanel is not null) { gal.galleryPanel.MaxItemsInRow = maxItemsInRow; } } /// <summary> /// Gets or sets min count of items in row /// </summary> public int MinItemsInRow { get { return (int)this.GetValue(MinItemsInRowProperty); } set { this.SetValue(MinItemsInRowProperty, value); } } /// <summary>Identifies the <see cref="MinItemsInRow"/> dependency property.</summary> public static readonly DependencyProperty MinItemsInRowProperty = DependencyProperty.Register(nameof(MinItemsInRow), typeof(int), typeof(InRibbonGallery), new PropertyMetadata(IntBoxes.One, OnMinItemsInRowChanged)); private static void OnMinItemsInRowChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var gal = (InRibbonGallery)d; var minItemsInRow = (int)e.NewValue; if (gal.IsDropDownOpen == false && gal.galleryPanel is not null) { gal.galleryPanel.MinItemsInRow = minItemsInRow; } } #endregion #region MaxDropDownHeight /// <summary> /// Get or sets max height of drop down popup /// </summary> public double MaxDropDownHeight { get { return (double)this.GetValue(MaxDropDownHeightProperty); } set { this.SetValue(MaxDropDownHeightProperty, value); } } /// <summary>Identifies the <see cref="MaxDropDownHeight"/> dependency property.</summary> public static readonly DependencyProperty MaxDropDownHeightProperty = DependencyProperty.Register(nameof(MaxDropDownHeight), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(SystemParameters.PrimaryScreenHeight / 3.0)); #endregion #region MaxDropDownWidth /// <summary> /// Get or sets max width of drop down popup /// </summary> public double MaxDropDownWidth { get { return (double)this.GetValue(MaxDropDownWidthProperty); } set { this.SetValue(MaxDropDownWidthProperty, value); } } /// <summary>Identifies the <see cref="MaxDropDownWidth"/> dependency property.</summary> public static readonly DependencyProperty MaxDropDownWidthProperty = DependencyProperty.Register(nameof(MaxDropDownWidth), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(SystemParameters.PrimaryScreenWidth / 3.0)); #endregion #region DropDownHeight /// <summary> /// Gets or sets initial dropdown height /// </summary> public double DropDownHeight { get { return (double)this.GetValue(DropDownHeightProperty); } set { this.SetValue(DropDownHeightProperty, value); } } /// <summary>Identifies the <see cref="DropDownHeight"/> dependency property.</summary> public static readonly DependencyProperty DropDownHeightProperty = DependencyProperty.Register(nameof(DropDownHeight), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(DoubleBoxes.NaN)); #endregion #region DropDownWidth /// <summary> /// Gets or sets initial dropdown width /// </summary> public double DropDownWidth { get { return (double)this.GetValue(DropDownWidthProperty); } set { this.SetValue(DropDownWidthProperty, value); } } /// <summary>Identifies the <see cref="DropDownWidth"/> dependency property.</summary> public static readonly DependencyProperty DropDownWidthProperty = DependencyProperty.Register(nameof(DropDownWidth), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(DoubleBoxes.NaN)); #endregion #region GalleryPanelContainerHeight /// <summary>Identifies the <see cref="GalleryPanelContainerHeight"/> dependency property.</summary> public static readonly DependencyProperty GalleryPanelContainerHeightProperty = DependencyProperty.Register(nameof(GalleryPanelContainerHeight), typeof(double), typeof(InRibbonGallery), new PropertyMetadata(60D)); /// <summary> /// Gets or sets the height of the container which hosts the <see cref="GalleryPanel"/>. /// </summary> public double GalleryPanelContainerHeight { get { return (double)this.GetValue(GalleryPanelContainerHeightProperty); } set { this.SetValue(GalleryPanelContainerHeightProperty, value); } } #endregion #region IsSimplified /// <summary> /// Gets or sets whether or not the ribbon is in Simplified mode /// </summary> public bool IsSimplified { get { return (bool)this.GetValue(IsSimplifiedProperty); } private set { this.SetValue(IsSimplifiedPropertyKey, BooleanBoxes.Box(value)); } } private static readonly DependencyPropertyKey IsSimplifiedPropertyKey = DependencyProperty.RegisterReadOnly(nameof(IsSimplified), typeof(bool), typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.FalseBox)); /// <summary>Identifies the <see cref="IsSimplified"/> dependency property.</summary> public static readonly DependencyProperty IsSimplifiedProperty = IsSimplifiedPropertyKey.DependencyProperty; #endregion #endregion Properties #region Events /// <inheritdoc /> public event EventHandler? Scaled; /// <inheritdoc /> public event EventHandler? DropDownOpened; /// <inheritdoc /> public event EventHandler? DropDownClosed; #endregion #region Constructors /// <summary> /// Initializes static members of the <see cref="InRibbonGallery"/> class. /// </summary> static InRibbonGallery() { var type = typeof(InRibbonGallery); DefaultStyleKeyProperty.OverrideMetadata(type, new FrameworkPropertyMetadata(type)); SelectedItemProperty.OverrideMetadata(type, new FrameworkPropertyMetadata(null, CoerceSelectedItem)); ToolTipService.Attach(type); PopupService.Attach(type); ContextMenuService.Attach(type); } // Coerce selected item private static object? CoerceSelectedItem(DependencyObject d, object? basevalue) { var gallery = (InRibbonGallery)d; if (gallery.Selectable == false) { var galleryItem = gallery.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(basevalue); if (basevalue is not null && galleryItem is not null) { galleryItem.IsSelected = false; } return null; } return basevalue; } /// <summary> /// Default constructor /// </summary> public InRibbonGallery() { ContextMenuService.Coerce(this); this.IsVisibleChanged += this.OnIsVisibleChanged; this.Unloaded += this.OnUnloaded; } private void OnUnloaded(object sender, RoutedEventArgs e) { this.SetCurrentValue(IsDropDownOpenProperty, false); } #endregion #region Overrides /// <inheritdoc /> protected override void OnKeyUp(KeyEventArgs e) { base.OnKeyUp(e); if (e.Handled) { return; } if (e.Key == Key.F4 && (e.KeyboardDevice.Modifiers & ModifierKeys.Alt) == 0) { this.IsDropDownOpen = !this.IsDropDownOpen; e.Handled = true; } else if (e.Key == Key.Escape && this.IsDropDownOpen) { this.IsDropDownOpen = false; e.Handled = true; } } /// <inheritdoc /> public KeyTipPressedResult OnKeyTipPressed() { this.IsDropDownOpen = true; return new KeyTipPressedResult(false, true); } /// <inheritdoc /> public void OnKeyTipBack() { this.IsDropDownOpen = false; } /// <inheritdoc /> protected override void OnSelectionChanged(SelectionChangedEventArgs e) { foreach (var item in e.RemovedItems) { if (this.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(item) is GalleryItem itemContainer) { itemContainer.IsSelected = false; } } foreach (var item in e.AddedItems) { if (this.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(item) is GalleryItem itemContainer) { itemContainer.IsSelected = this.Selectable; } } base.OnSelectionChanged(e); if ((!AutomationPeer.ListenerExists(AutomationEvents.SelectionPatternOnInvalidated) && !AutomationPeer.ListenerExists(AutomationEvents.SelectionItemPatternOnElementSelected) && (!AutomationPeer.ListenerExists(AutomationEvents.SelectionItemPatternOnElementAddedToSelection) && !AutomationPeer.ListenerExists(AutomationEvents.SelectionItemPatternOnElementRemovedFromSelection))) || UIElementAutomationPeer.CreatePeerForElement(this) is not RibbonInRibbonGalleryAutomationPeer peerForElement) { return; } peerForElement.RaiseSelectionEvents(e); } /// <inheritdoc /> public override void OnApplyTemplate() { if (this.expandButton is not null) { this.expandButton.Click -= this.OnExpandClick; } this.expandButton = this.GetTemplateChild("PART_ExpandButton") as ToggleButton; if (this.expandButton is not null) { this.expandButton.Click += this.OnExpandClick; } this.dropDownButton = this.GetTemplateChild("PART_DropDownButton") as ToggleButton; if (this.dropDownButton is ISimplifiedStateControl control) { control.UpdateSimplifiedState(this.IsSimplified); } if (this.DropDownPopup is not null) { this.DropDownPopup.PreviewMouseLeftButtonUp -= this.OnPopupPreviewMouseUp; this.DropDownPopup.PreviewMouseLeftButtonDown -= this.OnPopupPreviewMouseDown; } this.DropDownPopup = this.GetTemplateChild("PART_Popup") as Popup; if (this.DropDownPopup is not null) { this.DropDownPopup.PreviewMouseLeftButtonUp += this.OnPopupPreviewMouseUp; this.DropDownPopup.PreviewMouseLeftButtonDown += this.OnPopupPreviewMouseDown; KeyboardNavigation.SetControlTabNavigation(this.DropDownPopup, KeyboardNavigationMode.Cycle); KeyboardNavigation.SetDirectionalNavigation(this.DropDownPopup, KeyboardNavigationMode.Cycle); KeyboardNavigation.SetTabNavigation(this.DropDownPopup, KeyboardNavigationMode.Cycle); } this.popupContentControl = this.GetTemplateChild("PART_PopupContentControl") as ResizeableContentControl; this.groupsMenuButton?.Items.Clear(); this.groupsMenuButton = this.GetTemplateChild("PART_FilterDropDownButton") as DropDownButton; if (this.groupsMenuButton is not null) { foreach (var currentFilter in this.Filters) { var item = new MenuItem { Header = currentFilter.Title, Tag = currentFilter, IsDefinitive = false }; if (ReferenceEquals(currentFilter, this.SelectedFilter)) { item.IsChecked = true; } item.Click += this.OnFilterMenuItemClick; this.groupsMenuButton.Items.Add(item); } } this.galleryPanel = this.GetTemplateChild("PART_GalleryPanel") as GalleryPanel; if (this.galleryPanel is not null) { using (new ScopeGuard(this.galleryPanel.SuspendUpdates, this.galleryPanel.ResumeUpdates).Start()) { this.galleryPanel.MinItemsInRow = this.MinItemsInRow; this.galleryPanel.MaxItemsInRow = this.MaxItemsInRow; } this.CurrentGalleryPanelState = new GalleryPanelState(this.galleryPanel); } else { this.CurrentGalleryPanelState = null; } this.controlPresenter = this.GetTemplateChild("PART_ContentPresenter") as ContentControl; this.popupControlPresenter = this.GetTemplateChild("PART_PopupContentPresenter") as ContentControl; } private void OnIsVisibleChanged(object sender, DependencyPropertyChangedEventArgs e) { var groupBox = UIHelper.GetParent<RibbonGroupBox>(this); // Only notify the parent groupbox if we are not currently being shown in the collapsed popup. // Otherwise we will cause application freezes as we would be constantly flipped between being visible and not visible. // See https://github.com/fluentribbon/Fluent.Ribbon/issues/900 for reference if (groupBox?.IsDropDownOpen == false) { groupBox.TryClearCacheAndResetStateAndScaleAndNotifyParentRibbonGroupsContainer(); } } private void OnPopupPreviewMouseUp(object sender, MouseButtonEventArgs e) { // Ignore mouse up when mouse donw is on expand button if (this.isButtonClicked) { this.isButtonClicked = false; e.Handled = true; } } private void OnPopupPreviewMouseDown(object sender, MouseButtonEventArgs e) { this.isButtonClicked = false; } private void OnExpandClick(object sender, RoutedEventArgs e) { this.isButtonClicked = true; } /// <inheritdoc /> public void OnSizePropertyChanged(RibbonControlSize previous, RibbonControlSize current) { if (this.ReadLocalValue(IsCollapsedProperty) != DependencyProperty.UnsetValue) { return; } if (this.CanCollapseToButton) { if (current == RibbonControlSize.Large && this.galleryPanel?.MinItemsInRow > this.MinItemsInRow) { this.SetCurrentValue(IsCollapsedProperty, BooleanBoxes.FalseBox); } else { this.SetCurrentValue(IsCollapsedProperty, BooleanBoxes.TrueBox); } } else { this.SetCurrentValue(IsCollapsedProperty, BooleanBoxes.FalseBox); } } /// <inheritdoc /> protected override DependencyObject GetContainerForItemOverride() { return new GalleryItem(); } /// <inheritdoc /> protected override bool IsItemItsOwnContainerOverride(object item) { return item is GalleryItem; } /// <inheritdoc /> protected override void OnItemsChanged(NotifyCollectionChangedEventArgs e) { base.OnItemsChanged(e); // We don't want to notify scaling when items are moved to a different control. // This prevents excessive cache invalidation. if (ItemsControlHelper.GetIsMovingItemsToDifferentControl(this) == false) { this.Scaled?.Invoke(this, EventArgs.Empty); } } /// <inheritdoc /> protected override void OnKeyDown(KeyEventArgs e) { if (e.Key == Key.Escape) { this.IsDropDownOpen = false; } base.OnKeyDown(e); } #endregion #region QuickAccess /// <inheritdoc /> public virtual FrameworkElement CreateQuickAccessItem() { var gallery = new InRibbonGallery(); RibbonControl.BindQuickAccessItem(this, gallery); RibbonControl.Bind(this, gallery, nameof(this.GroupBy), GroupByProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.GroupByAdvanced), GroupByAdvancedProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ItemHeight), ItemHeightProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ItemWidth), ItemWidthProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ResizeMode), ResizeModeProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.MinItemsInDropDownRow), MinItemsInDropDownRowProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.MaxItemsInDropDownRow), MaxItemsInDropDownRowProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.DisplayMemberPath), DisplayMemberPathProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.GroupStyleSelector), GroupStyleSelectorProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ItemContainerStyle), ItemContainerStyleProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ItemsPanel), ItemsPanelProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ItemStringFormat), ItemStringFormatProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.ItemTemplate), ItemTemplateProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.SelectedValuePath), SelectedValuePathProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.MaxDropDownWidth), MaxDropDownWidthProperty, BindingMode.OneWay); RibbonControl.Bind(this, gallery, nameof(this.MaxDropDownHeight), MaxDropDownHeightProperty, BindingMode.OneWay); gallery.DropDownOpened += this.OnQuickAccessOpened; if (this.DropDownClosed is not null) { gallery.DropDownClosed += this.DropDownClosed; } if (this.DropDownOpened is not null) { gallery.DropDownOpened += this.DropDownOpened; } RibbonProperties.SetSize(gallery, RibbonControlSize.Small); this.quickAccessGallery = gallery; return gallery; } private InRibbonGallery? quickAccessGallery; private void OnQuickAccessOpened(object? sender, EventArgs e) { if (this.quickAccessGallery is not null) { for (var i = 0; i < this.Filters.Count; i++) { this.quickAccessGallery.Filters.Add(this.Filters[i]); } this.quickAccessGallery.SelectedFilter = this.SelectedFilter; this.quickAccessGallery.DropDownClosed += this.OnQuickAccessMenuClosedOrUnloaded; this.quickAccessGallery.Unloaded += this.OnQuickAccessMenuClosedOrUnloaded; } this.Freeze(); } private void OnQuickAccessMenuClosedOrUnloaded(object? sender, EventArgs e) { if (this.quickAccessGallery is not null) { this.quickAccessGallery.DropDownClosed -= this.OnQuickAccessMenuClosedOrUnloaded; this.quickAccessGallery.Unloaded -= this.OnQuickAccessMenuClosedOrUnloaded; this.SelectedFilter = this.quickAccessGallery.SelectedFilter; this.quickAccessGallery.Filters.Clear(); } this.Unfreeze(); } private void Freeze() { if (this.quickAccessGallery is null) { return; } this.IsSnapped = true; this.IsFrozen = true; if (this.controlPresenter is not null) { this.controlPresenter.Content = this.snappedImage; } // Move items and selected item var selectedItem = this.SelectedItem; this.SelectedItem = null; ItemsControlHelper.MoveItemsToDifferentControl(this, this.quickAccessGallery); this.quickAccessGallery.SelectedItem = selectedItem; // Move menu var menu = this.Menu; this.Menu = null; this.quickAccessGallery.Menu = menu; } private void Unfreeze() { if (this.quickAccessGallery is null) { return; } // Move items and selected item var selectedItem = this.quickAccessGallery.SelectedItem; this.quickAccessGallery.SelectedItem = null; ItemsControlHelper.MoveItemsToDifferentControl(this.quickAccessGallery, this); this.SelectedItem = selectedItem; // Move menu var menu = this.quickAccessGallery.Menu; this.quickAccessGallery.Menu = null; this.Menu = menu; if (this.IsDropDownOpen == false) { if (this.popupControlPresenter is not null) { this.popupControlPresenter.Content = null; } if (this.controlPresenter is not null) { this.controlPresenter.Content = this.galleryPanel; } } this.RunInDispatcherAsync(() => { this.IsFrozen = false; if (this.IsDropDownOpen == false) { this.IsSnapped = false; } var selectedContainer = this.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(this.SelectedItem); selectedContainer?.BringIntoView(); }, DispatcherPriority.SystemIdle); } /// <inheritdoc /> public bool CanAddToQuickAccessToolBar { get { return (bool)this.GetValue(CanAddToQuickAccessToolBarProperty); } set { this.SetValue(CanAddToQuickAccessToolBarProperty, BooleanBoxes.Box(value)); } } /// <summary>Identifies the <see cref="CanAddToQuickAccessToolBar"/> dependency property.</summary> public static readonly DependencyProperty CanAddToQuickAccessToolBarProperty = RibbonControl.CanAddToQuickAccessToolBarProperty.AddOwner(typeof(InRibbonGallery), new PropertyMetadata(BooleanBoxes.TrueBox, RibbonControl.OnCanAddToQuickAccessToolBarChanged)); #endregion #region Implementation of IScalableRibbonControl /// <inheritdoc /> public void ResetScale() { if (this.IsCollapsed && this.ReadLocalValue(IsCollapsedProperty) == DependencyProperty.UnsetValue && RibbonProperties.GetSize(this) == RibbonControlSize.Large) { this.SetCurrentValue(IsCollapsedProperty, BooleanBoxes.FalseBox); } if (this.galleryPanel is not null && this.galleryPanel.MaxItemsInRow < this.MaxItemsInRow) { this.galleryPanel.MaxItemsInRow = this.MaxItemsInRow; } this.InvalidateMeasure(); } /// <inheritdoc /> public void Enlarge() { if (this.IsCollapsed && this.ReadLocalValue(IsCollapsedProperty) == DependencyProperty.UnsetValue && RibbonProperties.GetSize(this) == RibbonControlSize.Large) { this.SetCurrentValue(IsCollapsedProperty, BooleanBoxes.FalseBox); } else if (this.galleryPanel is not null && this.galleryPanel.MaxItemsInRow < this.MaxItemsInRow) { this.galleryPanel.MaxItemsInRow = Math.Min(this.galleryPanel.MaxItemsInRow + 1, this.MaxItemsInRow); } else { return; } this.InvalidateMeasure(); this.Scaled?.Invoke(this, EventArgs.Empty); } /// <inheritdoc /> public void Reduce() { if (this.galleryPanel is not null && this.galleryPanel.MaxItemsInRow > this.MinItemsInRow) { this.galleryPanel.MaxItemsInRow = Math.Max(this.galleryPanel.MaxItemsInRow - 1, 0); } else if (this.CanCollapseToButton && this.ReadLocalValue(IsCollapsedProperty) == DependencyProperty.UnsetValue && this.IsCollapsed == false) { this.SetCurrentValue(IsCollapsedProperty, BooleanBoxes.TrueBox); } else { return; } this.InvalidateMeasure(); this.Scaled?.Invoke(this, EventArgs.Empty); } #endregion /// <inheritdoc /> void ISimplifiedStateControl.UpdateSimplifiedState(bool isSimplified) { this.IsSimplified = isSimplified; if (this.dropDownButton is ISimplifiedStateControl control) { control.UpdateSimplifiedState(isSimplified); } } /// <inheritdoc /> void ILogicalChildSupport.AddLogicalChild(object child) { this.AddLogicalChild(child); } /// <inheritdoc /> void ILogicalChildSupport.RemoveLogicalChild(object child) { this.RemoveLogicalChild(child); } /// <inheritdoc /> protected override IEnumerator LogicalChildren { get { var baseEnumerator = base.LogicalChildren; while (baseEnumerator?.MoveNext() == true) { yield return baseEnumerator.Current; } if (this.Icon is not null) { yield return this.Icon; } if (this.MediumIcon is not null) { yield return this.MediumIcon; } if (this.LargeIcon is not null) { yield return this.LargeIcon; } if (this.Header is not null) { yield return this.Header; } } } /// <inheritdoc /> protected override AutomationPeer OnCreateAutomationPeer() => new RibbonInRibbonGalleryAutomationPeer(this); internal class GalleryPanelState { public GalleryPanelState(GalleryPanel galleryPanel) { this.GalleryPanel = galleryPanel; this.Save(); } public GalleryPanel GalleryPanel { get; } public int MinItemsInRow { get; private set; } public int MaxItemsInRow { get; private set; } public void Save() { this.MinItemsInRow = this.GalleryPanel.MinItemsInRow; this.MaxItemsInRow = this.GalleryPanel.MaxItemsInRow; } public void Restore() { this.GalleryPanel.MinItemsInRow = this.MinItemsInRow; this.GalleryPanel.MaxItemsInRow = this.MaxItemsInRow; } } /// <summary> /// Causes the object to scroll into view. If it is not visible, it is aligned either at the top or bottom of the viewport. /// </summary> public void ScrollIntoView(object item) { if (this.ItemContainerGenerator.Status == GeneratorStatus.ContainersGenerated) { this.OnBringItemIntoView(item); } else { // The items aren't generated, try at a later time this.Dispatcher.BeginInvoke(DispatcherPriority.Loaded, new DispatcherOperationCallback(this.OnBringItemIntoView), item); } } private object? OnBringItemIntoView(object item) { var selectedContainer = this.ItemContainerGenerator.ContainerOrContainerContentFromItem<GalleryItem>(item); selectedContainer?.BringIntoView(); return null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Globalization; using Xunit; namespace System.Tests { public partial class SByteTests { [Fact] public static void Ctor_Empty() { var i = new sbyte(); Assert.Equal(0, i); } [Fact] public static void Ctor_Value() { sbyte i = 41; Assert.Equal(41, i); } [Fact] public static void MaxValue() { Assert.Equal(0x7F, sbyte.MaxValue); } [Fact] public static void MinValue() { Assert.Equal(-0x80, sbyte.MinValue); } [Theory] [InlineData((sbyte)114, (sbyte)114, 0)] [InlineData((sbyte)114, sbyte.MinValue, 1)] [InlineData((sbyte)-114, sbyte.MinValue, 1)] [InlineData(sbyte.MinValue, sbyte.MinValue, 0)] [InlineData((sbyte)114, (sbyte)-123, 1)] [InlineData((sbyte)114, (sbyte)0, 1)] [InlineData((sbyte)114, (sbyte)123, -1)] [InlineData((sbyte)114, sbyte.MaxValue, -1)] [InlineData((sbyte)-114, sbyte.MaxValue, -1)] [InlineData(sbyte.MaxValue, sbyte.MaxValue, 0)] [InlineData((sbyte)114, null, 1)] public void CompareTo_Other_ReturnsExpected(sbyte i, object value, int expected) { if (value is sbyte sbyteValue) { Assert.Equal(expected, Math.Sign(i.CompareTo(sbyteValue))); Assert.Equal(-expected, Math.Sign(sbyteValue.CompareTo(i))); } Assert.Equal(expected, Math.Sign(i.CompareTo(value))); } [Theory] [InlineData("a")] [InlineData(234)] public void CompareTo_ObjectNotSByte_ThrowsArgumentException(object value) { AssertExtensions.Throws<ArgumentException>(null, () => ((sbyte)123).CompareTo(value)); } [Theory] [InlineData((sbyte)78, (sbyte)78, true)] [InlineData((sbyte)78, (sbyte)-78, false)] [InlineData((sbyte)78, (sbyte)0, false)] [InlineData((sbyte)0, (sbyte)0, true)] [InlineData((sbyte)-78, (sbyte)-78, true)] [InlineData((sbyte)-78, (sbyte)78, false)] [InlineData((sbyte)78, null, false)] [InlineData((sbyte)78, "78", false)] [InlineData((sbyte)78, 78, false)] public static void Equals(sbyte i1, object obj, bool expected) { if (obj is sbyte) { sbyte i2 = (sbyte)obj; Assert.Equal(expected, i1.Equals(i2)); Assert.Equal(expected, i1.GetHashCode().Equals(i2.GetHashCode())); } Assert.Equal(expected, i1.Equals(obj)); } [Fact] public void GetTypeCode_Invoke_ReturnsSByte() { Assert.Equal(TypeCode.SByte, ((sbyte)1).GetTypeCode()); } public static IEnumerable<object[]> ToString_TestData() { foreach (NumberFormatInfo defaultFormat in new[] { null, NumberFormatInfo.CurrentInfo }) { foreach (string defaultSpecifier in new[] { "G", "G\0", "\0N222", "\0", "" }) { yield return new object[] { sbyte.MinValue, defaultSpecifier, defaultFormat, "-128" }; yield return new object[] { (sbyte)-123, defaultSpecifier, defaultFormat, "-123" }; yield return new object[] { (sbyte)0, defaultSpecifier, defaultFormat, "0" }; yield return new object[] { (sbyte)123, defaultSpecifier, defaultFormat, "123" }; yield return new object[] { sbyte.MaxValue, defaultSpecifier, defaultFormat, "127" }; } yield return new object[] { (sbyte)123, "D", defaultFormat, "123" }; yield return new object[] { (sbyte)123, "D99", defaultFormat, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000123" }; yield return new object[] { (sbyte)123, "D99\09", defaultFormat, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000123" }; yield return new object[] { (sbyte)-123, "D99", defaultFormat, "-000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000123" }; yield return new object[] { (sbyte)0x24, "x", defaultFormat, "24" }; yield return new object[] { (sbyte)-0x24, "x", defaultFormat, "dc" }; yield return new object[] { (sbyte)24, "N", defaultFormat, string.Format("{0:N}", 24.00) }; } var customFormat = new NumberFormatInfo() { NegativeSign = "#", NumberDecimalSeparator = "~", NumberGroupSeparator = "*", PositiveSign = "&", NumberDecimalDigits = 2, PercentSymbol = "@", PercentGroupSeparator = ",", PercentDecimalSeparator = ".", PercentDecimalDigits = 5 }; yield return new object[] { (sbyte)-24, "N", customFormat, "#24~00" }; yield return new object[] { (sbyte)24, "N", customFormat, "24~00" }; yield return new object[] { (sbyte)123, "E", customFormat, "1~230000E&002" }; yield return new object[] { (sbyte)123, "F", customFormat, "123~00" }; yield return new object[] { (sbyte)123, "P", customFormat, "12,300.00000 @" }; } [Theory] [MemberData(nameof(ToString_TestData))] public static void ToString(sbyte i, string format, IFormatProvider provider, string expected) { // Format is case insensitive string upperFormat = format.ToUpperInvariant(); string lowerFormat = format.ToLowerInvariant(); string upperExpected = expected.ToUpperInvariant(); string lowerExpected = expected.ToLowerInvariant(); bool isDefaultProvider = (provider == null || provider == NumberFormatInfo.CurrentInfo); if (string.IsNullOrEmpty(format) || format.ToUpperInvariant() == "G") { if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString()); Assert.Equal(upperExpected, i.ToString((IFormatProvider)null)); } Assert.Equal(upperExpected, i.ToString(provider)); } if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString(upperFormat)); Assert.Equal(lowerExpected, i.ToString(lowerFormat)); Assert.Equal(upperExpected, i.ToString(upperFormat, null)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, null)); } Assert.Equal(upperExpected, i.ToString(upperFormat, provider)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, provider)); } [Fact] public static void ToString_InvalidFormat_ThrowsFormatException() { sbyte b = 123; Assert.Throws<FormatException>(() => b.ToString("r")); // Invalid format Assert.Throws<FormatException>(() => b.ToString("r", null)); // Invalid format Assert.Throws<FormatException>(() => b.ToString("R")); // Invalid format Assert.Throws<FormatException>(() => b.ToString("R", null)); // Invalid format Assert.Throws<FormatException>(() => b.ToString("Y")); // Invalid format Assert.Throws<FormatException>(() => b.ToString("Y", null)); // Invalid format } public static IEnumerable<object[]> Parse_Valid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo emptyFormat = new NumberFormatInfo(); NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; yield return new object[] { "-123", defaultStyle, null, (sbyte)-123 }; yield return new object[] { "0", defaultStyle, null, (sbyte)0 }; yield return new object[] { "123", defaultStyle, null, (sbyte)123 }; yield return new object[] { "+123", defaultStyle, null, (sbyte)123 }; yield return new object[] { " 123 ", defaultStyle, null, (sbyte)123 }; yield return new object[] { "127", defaultStyle, null, (sbyte)127 }; yield return new object[] { "12", NumberStyles.HexNumber, null, (sbyte)0x12 }; yield return new object[] { "10", NumberStyles.AllowThousands, null, (sbyte)10 }; yield return new object[] { "(123)", NumberStyles.AllowParentheses, null, (sbyte)-123 }; // Parentheses = negative yield return new object[] { "123", defaultStyle, emptyFormat, (sbyte)123 }; yield return new object[] { "123", NumberStyles.Any, emptyFormat, (sbyte)123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyFormat, (sbyte)0x12 }; yield return new object[] { "a", NumberStyles.HexNumber, null, (sbyte)0xa }; yield return new object[] { "A", NumberStyles.HexNumber, null, (sbyte)0xa }; yield return new object[] { "$100", NumberStyles.Currency, customFormat, (sbyte)100 }; } [Theory] [MemberData(nameof(Parse_Valid_TestData))] public static void Parse_Valid(string value, NumberStyles style, IFormatProvider provider, sbyte expected) { sbyte result; // Default style and provider if (style == NumberStyles.Integer && provider == null) { Assert.True(sbyte.TryParse(value, out result)); Assert.Equal(expected, result); Assert.Equal(expected, sbyte.Parse(value)); } // Default provider if (provider == null) { Assert.Equal(expected, sbyte.Parse(value, style)); // Substitute default NumberFormatInfo Assert.True(sbyte.TryParse(value, style, new NumberFormatInfo(), out result)); Assert.Equal(expected, result); Assert.Equal(expected, sbyte.Parse(value, style, new NumberFormatInfo())); } // Default style if (style == NumberStyles.Integer) { Assert.Equal(expected, sbyte.Parse(value, provider)); } // Full overloads Assert.True(sbyte.TryParse(value, style, provider, out result)); Assert.Equal(expected, result); Assert.Equal(expected, sbyte.Parse(value, style, provider)); } public static IEnumerable<object[]> Parse_Invalid_TestData() { // Include the test data for wider primitives. foreach (object[] widerTests in Int32Tests.Parse_Invalid_TestData()) { yield return widerTests; } yield return new object[] { "-129", NumberStyles.Integer, null, typeof(OverflowException) }; // < min value yield return new object[] { "128", NumberStyles.Integer, null, typeof(OverflowException) }; // > max value yield return new object[] { "FFFFFFFF", NumberStyles.HexNumber, null, typeof(OverflowException) }; // Hex number < 0 yield return new object[] { "100", NumberStyles.HexNumber, null, typeof(OverflowException) }; // Hex number > max value } [Theory] [MemberData(nameof(Parse_Invalid_TestData))] public static void Parse_Invalid(string value, NumberStyles style, IFormatProvider provider, Type exceptionType) { sbyte result; // Default style and provider if (style == NumberStyles.Integer && provider == null) { Assert.False(sbyte.TryParse(value, out result)); Assert.Equal(default, result); Assert.Throws(exceptionType, () => sbyte.Parse(value)); } // Default provider if (provider == null) { Assert.Throws(exceptionType, () => sbyte.Parse(value, style)); // Substitute default NumberFormatInfo Assert.False(sbyte.TryParse(value, style, new NumberFormatInfo(), out result)); Assert.Equal(default, result); Assert.Throws(exceptionType, () => sbyte.Parse(value, style, new NumberFormatInfo())); } // Default style if (style == NumberStyles.Integer) { Assert.Throws(exceptionType, () => sbyte.Parse(value, provider)); } // Full overloads Assert.False(sbyte.TryParse(value, style, provider, out result)); Assert.Equal(default, result); Assert.Throws(exceptionType, () => sbyte.Parse(value, style, provider)); } [Theory] [InlineData(NumberStyles.HexNumber | NumberStyles.AllowParentheses, null)] [InlineData(unchecked((NumberStyles)0xFFFFFC00), "style")] public static void TryParse_InvalidNumberStyle_ThrowsArgumentException(NumberStyles style, string paramName) { sbyte result = 0; AssertExtensions.Throws<ArgumentException>(paramName, () => sbyte.TryParse("1", style, null, out result)); Assert.Equal(default(sbyte), result); AssertExtensions.Throws<ArgumentException>(paramName, () => sbyte.Parse("1", style)); AssertExtensions.Throws<ArgumentException>(paramName, () => sbyte.Parse("1", style, null)); } } }
using System; using UnityEngine; namespace ExtendedLibrary.Events { public partial class ExtendedEvent { [Serializable] public class Value { private Type typeOf; private object value = null; private bool hasValue = false; /// <summary> /// AssemblyQualifiedName /// </summary> [HideInInspector] [SerializeField] private string fullTypeName = string.Empty; [HideInInspector] [SerializeField] private ObjectType type = ObjectType.Void; [HideInInspector] [SerializeField] private bool boolValue; [HideInInspector] [SerializeField] private byte byteValue; [HideInInspector] [SerializeField] private sbyte sbyteValue; [HideInInspector] [SerializeField] private char charValue; [HideInInspector] [SerializeField] private short shortValue; [HideInInspector] [SerializeField] private ushort ushortValue; [HideInInspector] [SerializeField] private int intValue; [HideInInspector] [SerializeField] private uint uintValue; [HideInInspector] [SerializeField] private long longValue; [HideInInspector] [SerializeField] private float floatValue; [HideInInspector] [SerializeField] private double doubleValue; [HideInInspector] [SerializeField] private long enumValue; [HideInInspector] [SerializeField] private string stringValue = string.Empty; [HideInInspector] [SerializeField] private LayerMask layerMaskValue; [HideInInspector] [SerializeField] private Vector2 vector2Value; [HideInInspector] [SerializeField] private Vector3 vector3Value; [HideInInspector] [SerializeField] private Vector4 vector4Value; [HideInInspector] [SerializeField] private Color colorValue; [HideInInspector] [SerializeField] private Bounds boundsValue; [HideInInspector] [SerializeField] private Rect rectValue; [HideInInspector] [SerializeField] private Quaternion quaternionValue; [HideInInspector] [SerializeField] private AnimationCurve animationCurveValue; [HideInInspector] [SerializeField] private UnityEngine.Object unityObjectReference; [HideInInspector] [SerializeField] private string serializedValue = string.Empty; public ObjectType Type { get { return this.type; } } /// <summary> /// AssemblyQualifiedName /// </summary> public string FullTypeName { get { return this.fullTypeName; } } public Type TypeOf { get { if (this.typeOf == null) { if (this.type == ObjectType.Void) return null; if (string.IsNullOrEmpty(this.fullTypeName)) return null; try { this.typeOf = System.Type.GetType(this.fullTypeName); } catch (Exception ex) { Debug.LogErrorFormat("{0}\n{1}", ex.Message, ex.StackTrace); return null; } } return this.typeOf; } } public Value() { } public Value(object value) { Set(value); } public object Get() { if (this.hasValue) return this.value; var typeOf = this.TypeOf; if (typeOf == null) { this.hasValue = true; return this.value; } switch (this.type) { case ObjectType.Boolean: this.value = this.boolValue; break; case ObjectType.Byte: this.value = this.byteValue; break; case ObjectType.SByte: this.value = this.sbyteValue; break; case ObjectType.Char: this.value = this.charValue; break; case ObjectType.Int16: this.value = this.shortValue; break; case ObjectType.UInt16: this.value = this.ushortValue; break; case ObjectType.Int32: this.value = this.intValue; break; case ObjectType.UInt32: this.value = this.uintValue; break; case ObjectType.Int64: this.value = this.longValue; break; case ObjectType.UInt64: ulong ulongValue; ulong.TryParse(this.serializedValue, out ulongValue); this.value = ulongValue; break; case ObjectType.Single: this.value = this.floatValue; break; case ObjectType.Double: this.value = this.doubleValue; break; case ObjectType.Enum: try { this.value = Enum.ToObject(typeOf, this.enumValue); break; } catch (Exception ex) { Debug.LogErrorFormat("{0}\n{1}", ex.Message, ex.StackTrace); return null; } case ObjectType.String: this.value = this.stringValue; break; case ObjectType.LayerMask: this.value = this.layerMaskValue; break; case ObjectType.Vector2: this.value = this.vector2Value; break; case ObjectType.Vector3: this.value = this.vector3Value; break; case ObjectType.Vector4: this.value = this.vector4Value; break; case ObjectType.Color: this.value = this.colorValue; break; case ObjectType.Bounds: this.value = this.boundsValue; break; case ObjectType.Rect: this.value = this.rectValue; break; case ObjectType.Quaternion: this.value = this.quaternionValue; break; case ObjectType.Matrix4x4: this.value = this.serializedValue.ToObject<Matrix4x4>(); break; case ObjectType.AnimationCurve: this.value = this.animationCurveValue; break; case ObjectType.UnityObject: this.value = this.unityObjectReference; break; case ObjectType.SerializableType: case ObjectType.Array: case ObjectType.List: try { var value = this.serializedValue.ToObject(typeOf); if (value == null) { value = Activator.CreateInstance(typeOf); } this.value = value; break; } catch (Exception ex) { Debug.LogErrorFormat("{0}\n{1}", ex.Message, ex.StackTrace); return null; } } this.hasValue = true; return this.value; } public void Set(object value) { this.typeOf = value.GetType(); this.type = this.typeOf.GetSerializableObjectType(); if (this.type == ObjectType.Void) return; this.fullTypeName = this.typeOf.GetSerializableAssemblyQualifiedName(); try { switch (this.type) { case ObjectType.Void: break; case ObjectType.Boolean: this.boolValue = (bool) value; break; case ObjectType.Byte: this.byteValue = (byte) value; break; case ObjectType.SByte: this.sbyteValue = (sbyte) value; break; case ObjectType.Char: this.charValue = (char) value; break; case ObjectType.Int16: this.shortValue = (short) value; break; case ObjectType.UInt16: this.ushortValue = (ushort) value; break; case ObjectType.Int32: this.intValue = (int) value; break; case ObjectType.UInt32: this.uintValue = (uint) value; break; case ObjectType.Int64: this.longValue = (long) value; break; case ObjectType.UInt64: this.serializedValue = value.ToString(); break; case ObjectType.Single: this.floatValue = (float) value; break; case ObjectType.Double: this.doubleValue = (double) value; break; case ObjectType.Enum: this.enumValue = (long) value; break; case ObjectType.String: this.stringValue = (string) value; break; case ObjectType.LayerMask: this.layerMaskValue = (LayerMask) value; break; case ObjectType.Vector2: this.vector2Value = (Vector2) value; break; case ObjectType.Vector3: this.vector3Value = (Vector3) value; break; case ObjectType.Vector4: this.vector4Value = (Vector4) value; break; case ObjectType.Color: this.colorValue = (Color) value; break; case ObjectType.Bounds: this.boundsValue = (Bounds) value; break; case ObjectType.Rect: this.rectValue = (Rect) value; break; case ObjectType.Quaternion: this.quaternionValue = (Quaternion) value; break; case ObjectType.AnimationCurve: this.animationCurveValue = (AnimationCurve) value; break; case ObjectType.UnityObject: this.unityObjectReference = (UnityEngine.Object) value; break; case ObjectType.Matrix4x4: case ObjectType.SerializableType: case ObjectType.Array: case ObjectType.List: this.serializedValue = value.ToJson(this.typeOf); break; default: break; } } catch (Exception ex) { Debug.LogErrorFormat("{0}\n{1}", ex.Message, ex.StackTrace); } } } } }
using System; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Runtime.InteropServices; using Microsoft.DotNet.Cli.Build.Framework; using Microsoft.Extensions.PlatformAbstractions; using static Microsoft.DotNet.Cli.Build.Framework.BuildHelpers; namespace Microsoft.DotNet.Cli.Build { public static class PackageTargets { public static readonly string[] ProjectsToPack = new string[] { "Microsoft.DotNet.Cli.Utils", "Microsoft.DotNet.ProjectModel", "Microsoft.DotNet.ProjectModel.Loader", "Microsoft.DotNet.ProjectModel.Workspaces", "Microsoft.DotNet.InternalAbstractions", "Microsoft.Extensions.DependencyModel", "Microsoft.Extensions.Testing.Abstractions", "Microsoft.DotNet.Compiler.Common", "Microsoft.DotNet.Files", "dotnet-compile-fsc" }; [Target(nameof(PackageTargets.CopyCLISDKLayout), nameof(PackageTargets.CopySharedHostLayout), nameof(PackageTargets.CopySharedFxLayout), nameof(PackageTargets.CopyCombinedFrameworkSDKHostLayout), nameof(PackageTargets.CopyCombinedFrameworkHostLayout))] public static BuildTargetResult InitPackage(BuildTargetContext c) { Directory.CreateDirectory(Dirs.Packages); return c.Success(); } [Target(nameof(PrepareTargets.Init), nameof(PackageTargets.InitPackage), nameof(PackageTargets.GenerateVersionBadge), nameof(PackageTargets.GenerateCompressedFile), nameof(InstallerTargets.GenerateInstaller), nameof(PackageTargets.GenerateNugetPackages), nameof(InstallerTargets.TestInstaller))] [Environment("DOTNET_BUILD_SKIP_PACKAGING", null, "0", "false")] public static BuildTargetResult Package(BuildTargetContext c) { return c.Success(); } [Target] public static BuildTargetResult GenerateVersionBadge(BuildTargetContext c) { var buildVersion = c.BuildContext.Get<BuildVersion>("BuildVersion"); var versionSvg = Path.Combine(Dirs.RepoRoot, "resources", "images", "version_badge.svg"); var outputVersionSvg = c.BuildContext.Get<string>("VersionBadge"); var versionSvgContent = File.ReadAllText(versionSvg); versionSvgContent = versionSvgContent.Replace("ver_number", buildVersion.NuGetVersion); File.WriteAllText(outputVersionSvg, versionSvgContent); return c.Success(); } [Target] public static BuildTargetResult CopyCLISDKLayout(BuildTargetContext c) { var cliSdkRoot = Path.Combine(Dirs.Output, "obj", "clisdk"); if (Directory.Exists(cliSdkRoot)) { Utils.DeleteDirectory(cliSdkRoot); } Directory.CreateDirectory(cliSdkRoot); Utils.CopyDirectoryRecursively(Path.Combine(Dirs.Stage2, "sdk"), cliSdkRoot, true); FixPermissions(cliSdkRoot); c.BuildContext["CLISDKRoot"] = cliSdkRoot; return c.Success(); } [Target] public static BuildTargetResult CopySharedHostLayout(BuildTargetContext c) { var sharedHostRoot = Path.Combine(Dirs.Output, "obj", "sharedHost"); if (Directory.Exists(sharedHostRoot)) { Utils.DeleteDirectory(sharedHostRoot); } Directory.CreateDirectory(sharedHostRoot); foreach (var file in Directory.GetFiles(Dirs.Stage2, "*", SearchOption.TopDirectoryOnly)) { var destFile = file.Replace(Dirs.Stage2, sharedHostRoot); File.Copy(file, destFile, true); } FixPermissions(sharedHostRoot); c.BuildContext["SharedHostPublishRoot"] = sharedHostRoot; return c.Success(); } [Target] public static BuildTargetResult CopySharedFxLayout(BuildTargetContext c) { var sharedFxRoot = Path.Combine(Dirs.Output, "obj", "sharedFx"); if (Directory.Exists(sharedFxRoot)) { Utils.DeleteDirectory(sharedFxRoot); } Directory.CreateDirectory(sharedFxRoot); Utils.CopyDirectoryRecursively(Path.Combine(Dirs.Stage2, "shared"), sharedFxRoot, true); FixPermissions(sharedFxRoot); c.BuildContext["SharedFrameworkPublishRoot"] = sharedFxRoot; return c.Success(); } [Target] public static BuildTargetResult CopyCombinedFrameworkSDKHostLayout(BuildTargetContext c) { var combinedRoot = Path.Combine(Dirs.Output, "obj", "combined-framework-sdk-host"); if (Directory.Exists(combinedRoot)) { Utils.DeleteDirectory(combinedRoot); } string sdkPublishRoot = c.BuildContext.Get<string>("CLISDKRoot"); Utils.CopyDirectoryRecursively(sdkPublishRoot, combinedRoot); string sharedFrameworkPublishRoot = c.BuildContext.Get<string>("SharedFrameworkPublishRoot"); Utils.CopyDirectoryRecursively(sharedFrameworkPublishRoot, combinedRoot); string sharedHostPublishRoot = c.BuildContext.Get<string>("SharedHostPublishRoot"); Utils.CopyDirectoryRecursively(sharedHostPublishRoot, combinedRoot); c.BuildContext["CombinedFrameworkSDKHostRoot"] = combinedRoot; return c.Success(); } [Target] public static BuildTargetResult CopyCombinedFrameworkHostLayout(BuildTargetContext c) { var combinedRoot = Path.Combine(Dirs.Output, "obj", "combined-framework-host"); if (Directory.Exists(combinedRoot)) { Utils.DeleteDirectory(combinedRoot); } string sharedFrameworkPublishRoot = c.BuildContext.Get<string>("SharedFrameworkPublishRoot"); Utils.CopyDirectoryRecursively(sharedFrameworkPublishRoot, combinedRoot); string sharedHostPublishRoot = c.BuildContext.Get<string>("SharedHostPublishRoot"); Utils.CopyDirectoryRecursively(sharedHostPublishRoot, combinedRoot); c.BuildContext["CombinedFrameworkHostRoot"] = combinedRoot; return c.Success(); } [Target(nameof(PackageTargets.GenerateZip), nameof(PackageTargets.GenerateTarBall))] public static BuildTargetResult GenerateCompressedFile(BuildTargetContext c) { return c.Success(); } [Target(nameof(PackageTargets.InitPackage))] [BuildPlatforms(BuildPlatform.Windows)] public static BuildTargetResult GenerateZip(BuildTargetContext c) { CreateZipFromDirectory(c.BuildContext.Get<string>("CombinedFrameworkSDKHostRoot"), c.BuildContext.Get<string>("CombinedFrameworkSDKHostCompressedFile")); CreateZipFromDirectory(c.BuildContext.Get<string>("CombinedFrameworkHostRoot"), c.BuildContext.Get<string>("CombinedFrameworkHostCompressedFile")); return c.Success(); } [Target(nameof(PackageTargets.InitPackage))] [BuildPlatforms(BuildPlatform.Unix)] public static BuildTargetResult GenerateTarBall(BuildTargetContext c) { CreateTarBallFromDirectory(c.BuildContext.Get<string>("CombinedFrameworkSDKHostRoot"), c.BuildContext.Get<string>("CombinedFrameworkSDKHostCompressedFile")); CreateTarBallFromDirectory(c.BuildContext.Get<string>("CombinedFrameworkHostRoot"), c.BuildContext.Get<string>("CombinedFrameworkHostCompressedFile")); return c.Success(); } [Target] public static BuildTargetResult GenerateNugetPackages(BuildTargetContext c) { var versionSuffix = c.BuildContext.Get<BuildVersion>("BuildVersion").VersionSuffix; var configuration = c.BuildContext.Get<string>("Configuration"); var env = GetCommonEnvVars(c); var dotnet = DotNetCli.Stage2; var packagingBuildBasePath = Path.Combine(Dirs.Stage2Compilation, "forPackaging"); FS.Mkdirp(Dirs.PackagesIntermediate); FS.Mkdirp(Dirs.Packages); foreach (var projectName in ProjectsToPack) { var projectFile = Path.Combine(Dirs.RepoRoot, "src", projectName, "project.json"); dotnet.Pack( projectFile, "--no-build", "--build-base-path", packagingBuildBasePath, "--output", Dirs.PackagesIntermediate, "--configuration", configuration, "--version-suffix", versionSuffix) .Execute() .EnsureSuccessful(); } var packageFiles = Directory.EnumerateFiles(Dirs.PackagesIntermediate, "*.nupkg"); foreach (var packageFile in packageFiles) { if (!packageFile.EndsWith(".symbols.nupkg")) { var destinationPath = Path.Combine(Dirs.Packages, Path.GetFileName(packageFile)); File.Copy(packageFile, destinationPath, overwrite: true); } } return c.Success(); } internal static Dictionary<string, string> GetCommonEnvVars(BuildTargetContext c) { // Set up the environment variables previously defined by common.sh/ps1 // This is overkill, but I want to cover all the variables used in all OSes (including where some have the same names) var buildVersion = c.BuildContext.Get<BuildVersion>("BuildVersion"); var configuration = c.BuildContext.Get<string>("Configuration"); var architecture = PlatformServices.Default.Runtime.RuntimeArchitecture; var env = new Dictionary<string, string>() { { "RID", PlatformServices.Default.Runtime.GetRuntimeIdentifier() }, { "OSNAME", PlatformServices.Default.Runtime.OperatingSystem }, { "TFM", "dnxcore50" }, { "REPOROOT", Dirs.RepoRoot }, { "OutputDir", Dirs.Output }, { "Stage1Dir", Dirs.Stage1 }, { "Stage1CompilationDir", Dirs.Stage1Compilation }, { "Stage2Dir", Dirs.Stage2 }, { "STAGE2_DIR", Dirs.Stage2 }, { "Stage2CompilationDir", Dirs.Stage2Compilation }, { "HostDir", Dirs.Corehost }, { "PackageDir", Path.Combine(Dirs.Packages) }, // Legacy name { "TestBinRoot", Dirs.TestOutput }, { "TestPackageDir", Dirs.TestPackages }, { "MajorVersion", buildVersion.Major.ToString() }, { "MinorVersion", buildVersion.Minor.ToString() }, { "PatchVersion", buildVersion.Patch.ToString() }, { "CommitCountVersion", buildVersion.CommitCountString }, { "COMMIT_COUNT_VERSION", buildVersion.CommitCountString }, { "DOTNET_CLI_VERSION", buildVersion.SimpleVersion }, { "DOTNET_MSI_VERSION", buildVersion.GenerateMsiVersion() }, { "VersionSuffix", buildVersion.VersionSuffix }, { "CONFIGURATION", configuration }, { "ARCHITECTURE", architecture } }; return env; } private static void CreateZipFromDirectory(string directory, string artifactPath) { if (File.Exists(artifactPath)) { File.Delete(artifactPath); } ZipFile.CreateFromDirectory(directory, artifactPath, CompressionLevel.Optimal, false); } private static void CreateTarBallFromDirectory(string directory, string artifactPath) { if (File.Exists(artifactPath)) { File.Delete(artifactPath); } Cmd("tar", "-czf", artifactPath, "-C", directory, ".") .Execute() .EnsureSuccessful(); } private static void FixPermissions(string directory) { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { // Reset everything to user readable/writeable and group and world readable. FS.ChmodAll(directory, "*", "644"); // Now make things that should be executable, executable. FS.FixModeFlags(directory); } } } }
/* * * (c) Copyright Ascensio System Limited 2010-2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ namespace ASC.Mail.Net.SIP.Stack { #region usings using System; using System.Collections.Generic; using System.Net; using System.Threading; using AUTH; using Message; #endregion /// <summary> /// This class implements SIP request sender. /// </summary> /// <remarks> /// Request is sent using following methods:<br/> /// *) If there is active data flow, it is used. /// *) Request is sent as described in RFC 3261 [4](RFC 3263). /// </remarks> public class SIP_RequestSender : IDisposable { #region Events /// <summary> /// Is raised when sender has finished processing(got final-response or error). /// </summary> public event EventHandler Completed = null; /// <summary> /// Is raised when this object has disposed. /// </summary> public event EventHandler Disposed = null; /// <summary> /// Is raised when this transaction has got response from target end point. /// </summary> public event EventHandler<SIP_ResponseReceivedEventArgs> ResponseReceived = null; #endregion #region Members private bool m_IsStarted; private List<NetworkCredential> m_pCredentials; private SIP_Flow m_pFlow; private Queue<SIP_Hop> m_pHops; private object m_pLock = new object(); private SIP_Request m_pRequest; private SIP_Stack m_pStack; private SIP_ClientTransaction m_pTransaction; private SIP_RequestSenderState m_State = SIP_RequestSenderState.Initial; #endregion #region Constructor /// <summary> /// Default constructor. /// </summary> /// <param name="stack">Owner stack.</param> /// <param name="request">SIP request.</param> /// <param name="flow">Active data flow what to try before RFC 3261 [4](RFC 3263) methods to use to send request. /// This value can be null.</param> /// <exception cref="ArgumentNullException">Is raised when <b>stack</b> or <b>request</b> is null.</exception> internal SIP_RequestSender(SIP_Stack stack, SIP_Request request, SIP_Flow flow) { if (stack == null) { throw new ArgumentNullException("stack"); } if (request == null) { throw new ArgumentNullException("request"); } m_pStack = stack; m_pRequest = request; m_pFlow = flow; m_pCredentials = new List<NetworkCredential>(); m_pHops = new Queue<SIP_Hop>(); } #endregion #region Properties /// <summary> /// Gets credentials collection. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this property is accessed.</exception> public List<NetworkCredential> Credentials { get { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } return m_pCredentials; } } /// <summary> /// Gets SIP flow what was used to send request or null if request is not sent yet. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this property is accessed.</exception> public SIP_Flow Flow { get { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } return m_pFlow; } } /// <summary> /// Gets if this object is disposed. /// </summary> public bool IsDisposed { get { return m_State == SIP_RequestSenderState.Disposed; } } /// <summary> /// Gets if request sending has been started. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this property is accessed.</exception> public bool IsStarted { get { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } return m_IsStarted; } } /// <summary> /// Gets SIP request. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this property is accessed.</exception> public SIP_Request Request { get { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } return m_pRequest; } } /// <summary> /// Gets owner stack. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this property is accessed.</exception> public SIP_Stack Stack { get { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } return m_pStack; } } #endregion #region Methods /// <summary> /// Cleans up any resources being used. /// </summary> public void Dispose() { lock (m_pLock) { if (m_State == SIP_RequestSenderState.Disposed) { return; } m_State = SIP_RequestSenderState.Disposed; OnDisposed(); ResponseReceived = null; Completed = null; Disposed = null; m_pStack = null; m_pRequest = null; m_pCredentials = null; m_pHops = null; m_pTransaction = null; m_pLock = null; } } /// <summary> /// Starts sending request. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this method is accessed.</exception> /// <exception cref="InvalidOperationException">Is raised when <b>Start</b> method has alredy called.</exception> /// <exception cref="SIP_TransportException">Is raised when no transport hop(s) for request.</exception> public void Start() { lock (m_pLock) { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } if (m_IsStarted) { throw new InvalidOperationException("Start method has been already called."); } m_IsStarted = true; m_State = SIP_RequestSenderState.Starting; // Start may take so, process it on thread pool. ThreadPool.QueueUserWorkItem(delegate { lock (m_pLock) { if (m_State == SIP_RequestSenderState.Disposed) { return; } /* RFC 3261 8.1.2 Sending the Request The destination for the request is then computed. Unless there is local policy specifying otherwise, the destination MUST be determined by applying the DNS procedures described in [4] as follows. If the first element in the route set indicated a strict router (resulting in forming the request as described in Section 12.2.1.1), the procedures MUST be applied to the Request-URI of the request. Otherwise, the procedures are applied to the first Route header field value in the request (if one exists), or to the request's Request-URI if there is no Route header field present. These procedures yield an ordered set of address, port, and transports to attempt. Independent of which URI is used as input to the procedures of [4], if the Request-URI specifies a SIPS resource, the UAC MUST follow the procedures of [4] as if the input URI were a SIPS URI. The UAC SHOULD follow the procedures defined in [4] for stateful elements, trying each address until a server is contacted. Each try constitutes a new transaction, and therefore each carries a different topmost Via header field value with a new branch parameter. Furthermore, the transport value in the Via header field is set to whatever transport was determined for the target server. */ // We never use strict, only loose route. bool isStrictRoute = false; SIP_Uri uri = null; if (isStrictRoute) { uri = (SIP_Uri) m_pRequest.RequestLine.Uri; } else if (m_pRequest.Route.GetTopMostValue() != null) { uri = (SIP_Uri) m_pRequest.Route.GetTopMostValue().Address. Uri; } else { uri = (SIP_Uri) m_pRequest.RequestLine.Uri; } //uri.Param_Transport = "TCP"; // Queue hops. foreach (SIP_Hop hop in m_pStack.GetHops(uri, m_pRequest.ToByteData().Length, ((SIP_Uri) m_pRequest.RequestLine.Uri). IsSecure)) { m_pHops.Enqueue(hop); } if (m_pHops.Count == 0) { OnTransportError( new SIP_TransportException("No hops for '" + uri + "'.")); OnCompleted(); } else { m_State = SIP_RequestSenderState.Started; try { if (m_pFlow != null) { SendToFlow(m_pFlow, m_pRequest.Copy()); return; } } catch { // Sending to specified flow failed, probably disposed, just try send to first hop. } SendToNextHop(); } } }); } } /// <summary> /// Cancels current request sending. /// </summary> /// <exception cref="ObjectDisposedException">Is raised when this object is disposed and and this method is accessed.</exception> /// <exception cref="InvalidOperationException">Is raised when request sending has not been started by <b>Start</b> method.</exception> public void Cancel() { // If sender is in starting state, we must wait that state to complete. while (m_State == SIP_RequestSenderState.Starting) { Thread.Sleep(5); } lock (m_pLock) { if (m_State == SIP_RequestSenderState.Disposed) { throw new ObjectDisposedException(GetType().Name); } if (!m_IsStarted) { throw new InvalidOperationException("Request sending has not started, nothing to cancel."); } if (m_State != SIP_RequestSenderState.Started) { return; } m_pHops.Clear(); } // We may not call m_pTransaction.Cancel() in lock block, because deadlock can happen when transaction get response at same time. // Transaction waits lock for us and we wait lock to transaction. m_pTransaction.Cancel(); } #endregion #region Utility methods /// <summary> /// Is called when client transactions receives response. /// </summary> /// <param name="sender">Sender.</param> /// <param name="e">Event data.</param> private void ClientTransaction_ResponseReceived(object sender, SIP_ResponseReceivedEventArgs e) { lock (m_pLock) { m_pFlow = e.ClientTransaction.Request.Flow; if (e.Response.StatusCode == 401 || e.Response.StatusCode == 407) { // Check if authentication failed(We sent authorization data and it's challenged again, // probably user name or password inccorect) bool hasFailedAuthorization = false; foreach (SIP_t_Challenge challange in e.Response.WWWAuthenticate.GetAllValues()) { foreach (SIP_t_Credentials credentials in m_pTransaction.Request.Authorization.GetAllValues()) { if (new Auth_HttpDigest(challange.AuthData, "").Realm == new Auth_HttpDigest(credentials.AuthData, "").Realm) { hasFailedAuthorization = true; break; } } } foreach (SIP_t_Challenge challange in e.Response.ProxyAuthenticate.GetAllValues()) { foreach (SIP_t_Credentials credentials in m_pTransaction.Request.ProxyAuthorization.GetAllValues()) { if (new Auth_HttpDigest(challange.AuthData, "").Realm == new Auth_HttpDigest(credentials.AuthData, "").Realm) { hasFailedAuthorization = true; break; } } } // Authorization failed, pass response to UA. if (hasFailedAuthorization) { OnResponseReceived(e.Response); } // Try to authorize challanges. else { SIP_Request request = m_pRequest.Copy(); /* RFC 3261 22.2. When a UAC resubmits a request with its credentials after receiving a 401 (Unauthorized) or 407 (Proxy Authentication Required) response, it MUST increment the CSeq header field value as it would normally when sending an updated request. */ request.CSeq = new SIP_t_CSeq(m_pStack.ConsumeCSeq(), request.CSeq.RequestMethod); // All challanges authorized, resend request. if (Authorize(request, e.Response, Credentials.ToArray())) { SIP_Flow flow = m_pTransaction.Flow; CleanUpActiveTransaction(); SendToFlow(flow, request); } // We don't have credentials for one or more challenges. else { OnResponseReceived(e.Response); } } } else { OnResponseReceived(e.Response); if (e.Response.StatusCodeType != SIP_StatusCodeType.Provisional) { OnCompleted(); } } } } /// <summary> /// Is called when client transaction has timed out. /// </summary> /// <param name="sender">Sender.</param> /// <param name="e">Event data.</param> private void ClientTransaction_TimedOut(object sender, EventArgs e) { lock (m_pLock) { /* RFC 3261 8.1.2. The UAC SHOULD follow the procedures defined in [4] for stateful elements, trying each address until a server is contacted. Each try constitutes a new transaction, and therefore each carries a different topmost Via header field value with a new branch parameter. Furthermore, the transport value in the Via header field is set to whatever transport was determined for the target server. */ if (m_pHops.Count > 0) { CleanUpActiveTransaction(); SendToNextHop(); } /* 8.1.3.1 Transaction Layer Errors In some cases, the response returned by the transaction layer will not be a SIP message, but rather a transaction layer error. When a timeout error is received from the transaction layer, it MUST be treated as if a 408 (Request Timeout) status code has been received. If a fatal transport error is reported by the transport layer (generally, due to fatal ICMP errors in UDP or connection failures in TCP), the condition MUST be treated as a 503 (Service Unavailable) status code. */ else { OnResponseReceived(m_pStack.CreateResponse(SIP_ResponseCodes.x408_Request_Timeout, m_pRequest)); OnCompleted(); } } } /// <summary> /// Is called when client transaction encountered transport error. /// </summary> /// <param name="sender">Sender.</param> /// <param name="e">Event data.</param> private void ClientTransaction_TransportError(object sender, EventArgs e) { lock (m_pLock) { /* RFC 3261 8.1.2. The UAC SHOULD follow the procedures defined in [4] for stateful elements, trying each address until a server is contacted. Each try constitutes a new transaction, and therefore each carries a different topmost Via header field value with a new branch parameter. Furthermore, the transport value in the Via header field is set to whatever transport was determined for the target server. */ if (m_pHops.Count > 0) { CleanUpActiveTransaction(); SendToNextHop(); } /* RFC 3261 8.1.3.1 Transaction Layer Errors In some cases, the response returned by the transaction layer will not be a SIP message, but rather a transaction layer error. When a timeout error is received from the transaction layer, it MUST be treated as if a 408 (Request Timeout) status code has been received. If a fatal transport error is reported by the transport layer (generally, due to fatal ICMP errors in UDP or connection failures in TCP), the condition MUST be treated as a 503 (Service Unavailable) status code. */ else { OnResponseReceived( m_pStack.CreateResponse( SIP_ResponseCodes.x503_Service_Unavailable + ": Transport error.", m_pRequest)); OnCompleted(); } } } /// <summary> /// Creates authorization for each challange in <b>response</b>. /// </summary> /// <param name="request">SIP request where to add authorization values.</param> /// <param name="response">SIP response which challanges to authorize.</param> /// <param name="credentials">Credentials for authorization.</param> /// <returns>Returns true if all challanges were authorized. If any of the challanges was not authorized, returns false.</returns> private bool Authorize(SIP_Request request, SIP_Response response, NetworkCredential[] credentials) { if (request == null) { throw new ArgumentNullException("request"); } if (response == null) { throw new ArgumentNullException("response"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } bool allAuthorized = true; #region WWWAuthenticate foreach (SIP_t_Challenge challange in response.WWWAuthenticate.GetAllValues()) { Auth_HttpDigest authDigest = new Auth_HttpDigest(challange.AuthData, request.RequestLine.Method); // Serach credential for the specified challange. NetworkCredential credential = null; foreach (NetworkCredential c in credentials) { if (c.Domain.ToLower() == authDigest.Realm.ToLower()) { credential = c; break; } } // We don't have credential for this challange. if (credential == null) { allAuthorized = false; } // Authorize challange. else { authDigest.UserName = credential.UserName; authDigest.Password = credential.Password; authDigest.CNonce = Auth_HttpDigest.CreateNonce(); request.Authorization.Add(authDigest.ToAuthorization()); } } #endregion #region ProxyAuthenticate foreach (SIP_t_Challenge challange in response.ProxyAuthenticate.GetAllValues()) { Auth_HttpDigest authDigest = new Auth_HttpDigest(challange.AuthData, request.RequestLine.Method); // Serach credential for the specified challange. NetworkCredential credential = null; foreach (NetworkCredential c in credentials) { if (c.Domain.ToLower() == authDigest.Realm.ToLower()) { credential = c; break; } } // We don't have credential for this challange. if (credential == null) { allAuthorized = false; } // Authorize challange. else { authDigest.UserName = credential.UserName; authDigest.Password = credential.Password; authDigest.CNonce = Auth_HttpDigest.CreateNonce(); request.ProxyAuthorization.Add(authDigest.ToAuthorization()); } } #endregion return allAuthorized; } /// <summary> /// Starts sending request to next hop in queue. /// </summary> /// <exception cref="InvalidOperationException">Is raised when no next hop available(m_pHops.Count == 0) and this method is accessed.</exception> private void SendToNextHop() { if (m_pHops.Count == 0) { throw new InvalidOperationException("No more hop(s)."); } SIP_Hop hop = m_pHops.Dequeue(); SendToFlow(m_pStack.TransportLayer.GetOrCreateFlow(hop.Transport, null, hop.EndPoint), m_pRequest.Copy()); } /// <summary> /// Sends specified request to the specified data flow. /// </summary> /// <param name="flow">SIP data flow.</param> /// <param name="request">SIP request to send.</param> /// <exception cref="ArgumentNullException">Is raised when <b>flow</b> or <b>request</b> is null reference.</exception> private void SendToFlow(SIP_Flow flow, SIP_Request request) { if (flow == null) { throw new ArgumentNullException("flow"); } if (request == null) { throw new ArgumentNullException("request"); } #region Contact (RFC 3261 8.1.1.8) /* The Contact header field provides a SIP or SIPS URI that can be used to contact that specific instance of the UA for subsequent requests. The Contact header field MUST be present and contain exactly one SIP or SIPS URI in any request that can result in the establishment of a dialog. For the methods defined in this specification, that includes only the INVITE request. For these requests, the scope of the Contact is global. That is, the Contact header field value contains the URI at which the UA would like to receive requests, and this URI MUST be valid even if used in subsequent requests outside of any dialogs. If the Request-URI or top Route header field value contains a SIPS URI, the Contact header field MUST contain a SIPS URI as well. */ SIP_t_ContactParam contact = request.Contact.GetTopMostValue(); // Add contact header If request-Method can establish dialog and contact header not present. if (SIP_Utils.MethodCanEstablishDialog(request.RequestLine.Method) && contact == null) { SIP_Uri from = (SIP_Uri) request.From.Address.Uri; request.Contact.Add((flow.IsSecure ? "sips:" : "sip:") + from.User + "@" + m_pStack.TransportLayer.GetContactHost(flow)); } // If contact SIP URI and host = auto-allocate, allocate it as needed. else if (contact != null && contact.Address.Uri is SIP_Uri && ((SIP_Uri) contact.Address.Uri).Host == "auto-allocate") { ((SIP_Uri) contact.Address.Uri).Host = m_pStack.TransportLayer.GetContactHost(flow).ToString(); } #endregion m_pTransaction = m_pStack.TransactionLayer.CreateClientTransaction(flow, request, true); m_pTransaction.ResponseReceived += ClientTransaction_ResponseReceived; m_pTransaction.TimedOut += ClientTransaction_TimedOut; m_pTransaction.TransportError += ClientTransaction_TransportError; // Start transaction processing. m_pTransaction.Start(); } /// <summary> /// Cleans up active transaction. /// </summary> private void CleanUpActiveTransaction() { if (m_pTransaction != null) { // Don't dispose transaction, transaction will dispose itself when done. // Otherwise for example failed INVITE won't linger in "Completed" state as it must be. // We just release Events processing, because you don't care about them any more. m_pTransaction.ResponseReceived -= ClientTransaction_ResponseReceived; m_pTransaction.TimedOut -= ClientTransaction_TimedOut; m_pTransaction.TransportError -= ClientTransaction_TransportError; m_pTransaction = null; } } /// <summary> /// Raises ResponseReceived event. /// </summary> /// <param name="response">SIP response received.</param> private void OnResponseReceived(SIP_Response response) { if (ResponseReceived != null) { ResponseReceived(this, new SIP_ResponseReceivedEventArgs(m_pStack, m_pTransaction, response)); } } /// <summary> /// Raises event <b>TransportError</b>. /// </summary> /// <param name="exception">Excption happened.</param> private void OnTransportError(Exception exception) { // TODO: } /// <summary> /// Raises event <b>Completed</b>. /// </summary> private void OnCompleted() { m_State = SIP_RequestSenderState.Completed; if (Completed != null) { Completed(this, new EventArgs()); } } /// <summary> /// Raises <b>Disposed</b> event. /// </summary> private void OnDisposed() { if (Disposed != null) { Disposed(this, new EventArgs()); } } #endregion #region Nested type: SIP_RequestSenderState private enum SIP_RequestSenderState { Initial, Starting, Started, Completed, Disposed } #endregion } }
// dnlib: See LICENSE.txt for more info using System; using System.Collections.Generic; using dnlib.Utils; using dnlib.W32Resources; using dnlib.IO; using dnlib.Threading; namespace dnlib.PE { /// <summary> /// Image layout /// </summary> public enum ImageLayout { /// <summary> /// Use this if the PE file has a normal structure (eg. it's been read from a file on disk) /// </summary> File, /// <summary> /// Use this if the PE file has been loaded into memory by the OS PE file loader /// </summary> Memory, } /// <summary> /// Accesses a PE file /// </summary> public sealed class PEImage : IPEImage { // Default to false because an OS loaded PE image may contain memory holes. If there // are memory holes, other code (eg. .NET resource creator) must verify that all memory // is available, which will be slower. const bool USE_MEMORY_LAYOUT_WITH_MAPPED_FILES = false; static readonly IPEType MemoryLayout = new MemoryPEType(); static readonly IPEType FileLayout = new FilePEType(); IImageStream imageStream; IImageStreamCreator imageStreamCreator; IPEType peType; PEInfo peInfo; UserValue<Win32Resources> win32Resources; #if THREAD_SAFE readonly Lock theLock = Lock.Create(); #endif sealed class FilePEType : IPEType { /// <inheritdoc/> public RVA ToRVA(PEInfo peInfo, FileOffset offset) { return peInfo.ToRVA(offset); } /// <inheritdoc/> public FileOffset ToFileOffset(PEInfo peInfo, RVA rva) { return peInfo.ToFileOffset(rva); } } sealed class MemoryPEType : IPEType { /// <inheritdoc/> public RVA ToRVA(PEInfo peInfo, FileOffset offset) { return (RVA)offset; } /// <inheritdoc/> public FileOffset ToFileOffset(PEInfo peInfo, RVA rva) { return (FileOffset)rva; } } /// <inheritdoc/> public bool IsFileImageLayout { get { return peType is FilePEType; } } /// <inheritdoc/> public bool MayHaveInvalidAddresses { get { return !IsFileImageLayout; } } /// <inheritdoc/> public string FileName { get { return imageStreamCreator.FileName; } } /// <inheritdoc/> public ImageDosHeader ImageDosHeader { get { return peInfo.ImageDosHeader; } } /// <inheritdoc/> public ImageNTHeaders ImageNTHeaders { get { return peInfo.ImageNTHeaders; } } /// <inheritdoc/> public IList<ImageSectionHeader> ImageSectionHeaders { get { return peInfo.ImageSectionHeaders; } } /// <inheritdoc/> public Win32Resources Win32Resources { get { return win32Resources.Value; } set { IDisposable origValue = null; if (win32Resources.IsValueInitialized) { origValue = win32Resources.Value; if (origValue == value) return; } win32Resources.Value = value; if (origValue != null) origValue.Dispose(); } } /// <summary> /// Constructor /// </summary> /// <param name="imageStreamCreator">The PE stream creator</param> /// <param name="imageLayout">Image layout</param> /// <param name="verify">Verify PE file data</param> public PEImage(IImageStreamCreator imageStreamCreator, ImageLayout imageLayout, bool verify) { try { this.imageStreamCreator = imageStreamCreator; this.peType = ConvertImageLayout(imageLayout); ResetReader(); this.peInfo = new PEInfo(imageStream, verify); Initialize(); } catch { Dispose(); throw; } } void Initialize() { win32Resources.ReadOriginalValue = () => { var dataDir = peInfo.ImageNTHeaders.OptionalHeader.DataDirectories[2]; if (dataDir.VirtualAddress == 0 || dataDir.Size == 0) return null; return new Win32ResourcesPE(this); }; #if THREAD_SAFE win32Resources.Lock = theLock; #endif } static IPEType ConvertImageLayout(ImageLayout imageLayout) { switch (imageLayout) { case ImageLayout.File: return FileLayout; case ImageLayout.Memory: return MemoryLayout; default: throw new ArgumentException("imageLayout"); } } /// <summary> /// Constructor /// </summary> /// <param name="fileName">Name of the file</param> /// <param name="mapAsImage"><c>true</c> if we should map it as an executable</param> /// <param name="verify">Verify PE file data</param> public PEImage(string fileName, bool mapAsImage, bool verify) : this(ImageStreamCreator.Create(fileName, mapAsImage), mapAsImage ? ImageLayout.Memory : ImageLayout.File, verify) { try { if (mapAsImage && imageStreamCreator is MemoryMappedFileStreamCreator) { ((MemoryMappedFileStreamCreator)imageStreamCreator).Length = peInfo.GetImageSize(); ResetReader(); } } catch { Dispose(); throw; } } /// <summary> /// Constructor /// </summary> /// <param name="fileName">Name of the file</param> /// <param name="verify">Verify PE file data</param> public PEImage(string fileName, bool verify) : this(fileName, USE_MEMORY_LAYOUT_WITH_MAPPED_FILES, verify) { } /// <summary> /// Constructor /// </summary> /// <param name="fileName">Name of the file</param> public PEImage(string fileName) : this(fileName, true) { } /// <summary> /// Constructor /// </summary> /// <param name="data">The PE file data</param> /// <param name="imageLayout">Image layout</param> /// <param name="verify">Verify PE file data</param> public PEImage(byte[] data, ImageLayout imageLayout, bool verify) : this(new MemoryStreamCreator(data), imageLayout, verify) { } /// <summary> /// Constructor /// </summary> /// <param name="data">The PE file data</param> /// <param name="verify">Verify PE file data</param> public PEImage(byte[] data, bool verify) : this(data, ImageLayout.File, verify) { } /// <summary> /// Constructor /// </summary> /// <param name="data">The PE file data</param> public PEImage(byte[] data) : this(data, true) { } /// <summary> /// Constructor /// </summary> /// <param name="baseAddr">Address of PE image</param> /// <param name="length">Length of PE image</param> /// <param name="imageLayout">Image layout</param> /// <param name="verify">Verify PE file data</param> public PEImage(IntPtr baseAddr, long length, ImageLayout imageLayout, bool verify) : this(new UnmanagedMemoryStreamCreator(baseAddr, length), imageLayout, verify) { } /// <summary> /// Constructor /// </summary> /// <param name="baseAddr">Address of PE image</param> /// <param name="length">Length of PE image</param> /// <param name="verify">Verify PE file data</param> public PEImage(IntPtr baseAddr, long length, bool verify) : this(baseAddr, length, ImageLayout.Memory, verify) { } /// <summary> /// Constructor /// </summary> /// <param name="baseAddr">Address of PE image</param> /// <param name="length">Length of PE image</param> public PEImage(IntPtr baseAddr, long length) : this(baseAddr, length, true) { } /// <summary> /// Constructor /// </summary> /// <param name="baseAddr">Address of PE image</param> /// <param name="imageLayout">Image layout</param> /// <param name="verify">Verify PE file data</param> public PEImage(IntPtr baseAddr, ImageLayout imageLayout, bool verify) : this(new UnmanagedMemoryStreamCreator(baseAddr, 0x10000), imageLayout, verify) { try { ((UnmanagedMemoryStreamCreator)imageStreamCreator).Length = peInfo.GetImageSize(); ResetReader(); } catch { Dispose(); throw; } } /// <summary> /// Constructor /// </summary> /// <param name="baseAddr">Address of PE image</param> /// <param name="verify">Verify PE file data</param> public PEImage(IntPtr baseAddr, bool verify) : this(baseAddr, ImageLayout.Memory, verify) { } /// <summary> /// Constructor /// </summary> /// <param name="baseAddr">Address of PE image</param> public PEImage(IntPtr baseAddr) : this(baseAddr, true) { } void ResetReader() { if (imageStream != null) { imageStream.Dispose(); imageStream = null; } imageStream = imageStreamCreator.CreateFull(); } /// <inheritdoc/> public RVA ToRVA(FileOffset offset) { return peType.ToRVA(peInfo, offset); } /// <inheritdoc/> public FileOffset ToFileOffset(RVA rva) { return peType.ToFileOffset(peInfo, rva); } /// <inheritdoc/> public void Dispose() { IDisposable id; if (win32Resources.IsValueInitialized && (id = win32Resources.Value) != null) id.Dispose(); if ((id = imageStream) != null) id.Dispose(); if ((id = imageStreamCreator) != null) id.Dispose(); win32Resources.Value = null; imageStream = null; imageStreamCreator = null; peType = null; peInfo = null; } /// <inheritdoc/> public IImageStream CreateStream(FileOffset offset) { if ((long)offset > imageStreamCreator.Length) throw new ArgumentOutOfRangeException("offset"); long length = imageStreamCreator.Length - (long)offset; return CreateStream(offset, length); } /// <inheritdoc/> public IImageStream CreateStream(FileOffset offset, long length) { return imageStreamCreator.Create(offset, length); } /// <inheritdoc/> public IImageStream CreateFullStream() { return imageStreamCreator.CreateFull(); } /// <inheritdoc/> public void UnsafeDisableMemoryMappedIO() { var creator = imageStreamCreator as MemoryMappedFileStreamCreator; if (creator != null) creator.UnsafeDisableMemoryMappedIO(); } /// <inheritdoc/> public bool IsMemoryMappedIO { get { var creator = imageStreamCreator as MemoryMappedFileStreamCreator; return creator == null ? false : creator.IsMemoryMappedIO; } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Net; using System.Reflection; using OpenSim.Framework; using OpenSim.Services.Interfaces; using GridRegion = OpenSim.Services.Interfaces.GridRegion; using FriendInfo = OpenSim.Services.Interfaces.FriendInfo; using log4net; using OpenMetaverse; using OpenMetaverse.StructuredData; using OSDArray = OpenMetaverse.StructuredData.OSDArray; using OSDMap = OpenMetaverse.StructuredData.OSDMap; namespace OpenSim.Services.LLLoginService { public class LLFailedLoginResponse : OpenSim.Services.Interfaces.FailedLoginResponse { protected string m_key; protected string m_value; protected string m_login; public static LLFailedLoginResponse UserProblem; public static LLFailedLoginResponse GridProblem; public static LLFailedLoginResponse InventoryProblem; public static LLFailedLoginResponse DeadRegionProblem; public static LLFailedLoginResponse LoginBlockedProblem; public static LLFailedLoginResponse UnverifiedAccountProblem; public static LLFailedLoginResponse AlreadyLoggedInProblem; public static LLFailedLoginResponse InternalError; static LLFailedLoginResponse() { UserProblem = new LLFailedLoginResponse("key", "Could not authenticate your avatar. Please check your username and password, and check the grid if problems persist.", "false"); GridProblem = new LLFailedLoginResponse("key", "Error connecting to the desired location. Try connecting to another region.", "false"); InventoryProblem = new LLFailedLoginResponse("key", "The inventory service is not responding. Please notify your login region operator.", "false"); DeadRegionProblem = new LLFailedLoginResponse("key", "The region you are attempting to log into is not responding. Please select another region and try again.", "false"); LoginBlockedProblem = new LLFailedLoginResponse("presence", "Logins are currently restricted. Please try again later.", "false"); UnverifiedAccountProblem = new LLFailedLoginResponse("presence", "Your account has not yet been verified. Please check " + "your email and click the provided link.", "false"); AlreadyLoggedInProblem = new LLFailedLoginResponse("presence", "You appear to be already logged in. " + "If this is not the case please wait for your session to timeout. " + "If this takes longer than a few minutes please contact the grid owner. " + "Please wait 5 minutes if you are going to connect to a region nearby to the region you were at previously.", "false"); InternalError = new LLFailedLoginResponse("Internal Error", "Error generating Login Response", "false"); } public LLFailedLoginResponse(string key, string value, string login) { m_key = key; m_value = value; m_login = login; } public override Hashtable ToHashtable() { Hashtable loginError = new Hashtable(); loginError["reason"] = m_key; loginError["message"] = m_value; loginError["login"] = m_login; return loginError; } public override OSD ToOSDMap() { OSDMap map = new OSDMap(); map["reason"] = OSD.FromString(m_key); map["message"] = OSD.FromString(m_value); map["login"] = OSD.FromString(m_login); return map; } } /// <summary> /// A class to handle LL login response. /// </summary> public class LLLoginResponse : OpenSim.Services.Interfaces.LoginResponse { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private static Hashtable globalTexturesHash; // Global Textures private static string sunTexture = "cce0f112-878f-4586-a2e2-a8f104bba271"; private static string cloudTexture = "dc4b9f0b-d008-45c6-96a4-01dd947ac621"; private static string moonTexture = "ec4b9f0b-d008-45c6-96a4-01dd947ac621"; private Hashtable loginFlagsHash; private Hashtable uiConfigHash; private ArrayList loginFlags; private ArrayList globalTextures; private ArrayList eventCategories; private ArrayList uiConfig; private ArrayList classifiedCategories; private ArrayList inventoryRoot; private ArrayList initialOutfit; private ArrayList agentInventory; private ArrayList inventoryLibraryOwner; private ArrayList inventoryLibRoot; private ArrayList inventoryLibrary; private ArrayList activeGestures; private UserInfo userProfile; private UUID agentID; private UUID sessionID; private UUID secureSessionID; private UUID realID; // Login Flags private string dst; private string stipendSinceLogin; private string gendered; private string everLoggedIn; private string login; private uint simPort; private uint simHttpPort; private string simAddress; private string agentAccess; private string agentAccessMax; private Int32 circuitCode; private uint regionX; private uint regionY; // Login private string firstname; private string lastname; // Web map private string mapTileURL; // Web Profiles private string profileURL; // OpenID private string openIDURL; private string searchURL; // Error Flags private string errorReason; private string errorMessage; private string welcomeMessage; private string startLocation; private string allowFirstLife; private string home; private string seedCapability; private string lookAt; private BuddyList m_buddyList = null; private string currency; private string classifiedFee; private int maxAgentGroups; static LLLoginResponse() { // This is being set, but it's not used // not sure why. globalTexturesHash = new Hashtable(); globalTexturesHash["sun_texture_id"] = sunTexture; globalTexturesHash["cloud_texture_id"] = cloudTexture; globalTexturesHash["moon_texture_id"] = moonTexture; } public LLLoginResponse() { loginFlags = new ArrayList(); globalTextures = new ArrayList(); eventCategories = new ArrayList(); uiConfig = new ArrayList(); classifiedCategories = new ArrayList(); uiConfigHash = new Hashtable(); // defaultXmlRpcResponse = new XmlRpcResponse(); userProfile = new UserInfo(); inventoryRoot = new ArrayList(); initialOutfit = new ArrayList(); agentInventory = new ArrayList(); inventoryLibrary = new ArrayList(); inventoryLibraryOwner = new ArrayList(); activeGestures = new ArrayList(); SetDefaultValues(); } public LLLoginResponse(UserAccount account, AgentCircuitData aCircuit, GridUserInfo pinfo, GridRegion destination, List<InventoryFolderBase> invSkel, FriendInfo[] friendsList, ILibraryService libService, string where, string startlocation, Vector3 position, Vector3 lookAt, List<InventoryItemBase> gestures, string message, GridRegion home, IPEndPoint clientIP, string mapTileURL, string profileURL, string openIDURL, string searchURL, string currency, string DSTZone, string destinationsURL, string avatarsURL, UUID realID, string classifiedFee,int maxAgentGroups) : this() { FillOutInventoryData(invSkel, libService); FillOutActiveGestures(gestures); CircuitCode = (int)aCircuit.circuitcode; Lastname = account.LastName; Firstname = account.FirstName; AgentID = account.PrincipalID; SessionID = aCircuit.SessionID; SecureSessionID = aCircuit.SecureSessionID; RealID = realID; Message = message; BuddList = ConvertFriendListItem(friendsList); StartLocation = where; MapTileURL = mapTileURL; ProfileURL = profileURL; OpenIDURL = openIDURL; DestinationsURL = destinationsURL; AvatarsURL = avatarsURL; SearchURL = searchURL; Currency = currency; ClassifiedFee = classifiedFee; MaxAgentGroups = maxAgentGroups; FillOutHomeData(pinfo, home); LookAt = String.Format("[r{0},r{1},r{2}]", lookAt.X, lookAt.Y, lookAt.Z); FillOutRegionData(destination); m_log.DebugFormat("[LOGIN RESPONSE] LLLoginResponse create. sizeX={0}, sizeY={1}", RegionSizeX, RegionSizeY); FillOutSeedCap(aCircuit, destination, clientIP); switch (DSTZone) { case "none": DST = "N"; break; case "local": DST = TimeZone.CurrentTimeZone.IsDaylightSavingTime(DateTime.Now) ? "Y" : "N"; break; default: TimeZoneInfo dstTimeZone = null; string[] tzList = DSTZone.Split(';'); foreach (string tzName in tzList) { try { dstTimeZone = TimeZoneInfo.FindSystemTimeZoneById(tzName); } catch { continue; } break; } if (dstTimeZone == null) { m_log.WarnFormat( "[LLOGIN RESPONSE]: No valid timezone found for DST in {0}, falling back to system time.", tzList); DST = TimeZone.CurrentTimeZone.IsDaylightSavingTime(DateTime.Now) ? "Y" : "N"; } else { DST = dstTimeZone.IsDaylightSavingTime(DateTime.Now) ? "Y" : "N"; } break; } } private void FillOutInventoryData(List<InventoryFolderBase> invSkel, ILibraryService libService) { InventoryData inventData = null; try { inventData = GetInventorySkeleton(invSkel); } catch (Exception e) { m_log.WarnFormat( "[LLLOGIN SERVICE]: Error processing inventory skeleton of agent {0} - {1}", agentID, e); // ignore and continue } if (inventData != null) { ArrayList AgentInventoryArray = inventData.InventoryArray; Hashtable InventoryRootHash = new Hashtable(); InventoryRootHash["folder_id"] = inventData.RootFolderID.ToString(); InventoryRoot = new ArrayList(); InventoryRoot.Add(InventoryRootHash); InventorySkeleton = AgentInventoryArray; } // Inventory Library Section if (libService != null && libService.LibraryRootFolder != null) { Hashtable InventoryLibRootHash = new Hashtable(); InventoryLibRootHash["folder_id"] = "00000112-000f-0000-0000-000100bba000"; InventoryLibRoot = new ArrayList(); InventoryLibRoot.Add(InventoryLibRootHash); InventoryLibraryOwner = GetLibraryOwner(libService.LibraryRootFolder); InventoryLibrary = GetInventoryLibrary(libService); } } private void FillOutActiveGestures(List<InventoryItemBase> gestures) { ArrayList list = new ArrayList(); if (gestures != null) { foreach (InventoryItemBase gesture in gestures) { Hashtable item = new Hashtable(); item["item_id"] = gesture.ID.ToString(); item["asset_id"] = gesture.AssetID.ToString(); list.Add(item); } } ActiveGestures = list; } private void FillOutHomeData(GridUserInfo pinfo, GridRegion home) { int x = (int)Util.RegionToWorldLoc(1000); int y = (int)Util.RegionToWorldLoc(1000); if (home != null) { x = home.RegionLocX; y = home.RegionLocY; } Home = string.Format( "{{'region_handle':[r{0},r{1}], 'position':[r{2},r{3},r{4}], 'look_at':[r{5},r{6},r{7}]}}", x, y, pinfo.HomePosition.X, pinfo.HomePosition.Y, pinfo.HomePosition.Z, pinfo.HomeLookAt.X, pinfo.HomeLookAt.Y, pinfo.HomeLookAt.Z); } private void FillOutRegionData(GridRegion destination) { IPEndPoint endPoint = destination.ExternalEndPoint; if (endPoint == null) return; SimAddress = endPoint.Address.ToString(); SimPort = (uint)endPoint.Port; RegionX = (uint)destination.RegionLocX; RegionY = (uint)destination.RegionLocY; RegionSizeX = destination.RegionSizeX; RegionSizeY = destination.RegionSizeY; } private void FillOutSeedCap(AgentCircuitData aCircuit, GridRegion destination, IPEndPoint ipepClient) { SeedCapability = destination.ServerURI + CapsUtil.GetCapsSeedPath(aCircuit.CapsPath); } private void SetDefaultValues() { TimeZoneInfo gridTimeZone; // Disabled for now pending making timezone a config value, which can at some point have a default of // a ; separated list of possible timezones. // The problem here is that US/Pacific (or even the Olsen America/Los_Angeles) is not universal across // windows, mac and various distributions of linux, introducing another element of consistency. // The server operator needs to be able to control this setting // try // { // // First try to fetch DST from Pacific Standard Time, because this is // // the one expected by the viewer. "US/Pacific" is the string to search // // on linux and mac, and should work also on Windows (to confirm) // gridTimeZone = TimeZoneInfo.FindSystemTimeZoneById("US/Pacific"); // } // catch (Exception e) // { // m_log.WarnFormat( // "[TIMEZONE]: {0} Falling back to system time. System time should be set to Pacific Standard Time to provide the expected time", // e.Message); gridTimeZone = TimeZoneInfo.Local; // } DST = gridTimeZone.IsDaylightSavingTime(DateTime.Now) ? "Y" : "N"; StipendSinceLogin = "N"; Gendered = "Y"; EverLoggedIn = "Y"; login = "false"; firstname = "Test"; lastname = "User"; agentAccess = "M"; agentAccessMax = "A"; startLocation = "last"; allowFirstLife = "Y"; ErrorMessage = "You have entered an invalid name/password combination. Check Caps/lock."; ErrorReason = "key"; welcomeMessage = "Welcome to OpenSim!"; seedCapability = String.Empty; home = "{'region_handle':[" + "r" + Util.RegionToWorldLoc(1000).ToString() + "," + "r" + Util.RegionToWorldLoc(1000).ToString() + "], 'position':[" + "r" + userProfile.homepos.X.ToString() + "," + "r" + userProfile.homepos.Y.ToString() + "," + "r" + userProfile.homepos.Z.ToString() + "], 'look_at':[" + "r" + userProfile.homelookat.X.ToString() + "," + "r" + userProfile.homelookat.Y.ToString() + "," + "r" + userProfile.homelookat.Z.ToString() + "]}"; lookAt = "[r0.99949799999999999756,r0.03166859999999999814,r0]"; RegionX = (uint) 255232; RegionY = (uint) 254976; // Classifieds; AddClassifiedCategory((Int32) 1, "Shopping"); AddClassifiedCategory((Int32) 2, "Land Rental"); AddClassifiedCategory((Int32) 3, "Property Rental"); AddClassifiedCategory((Int32) 4, "Special Attraction"); AddClassifiedCategory((Int32) 5, "New Products"); AddClassifiedCategory((Int32) 6, "Employment"); AddClassifiedCategory((Int32) 7, "Wanted"); AddClassifiedCategory((Int32) 8, "Service"); AddClassifiedCategory((Int32) 9, "Personal"); SessionID = UUID.Random(); SecureSessionID = UUID.Random(); AgentID = UUID.Random(); RealID = UUID.Zero; Hashtable InitialOutfitHash = new Hashtable(); InitialOutfitHash["folder_name"] = "Nightclub Female"; InitialOutfitHash["gender"] = "female"; initialOutfit.Add(InitialOutfitHash); mapTileURL = String.Empty; profileURL = String.Empty; openIDURL = String.Empty; searchURL = String.Empty; currency = String.Empty; ClassifiedFee = "0"; MaxAgentGroups = 42; } public override Hashtable ToHashtable() { try { Hashtable responseData = new Hashtable(); loginFlagsHash = new Hashtable(); loginFlagsHash["daylight_savings"] = DST; loginFlagsHash["stipend_since_login"] = StipendSinceLogin; loginFlagsHash["gendered"] = Gendered; loginFlagsHash["ever_logged_in"] = EverLoggedIn; loginFlags.Add(loginFlagsHash); responseData["first_name"] = Firstname; responseData["last_name"] = Lastname; responseData["agent_access"] = agentAccess; responseData["agent_access_max"] = agentAccessMax; globalTextures.Add(globalTexturesHash); // this.eventCategories.Add(this.eventCategoriesHash); AddToUIConfig("allow_first_life", allowFirstLife); uiConfig.Add(uiConfigHash); responseData["sim_port"] = (Int32) SimPort; responseData["sim_ip"] = SimAddress; responseData["http_port"] = (Int32)SimHttpPort; responseData["agent_id"] = AgentID.ToString(); responseData["real_id"] = RealID.ToString(); responseData["session_id"] = SessionID.ToString(); responseData["secure_session_id"] = SecureSessionID.ToString(); responseData["circuit_code"] = CircuitCode; responseData["seconds_since_epoch"] = (Int32) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; responseData["login-flags"] = loginFlags; responseData["global-textures"] = globalTextures; responseData["seed_capability"] = seedCapability; responseData["event_categories"] = eventCategories; responseData["event_notifications"] = new ArrayList(); // todo responseData["classified_categories"] = classifiedCategories; responseData["ui-config"] = uiConfig; if (agentInventory != null) { responseData["inventory-skeleton"] = agentInventory; responseData["inventory-root"] = inventoryRoot; } responseData["inventory-skel-lib"] = inventoryLibrary; responseData["inventory-lib-root"] = inventoryLibRoot; responseData["gestures"] = activeGestures; responseData["inventory-lib-owner"] = inventoryLibraryOwner; responseData["initial-outfit"] = initialOutfit; responseData["start_location"] = startLocation; responseData["seed_capability"] = seedCapability; responseData["home"] = home; responseData["look_at"] = lookAt; responseData["max-agent-groups"] = MaxAgentGroups; responseData["message"] = welcomeMessage; responseData["region_x"] = (Int32)(RegionX); responseData["region_y"] = (Int32)(RegionY); responseData["region_size_x"] = (Int32)RegionSizeX; responseData["region_size_y"] = (Int32)RegionSizeY; m_log.DebugFormat("[LOGIN RESPONSE] returning sizeX={0}, sizeY={1}", RegionSizeX, RegionSizeY); if (searchURL != String.Empty) responseData["search"] = searchURL; if (mapTileURL != String.Empty) responseData["map-server-url"] = mapTileURL; if (profileURL != String.Empty) responseData["profile-server-url"] = profileURL; if (DestinationsURL != String.Empty) responseData["destination_guide_url"] = DestinationsURL; if (AvatarsURL != String.Empty) responseData["avatar_picker_url"] = AvatarsURL; // We need to send an openid_token back in the response too if (openIDURL != String.Empty) responseData["openid_url"] = openIDURL; if (m_buddyList != null) { responseData["buddy-list"] = m_buddyList.ToArray(); } if (currency != String.Empty) { // responseData["real_currency"] = currency; responseData["currency"] = currency; } if (ClassifiedFee != String.Empty) responseData["classified_fee"] = ClassifiedFee; responseData["login"] = "true"; return responseData; } catch (Exception e) { m_log.Warn("[CLIENT]: LoginResponse: Error creating Hashtable Response: " + e.Message); return LLFailedLoginResponse.InternalError.ToHashtable(); } } public override OSD ToOSDMap() { try { OSDMap map = new OSDMap(); map["first_name"] = OSD.FromString(Firstname); map["last_name"] = OSD.FromString(Lastname); map["agent_access"] = OSD.FromString(agentAccess); map["agent_access_max"] = OSD.FromString(agentAccessMax); map["sim_port"] = OSD.FromInteger(SimPort); map["sim_ip"] = OSD.FromString(SimAddress); map["agent_id"] = OSD.FromUUID(AgentID); map["real_id"] = OSD.FromUUID(RealID); map["session_id"] = OSD.FromUUID(SessionID); map["secure_session_id"] = OSD.FromUUID(SecureSessionID); map["circuit_code"] = OSD.FromInteger(CircuitCode); map["seconds_since_epoch"] = OSD.FromInteger((int)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds); #region Login Flags OSDMap loginFlagsLLSD = new OSDMap(); loginFlagsLLSD["daylight_savings"] = OSD.FromString(DST); loginFlagsLLSD["stipend_since_login"] = OSD.FromString(StipendSinceLogin); loginFlagsLLSD["gendered"] = OSD.FromString(Gendered); loginFlagsLLSD["ever_logged_in"] = OSD.FromString(EverLoggedIn); map["login-flags"] = WrapOSDMap(loginFlagsLLSD); #endregion Login Flags #region Global Textures OSDMap globalTexturesLLSD = new OSDMap(); globalTexturesLLSD["sun_texture_id"] = OSD.FromString(SunTexture); globalTexturesLLSD["cloud_texture_id"] = OSD.FromString(CloudTexture); globalTexturesLLSD["moon_texture_id"] = OSD.FromString(MoonTexture); map["global-textures"] = WrapOSDMap(globalTexturesLLSD); #endregion Global Textures map["seed_capability"] = OSD.FromString(seedCapability); map["event_categories"] = ArrayListToOSDArray(eventCategories); //map["event_notifications"] = new OSDArray(); // todo map["classified_categories"] = ArrayListToOSDArray(classifiedCategories); #region UI Config OSDMap uiConfigLLSD = new OSDMap(); uiConfigLLSD["allow_first_life"] = OSD.FromString(allowFirstLife); map["ui-config"] = WrapOSDMap(uiConfigLLSD); #endregion UI Config #region Inventory map["inventory-skeleton"] = ArrayListToOSDArray(agentInventory); map["inventory-skel-lib"] = ArrayListToOSDArray(inventoryLibrary); map["inventory-root"] = ArrayListToOSDArray(inventoryRoot); ; map["inventory-lib-root"] = ArrayListToOSDArray(inventoryLibRoot); map["inventory-lib-owner"] = ArrayListToOSDArray(inventoryLibraryOwner); #endregion Inventory map["gestures"] = ArrayListToOSDArray(activeGestures); map["initial-outfit"] = ArrayListToOSDArray(initialOutfit); map["start_location"] = OSD.FromString(startLocation); map["seed_capability"] = OSD.FromString(seedCapability); map["home"] = OSD.FromString(home); map["look_at"] = OSD.FromString(lookAt); map["max-agent-groups"] = OSD.FromInteger(MaxAgentGroups); map["message"] = OSD.FromString(welcomeMessage); map["region_x"] = OSD.FromInteger(RegionX); map["region_y"] = OSD.FromInteger(RegionY); if (mapTileURL != String.Empty) map["map-server-url"] = OSD.FromString(mapTileURL); if (profileURL != String.Empty) map["profile-server-url"] = OSD.FromString(profileURL); if (openIDURL != String.Empty) map["openid_url"] = OSD.FromString(openIDURL); if (searchURL != String.Empty) map["search"] = OSD.FromString(searchURL); if (ClassifiedFee != String.Empty) map["classified_fee"] = OSD.FromString(ClassifiedFee); if (m_buddyList != null) { map["buddy-list"] = ArrayListToOSDArray(m_buddyList.ToArray()); } map["login"] = OSD.FromString("true"); return map; } catch (Exception e) { m_log.Warn("[CLIENT]: LoginResponse: Error creating LLSD Response: " + e.Message); return LLFailedLoginResponse.InternalError.ToOSDMap(); } } public OSDArray ArrayListToOSDArray(ArrayList arrlst) { OSDArray llsdBack = new OSDArray(); foreach (Hashtable ht in arrlst) { OSDMap mp = new OSDMap(); foreach (DictionaryEntry deHt in ht) { mp.Add((string)deHt.Key, OSDString.FromObject(deHt.Value)); } llsdBack.Add(mp); } return llsdBack; } private static OSDArray WrapOSDMap(OSDMap wrapMe) { OSDArray array = new OSDArray(); array.Add(wrapMe); return array; } public void SetEventCategories(string category, string value) { // this.eventCategoriesHash[category] = value; //TODO } public void AddToUIConfig(string itemName, string item) { uiConfigHash[itemName] = item; } public void AddClassifiedCategory(Int32 ID, string categoryName) { Hashtable hash = new Hashtable(); hash["category_name"] = categoryName; hash["category_id"] = ID; classifiedCategories.Add(hash); // this.classifiedCategoriesHash.Clear(); } private static LLLoginResponse.BuddyList ConvertFriendListItem(FriendInfo[] friendsList) { LLLoginResponse.BuddyList buddylistreturn = new LLLoginResponse.BuddyList(); foreach (FriendInfo finfo in friendsList) { if (finfo.TheirFlags == -1) continue; LLLoginResponse.BuddyList.BuddyInfo buddyitem = new LLLoginResponse.BuddyList.BuddyInfo(finfo.Friend); // finfo.Friend may not be a simple uuid UUID friendID = UUID.Zero; if (UUID.TryParse(finfo.Friend, out friendID)) buddyitem.BuddyID = finfo.Friend; else { string tmp; if (Util.ParseUniversalUserIdentifier(finfo.Friend, out friendID, out tmp, out tmp, out tmp, out tmp)) buddyitem.BuddyID = friendID.ToString(); else // junk entry continue; } buddyitem.BuddyRightsHave = (int)finfo.TheirFlags; buddyitem.BuddyRightsGiven = (int)finfo.MyFlags; buddylistreturn.AddNewBuddy(buddyitem); } return buddylistreturn; } private InventoryData GetInventorySkeleton(List<InventoryFolderBase> folders) { UUID rootID = UUID.Zero; ArrayList AgentInventoryArray = new ArrayList(); Hashtable TempHash; foreach (InventoryFolderBase InvFolder in folders) { if (InvFolder.ParentID == UUID.Zero && InvFolder.Name == InventoryFolderBase.ROOT_FOLDER_NAME) { rootID = InvFolder.ID; } TempHash = new Hashtable(); TempHash["name"] = InvFolder.Name; TempHash["parent_id"] = InvFolder.ParentID.ToString(); TempHash["version"] = (Int32)InvFolder.Version; TempHash["type_default"] = (Int32)InvFolder.Type; TempHash["folder_id"] = InvFolder.ID.ToString(); AgentInventoryArray.Add(TempHash); } return new InventoryData(AgentInventoryArray, rootID); } /// <summary> /// Converts the inventory library skeleton into the form required by the rpc request. /// </summary> /// <returns></returns> protected virtual ArrayList GetInventoryLibrary(ILibraryService library) { Dictionary<UUID, InventoryFolderImpl> rootFolders = library.GetAllFolders(); // m_log.DebugFormat("[LLOGIN]: Library has {0} folders", rootFolders.Count); //Dictionary<UUID, InventoryFolderImpl> rootFolders = new Dictionary<UUID,InventoryFolderImpl>(); ArrayList folderHashes = new ArrayList(); foreach (InventoryFolderBase folder in rootFolders.Values) { Hashtable TempHash = new Hashtable(); TempHash["name"] = folder.Name; TempHash["parent_id"] = folder.ParentID.ToString(); TempHash["version"] = (Int32)folder.Version; TempHash["type_default"] = (Int32)folder.Type; TempHash["folder_id"] = folder.ID.ToString(); folderHashes.Add(TempHash); } return folderHashes; } /// <summary> /// /// </summary> /// <returns></returns> protected virtual ArrayList GetLibraryOwner(InventoryFolderImpl libFolder) { //for now create random inventory library owner Hashtable TempHash = new Hashtable(); TempHash["agent_id"] = "11111111-1111-0000-0000-000100bba000"; // libFolder.Owner ArrayList inventoryLibOwner = new ArrayList(); inventoryLibOwner.Add(TempHash); return inventoryLibOwner; } public class InventoryData { public ArrayList InventoryArray = null; public UUID RootFolderID = UUID.Zero; public InventoryData(ArrayList invList, UUID rootID) { InventoryArray = invList; RootFolderID = rootID; } } #region Properties public string Login { get { return login; } set { login = value; } } public string DST { get { return dst; } set { dst = value; } } public string StipendSinceLogin { get { return stipendSinceLogin; } set { stipendSinceLogin = value; } } public string Gendered { get { return gendered; } set { gendered = value; } } public string EverLoggedIn { get { return everLoggedIn; } set { everLoggedIn = value; } } public uint SimPort { get { return simPort; } set { simPort = value; } } public uint SimHttpPort { get { return simHttpPort; } set { simHttpPort = value; } } public string SimAddress { get { return simAddress; } set { simAddress = value; } } public UUID AgentID { get { return agentID; } set { agentID = value; } } public UUID SessionID { get { return sessionID; } set { sessionID = value; } } public UUID SecureSessionID { get { return secureSessionID; } set { secureSessionID = value; } } public UUID RealID { get { return realID; } set { realID = value; } } public Int32 CircuitCode { get { return circuitCode; } set { circuitCode = value; } } public uint RegionX { get { return regionX; } set { regionX = value; } } public uint RegionY { get { return regionY; } set { regionY = value; } } public int RegionSizeX { get; private set; } public int RegionSizeY { get; private set; } public string SunTexture { get { return sunTexture; } set { sunTexture = value; } } public string CloudTexture { get { return cloudTexture; } set { cloudTexture = value; } } public string MoonTexture { get { return moonTexture; } set { moonTexture = value; } } public string Firstname { get { return firstname; } set { firstname = value; } } public string Lastname { get { return lastname; } set { lastname = value; } } public string AgentAccess { get { return agentAccess; } set { agentAccess = value; } } public string AgentAccessMax { get { return agentAccessMax; } set { agentAccessMax = value; } } public string StartLocation { get { return startLocation; } set { startLocation = value; } } public string LookAt { get { return lookAt; } set { lookAt = value; } } public string SeedCapability { get { return seedCapability; } set { seedCapability = value; } } public string ErrorReason { get { return errorReason; } set { errorReason = value; } } public string ErrorMessage { get { return errorMessage; } set { errorMessage = value; } } public ArrayList InventoryRoot { get { return inventoryRoot; } set { inventoryRoot = value; } } public ArrayList InventorySkeleton { get { return agentInventory; } set { agentInventory = value; } } public ArrayList InventoryLibrary { get { return inventoryLibrary; } set { inventoryLibrary = value; } } public ArrayList InventoryLibraryOwner { get { return inventoryLibraryOwner; } set { inventoryLibraryOwner = value; } } public ArrayList InventoryLibRoot { get { return inventoryLibRoot; } set { inventoryLibRoot = value; } } public ArrayList ActiveGestures { get { return activeGestures; } set { activeGestures = value; } } public string Home { get { return home; } set { home = value; } } public string MapTileURL { get { return mapTileURL; } set { mapTileURL = value; } } public string ProfileURL { get { return profileURL; } set { profileURL = value; } } public string OpenIDURL { get { return openIDURL; } set { openIDURL = value; } } public string SearchURL { get { return searchURL; } set { searchURL = value; } } public string Message { get { return welcomeMessage; } set { welcomeMessage = value; } } public BuddyList BuddList { get { return m_buddyList; } set { m_buddyList = value; } } public string Currency { get { return currency; } set { currency = value; } } public string ClassifiedFee { get { return classifiedFee; } set { classifiedFee = value; } } public int MaxAgentGroups { get { return maxAgentGroups; } set { maxAgentGroups = value; } } public string DestinationsURL { get; set; } public string AvatarsURL { get; set; } #endregion public class UserInfo { public string firstname; public string lastname; public ulong homeregionhandle; public Vector3 homepos; public Vector3 homelookat; } public class BuddyList { public List<BuddyInfo> Buddies = new List<BuddyInfo>(); public void AddNewBuddy(BuddyInfo buddy) { if (!Buddies.Contains(buddy)) { Buddies.Add(buddy); } } public ArrayList ToArray() { ArrayList buddyArray = new ArrayList(); foreach (BuddyInfo buddy in Buddies) { buddyArray.Add(buddy.ToHashTable()); } return buddyArray; } public class BuddyInfo { public int BuddyRightsHave = 1; public int BuddyRightsGiven = 1; public string BuddyID; public BuddyInfo(string buddyID) { BuddyID = buddyID; } public BuddyInfo(UUID buddyID) { BuddyID = buddyID.ToString(); } public Hashtable ToHashTable() { Hashtable hTable = new Hashtable(); hTable["buddy_rights_has"] = BuddyRightsHave; hTable["buddy_rights_given"] = BuddyRightsGiven; hTable["buddy_id"] = BuddyID; return hTable; } } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using UnityEngine; #if UNITY_EDITOR using UnityEditor; #endif namespace UnityTest { public interface ITestComponent : IComparable<ITestComponent> { void EnableTest(bool enable); bool IsTestGroup(); GameObject gameObject { get; } string Name { get; } ITestComponent GetTestGroup(); bool IsExceptionExpected(string exceptionType); bool ShouldSucceedOnException(); double GetTimeout(); bool IsIgnored(); bool ShouldSucceedOnAssertions(); bool IsExludedOnThisPlatform(); } public class TestComponent : MonoBehaviour, ITestComponent { public static ITestComponent NullTestComponent = new NullTestComponentImpl(); public float timeout = 5; public bool ignored = false; public bool succeedAfterAllAssertionsAreExecuted = false; public bool expectException = false; public string expectedExceptionList = ""; public bool succeedWhenExceptionIsThrown = false; public IncludedPlatforms includedPlatforms = (IncludedPlatforms) ~0L; public string[] platformsToIgnore = null; public bool dynamic; public string dynamicTypeName; public bool IsExludedOnThisPlatform() { return platformsToIgnore != null && platformsToIgnore.Any(platform => platform == Application.platform.ToString()); } static bool IsAssignableFrom(Type a, Type b) { #if !UNITY_METRO return a.IsAssignableFrom(b); #else return false; #endif } public bool IsExceptionExpected(string exception) { exception = exception.Trim(); if (!expectException) return false; if(string.IsNullOrEmpty(expectedExceptionList.Trim())) return true; foreach (var expectedException in expectedExceptionList.Split(',').Select(e => e.Trim())) { if (exception == expectedException) return true; var exceptionType = Type.GetType(exception) ?? GetTypeByName(exception); var expectedExceptionType = Type.GetType(expectedException) ?? GetTypeByName(expectedException); if (exceptionType != null && expectedExceptionType != null && IsAssignableFrom(expectedExceptionType, exceptionType)) return true; } return false; } public bool ShouldSucceedOnException() { return succeedWhenExceptionIsThrown; } public double GetTimeout() { return timeout; } public bool IsIgnored() { return ignored; } public bool ShouldSucceedOnAssertions() { return succeedAfterAllAssertionsAreExecuted; } private static Type GetTypeByName(string className) { #if !UNITY_METRO return AppDomain.CurrentDomain.GetAssemblies().SelectMany(a => a.GetTypes()).FirstOrDefault(type => type.Name == className); #else return null; #endif } public void OnValidate() { if (timeout < 0.01f) timeout = 0.01f; } // Legacy [Flags] public enum IncludedPlatforms { WindowsEditor = 1 << 0, OSXEditor = 1 << 1, WindowsPlayer = 1 << 2, OSXPlayer = 1 << 3, LinuxPlayer = 1 << 4, MetroPlayerX86 = 1 << 5, MetroPlayerX64 = 1 << 6, MetroPlayerARM = 1 << 7, WindowsWebPlayer = 1 << 8, OSXWebPlayer = 1 << 9, Android = 1 << 10, // ReSharper disable once InconsistentNaming IPhonePlayer = 1 << 11, TizenPlayer = 1 << 12, WP8Player = 1 << 13, BB10Player = 1 << 14, NaCl = 1 << 15, PS3 = 1 << 16, XBOX360 = 1 << 17, WiiPlayer = 1 << 18, PSP2 = 1 << 19, PS4 = 1 << 20, PSMPlayer = 1 << 21, XboxOne = 1 << 22, } #region ITestComponent implementation public void EnableTest(bool enable) { if (enable && dynamic) { Type t = Type.GetType(dynamicTypeName); var s = gameObject.GetComponent(t) as MonoBehaviour; if (s != null) DestroyImmediate(s); gameObject.AddComponent(t); } if (gameObject.activeSelf != enable) gameObject.SetActive(enable); } public int CompareTo(ITestComponent obj) { if (obj == NullTestComponent) return 1; var result = gameObject.name.CompareTo(obj.gameObject.name); if (result == 0) result = gameObject.GetInstanceID().CompareTo(obj.gameObject.GetInstanceID()); return result; } public bool IsTestGroup() { for (int i = 0; i < gameObject.transform.childCount; i++) { var childTc = gameObject.transform.GetChild(i).GetComponent(typeof(TestComponent)); if (childTc != null) return true; } return false; } public string Name { get { return gameObject == null ? "" : gameObject.name; } } public ITestComponent GetTestGroup() { var parent = gameObject.transform.parent; if (parent == null) return NullTestComponent; return parent.GetComponent<TestComponent>(); } public override bool Equals(object o) { if (o is TestComponent) return this == (o as TestComponent); return false; } public override int GetHashCode() { return base.GetHashCode(); } public static bool operator ==(TestComponent a, TestComponent b) { if (ReferenceEquals(a, b)) return true; if (((object)a == null) || ((object)b == null)) return false; if (a.dynamic && b.dynamic) return a.dynamicTypeName == b.dynamicTypeName; if (a.dynamic || b.dynamic) return false; return a.gameObject == b.gameObject; } public static bool operator !=(TestComponent a, TestComponent b) { return !(a == b); } #endregion #region Static helpers public static TestComponent CreateDynamicTest(Type type) { var go = CreateTest(type.Name); go.hideFlags |= HideFlags.DontSave; go.SetActive(false); var tc = go.GetComponent<TestComponent>(); tc.dynamic = true; tc.dynamicTypeName = type.AssemblyQualifiedName; #if !UNITY_METRO foreach (var typeAttribute in type.GetCustomAttributes(false)) { if (typeAttribute is IntegrationTest.TimeoutAttribute) tc.timeout = (typeAttribute as IntegrationTest.TimeoutAttribute).timeout; else if (typeAttribute is IntegrationTest.IgnoreAttribute) tc.ignored = true; else if (typeAttribute is IntegrationTest.SucceedWithAssertions) tc.succeedAfterAllAssertionsAreExecuted = true; else if (typeAttribute is IntegrationTest.ExcludePlatformAttribute) tc.platformsToIgnore = (typeAttribute as IntegrationTest.ExcludePlatformAttribute).platformsToExclude; else if (typeAttribute is IntegrationTest.ExpectExceptions) { var attribute = (typeAttribute as IntegrationTest.ExpectExceptions); tc.expectException = true; tc.expectedExceptionList = string.Join(",", attribute.exceptionTypeNames); tc.succeedWhenExceptionIsThrown = attribute.succeedOnException; } } go.AddComponent(type); #endif // if !UNITY_METRO return tc; } public static GameObject CreateTest() { return CreateTest("New Test"); } private static GameObject CreateTest(string name) { var go = new GameObject(name); go.AddComponent<TestComponent>(); Undo.RegisterCreatedObjectUndo(go, "Created test"); return go; } public static List<TestComponent> FindAllTestsOnScene() { var tests = Resources.FindObjectsOfTypeAll (typeof(TestComponent)).Cast<TestComponent> (); #if UNITY_EDITOR tests = tests.Where( t => {var p = PrefabUtility.GetPrefabType(t); return p != PrefabType.Prefab && p != PrefabType.ModelPrefab;} ); #endif return tests.ToList (); } public static List<TestComponent> FindAllTopTestsOnScene() { return FindAllTestsOnScene().Where(component => component.gameObject.transform.parent == null).ToList(); } public static List<TestComponent> FindAllDynamicTestsOnScene() { return FindAllTestsOnScene().Where(t => t.dynamic).ToList(); } public static void DestroyAllDynamicTests() { foreach (var dynamicTestComponent in FindAllDynamicTestsOnScene()) DestroyImmediate(dynamicTestComponent.gameObject); } public static void DisableAllTests() { foreach (var t in FindAllTestsOnScene()) t.EnableTest(false); } public static bool AnyTestsOnScene() { return FindAllTestsOnScene().Any(); } public static bool AnyDynamicTestForCurrentScene() { #if UNITY_EDITOR return TestComponent.GetTypesWithHelpAttribute(EditorApplication.currentScene).Any(); #else return TestComponent.GetTypesWithHelpAttribute(Application.loadedLevelName).Any(); #endif } #endregion private sealed class NullTestComponentImpl : ITestComponent { public int CompareTo(ITestComponent other) { if (other == this) return 0; return -1; } public void EnableTest(bool enable) { } public bool IsTestGroup() { throw new NotImplementedException(); } public GameObject gameObject { get; private set; } public string Name { get { return ""; } } public ITestComponent GetTestGroup() { return null; } public bool IsExceptionExpected(string exceptionType) { throw new NotImplementedException(); } public bool ShouldSucceedOnException() { throw new NotImplementedException(); } public double GetTimeout() { throw new NotImplementedException(); } public bool IsIgnored() { throw new NotImplementedException(); } public bool ShouldSucceedOnAssertions() { throw new NotImplementedException(); } public bool IsExludedOnThisPlatform() { throw new NotImplementedException(); } } public static IEnumerable<Type> GetTypesWithHelpAttribute(string sceneName) { #if !UNITY_METRO foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies()) { Type[] types = null; try { types = assembly.GetTypes(); } catch (ReflectionTypeLoadException ex) { Debug.LogError("Failed to load types from: " + assembly.FullName); foreach (Exception loadEx in ex.LoaderExceptions) Debug.LogException(loadEx); } if (types == null) continue; foreach (Type type in types) { var attributes = type.GetCustomAttributes(typeof(IntegrationTest.DynamicTestAttribute), true); if (attributes.Length == 1) { var a = attributes.Single() as IntegrationTest.DynamicTestAttribute; if (a.IncludeOnScene(sceneName)) yield return type; } } } #else // if !UNITY_METRO yield break; #endif // if !UNITY_METRO } } }
// Copyright (c) 2007, Clarius Consulting, Manas Technology Solutions, InSTEDD, and Contributors. // All rights reserved. Licensed under the BSD 3-Clause License; see License.txt. using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Moq.Expressions.Visitors; using Moq.Internals; using Moq.Properties; using TypeNameFormatter; namespace Moq { /// <summary> /// <see cref="ActionObserver"/> is a kind of <see cref="ExpressionReconstructor"/> that works by /// applying a <see cref="Action{T}"/> delegate to a light-weight proxy that records the invocation /// happening to it, and auto-generates the same kind of recording proxy for its return value. /// That way, a chain of invocation records is generated from which a LINQ expression tree can be /// reconstructed. /// </summary> internal sealed class ActionObserver : ExpressionReconstructor { public override Expression<Action<T>> ReconstructExpression<T>(Action<T> action, object[] ctorArgs = null) { using (var matcherObserver = MatcherObserver.Activate()) { // Create the root recording proxy: var root = (T)CreateProxy(typeof(T), ctorArgs, matcherObserver, out var rootRecorder); Exception error = null; try { // Execute the delegate. The root recorder will automatically "mock" return values // and so build a chain of recorders, whereby each one records a single invocation // in a method chain `o.X.Y.Z`: action.Invoke(root); } catch (Exception ex) { // Something went wrong. We don't return this error right away. We want to // rebuild the expression tree as far as possible for diagnostic purposes. error = ex; } // Start the expression tree with a parameter of type `T`: var actionParameters = action.GetMethodInfo().GetParameters(); var actionParameterName = actionParameters[actionParameters.Length - 1].Name; var rootExpression = Expression.Parameter(typeof(T), actionParameterName); Expression body = rootExpression; // Then step through one recorded invocation at a time: for (var recorder = rootRecorder; recorder != null; recorder = recorder.Next) { var invocation = recorder.Invocation; if (invocation != null) { body = Expression.Call(body, invocation.Method, GetArgumentExpressions(invocation, recorder.Matches.ToArray())); } else { // A recorder was set up, but it recorded no invocation. This means // that the invocation could not be intercepted: throw new ArgumentException( string.Format( CultureInfo.CurrentCulture, Resources.UnsupportedExpressionWithHint, $"{actionParameterName} => {body.ToStringFixed()}...", Resources.NextMemberNonInterceptable)); } } // Now we've either got no error and a completely reconstructed expression, or // we have an error and a partially reconstructed expression which we can use for // diagnostic purposes: if (error == null) { return Expression.Lambda<Action<T>>(body.Apply(UpgradePropertyAccessorMethods.Rewriter), rootExpression); } else { throw new ArgumentException( string.Format( CultureInfo.CurrentCulture, Resources.UnsupportedExpressionWithHint, $"{actionParameterName} => {body.ToStringFixed()}...", error.Message)); } } Expression[] GetArgumentExpressions(Invocation invocation, Match[] matches) { // First, let's pretend that all arguments are constant values: var parameterTypes = invocation.Method.GetParameterTypes(); var parameterCount = parameterTypes.Count; var expressions = new Expression[parameterCount]; for (int i = 0; i < parameterCount; ++i) { expressions[i] = Expression.Constant(invocation.Arguments[i], parameterTypes[i]); } // Now let's override the above constant expressions with argument matchers, if available: if (matches.Length > 0) { int matchIndex = 0; for (int argumentIndex = 0; matchIndex < matches.Length && argumentIndex < expressions.Length; ++argumentIndex) { // We are assuming that by default matchers return `default(T)`. If a matcher was used, // it will have left behind a `default(T)` argument, possibly coerced to the parameter type. // Therefore, we attempt to reproduce such coercions using `Convert.ChangeType`: Type defaultValueType = matches[matchIndex].RenderExpression.Type; object defaultValue = defaultValueType.GetDefaultValue(); try { defaultValue = Convert.ChangeType(defaultValue, parameterTypes[argumentIndex]); } catch { // Never mind, we tried. } if (!object.Equals(invocation.Arguments[argumentIndex], defaultValue)) { // This parameter has a non-`default` value. We therefore assume that it isn't // a value that was produced by a matcher. (See explanation in comment above.) continue; } if (parameterTypes[argumentIndex].IsAssignableFrom(defaultValue?.GetType() ?? defaultValueType)) { // We found a potential match. (Matcher type is assignment-compatible to parameter type.) if (matchIndex < matches.Length - 1 && !(argumentIndex < expressions.Length - 1 || CanDistribute(matchIndex + 1, argumentIndex + 1))) { // We get here if there are more matchers to distribute, // but we either: // * ran out of parameters to distribute over, or // * the remaining matchers can't be distributed over the remaining parameters. // In this case, we bail out, which will lead to an exception being thrown. break; } // The remaining matchers can be distributed over the remaining parameters, // so we can use up this matcher: expressions[argumentIndex] = new MatchExpression(matches[matchIndex]); ++matchIndex; } } if (matchIndex < matches.Length) { // If we get here, we can be almost certain that matchers weren't distributed properly // across the invocation's parameters. We could hope for the best and just leave it // at that; however, it's probably better to let client code know, so it can be either // adjusted or reported to Moq. throw new ArgumentException( string.Format( CultureInfo.CurrentCulture, Resources.MatcherAssignmentFailedDuringExpressionReconstruction, matches.Length, $"{invocation.Method.DeclaringType.GetFormattedName()}.{invocation.Method.Name}")); } bool CanDistribute(int msi, int asi) { var match = matches[msi]; var matchType = match.RenderExpression.Type; for (int ai = asi; ai < expressions.Length; ++ai) { if (parameterTypes[ai].IsAssignableFrom(matchType) && CanDistribute(msi + 1, ai + 1)) { return true; } } return false; } } // Finally, add explicit type casts (aka `Convert` nodes) where necessary: for (int i = 0; i < expressions.Length; ++i) { var argument = expressions[i]; var parameterType = parameterTypes[i]; if (argument.Type == parameterType) continue; // nullable type coercion: if (Nullable.GetUnderlyingType(parameterType) != null && Nullable.GetUnderlyingType(argument.Type) == null) { expressions[i] = Expression.Convert(argument, parameterType); } // boxing of value types (i.e. where a value-typed value is assigned to a reference-typed parameter): else if (argument.Type.IsValueType && !parameterType.IsValueType) { expressions[i] = Expression.Convert(argument, parameterType); } // if types don't match exactly and aren't assignment compatible: else if (argument.Type != parameterType && !parameterType.IsAssignableFrom(argument.Type)) { expressions[i] = Expression.Convert(argument, parameterType); } } return expressions; } } // Creates a proxy (way more light-weight than a `Mock<T>`!) with an invocation `Recorder` attached to it. private static IProxy CreateProxy(Type type, object[] ctorArgs, MatcherObserver matcherObserver, out Recorder recorder) { recorder = new Recorder(matcherObserver); return (IProxy)ProxyFactory.Instance.CreateProxy(type, recorder, Type.EmptyTypes, ctorArgs ?? new object[0]); } // Records an invocation, mocks return values, and builds a chain to the return value's recorder. // This record represents the basis for reconstructing an expression tree. private sealed class Recorder : IInterceptor { private readonly MatcherObserver matcherObserver; private int creationTimestamp; private Invocation invocation; private int invocationTimestamp; private IProxy returnValue; public Recorder(MatcherObserver matcherObserver) { Debug.Assert(matcherObserver != null); this.matcherObserver = matcherObserver; this.creationTimestamp = this.matcherObserver.GetNextTimestamp(); } public Invocation Invocation => this.invocation; public IEnumerable<Match> Matches { get { Debug.Assert(this.invocationTimestamp != default); return this.matcherObserver.GetMatchesBetween(this.creationTimestamp, this.invocationTimestamp); } } public Recorder Next => this.returnValue?.Interceptor as Recorder; public void Intercept(Invocation invocation) { var returnType = invocation.Method.ReturnType; // In theory, each recorder should receive exactly one invocation. // There are some reasons why that may not always be true: // // 1. You may be inspecting a `Recorder` object in your IDE, causing // additional calls e.g. to `ToString`. In this case, any such // subsequent calls should be ignored. // // 2. The proxied type may perform virtual calls in its own ctor. // In this case, *only* the last call is going to be relevant. // // Getting (2) right is more important than getting (1) right, so we // disable the following guard and allow subsequent calls to override // earlier ones: //if (this.invocation == null) { this.invocation = invocation; this.invocationTimestamp = this.matcherObserver.GetNextTimestamp(); if (returnType == typeof(void)) { this.returnValue = null; } else if (returnType.IsMockable()) { this.returnValue = CreateProxy(returnType, null, this.matcherObserver, out _); } else { throw new NotSupportedException(Resources.LastMemberHasNonInterceptableReturnType); } } if (returnType != typeof(void)) { invocation.ReturnValue = this.returnValue; } } } } }
#region LGPL License /* Axiom Game Engine Library Copyright (C) 2003 Axiom Project Team The overall design, and a majority of the core engine and rendering code contained within this library is a derivative of the open source Object Oriented Graphics Engine OGRE, which can be found at http://ogre.sourceforge.net. Many thanks to the OGRE team for maintaining such a high quality project. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #endregion using System; using System.IO; using System.Collections; using System.Runtime.InteropServices; using Axiom.Core; using Axiom.MathLib; using Axiom.Graphics; using Axiom.Controllers; using Axiom.SceneManagers.Bsp.Collections; namespace Axiom.SceneManagers.Bsp { /// <summary> /// Class for recording Quake3 shaders. /// </summary> /// <remarks> /// This is a temporary holding area since shaders are actually converted into /// Material objects for use in the engine proper. However, because we have to read /// in shader definitions en masse (because they are stored in shared .shader files) /// without knowing which will actually be used, we store their definitions here /// temporarily since their instantiations as Materials would use precious resources /// because of the automatic loading of textures etc. /// </remarks> public class Quake3Shader : Resource { #region Protected members protected uint flags; protected ShaderPassCollection pass; protected bool farBox; // Skybox protected string farBoxName; protected bool skyDome; protected float cloudHeight; // Skydome protected ShaderDeformFunc deformFunc; protected float[] deformParams; protected ManualCullingMode cullMode; protected bool fog; protected ColorEx fogColour; protected float fogDistance; #endregion #region Properties public uint Flags { get { return flags; } set { flags = value; } } public int NumPasses { get { return pass.Count; } } public ShaderPassCollection Pass { get { return pass; } set { pass = value; } } public bool Farbox { get { return farBox; } set { farBox = value; } } public string FarboxName { get { return farBoxName; } set { farBoxName = value; } } public bool SkyDome { get { return skyDome; } set { skyDome = value; } } public float CloudHeight { get { return cloudHeight; } set { cloudHeight = value; } } public ShaderDeformFunc DeformFunc { get { return deformFunc; } set { deformFunc = value; } } public float[] DeformParams { get { return deformParams; } set { deformParams = value; } } public ManualCullingMode CullingMode { get { return cullMode; } set { cullMode = value; } } public bool Fog { get { return fog; } set { fog = value; } } public ColorEx FogColour { get { return fogColour; } set { fogColour = value; } } public float FogDistance { get { return fogDistance; } set { fogDistance = value; } } #endregion #region Constructor /// <summary> /// Default constructor - used by <see cref="Quake3ShaderManager"/> (do not call directly) /// </summary> /// <param name="name">Shader name.</param> public Quake3Shader(string name) { this.name = name; deformFunc = ShaderDeformFunc.None; deformParams = new float[5]; cullMode = ManualCullingMode.Back; pass = new ShaderPassCollection(); } #endregion #region Methods protected string GetAlternateName(string textureName) { // Get alternative JPG to TGA and vice versa int pos; string ext, baseName; pos = textureName.LastIndexOf("."); ext = textureName.Substring(pos,4).ToLower(); baseName = textureName.Substring(0,pos); if (ext == ".jpg") { return baseName + ".tga"; } else { return baseName + ".jpg"; } } /// <summary> /// Creates this shader as an OGRE material. /// </summary> /// <remarks> /// Creates a new material based on this shaders settings and registers it with the /// SceneManager passed in. /// Material name is in the format of: shader#lightmap. /// </remarks> /// <param name="sm">SceneManager to register the material with.</param> /// <param name="lightmapNumber">Lightmap number</param> public Material CreateAsMaterial(SceneManager sm, int lightmapNumber) { string materialName = String.Format("{0}#{1}", name, lightmapNumber); Material material = sm.CreateMaterial(materialName); LogManager.Instance.Write("Using Q3 shader {0}", name); for(int p = 0; p < pass.Count; ++p) { TextureUnitState t; // Create basic texture t = LoadMaterialTextures(p, lightmapNumber, material); // Blending if(p == 0) { // scene blend material.SetSceneBlending(pass[p].blendSrc, pass[p].blendDest); if(material.IsTransparent && (pass[p].blendSrc != SceneBlendFactor.SourceAlpha)) material.DepthWrite = false; t.SetColorOperation(LayerBlendOperation.Replace); } else { if(pass[p].customBlend) { // Fallback for now t.SetColorOperation(LayerBlendOperation.Modulate); } else { t.SetColorOperation(pass[p].blend); } } // Tex coords if(pass[p].texGen == ShaderTextureGen.Base) t.TextureCoordSet = 0; else if(pass[p].texGen == ShaderTextureGen.Lightmap) t.TextureCoordSet = 1; else if(pass[p].texGen == ShaderTextureGen.Environment) t.SetEnvironmentMap(true, EnvironmentMap.Planar); // Tex mod // Scale t.SetTextureScaleU(pass[p].tcModScale[0]); t.SetTextureScaleV(pass[p].tcModScale[1]); CreateProceduralTextureMods(p, t); // Address mode t.TextureAddressing = pass[p].addressMode; // Alpha mode t.SetAlphaRejectSettings(pass[p].alphaFunc, pass[p].alphaVal); } // Do farbox (create new material) // Do skydome (use this material) if(skyDome) { float halfAngle = 0.5f * (0.5f * (4.0f * (float) Math.Atan(1.0f))); float sin = (float) Math.Sin(halfAngle); // Quake3 is always aligned with Z upwards Quaternion q = new Quaternion( (float) Math.Cos(halfAngle), sin * Vector3.UnitX.x, sin * Vector3.UnitY.y, sin * Vector3.UnitX.z ); // Also draw last, and make close to camera (far clip plane is shorter) sm.SetSkyDome(true, materialName, 20 - (cloudHeight / 256 * 18), 12, 2000, false, q); } material.CullingMode = Axiom.Graphics.CullingMode.None; material.ManualCullMode = cullMode; material.Lighting = false; material.Load(); return material; } private void CreateProceduralTextureMods(int p, TextureUnitState t) { // Procedural mods // Custom - don't use mod if generating environment // Because I do env a different way it look horrible if(pass[p].texGen != ShaderTextureGen.Environment) { if(pass[p].tcModRotate != 0.0f) t.SetRotateAnimation(pass[p].tcModRotate); if((pass[p].tcModScroll[0] != 0.0f) || (pass[p].tcModScroll[1] != 0.0f)) { if(pass[p].tcModTurbOn) { // Turbulent scroll if(pass[p].tcModScroll[0] != 0.0f) { t.SetTransformAnimation(TextureTransform.TranslateU, WaveformType.Sine, pass[p].tcModTurb[0], pass[p].tcModTurb[3], pass[p].tcModTurb[2], pass[p].tcModTurb[1]); } if(pass[p].tcModScroll[1] != 0.0f) { t.SetTransformAnimation(TextureTransform.TranslateV, WaveformType.Sine, pass[p].tcModTurb[0], pass[p].tcModTurb[3], pass[p].tcModTurb[2], pass[p].tcModTurb[1]); } } else { // Constant scroll t.SetScrollAnimation(pass[p].tcModScroll[0], pass[p].tcModScroll[1]); } } if(pass[p].tcModStretchWave != ShaderWaveType.None) { WaveformType wft = WaveformType.Sine; switch(pass[p].tcModStretchWave) { case ShaderWaveType.Sin: wft = WaveformType.Sine; break; case ShaderWaveType.Triangle: wft = WaveformType.Triangle; break; case ShaderWaveType.Square: wft = WaveformType.Square; break; case ShaderWaveType.SawTooth: wft = WaveformType.Sawtooth; break; case ShaderWaveType.InverseSawtooth: wft = WaveformType.InverseSawtooth; break; } // Create wave-based stretcher t.SetTransformAnimation(TextureTransform.ScaleU, wft, pass[p].tcModStretchParams[3], pass[p].tcModStretchParams[0], pass[p].tcModStretchParams[2], pass[p].tcModStretchParams[1]); t.SetTransformAnimation(TextureTransform.ScaleV, wft, pass[p].tcModStretchParams[3], pass[p].tcModStretchParams[0], pass[p].tcModStretchParams[2], pass[p].tcModStretchParams[1]); } } } private TextureUnitState LoadMaterialTextures(int p, int lightmapNumber, Material material) { TextureUnitState t; if(pass[p].textureName == "$lightmap") { string lightmapName = String.Format("@lightmap{0}", lightmapNumber); t = material.GetTechnique(0).GetPass(0).CreateTextureUnitState(lightmapName); } // Animated texture support else if(pass[p].animNumFrames > 0) { float sequenceTime = pass[p].animNumFrames / pass[p].animFps; /* Pre-load textures We need to know if each one was loaded OK since extensions may change for each Quake3 can still include alternate extension filenames e.g. jpg instead of tga Pain in the arse - have to check for each frame as letters<n>.tga for example is different per frame! */ for(uint alt = 0; alt < pass[p].animNumFrames; ++alt) { try { TextureManager.Instance.Load(pass[p].frames[alt]); } catch { // Try alternate extension pass[p].frames[alt] = GetAlternateName(pass[p].frames[alt]); try { TextureManager.Instance.Load(pass[p].frames[alt]); } catch { // stuffed - no texture } } } t = material.GetTechnique(0).GetPass(0).CreateTextureUnitState(""); t.SetAnimatedTextureName(pass[p].frames, pass[p].animNumFrames, sequenceTime); if(t.IsBlank) { for(int alt = 0; alt < pass[p].animNumFrames; alt++) pass[p].frames[alt] = GetAlternateName(pass[p].frames[alt]); t.SetAnimatedTextureName(pass[p].frames, pass[p].animNumFrames, sequenceTime); } } else { // Quake3 can still include alternate extension filenames e.g. jpg instead of tga // Pain in the arse - have to check for failure try { TextureManager.Instance.Load(pass[p].textureName); } catch { // Try alternate extension pass[p].textureName = GetAlternateName(pass[p].textureName); try { TextureManager.Instance.Load(pass[p].textureName); } catch { // stuffed - no texture } } t = material.GetTechnique(0).GetPass(0).CreateTextureUnitState(pass[p].textureName); } return t; } #endregion #region Implementation of Resource public override void Load() { // Do nothing. } public override void Unload() { // Do nothing. } #endregion } public class ShaderPass { public uint flags; public string textureName; public ShaderTextureGen texGen; // Multitexture blend public LayerBlendOperation blend; // Multipass blends (Quake3 only supports multipass?? Surely not?) public SceneBlendFactor blendSrc; public SceneBlendFactor blendDest; public bool customBlend; public CompareFunction depthFunc; public TextureAddressing addressMode; // TODO - alphaFunc public ShaderGen rgbGenFunc; public ShaderWaveType rgbGenWave; public float[] rgbGenParams = new float[4]; // base, amplitude, phase, frequency public float[] tcModScale = new float[2]; public float tcModRotate; public float[] tcModScroll = new float[2]; public float[] tcModTransform = new float[6]; public bool tcModTurbOn; public float[] tcModTurb = new float[4]; public ShaderWaveType tcModStretchWave; public float[] tcModStretchParams = new float[4]; // base, amplitude, phase, frequency public CompareFunction alphaFunc; public byte alphaVal; public float animFps; public int animNumFrames; public string[] frames = new string[32]; }; [Flags] public enum ShaderFlags { NoCull = 1 << 0, Transparent = 1 << 1, DepthWrite = 1 << 2, Sky = 1 << 3, NoMipMaps = 1 << 4, NeedColours = 1 << 5, DeformVerts = 1 << 6 } [Flags] public enum ShaderPassFlags { Lightmap = 1 << 0, Blend = 1 << 1, AlphaFunc = 1 << 2, TCMod = 1 << 3, AnimMap = 1 << 5, TCGenEnv = 1 << 6 } public enum ShaderGen { Identity = 0, Wave, Vertex } public enum ShaderTextureGen { Base = 0, Lightmap, Environment } public enum ShaderWaveType { None = 0, Sin, Triangle, Square, SawTooth, InverseSawtooth } public enum ShaderDeformFunc { None = 0, Bulge, Wave, Normal, Move, AutoSprite, AutoSprite2 } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Windows; using System.Windows.Controls; using System.Windows.Media.Animation; using MahApps.Metro.ValueBoxes; namespace MahApps.Metro.Controls { /// <summary> /// enumeration for the different transition types /// </summary> public enum TransitionType { /// <summary> /// Use the VisualState DefaultTransition /// </summary> Default, /// <summary> /// Use the VisualState Normal /// </summary> Normal, /// <summary> /// Use the VisualState UpTransition /// </summary> Up, /// <summary> /// Use the VisualState DownTransition /// </summary> Down, /// <summary> /// Use the VisualState RightTransition /// </summary> Right, /// <summary> /// Use the VisualState RightReplaceTransition /// </summary> RightReplace, /// <summary> /// Use the VisualState LeftTransition /// </summary> Left, /// <summary> /// Use the VisualState LeftReplaceTransition /// </summary> LeftReplace, /// <summary> /// Use a custom VisualState, the name must be set using CustomVisualStatesName property /// </summary> Custom } /// <summary> /// A ContentControl that animates content as it loads and unloads. /// </summary> public class TransitioningContentControl : ContentControl { internal const string PresentationGroup = "PresentationStates"; internal const string HiddenState = "Hidden"; internal const string PreviousContentPresentationSitePartName = "PreviousContentPresentationSite"; internal const string CurrentContentPresentationSitePartName = "CurrentContentPresentationSite"; private ContentPresenter currentContentPresentationSite; private ContentPresenter previousContentPresentationSite; private bool allowIsTransitioningPropertyWrite; private Storyboard currentTransition; public event RoutedEventHandler TransitionCompleted; public const TransitionType DefaultTransitionState = TransitionType.Default; public static readonly DependencyProperty IsTransitioningProperty = DependencyProperty.Register(nameof(IsTransitioning), typeof(bool), typeof(TransitioningContentControl), new PropertyMetadata(BooleanBoxes.FalseBox, OnIsTransitioningPropertyChanged)); public static readonly DependencyProperty TransitionProperty = DependencyProperty.Register(nameof(Transition), typeof(TransitionType), typeof(TransitioningContentControl), new FrameworkPropertyMetadata(TransitionType.Default, FrameworkPropertyMetadataOptions.AffectsArrange | FrameworkPropertyMetadataOptions.Inherits, OnTransitionPropertyChanged)); public static readonly DependencyProperty RestartTransitionOnContentChangeProperty = DependencyProperty.Register(nameof(RestartTransitionOnContentChange), typeof(bool), typeof(TransitioningContentControl), new PropertyMetadata(BooleanBoxes.FalseBox, OnRestartTransitionOnContentChangePropertyChanged)); public static readonly DependencyProperty CustomVisualStatesProperty = DependencyProperty.Register(nameof(CustomVisualStates), typeof(ObservableCollection<VisualState>), typeof(TransitioningContentControl), new PropertyMetadata(null)); public static readonly DependencyProperty CustomVisualStatesNameProperty = DependencyProperty.Register(nameof(CustomVisualStatesName), typeof(string), typeof(TransitioningContentControl), new PropertyMetadata("CustomTransition")); public ObservableCollection<VisualState> CustomVisualStates { get { return (ObservableCollection<VisualState>)this.GetValue(CustomVisualStatesProperty); } set { this.SetValue(CustomVisualStatesProperty, value); } } /// <summary> /// Gets or sets the name of the custom transition visual state. /// </summary> public string CustomVisualStatesName { get { return (string)this.GetValue(CustomVisualStatesNameProperty); } set { this.SetValue(CustomVisualStatesNameProperty, value); } } /// <summary> /// Gets/sets if the content is transitioning. /// </summary> public bool IsTransitioning { get { return (bool)this.GetValue(IsTransitioningProperty); } private set { this.allowIsTransitioningPropertyWrite = true; this.SetValue(IsTransitioningProperty, BooleanBoxes.Box(value)); this.allowIsTransitioningPropertyWrite = false; } } public TransitionType Transition { get { return (TransitionType)this.GetValue(TransitionProperty); } set { this.SetValue(TransitionProperty, value); } } public bool RestartTransitionOnContentChange { get { return (bool)this.GetValue(RestartTransitionOnContentChangeProperty); } set { this.SetValue(RestartTransitionOnContentChangeProperty, BooleanBoxes.Box(value)); } } private static void OnIsTransitioningPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var source = (TransitioningContentControl)d; if (!source.allowIsTransitioningPropertyWrite) { source.IsTransitioning = (bool)e.OldValue; throw new InvalidOperationException(); } } internal Storyboard CurrentTransition { get { return this.currentTransition; } set { // decouple event if (this.currentTransition != null) { this.currentTransition.Completed -= this.OnTransitionCompleted; } this.currentTransition = value; if (this.currentTransition != null) { this.currentTransition.Completed += this.OnTransitionCompleted; } } } private static void OnTransitionPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var source = (TransitioningContentControl)d; var oldTransition = (TransitionType)e.OldValue; var newTransition = (TransitionType)e.NewValue; if (source.IsTransitioning) { source.AbortTransition(); } // find new transition Storyboard newStoryboard = source.GetStoryboard(newTransition); // unable to find the transition. if (newStoryboard == null) { // could be during initialization of xaml that presentationgroups was not yet defined if (VisualStates.TryGetVisualStateGroup(source, PresentationGroup) == null) { // will delay check source.CurrentTransition = null; } else { // revert to old value source.SetValue(TransitionProperty, oldTransition); throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Temporary removed exception message", newTransition)); } } else { source.CurrentTransition = newStoryboard; } } private static void OnRestartTransitionOnContentChangePropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { ((TransitioningContentControl)d).OnRestartTransitionOnContentChangeChanged((bool)e.OldValue, (bool)e.NewValue); } protected virtual void OnRestartTransitionOnContentChangeChanged(bool oldValue, bool newValue) { } public TransitioningContentControl() { this.CustomVisualStates = new ObservableCollection<VisualState>(); this.DefaultStyleKey = typeof(TransitioningContentControl); } public override void OnApplyTemplate() { if (this.IsTransitioning) { this.AbortTransition(); } if (this.CustomVisualStates != null && this.CustomVisualStates.Any()) { var presentationGroup = VisualStates.TryGetVisualStateGroup(this, PresentationGroup); if (presentationGroup != null) { foreach (var state in this.CustomVisualStates) { presentationGroup.States.Add(state); } } } base.OnApplyTemplate(); this.previousContentPresentationSite = this.GetTemplateChild(PreviousContentPresentationSitePartName) as ContentPresenter; this.currentContentPresentationSite = this.GetTemplateChild(CurrentContentPresentationSitePartName) as ContentPresenter; // hookup currenttransition Storyboard transition = this.GetStoryboard(this.Transition); this.CurrentTransition = transition; if (transition == null) { var invalidTransition = this.Transition; // revert to default this.Transition = DefaultTransitionState; throw new MahAppsException($"'{invalidTransition}' transition could not be found!"); } VisualStateManager.GoToState(this, HiddenState, false); } protected override void OnContentChanged(object oldContent, object newContent) { base.OnContentChanged(oldContent, newContent); if (oldContent != newContent) { this.StartTransition(oldContent, newContent); } } [SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "newContent", Justification = "Should be used in the future.")] private void StartTransition(object oldContent, object newContent) { // both presenters must be available, otherwise a transition is useless. if (this.currentContentPresentationSite != null && this.previousContentPresentationSite != null) { if (this.RestartTransitionOnContentChange) { this.CurrentTransition.Completed -= this.OnTransitionCompleted; } this.currentContentPresentationSite.SetCurrentValue(ContentPresenter.ContentProperty, newContent); this.previousContentPresentationSite.SetCurrentValue(ContentPresenter.ContentProperty, oldContent); // and start a new transition if (!this.IsTransitioning || this.RestartTransitionOnContentChange) { if (this.RestartTransitionOnContentChange) { this.CurrentTransition.Completed += this.OnTransitionCompleted; } this.IsTransitioning = true; VisualStateManager.GoToState(this, HiddenState, false); VisualStateManager.GoToState(this, this.GetTransitionName(this.Transition), true); } } } /// <summary> /// Reload the current transition if the content is the same. /// </summary> public void ReloadTransition() { // both presenters must be available, otherwise a transition is useless. if (this.currentContentPresentationSite != null && this.previousContentPresentationSite != null) { if (this.RestartTransitionOnContentChange) { this.CurrentTransition.Completed -= this.OnTransitionCompleted; } if (!this.IsTransitioning || this.RestartTransitionOnContentChange) { if (this.RestartTransitionOnContentChange) { this.CurrentTransition.Completed += this.OnTransitionCompleted; } this.IsTransitioning = true; VisualStateManager.GoToState(this, HiddenState, false); VisualStateManager.GoToState(this, this.GetTransitionName(this.Transition), true); } } } private void OnTransitionCompleted(object sender, EventArgs e) { var clockGroup = sender as ClockGroup; this.AbortTransition(); if (clockGroup == null || clockGroup.CurrentState == ClockState.Stopped) { this.TransitionCompleted?.Invoke(this, new RoutedEventArgs()); } } public void AbortTransition() { // go to normal state and release our hold on the old content. VisualStateManager.GoToState(this, HiddenState, false); this.IsTransitioning = false; this.previousContentPresentationSite?.SetCurrentValue(ContentPresenter.ContentProperty, null); } private Storyboard GetStoryboard(TransitionType newTransition) { VisualStateGroup presentationGroup = VisualStates.TryGetVisualStateGroup(this, PresentationGroup); Storyboard newStoryboard = null; if (presentationGroup != null) { var transitionName = this.GetTransitionName(newTransition); newStoryboard = presentationGroup.States .OfType<VisualState>() .Where(state => state.Name == transitionName) .Select(state => state.Storyboard) .FirstOrDefault(); } return newStoryboard; } private string GetTransitionName(TransitionType transition) { switch (transition) { default: case TransitionType.Default: return "DefaultTransition"; case TransitionType.Normal: return "Normal"; case TransitionType.Up: return "UpTransition"; case TransitionType.Down: return "DownTransition"; case TransitionType.Right: return "RightTransition"; case TransitionType.RightReplace: return "RightReplaceTransition"; case TransitionType.Left: return "LeftTransition"; case TransitionType.LeftReplace: return "LeftReplaceTransition"; case TransitionType.Custom: return this.CustomVisualStatesName; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Net.Test.Common; using System.Runtime.InteropServices; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.DotNet.XUnitExtensions; using Xunit; namespace System.Net.Sockets.Tests { public partial class SocketOptionNameTest { private static bool SocketsReuseUnicastPortSupport => Capability.SocketsReuseUnicastPortSupport().HasValue; [ConditionalFact(nameof(SocketsReuseUnicastPortSupport))] public void ReuseUnicastPort_CreateSocketGetOption() { using (var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { if (Capability.SocketsReuseUnicastPortSupport().Value) { Assert.Equal(0, (int)socket.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseUnicastPort)); } else { Assert.Throws<SocketException>(() => socket.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseUnicastPort)); } } } [ConditionalFact(nameof(SocketsReuseUnicastPortSupport))] public void ReuseUnicastPort_CreateSocketSetOption() { using (var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { if (Capability.SocketsReuseUnicastPortSupport().Value) { socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseUnicastPort, 0); int optionValue = (int)socket.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseUnicastPort); Assert.Equal(0, optionValue); } else { Assert.Throws<SocketException>(() => socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseUnicastPort, 1)); } } } [Fact] public void MulticastOption_CreateSocketSetGetOption_GroupAndInterfaceIndex_SetSucceeds_GetThrows() { int interfaceIndex = 0; IPAddress groupIp = IPAddress.Parse("239.1.2.3"); using (Socket socket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { socket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.AddMembership, new MulticastOption(groupIp, interfaceIndex)); Assert.Throws<SocketException>(() => socket.GetSocketOption(SocketOptionLevel.IP, SocketOptionName.AddMembership)); } } [ActiveIssue(31609, TargetFrameworkMonikers.Uap)] [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsNanoServer))] // Skip on Nano: dotnet/corefx #29929 public async Task MulticastInterface_Set_AnyInterface_Succeeds() { // On all platforms, index 0 means "any interface" await MulticastInterface_Set_Helper(0); } [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsNanoServer))] // Skip on Nano: dotnet/corefx #29929 [PlatformSpecific(TestPlatforms.Windows)] // see comment below [SkipOnTargetFramework(TargetFrameworkMonikers.Uap)] // UWP Apps are normally blocked to send network traffic on loopback. public async Task MulticastInterface_Set_Loopback_Succeeds() { // On Windows, we can apparently assume interface 1 is "loopback." On other platforms, this is not a // valid assumption. We could maybe use NetworkInterface.LoopbackInterfaceIndex to get the index, but // this would introduce a dependency on System.Net.NetworkInformation, which depends on System.Net.Sockets, // which is what we're testing here.... So for now, we'll just assume "loopback == 1" and run this on // Windows only. await MulticastInterface_Set_Helper(1); } private async Task MulticastInterface_Set_Helper(int interfaceIndex) { IPAddress multicastAddress = IPAddress.Parse("239.1.2.3"); string message = "hello"; int port; using (Socket receiveSocket = CreateBoundUdpSocket(out port), sendSocket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { receiveSocket.ReceiveTimeout = 1000; receiveSocket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.AddMembership, new MulticastOption(multicastAddress, interfaceIndex)); sendSocket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.MulticastInterface, IPAddress.HostToNetworkOrder(interfaceIndex)); var receiveBuffer = new byte[1024]; var receiveTask = receiveSocket.ReceiveAsync(new ArraySegment<byte>(receiveBuffer), SocketFlags.None); for (int i = 0; i < TestSettings.UDPRedundancy; i++) { sendSocket.SendTo(Encoding.UTF8.GetBytes(message), new IPEndPoint(multicastAddress, port)); } var cts = new CancellationTokenSource(); Assert.True(await Task.WhenAny(receiveTask, Task.Delay(30_000, cts.Token)) == receiveTask, "Waiting for received data timed out"); cts.Cancel(); int bytesReceived = await receiveTask; string receivedMessage = Encoding.UTF8.GetString(receiveBuffer, 0, bytesReceived); Assert.Equal(receivedMessage, message); } } [Fact] public void MulticastInterface_Set_InvalidIndex_Throws() { int interfaceIndex = 31415; using (Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { Assert.Throws<SocketException>(() => s.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.MulticastInterface, IPAddress.HostToNetworkOrder(interfaceIndex))); } } [ActiveIssue(31609, TargetFrameworkMonikers.Uap)] [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsNanoServer))] // Skip on Nano: dotnet/corefx #29929 [PlatformSpecific(~TestPlatforms.OSX)] public async Task MulticastInterface_Set_IPv6_AnyInterface_Succeeds() { if (PlatformDetection.IsRedHatFamily7) { // RH7 seems to have issues with multicast in Azure. Same code and setup can pass when executed outside of Azure. throw new SkipTestException("IPv6 multicast environment not available"); } // On all platforms, index 0 means "any interface" await MulticastInterface_Set_IPv6_Helper(0); } [Fact] public void MulticastTTL_Set_IPv4_Succeeds() { using (Socket socket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { // This should not throw. We currently do not have good mechanism how to verify that the TTL/Hops is actually set. int ttl = (int)socket.GetSocketOption(SocketOptionLevel.IP, SocketOptionName.MulticastTimeToLive); ttl += 1; socket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.MulticastTimeToLive, ttl); Assert.Equal(ttl, (int)socket.GetSocketOption(SocketOptionLevel.IP, SocketOptionName.MulticastTimeToLive)); } } [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsNanoServer))] // Skip on Nano: dotnet/corefx #29929 public void MulticastTTL_Set_IPv6_Succeeds() { using (Socket socket = new Socket(AddressFamily.InterNetworkV6, SocketType.Dgram, ProtocolType.Udp)) { // This should not throw. We currently do not have good mechanism how to verify that the TTL/Hops is actually set. int ttl = (int)socket.GetSocketOption(SocketOptionLevel.IPv6, SocketOptionName.MulticastTimeToLive); ttl += 1; socket.SetSocketOption(SocketOptionLevel.IPv6, SocketOptionName.MulticastTimeToLive, ttl); Assert.Equal(ttl, (int)socket.GetSocketOption(SocketOptionLevel.IPv6, SocketOptionName.MulticastTimeToLive)); } } [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public void Ttl_Set_Succeeds(AddressFamily af) { using (Socket socket = new Socket(af, SocketType.Dgram, ProtocolType.Udp)) { short newTtl = socket.Ttl; // Change default ttl. newTtl += (short)((newTtl < 255) ? 1 : -1); socket.Ttl = newTtl; Assert.Equal(newTtl, socket.Ttl); } } [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsNanoServer))] // Skip on Nano: dotnet/corefx #29929 [PlatformSpecific(TestPlatforms.Windows)] [SkipOnTargetFramework(TargetFrameworkMonikers.Uap)] // UWP Apps are normally blocked to send network traffic on loopback. public async Task MulticastInterface_Set_IPv6_Loopback_Succeeds() { // On Windows, we can apparently assume interface 1 is "loopback." On other platforms, this is not a // valid assumption. We could maybe use NetworkInterface.LoopbackInterfaceIndex to get the index, but // this would introduce a dependency on System.Net.NetworkInformation, which depends on System.Net.Sockets, // which is what we're testing here.... So for now, we'll just assume "loopback == 1" and run this on // Windows only. await MulticastInterface_Set_IPv6_Helper(1); } private async Task MulticastInterface_Set_IPv6_Helper(int interfaceIndex) { IPAddress multicastAddress = IPAddress.Parse("ff11::1:1"); string message = "hello"; int port; using (Socket receiveSocket = CreateBoundUdpIPv6Socket(out port), sendSocket = new Socket(AddressFamily.InterNetworkV6, SocketType.Dgram, ProtocolType.Udp)) { receiveSocket.ReceiveTimeout = 1000; receiveSocket.SetSocketOption(SocketOptionLevel.IPv6, SocketOptionName.AddMembership, new IPv6MulticastOption(multicastAddress, interfaceIndex)); sendSocket.SetSocketOption(SocketOptionLevel.IPv6, SocketOptionName.MulticastInterface, interfaceIndex); var receiveBuffer = new byte[1024]; var receiveTask = receiveSocket.ReceiveAsync(new ArraySegment<byte>(receiveBuffer), SocketFlags.None); for (int i = 0; i < TestSettings.UDPRedundancy; i++) { sendSocket.SendTo(Encoding.UTF8.GetBytes(message), new IPEndPoint(multicastAddress, port)); } var cts = new CancellationTokenSource(); Assert.True(await Task.WhenAny(receiveTask, Task.Delay(30_000, cts.Token)) == receiveTask, "Waiting for received data timed out"); cts.Cancel(); int bytesReceived = await receiveTask; string receivedMessage = Encoding.UTF8.GetString(receiveBuffer, 0, bytesReceived); Assert.Equal(receivedMessage, message); } } [Fact] public void MulticastInterface_Set_IPv6_InvalidIndex_Throws() { int interfaceIndex = 31415; using (Socket s = new Socket(AddressFamily.InterNetworkV6, SocketType.Dgram, ProtocolType.Udp)) { Assert.Throws<SocketException>(() => s.SetSocketOption(SocketOptionLevel.IPv6, SocketOptionName.MulticastInterface, interfaceIndex)); } } [ConditionalTheory(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // In WSL, the connect() call fails immediately. [InlineData(false)] [InlineData(true)] public void FailedConnect_GetSocketOption_SocketOptionNameError(bool simpleGet) { using (var client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp) { Blocking = false }) { // Fail a Connect using (var server = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { server.Bind(new IPEndPoint(IPAddress.Loopback, 0)); // bind but don't listen Assert.ThrowsAny<Exception>(() => client.Connect(server.LocalEndPoint)); } // Verify via Select that there's an error const int FailedTimeout = 10 * 1000 * 1000; // 10 seconds var errorList = new List<Socket> { client }; Socket.Select(null, null, errorList, FailedTimeout); Assert.Equal(1, errorList.Count); // Get the last error and validate it's what's expected int errorCode; if (simpleGet) { errorCode = (int)client.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Error); } else { byte[] optionValue = new byte[sizeof(int)]; client.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Error, optionValue); errorCode = BitConverter.ToInt32(optionValue, 0); } Assert.Equal((int)SocketError.ConnectionRefused, errorCode); // Then get it again if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { // The Windows implementation doesn't clear the error code after retrieved. // https://github.com/dotnet/corefx/issues/8464 Assert.Equal(errorCode, (int)client.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Error)); } else { // The Unix implementation matches the getsockopt and MSDN docs and clears the error code as part of retrieval. Assert.Equal((int)SocketError.Success, (int)client.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Error)); } } } // Create an Udp Socket and binds it to an available port private static Socket CreateBoundUdpSocket(out int localPort) { Socket receiveSocket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp); // sending a message will bind the socket to an available port string sendMessage = "dummy message"; int port = 54320; IPAddress multicastAddress = IPAddress.Parse("239.1.1.1"); receiveSocket.SendTo(Encoding.UTF8.GetBytes(sendMessage), new IPEndPoint(multicastAddress, port)); localPort = (receiveSocket.LocalEndPoint as IPEndPoint).Port; return receiveSocket; } // Create an Udp Socket and binds it to an available port private static Socket CreateBoundUdpIPv6Socket(out int localPort) { Socket receiveSocket = new Socket(AddressFamily.InterNetworkV6, SocketType.Dgram, ProtocolType.Udp); // sending a message will bind the socket to an available port string sendMessage = "dummy message"; int port = 54320; IPAddress multicastAddress = IPAddress.Parse("ff11::1:1"); receiveSocket.SendTo(Encoding.UTF8.GetBytes(sendMessage), new IPEndPoint(multicastAddress, port)); localPort = (receiveSocket.LocalEndPoint as IPEndPoint).Port; return receiveSocket; } [Theory] [InlineData(null, null, null, true)] [InlineData(null, null, false, true)] [InlineData(null, false, false, true)] [InlineData(null, true, false, true)] [InlineData(null, true, true, false)] [InlineData(true, null, null, true)] [InlineData(true, null, false, true)] [InlineData(true, null, true, true)] [InlineData(true, false, null, true)] [InlineData(true, false, false, true)] [InlineData(true, false, true, true)] public void ReuseAddress(bool? exclusiveAddressUse, bool? firstSocketReuseAddress, bool? secondSocketReuseAddress, bool expectFailure) { using (Socket a = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { if (exclusiveAddressUse.HasValue) { a.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ExclusiveAddressUse, exclusiveAddressUse.Value); } if (firstSocketReuseAddress.HasValue) { a.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, firstSocketReuseAddress.Value); } a.Bind(new IPEndPoint(IPAddress.Loopback, 0)); using (Socket b = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { if (secondSocketReuseAddress.HasValue) { b.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, secondSocketReuseAddress.Value); } if (expectFailure) { Assert.ThrowsAny<SocketException>(() => b.Bind(a.LocalEndPoint)); } else { b.Bind(a.LocalEndPoint); } } } } [Theory] [PlatformSpecific(TestPlatforms.Windows)] // ExclusiveAddressUse option is a Windows-specific option (when set to "true," tells Windows not to allow reuse of same local address) [InlineData(false, null, null, true)] [InlineData(false, null, false, true)] [InlineData(false, false, null, true)] [InlineData(false, false, false, true)] [InlineData(false, true, null, true)] [InlineData(false, true, false, true)] [InlineData(false, true, true, false)] public void ReuseAddress_Windows(bool? exclusiveAddressUse, bool? firstSocketReuseAddress, bool? secondSocketReuseAddress, bool expectFailure) { ReuseAddress(exclusiveAddressUse, firstSocketReuseAddress, secondSocketReuseAddress, expectFailure); } [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // [ActiveIssue(11057)] [PlatformSpecific(TestPlatforms.AnyUnix)] // Windows defaults are different public void ExclusiveAddress_Default_Unix() { using (Socket a = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { Assert.Equal(1, (int)a.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ExclusiveAddressUse)); Assert.Equal(true, a.ExclusiveAddressUse); Assert.Equal(0, (int)a.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress)); } } [ConditionalTheory(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // [ActiveIssue(11057)] [InlineData(1)] [InlineData(0)] [PlatformSpecific(TestPlatforms.AnyUnix)] // Unix does not have separate options for ExclusiveAddressUse and ReuseAddress. public void SettingExclusiveAddress_SetsReuseAddress(int value) { using (Socket a = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { a.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ExclusiveAddressUse, value); Assert.Equal(value, (int)a.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ExclusiveAddressUse)); Assert.Equal(value == 1 ? 0 : 1, (int)a.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress)); } // SettingReuseAddress_SetsExclusiveAddress using (Socket a = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { a.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, value); Assert.Equal(value, (int)a.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress)); Assert.Equal(value == 1 ? 0 : 1, (int)a.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ExclusiveAddressUse)); } } [ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // [ActiveIssue(11057)] public void ExclusiveAddressUseTcp() { using (Socket a = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { // ExclusiveAddressUse defaults to true on Unix, on Windows it defaults to false. a.ExclusiveAddressUse = true; a.Bind(new IPEndPoint(IPAddress.Loopback, 0)); a.Listen(10); int port = (a.LocalEndPoint as IPEndPoint).Port; using (Socket b = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { SocketException ex = Assert.ThrowsAny<SocketException>(() => b.Bind(new IPEndPoint(IPAddress.Loopback, port))); Assert.Equal(SocketError.AddressAlreadyInUse, ex.SocketErrorCode); } } } [Fact] [PlatformSpecific(TestPlatforms.Linux | TestPlatforms.OSX)] public unsafe void ReuseAddressUdp() { // Verify that .NET Core Sockets can bind to the UDP address from applications // that allow binding the same address. int SOL_SOCKET = -1; int option = -1; if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { // Linux: use SO_REUSEADDR to allow binding the same address. SOL_SOCKET = 1; const int SO_REUSEADDR = 2; option = SO_REUSEADDR; } else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { // BSD: use SO_REUSEPORT to allow binding the same address. SOL_SOCKET = 0xffff; const int SO_REUSEPORT = 0x200; option = SO_REUSEPORT; } using (Socket s1 = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { int value = 1; int rv = setsockopt(s1.Handle.ToInt32(), SOL_SOCKET, option, &value, sizeof(int)); Assert.Equal(0, rv); s1.Bind(new IPEndPoint(IPAddress.Any, 0)); using (Socket s2 = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { s2.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true); s2.Bind(s1.LocalEndPoint); } } } [Theory] [PlatformSpecific(TestPlatforms.Windows)] // SetIPProtectionLevel not supported on Unix [InlineData(IPProtectionLevel.EdgeRestricted, AddressFamily.InterNetwork, SocketOptionLevel.IP)] [InlineData(IPProtectionLevel.Restricted, AddressFamily.InterNetwork, SocketOptionLevel.IP)] [InlineData(IPProtectionLevel.Unrestricted, AddressFamily.InterNetwork, SocketOptionLevel.IP)] [InlineData(IPProtectionLevel.EdgeRestricted, AddressFamily.InterNetworkV6, SocketOptionLevel.IPv6)] [InlineData(IPProtectionLevel.Restricted, AddressFamily.InterNetworkV6, SocketOptionLevel.IPv6)] [InlineData(IPProtectionLevel.Unrestricted, AddressFamily.InterNetworkV6, SocketOptionLevel.IPv6)] public void SetIPProtectionLevel_Windows(IPProtectionLevel level, AddressFamily family, SocketOptionLevel optionLevel) { using (var socket = new Socket(family, SocketType.Stream, ProtocolType.Tcp)) { socket.SetIPProtectionLevel(level); int result = (int)socket.GetSocketOption(optionLevel, SocketOptionName.IPProtectionLevel); Assert.Equal(result, (int)level); } } [Theory] [PlatformSpecific(TestPlatforms.AnyUnix)] // SetIPProtectionLevel not supported on Unix [InlineData(IPProtectionLevel.EdgeRestricted, AddressFamily.InterNetwork)] [InlineData(IPProtectionLevel.Restricted, AddressFamily.InterNetwork)] [InlineData(IPProtectionLevel.Unrestricted, AddressFamily.InterNetwork)] [InlineData(IPProtectionLevel.EdgeRestricted, AddressFamily.InterNetworkV6)] [InlineData(IPProtectionLevel.Restricted, AddressFamily.InterNetworkV6)] [InlineData(IPProtectionLevel.Unrestricted, AddressFamily.InterNetworkV6)] public void SetIPProtectionLevel_Unix(IPProtectionLevel level, AddressFamily family) { using (var socket = new Socket(family, SocketType.Stream, ProtocolType.Tcp)) { Assert.Throws<PlatformNotSupportedException>(() => socket.SetIPProtectionLevel(level)); } } [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public void SetIPProtectionLevel_ArgumentException(AddressFamily family) { using (var socket = new Socket(family, SocketType.Stream, ProtocolType.Tcp)) { AssertExtensions.Throws<ArgumentException>("level", () => socket.SetIPProtectionLevel(IPProtectionLevel.Unspecified)); } } } [Collection("NoParallelTests")] // Set of tests to not run together with any other tests. public partial class NoParallelTests { [Fact] public void BindDuringTcpWait_Succeeds() { int port = 0; using (Socket a = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { a.Bind(new IPEndPoint(IPAddress.Loopback, 0)); port = (a.LocalEndPoint as IPEndPoint).Port; a.Listen(10); // Connect a client using (Socket client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { client.Connect(new IPEndPoint(IPAddress.Loopback, port)); // accept socket and close it with zero linger time. a.Accept().Close(0); } } // Bind a socket to the same address we just used. // To avoid conflict with other tests, this is part of the NoParallelTests test collection. using (Socket b = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { b.Bind(new IPEndPoint(IPAddress.Loopback, port)); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // namespace System.Reflection { using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Globalization; using System.Runtime; using System.Runtime.CompilerServices; using System.Runtime.ConstrainedExecution; using System.Runtime.InteropServices; #if FEATURE_REMOTING using System.Runtime.Remoting.Metadata; #endif //FEATURE_REMOTING using System.Runtime.Serialization; using System.Security.Permissions; using System.Threading; using RuntimeTypeCache = System.RuntimeType.RuntimeTypeCache; [Serializable] [ClassInterface(ClassInterfaceType.None)] [ComDefaultInterface(typeof(_FieldInfo))] #pragma warning disable 618 [PermissionSetAttribute(SecurityAction.InheritanceDemand, Name = "FullTrust")] #pragma warning restore 618 [System.Runtime.InteropServices.ComVisible(true)] public abstract class FieldInfo : MemberInfo, _FieldInfo { #region Static Members public static FieldInfo GetFieldFromHandle(RuntimeFieldHandle handle) { if (handle.IsNullHandle()) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidHandle")); FieldInfo f = RuntimeType.GetFieldInfo(handle.GetRuntimeFieldInfo()); Type declaringType = f.DeclaringType; if (declaringType != null && declaringType.IsGenericType) throw new ArgumentException(String.Format( CultureInfo.CurrentCulture, Environment.GetResourceString("Argument_FieldDeclaringTypeGeneric"), f.Name, declaringType.GetGenericTypeDefinition())); return f; } [System.Runtime.InteropServices.ComVisible(false)] public static FieldInfo GetFieldFromHandle(RuntimeFieldHandle handle, RuntimeTypeHandle declaringType) { if (handle.IsNullHandle()) throw new ArgumentException(Environment.GetResourceString("Argument_InvalidHandle")); return RuntimeType.GetFieldInfo(declaringType.GetRuntimeType(), handle.GetRuntimeFieldInfo()); } #endregion #region Constructor protected FieldInfo() { } #endregion #if !FEATURE_CORECLR public static bool operator ==(FieldInfo left, FieldInfo right) { if (ReferenceEquals(left, right)) return true; if ((object)left == null || (object)right == null || left is RuntimeFieldInfo || right is RuntimeFieldInfo) { return false; } return left.Equals(right); } public static bool operator !=(FieldInfo left, FieldInfo right) { return !(left == right); } #endif // !FEATURE_CORECLR public override bool Equals(object obj) { return base.Equals(obj); } public override int GetHashCode() { return base.GetHashCode(); } #region MemberInfo Overrides public override MemberTypes MemberType { get { return System.Reflection.MemberTypes.Field; } } #endregion #region Public Abstract\Virtual Members public virtual Type[] GetRequiredCustomModifiers() { throw new NotImplementedException(); } public virtual Type[] GetOptionalCustomModifiers() { throw new NotImplementedException(); } [CLSCompliant(false)] public virtual void SetValueDirect(TypedReference obj, Object value) { throw new NotSupportedException(Environment.GetResourceString("NotSupported_AbstractNonCLS")); } [CLSCompliant(false)] public virtual Object GetValueDirect(TypedReference obj) { throw new NotSupportedException(Environment.GetResourceString("NotSupported_AbstractNonCLS")); } public abstract RuntimeFieldHandle FieldHandle { get; } public abstract Type FieldType { get; } public abstract Object GetValue(Object obj); public virtual Object GetRawConstantValue() { throw new NotSupportedException(Environment.GetResourceString("NotSupported_AbstractNonCLS")); } public abstract void SetValue(Object obj, Object value, BindingFlags invokeAttr, Binder binder, CultureInfo culture); public abstract FieldAttributes Attributes { get; } #endregion #region Public Members [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public void SetValue(Object obj, Object value) { // Theoretically we should set up a LookForMyCaller stack mark here and pass that along. // But to maintain backward compatibility we can't switch to calling an // internal overload that takes a stack mark. // Fortunately the stack walker skips all the reflection invocation frames including this one. // So this method will never be returned by the stack walker as the caller. // See SystemDomain::CallersMethodCallbackWithStackMark in AppDomain.cpp. SetValue(obj, value, BindingFlags.Default, Type.DefaultBinder, null); } public bool IsPublic { get { return(Attributes & FieldAttributes.FieldAccessMask) == FieldAttributes.Public; } } public bool IsPrivate { get { return(Attributes & FieldAttributes.FieldAccessMask) == FieldAttributes.Private; } } public bool IsFamily { get { return(Attributes & FieldAttributes.FieldAccessMask) == FieldAttributes.Family; } } public bool IsAssembly { get { return(Attributes & FieldAttributes.FieldAccessMask) == FieldAttributes.Assembly; } } public bool IsFamilyAndAssembly { get { return(Attributes & FieldAttributes.FieldAccessMask) == FieldAttributes.FamANDAssem; } } public bool IsFamilyOrAssembly { get { return(Attributes & FieldAttributes.FieldAccessMask) == FieldAttributes.FamORAssem; } } public bool IsStatic { get { return(Attributes & FieldAttributes.Static) != 0; } } public bool IsInitOnly { get { return(Attributes & FieldAttributes.InitOnly) != 0; } } public bool IsLiteral { get { return(Attributes & FieldAttributes.Literal) != 0; } } public bool IsNotSerialized { get { return(Attributes & FieldAttributes.NotSerialized) != 0; } } public bool IsSpecialName { get { return(Attributes & FieldAttributes.SpecialName) != 0; } } public bool IsPinvokeImpl { get { return(Attributes & FieldAttributes.PinvokeImpl) != 0; } } public virtual bool IsSecurityCritical { get { return FieldHandle.IsSecurityCritical(); } } public virtual bool IsSecuritySafeCritical { get { return FieldHandle.IsSecuritySafeCritical(); } } public virtual bool IsSecurityTransparent { get { return FieldHandle.IsSecurityTransparent(); } } #endregion #if !FEATURE_CORECLR Type _FieldInfo.GetType() { return base.GetType(); } void _FieldInfo.GetTypeInfoCount(out uint pcTInfo) { throw new NotImplementedException(); } void _FieldInfo.GetTypeInfo(uint iTInfo, uint lcid, IntPtr ppTInfo) { throw new NotImplementedException(); } void _FieldInfo.GetIDsOfNames([In] ref Guid riid, IntPtr rgszNames, uint cNames, uint lcid, IntPtr rgDispId) { throw new NotImplementedException(); } // If you implement this method, make sure to include _FieldInfo.Invoke in VM\DangerousAPIs.h and // include _FieldInfo in SystemDomain::IsReflectionInvocationMethod in AppDomain.cpp. void _FieldInfo.Invoke(uint dispIdMember, [In] ref Guid riid, uint lcid, short wFlags, IntPtr pDispParams, IntPtr pVarResult, IntPtr pExcepInfo, IntPtr puArgErr) { throw new NotImplementedException(); } #endif } [Serializable] internal abstract class RuntimeFieldInfo : FieldInfo, ISerializable { #region Private Data Members private BindingFlags m_bindingFlags; protected RuntimeTypeCache m_reflectedTypeCache; protected RuntimeType m_declaringType; #endregion #region Constructor protected RuntimeFieldInfo() { // Used for dummy head node during population } protected RuntimeFieldInfo(RuntimeTypeCache reflectedTypeCache, RuntimeType declaringType, BindingFlags bindingFlags) { m_bindingFlags = bindingFlags; m_declaringType = declaringType; m_reflectedTypeCache = reflectedTypeCache; } #endregion #if FEATURE_REMOTING #region Legacy Remoting Cache // The size of CachedData is accounted for by BaseObjectWithCachedData in object.h. // This member is currently being used by Remoting for caching remoting data. If you // need to cache data here, talk to the Remoting team to work out a mechanism, so that // both caching systems can happily work together. private RemotingFieldCachedData m_cachedData; internal RemotingFieldCachedData RemotingCache { get { // This grabs an internal copy of m_cachedData and uses // that instead of looking at m_cachedData directly because // the cache may get cleared asynchronously. This prevents // us from having to take a lock. RemotingFieldCachedData cache = m_cachedData; if (cache == null) { cache = new RemotingFieldCachedData(this); RemotingFieldCachedData ret = Interlocked.CompareExchange(ref m_cachedData, cache, null); if (ret != null) cache = ret; } return cache; } } #endregion #endif //FEATURE_REMOTING #region NonPublic Members internal BindingFlags BindingFlags { get { return m_bindingFlags; } } private RuntimeType ReflectedTypeInternal { get { return m_reflectedTypeCache.GetRuntimeType(); } } internal RuntimeType GetDeclaringTypeInternal() { return m_declaringType; } internal RuntimeType GetRuntimeType() { return m_declaringType; } internal abstract RuntimeModule GetRuntimeModule(); #endregion #region MemberInfo Overrides public override MemberTypes MemberType { get { return MemberTypes.Field; } } public override Type ReflectedType { get { return m_reflectedTypeCache.IsGlobal ? null : ReflectedTypeInternal; } } public override Type DeclaringType { get { return m_reflectedTypeCache.IsGlobal ? null : m_declaringType; } } public override Module Module { get { return GetRuntimeModule(); } } #endregion #region Object Overrides public unsafe override String ToString() { if (CompatibilitySwitches.IsAppEarlierThanWindowsPhone8) return FieldType.ToString() + " " + Name; else return FieldType.FormatTypeName() + " " + Name; } #endregion #region ICustomAttributeProvider public override Object[] GetCustomAttributes(bool inherit) { return CustomAttribute.GetCustomAttributes(this, typeof(object) as RuntimeType); } public override Object[] GetCustomAttributes(Type attributeType, bool inherit) { if (attributeType == null) throw new ArgumentNullException("attributeType"); Contract.EndContractBlock(); RuntimeType attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType; if (attributeRuntimeType == null) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeType"),"attributeType"); return CustomAttribute.GetCustomAttributes(this, attributeRuntimeType); } [System.Security.SecuritySafeCritical] // auto-generated public override bool IsDefined(Type attributeType, bool inherit) { if (attributeType == null) throw new ArgumentNullException("attributeType"); Contract.EndContractBlock(); RuntimeType attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType; if (attributeRuntimeType == null) throw new ArgumentException(Environment.GetResourceString("Arg_MustBeType"),"attributeType"); return CustomAttribute.IsDefined(this, attributeRuntimeType); } public override IList<CustomAttributeData> GetCustomAttributesData() { return CustomAttributeData.GetCustomAttributesInternal(this); } #endregion #region FieldInfo Overrides // All implemented on derived classes #endregion #region ISerializable Implementation [System.Security.SecurityCritical] // auto-generated public void GetObjectData(SerializationInfo info, StreamingContext context) { if (info == null) throw new ArgumentNullException("info"); Contract.EndContractBlock(); MemberInfoSerializationHolder.GetSerializationInfo( info, Name, ReflectedTypeInternal, ToString(), MemberTypes.Field); } #endregion } [Serializable] internal unsafe sealed class RtFieldInfo : RuntimeFieldInfo, IRuntimeFieldInfo { #region FCalls [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] static private extern void PerformVisibilityCheckOnField(IntPtr field, Object target, RuntimeType declaringType, FieldAttributes attr, uint invocationFlags); #endregion #region Private Data Members // agressive caching private IntPtr m_fieldHandle; private FieldAttributes m_fieldAttributes; // lazy caching private string m_name; private RuntimeType m_fieldType; private INVOCATION_FLAGS m_invocationFlags; #if FEATURE_APPX private bool IsNonW8PFrameworkAPI() { if (GetRuntimeType().IsNonW8PFrameworkAPI()) return true; // Allow "value__" if (m_declaringType.IsEnum) return false; RuntimeAssembly rtAssembly = GetRuntimeAssembly(); if (rtAssembly.IsFrameworkAssembly()) { int ctorToken = rtAssembly.InvocableAttributeCtorToken; if (System.Reflection.MetadataToken.IsNullToken(ctorToken) || !CustomAttribute.IsAttributeDefined(GetRuntimeModule(), MetadataToken, ctorToken)) return true; } return false; } #endif internal INVOCATION_FLAGS InvocationFlags { get { if ((m_invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_INITIALIZED) == 0) { Type declaringType = DeclaringType; bool fIsReflectionOnlyType = (declaringType is ReflectionOnlyType); INVOCATION_FLAGS invocationFlags = 0; // first take care of all the NO_INVOKE cases if ( (declaringType != null && declaringType.ContainsGenericParameters) || (declaringType == null && Module.Assembly.ReflectionOnly) || (fIsReflectionOnlyType) ) { invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE; } // If the invocationFlags are still 0, then // this should be an usable field, determine the other flags if (invocationFlags == 0) { if ((m_fieldAttributes & FieldAttributes.InitOnly) != (FieldAttributes)0) invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_SPECIAL_FIELD; if ((m_fieldAttributes & FieldAttributes.HasFieldRVA) != (FieldAttributes)0) invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_SPECIAL_FIELD; // A public field is inaccesible to Transparent code if the field is Critical. bool needsTransparencySecurityCheck = IsSecurityCritical && !IsSecuritySafeCritical; bool needsVisibilitySecurityCheck = ((m_fieldAttributes & FieldAttributes.FieldAccessMask) != FieldAttributes.Public) || (declaringType != null && declaringType.NeedsReflectionSecurityCheck); if (needsTransparencySecurityCheck || needsVisibilitySecurityCheck) invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY; // find out if the field type is one of the following: Primitive, Enum or Pointer Type fieldType = FieldType; if (fieldType.IsPointer || fieldType.IsEnum || fieldType.IsPrimitive) invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_FIELD_SPECIAL_CAST; } #if FEATURE_APPX if (AppDomain.ProfileAPICheck && IsNonW8PFrameworkAPI()) invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API; #endif // FEATURE_APPX // must be last to avoid threading problems m_invocationFlags = invocationFlags | INVOCATION_FLAGS.INVOCATION_FLAGS_INITIALIZED; } return m_invocationFlags; } } #endregion private RuntimeAssembly GetRuntimeAssembly() { return m_declaringType.GetRuntimeAssembly(); } #region Constructor [System.Security.SecurityCritical] // auto-generated internal RtFieldInfo( RuntimeFieldHandleInternal handle, RuntimeType declaringType, RuntimeTypeCache reflectedTypeCache, BindingFlags bindingFlags) : base(reflectedTypeCache, declaringType, bindingFlags) { m_fieldHandle = handle.Value; m_fieldAttributes = RuntimeFieldHandle.GetAttributes(handle); } #endregion #region Private Members RuntimeFieldHandleInternal IRuntimeFieldInfo.Value { [System.Security.SecuritySafeCritical] get { return new RuntimeFieldHandleInternal(m_fieldHandle); } } #endregion #region Internal Members internal void CheckConsistency(Object target) { // only test instance fields if ((m_fieldAttributes & FieldAttributes.Static) != FieldAttributes.Static) { if (!m_declaringType.IsInstanceOfType(target)) { if (target == null) { #if FEATURE_LEGACYNETCF if (CompatibilitySwitches.IsAppEarlierThanWindowsPhone8) throw new ArgumentNullException(Environment.GetResourceString("RFLCT.Targ_StatFldReqTarg")); else #endif throw new TargetException(Environment.GetResourceString("RFLCT.Targ_StatFldReqTarg")); } else { throw new ArgumentException( String.Format(CultureInfo.CurrentUICulture, Environment.GetResourceString("Arg_FieldDeclTarget"), Name, m_declaringType, target.GetType())); } } } } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] internal override bool CacheEquals(object o) { RtFieldInfo m = o as RtFieldInfo; if ((object)m == null) return false; return m.m_fieldHandle == m_fieldHandle; } [System.Security.SecurityCritical] [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] internal void InternalSetValue(Object obj, Object value, BindingFlags invokeAttr, Binder binder, CultureInfo culture, ref StackCrawlMark stackMark) { INVOCATION_FLAGS invocationFlags = InvocationFlags; RuntimeType declaringType = DeclaringType as RuntimeType; if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE) != 0) { if (declaringType != null && declaringType.ContainsGenericParameters) throw new InvalidOperationException(Environment.GetResourceString("Arg_UnboundGenField")); if ((declaringType == null && Module.Assembly.ReflectionOnly) || declaringType is ReflectionOnlyType) throw new InvalidOperationException(Environment.GetResourceString("Arg_ReflectionOnlyField")); throw new FieldAccessException(); } CheckConsistency(obj); RuntimeType fieldType = (RuntimeType)FieldType; value = fieldType.CheckValue(value, binder, culture, invokeAttr); #region Security Check #if FEATURE_APPX if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API) != 0) { RuntimeAssembly caller = RuntimeAssembly.GetExecutingAssembly(ref stackMark); if (caller != null && !caller.IsSafeForReflection()) throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_APIInvalidForCurrentContext", FullName)); } #endif if ((invocationFlags & (INVOCATION_FLAGS.INVOCATION_FLAGS_SPECIAL_FIELD | INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY)) != 0) PerformVisibilityCheckOnField(m_fieldHandle, obj, m_declaringType, m_fieldAttributes, (uint)m_invocationFlags); #endregion bool domainInitialized = false; if (declaringType == null) { RuntimeFieldHandle.SetValue(this, obj, value, fieldType, m_fieldAttributes, null, ref domainInitialized); } else { domainInitialized = declaringType.DomainInitialized; RuntimeFieldHandle.SetValue(this, obj, value, fieldType, m_fieldAttributes, declaringType, ref domainInitialized); declaringType.DomainInitialized = domainInitialized; } } // UnsafeSetValue doesn't perform any consistency or visibility check. // It is the caller's responsibility to ensure the operation is safe. // When the caller needs to perform visibility checks they should call // InternalSetValue() instead. When the caller needs to perform // consistency checks they should call CheckConsistency() before // calling this method. [System.Security.SecurityCritical] // auto-generated [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] internal void UnsafeSetValue(Object obj, Object value, BindingFlags invokeAttr, Binder binder, CultureInfo culture) { RuntimeType declaringType = DeclaringType as RuntimeType; RuntimeType fieldType = (RuntimeType)FieldType; value = fieldType.CheckValue(value, binder, culture, invokeAttr); bool domainInitialized = false; if (declaringType == null) { RuntimeFieldHandle.SetValue(this, obj, value, fieldType, m_fieldAttributes, null, ref domainInitialized); } else { domainInitialized = declaringType.DomainInitialized; RuntimeFieldHandle.SetValue(this, obj, value, fieldType, m_fieldAttributes, declaringType, ref domainInitialized); declaringType.DomainInitialized = domainInitialized; } } [System.Security.SecuritySafeCritical] [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] internal Object InternalGetValue(Object obj, ref StackCrawlMark stackMark) { INVOCATION_FLAGS invocationFlags = InvocationFlags; RuntimeType declaringType = DeclaringType as RuntimeType; if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE) != 0) { if (declaringType != null && DeclaringType.ContainsGenericParameters) throw new InvalidOperationException(Environment.GetResourceString("Arg_UnboundGenField")); if ((declaringType == null && Module.Assembly.ReflectionOnly) || declaringType is ReflectionOnlyType) throw new InvalidOperationException(Environment.GetResourceString("Arg_ReflectionOnlyField")); throw new FieldAccessException(); } CheckConsistency(obj); #if FEATURE_APPX if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NON_W8P_FX_API) != 0) { RuntimeAssembly caller = RuntimeAssembly.GetExecutingAssembly(ref stackMark); if (caller != null && !caller.IsSafeForReflection()) throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_APIInvalidForCurrentContext", FullName)); } #endif RuntimeType fieldType = (RuntimeType)FieldType; if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY) != 0) PerformVisibilityCheckOnField(m_fieldHandle, obj, m_declaringType, m_fieldAttributes, (uint)(m_invocationFlags & ~INVOCATION_FLAGS.INVOCATION_FLAGS_SPECIAL_FIELD)); return UnsafeGetValue(obj); } // UnsafeGetValue doesn't perform any consistency or visibility check. // It is the caller's responsibility to ensure the operation is safe. // When the caller needs to perform visibility checks they should call // InternalGetValue() instead. When the caller needs to perform // consistency checks they should call CheckConsistency() before // calling this method. [System.Security.SecurityCritical] [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] internal Object UnsafeGetValue(Object obj) { RuntimeType declaringType = DeclaringType as RuntimeType; RuntimeType fieldType = (RuntimeType)FieldType; bool domainInitialized = false; if (declaringType == null) { return RuntimeFieldHandle.GetValue(this, obj, fieldType, null, ref domainInitialized); } else { domainInitialized = declaringType.DomainInitialized; object retVal = RuntimeFieldHandle.GetValue(this, obj, fieldType, declaringType, ref domainInitialized); declaringType.DomainInitialized = domainInitialized; return retVal; } } #endregion #region MemberInfo Overrides public override String Name { [System.Security.SecuritySafeCritical] // auto-generated get { if (m_name == null) m_name = RuntimeFieldHandle.GetName(this); return m_name; } } internal String FullName { get { return String.Format("{0}.{1}", DeclaringType.FullName, Name); } } public override int MetadataToken { [System.Security.SecuritySafeCritical] // auto-generated get { return RuntimeFieldHandle.GetToken(this); } } [System.Security.SecuritySafeCritical] // auto-generated internal override RuntimeModule GetRuntimeModule() { return RuntimeTypeHandle.GetModule(RuntimeFieldHandle.GetApproxDeclaringType(this)); } #endregion #region FieldInfo Overrides public override Object GetValue(Object obj) { StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; return InternalGetValue(obj, ref stackMark); } public override object GetRawConstantValue() { throw new InvalidOperationException(); } [System.Security.SecuritySafeCritical] // auto-generated [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public override Object GetValueDirect(TypedReference obj) { if (obj.IsNull) throw new ArgumentException(Environment.GetResourceString("Arg_TypedReference_Null")); Contract.EndContractBlock(); unsafe { // Passing TypedReference by reference is easier to make correct in native code return RuntimeFieldHandle.GetValueDirect(this, (RuntimeType)FieldType, &obj, (RuntimeType)DeclaringType); } } [System.Security.SecuritySafeCritical] // auto-generated [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public override void SetValue(Object obj, Object value, BindingFlags invokeAttr, Binder binder, CultureInfo culture) { StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; InternalSetValue(obj, value, invokeAttr, binder, culture, ref stackMark); } [System.Security.SecuritySafeCritical] // auto-generated [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public override void SetValueDirect(TypedReference obj, Object value) { if (obj.IsNull) throw new ArgumentException(Environment.GetResourceString("Arg_TypedReference_Null")); Contract.EndContractBlock(); unsafe { // Passing TypedReference by reference is easier to make correct in native code RuntimeFieldHandle.SetValueDirect(this, (RuntimeType)FieldType, &obj, value, (RuntimeType)DeclaringType); } } public override RuntimeFieldHandle FieldHandle { get { Type declaringType = DeclaringType; if ((declaringType == null && Module.Assembly.ReflectionOnly) || declaringType is ReflectionOnlyType) throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_NotAllowedInReflectionOnly")); return new RuntimeFieldHandle(this); } } internal IntPtr GetFieldHandle() { return m_fieldHandle; } public override FieldAttributes Attributes { get { return m_fieldAttributes; } } public override Type FieldType { [System.Security.SecuritySafeCritical] // auto-generated get { if (m_fieldType == null) m_fieldType = new Signature(this, m_declaringType).FieldType; return m_fieldType; } } [System.Security.SecuritySafeCritical] // auto-generated public override Type[] GetRequiredCustomModifiers() { return new Signature(this, m_declaringType).GetCustomModifiers(1, true); } [System.Security.SecuritySafeCritical] // auto-generated public override Type[] GetOptionalCustomModifiers() { return new Signature(this, m_declaringType).GetCustomModifiers(1, false); } #endregion } [Serializable] internal sealed unsafe class MdFieldInfo : RuntimeFieldInfo, ISerializable { #region Private Data Members private int m_tkField; private string m_name; private RuntimeType m_fieldType; private FieldAttributes m_fieldAttributes; #endregion #region Constructor internal MdFieldInfo( int tkField, FieldAttributes fieldAttributes, RuntimeTypeHandle declaringTypeHandle, RuntimeTypeCache reflectedTypeCache, BindingFlags bindingFlags) : base(reflectedTypeCache, declaringTypeHandle.GetRuntimeType(), bindingFlags) { m_tkField = tkField; m_name = null; m_fieldAttributes = fieldAttributes; } #endregion #region Internal Members [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] internal override bool CacheEquals(object o) { MdFieldInfo m = o as MdFieldInfo; if ((object)m == null) return false; return m.m_tkField == m_tkField && m_declaringType.GetTypeHandleInternal().GetModuleHandle().Equals( m.m_declaringType.GetTypeHandleInternal().GetModuleHandle()); } #endregion #region MemberInfo Overrides public override String Name { [System.Security.SecuritySafeCritical] // auto-generated get { if (m_name == null) m_name = GetRuntimeModule().MetadataImport.GetName(m_tkField).ToString(); return m_name; } } public override int MetadataToken { get { return m_tkField; } } internal override RuntimeModule GetRuntimeModule() { return m_declaringType.GetRuntimeModule(); } #endregion #region FieldInfo Overrides public override RuntimeFieldHandle FieldHandle { get { throw new NotSupportedException(); } } public override FieldAttributes Attributes { get { return m_fieldAttributes; } } public override bool IsSecurityCritical { get { return DeclaringType.IsSecurityCritical; } } public override bool IsSecuritySafeCritical { get { return DeclaringType.IsSecuritySafeCritical; } } public override bool IsSecurityTransparent { get { return DeclaringType.IsSecurityTransparent; } } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public override Object GetValueDirect(TypedReference obj) { return GetValue(null); } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public override void SetValueDirect(TypedReference obj,Object value) { throw new FieldAccessException(Environment.GetResourceString("Acc_ReadOnly")); } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public unsafe override Object GetValue(Object obj) { return GetValue(false); } public unsafe override Object GetRawConstantValue() { return GetValue(true); } [System.Security.SecuritySafeCritical] // auto-generated private unsafe Object GetValue(bool raw) { // Cannot cache these because they could be user defined non-agile enumerations Object value = MdConstant.GetValue(GetRuntimeModule().MetadataImport, m_tkField, FieldType.GetTypeHandleInternal(), raw); if (value == DBNull.Value) throw new NotSupportedException(Environment.GetResourceString("Arg_EnumLitValueNotFound")); return value; } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] public override void SetValue(Object obj, Object value, BindingFlags invokeAttr, Binder binder, CultureInfo culture) { throw new FieldAccessException(Environment.GetResourceString("Acc_ReadOnly")); } public override Type FieldType { [System.Security.SecuritySafeCritical] // auto-generated get { if (m_fieldType == null) { ConstArray fieldMarshal = GetRuntimeModule().MetadataImport.GetSigOfFieldDef(m_tkField); m_fieldType = new Signature(fieldMarshal.Signature.ToPointer(), (int)fieldMarshal.Length, m_declaringType).FieldType; } return m_fieldType; } } public override Type[] GetRequiredCustomModifiers() { return EmptyArray<Type>.Value; } public override Type[] GetOptionalCustomModifiers() { return EmptyArray<Type>.Value; } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Collections; using System.IO; using System.Runtime.InteropServices; using System.Threading; namespace System.Net.Sockets { public partial class Socket { private DynamicWinsockMethods _dynamicWinsockMethods; internal void ReplaceHandleIfNecessaryAfterFailedConnect() { /* nop on Windows */ } private void EnsureDynamicWinsockMethods() { if (_dynamicWinsockMethods == null) { _dynamicWinsockMethods = DynamicWinsockMethods.GetMethods(_addressFamily, _socketType, _protocolType); } } internal bool AcceptEx(SafeCloseSocket listenSocketHandle, SafeCloseSocket acceptSocketHandle, IntPtr buffer, int len, int localAddressLength, int remoteAddressLength, out int bytesReceived, SafeHandle overlapped) { EnsureDynamicWinsockMethods(); AcceptExDelegate acceptEx = _dynamicWinsockMethods.GetDelegate<AcceptExDelegate>(listenSocketHandle); return acceptEx(listenSocketHandle, acceptSocketHandle, buffer, len, localAddressLength, remoteAddressLength, out bytesReceived, overlapped); } internal void GetAcceptExSockaddrs(IntPtr buffer, int receiveDataLength, int localAddressLength, int remoteAddressLength, out IntPtr localSocketAddress, out int localSocketAddressLength, out IntPtr remoteSocketAddress, out int remoteSocketAddressLength) { EnsureDynamicWinsockMethods(); GetAcceptExSockaddrsDelegate getAcceptExSockaddrs = _dynamicWinsockMethods.GetDelegate<GetAcceptExSockaddrsDelegate>(_handle); getAcceptExSockaddrs(buffer, receiveDataLength, localAddressLength, remoteAddressLength, out localSocketAddress, out localSocketAddressLength, out remoteSocketAddress, out remoteSocketAddressLength); } internal bool DisconnectEx(SafeCloseSocket socketHandle, SafeHandle overlapped, int flags, int reserved) { EnsureDynamicWinsockMethods(); DisconnectExDelegate disconnectEx = _dynamicWinsockMethods.GetDelegate<DisconnectExDelegate>(socketHandle); return disconnectEx(socketHandle, overlapped, flags, reserved); } internal bool DisconnectExBlocking(SafeCloseSocket socketHandle, IntPtr overlapped, int flags, int reserved) { EnsureDynamicWinsockMethods(); DisconnectExDelegateBlocking disconnectEx_Blocking = _dynamicWinsockMethods.GetDelegate<DisconnectExDelegateBlocking>(socketHandle); return disconnectEx_Blocking(socketHandle, overlapped, flags, reserved); } internal bool ConnectEx(SafeCloseSocket socketHandle, IntPtr socketAddress, int socketAddressSize, IntPtr buffer, int dataLength, out int bytesSent, SafeHandle overlapped) { EnsureDynamicWinsockMethods(); ConnectExDelegate connectEx = _dynamicWinsockMethods.GetDelegate<ConnectExDelegate>(socketHandle); return connectEx(socketHandle, socketAddress, socketAddressSize, buffer, dataLength, out bytesSent, overlapped); } internal SocketError WSARecvMsg(SafeCloseSocket socketHandle, IntPtr msg, out int bytesTransferred, SafeHandle overlapped, IntPtr completionRoutine) { EnsureDynamicWinsockMethods(); WSARecvMsgDelegate recvMsg = _dynamicWinsockMethods.GetDelegate<WSARecvMsgDelegate>(socketHandle); return recvMsg(socketHandle, msg, out bytesTransferred, overlapped, completionRoutine); } internal SocketError WSARecvMsgBlocking(IntPtr socketHandle, IntPtr msg, out int bytesTransferred, IntPtr overlapped, IntPtr completionRoutine) { EnsureDynamicWinsockMethods(); WSARecvMsgDelegateBlocking recvMsg_Blocking = _dynamicWinsockMethods.GetDelegate<WSARecvMsgDelegateBlocking>(_handle); return recvMsg_Blocking(socketHandle, msg, out bytesTransferred, overlapped, completionRoutine); } internal bool TransmitPackets(SafeCloseSocket socketHandle, IntPtr packetArray, int elementCount, int sendSize, SafeNativeOverlapped overlapped, TransmitFileOptions flags) { EnsureDynamicWinsockMethods(); TransmitPacketsDelegate transmitPackets = _dynamicWinsockMethods.GetDelegate<TransmitPacketsDelegate>(socketHandle); return transmitPackets(socketHandle, packetArray, elementCount, sendSize, overlapped, flags); } internal static IntPtr[] SocketListToFileDescriptorSet(IList socketList) { if (socketList == null || socketList.Count == 0) { return null; } IntPtr[] fileDescriptorSet = new IntPtr[socketList.Count + 1]; fileDescriptorSet[0] = (IntPtr)socketList.Count; for (int current = 0; current < socketList.Count; current++) { if (!(socketList[current] is Socket)) { throw new ArgumentException(SR.Format(SR.net_sockets_select, socketList[current].GetType().FullName, typeof(System.Net.Sockets.Socket).FullName), nameof(socketList)); } fileDescriptorSet[current + 1] = ((Socket)socketList[current])._handle.DangerousGetHandle(); } return fileDescriptorSet; } // Transform the list socketList such that the only sockets left are those // with a file descriptor contained in the array "fileDescriptorArray". internal static void SelectFileDescriptor(IList socketList, IntPtr[] fileDescriptorSet) { // Walk the list in order. // // Note that the counter is not necessarily incremented at each step; // when the socket is removed, advancing occurs automatically as the // other elements are shifted down. if (socketList == null || socketList.Count == 0) { return; } if ((int)fileDescriptorSet[0] == 0) { // No socket present, will never find any socket, remove them all. socketList.Clear(); return; } lock (socketList) { for (int currentSocket = 0; currentSocket < socketList.Count; currentSocket++) { Socket socket = socketList[currentSocket] as Socket; // Look for the file descriptor in the array. int currentFileDescriptor; for (currentFileDescriptor = 0; currentFileDescriptor < (int)fileDescriptorSet[0]; currentFileDescriptor++) { if (fileDescriptorSet[currentFileDescriptor + 1] == socket._handle.DangerousGetHandle()) { break; } } if (currentFileDescriptor == (int)fileDescriptorSet[0]) { // Descriptor not found: remove the current socket and start again. socketList.RemoveAt(currentSocket--); } } } } private Socket GetOrCreateAcceptSocket(Socket acceptSocket, bool checkDisconnected, string propertyName, out SafeCloseSocket handle) { // If an acceptSocket isn't specified, then we need to create one. if (acceptSocket == null) { acceptSocket = new Socket(_addressFamily, _socketType, _protocolType); } else { if (acceptSocket._rightEndPoint != null && (!checkDisconnected || !acceptSocket._isDisconnected)) { throw new InvalidOperationException(SR.Format(SR.net_sockets_namedmustnotbebound, propertyName)); } } handle = acceptSocket._handle; return acceptSocket; } private void SendFileInternal(string fileName, byte[] preBuffer, byte[] postBuffer, TransmitFileOptions flags) { // Open the file, if any FileStream fileStream = OpenFile(fileName); SocketError errorCode; using (fileStream) { SafeFileHandle fileHandle = fileStream?.SafeFileHandle; // This can throw ObjectDisposedException. errorCode = SocketPal.SendFile(_handle, fileHandle, preBuffer, postBuffer, flags); } if (errorCode != SocketError.Success) { SocketException socketException = new SocketException((int)errorCode); UpdateStatusAfterSocketError(socketException); if (NetEventSource.IsEnabled) NetEventSource.Error(this, socketException); throw socketException; } // If the user passed the Disconnect and/or ReuseSocket flags, then TransmitFile disconnected the socket. // Update our state to reflect this. if ((flags & (TransmitFileOptions.Disconnect | TransmitFileOptions.ReuseSocket)) != 0) { SetToDisconnected(); _remoteEndPoint = null; } } private IAsyncResult BeginSendFileInternal(string fileName, byte[] preBuffer, byte[] postBuffer, TransmitFileOptions flags, AsyncCallback callback, object state) { FileStream fileStream = OpenFile(fileName); TransmitFileAsyncResult asyncResult = new TransmitFileAsyncResult(this, state, callback); asyncResult.StartPostingAsyncOp(false); SocketError errorCode = SocketPal.SendFileAsync(_handle, fileStream, preBuffer, postBuffer, flags, asyncResult); // Check for synchronous exception if (!CheckErrorAndUpdateStatus(errorCode)) { throw new SocketException((int)errorCode); } asyncResult.FinishPostingAsyncOp(ref Caches.SendClosureCache); return asyncResult; } private void EndSendFileInternal(IAsyncResult asyncResult) { TransmitFileAsyncResult castedAsyncResult = asyncResult as TransmitFileAsyncResult; if (castedAsyncResult == null || castedAsyncResult.AsyncObject != this) { throw new ArgumentException(SR.net_io_invalidasyncresult, nameof(asyncResult)); } if (castedAsyncResult.EndCalled) { throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, "EndSendFile")); } castedAsyncResult.InternalWaitForCompletion(); castedAsyncResult.EndCalled = true; // If the user passed the Disconnect and/or ReuseSocket flags, then TransmitFile disconnected the socket. // Update our state to reflect this. if (castedAsyncResult.DoDisconnect) { SetToDisconnected(); _remoteEndPoint = null; } if ((SocketError)castedAsyncResult.ErrorCode != SocketError.Success) { SocketException socketException = new SocketException(castedAsyncResult.ErrorCode); UpdateStatusAfterSocketError(socketException); if (NetEventSource.IsEnabled) NetEventSource.Error(this, socketException); throw socketException; } } internal ThreadPoolBoundHandle GetOrAllocateThreadPoolBoundHandle() { // There is a known bug that exists through Windows 7 with UDP and // SetFileCompletionNotificationModes. // So, don't try to enable skipping the completion port on success in this case. bool trySkipCompletionPortOnSuccess = !(CompletionPortHelper.PlatformHasUdpIssue && _protocolType == ProtocolType.Udp); return _handle.GetOrAllocateThreadPoolBoundHandle(trySkipCompletionPortOnSuccess); } } }
using System; using System.IO; namespace LumiSoft.Data.lsDB { /// <summary> /// Data page. /// </summary> internal class DataPage { private DbFile m_pOwnerDB = null; private long m_StartPointer = 0; private bool m_Used = false; private long m_OwnerID = 0; private long m_OwnerDataPagePointer = 0; private long m_NextDataPagePointer = 0; private int m_DataAreaSize = 1000; private int m_StoredDataLength = 0; private byte[] m_Data = null; /// <summary> /// Default constructor. /// </summary> /// <param name="dataPageDataAreaSize">Specifies how much data data page can store.</param> /// <param name="ownerDB">Owner DB file..</param> /// <param name="startOffset">Data page start offset pointer.</param> public DataPage(int dataPageDataAreaSize,DbFile ownerDB,long startOffset) { /* DataPage structure 2 bytes - CRLF 1 byte - used (f - unused,u - used) 8 byte - owner object id 8 bytes - owner data page pointer 8 bytes - continuing data page pointer 4 bytes - stored data length in data area 2 bytes - CRLF 1000 bytes - data area */ m_DataAreaSize = dataPageDataAreaSize; m_pOwnerDB = ownerDB; m_StartPointer = startOffset; byte[] dataPageInfo = new byte[33]; ownerDB.SetFilePosition(startOffset); ownerDB.ReadFromFile(dataPageInfo,0,dataPageInfo.Length); m_Data = new byte[dataPageDataAreaSize]; ownerDB.ReadFromFile(m_Data,0,dataPageDataAreaSize); // CRLF if(dataPageInfo[0] != (byte)'\r'){ throw new Exception("Not right data page startOffset, or invalid data page <CR> is expected but is '" + (int)dataPageInfo[0] + "' !"); } if(dataPageInfo[1] != (byte)'\n'){ throw new Exception("Not right data page startOffset, or invalid data page <LF> is expected but is '" + (int)dataPageInfo[1] + "' !"); } // used if(dataPageInfo[2] == (byte)'u'){ m_Used = true; } else{ m_Used = false; } // owner object id m_OwnerID = ldb_Utils.ByteToLong(dataPageInfo,3); // owner data page pointer m_OwnerDataPagePointer = ldb_Utils.ByteToLong(dataPageInfo,11); // continuing data page pointer m_NextDataPagePointer = ldb_Utils.ByteToLong(dataPageInfo,19); // stored data length in data area m_StoredDataLength = ldb_Utils.ByteToInt(dataPageInfo,27); // CRLF if(dataPageInfo[31] != (byte)'\r'){ throw new Exception("Not right data page startOffset, or invalid data page <CR> is expected but is '" + (int)dataPageInfo[31] + "' !"); } if(dataPageInfo[32] != (byte)'\n'){ throw new Exception("Not right data page startOffset, or invalid data page <LF> is expected but is '" + (int)dataPageInfo[32] + "' !"); } } #region static method CreateDataPage /// <summary> /// Creates new data page structure. /// </summary> /// <param name="dataPageDataAreaSize">Specifies how much data can data page store.</param> /// <param name="used">Specifies if data page is used or free space. If this value is false, all toher parameters aren't stored.</param> /// <param name="ownerID">Owner data object ID.</param> /// <param name="ownerDataPagePointer">This data page owner data page pointer. This value can be 0, if no owner.</param> /// <param name="nextDataPagePointer">Data page pointer, what continues this data page. This value can be 0 if, data page won't spread to multiple data pages.</param> /// <param name="data">Data what data page stores. Maximum length is dataPageDataAreaSize.</param> /// <returns></returns> public static byte[] CreateDataPage(int dataPageDataAreaSize,bool used,long ownerID,long ownerDataPagePointer,long nextDataPagePointer,byte[] data) { /* DataPage structure 2 bytes - CRLF 1 byte - used (f - unused,u - used) 8 byte - owner object id 8 bytes - owner data page pointer 8 bytes - continuing data page pointer 4 bytes - stored data length in data area 2 bytes - CRLF dataPageDataAreaSize bytes - data area */ if(data.Length > dataPageDataAreaSize){ throw new Exception("Data page can store only " + dataPageDataAreaSize + " bytes, data conatins '" + data.Length + "' bytes !"); } byte[] dataPage = new byte[dataPageDataAreaSize + 33]; // CRLF dataPage[0] = (byte)'\r'; dataPage[1] = (byte)'\n'; if(used){ // used dataPage[2] = (byte)'u'; // owner object id Array.Copy(ldb_Utils.LongToByte(ownerID),0,dataPage,3,8); // owner data page pointer Array.Copy(ldb_Utils.LongToByte(ownerDataPagePointer),0,dataPage,11,8); // continuing data page pointer Array.Copy(ldb_Utils.LongToByte(nextDataPagePointer),0,dataPage,19,8); // stored data length in data area Array.Copy(ldb_Utils.IntToByte(data.Length),0,dataPage,27,4); // CRLF dataPage[31] = (byte)'\r'; dataPage[32] = (byte)'\n'; // data area Array.Copy(data,0,dataPage,33,data.Length); } else{ // used dataPage[2] = (byte)'f'; // CRLF dataPage[31] = (byte)'\r'; dataPage[32] = (byte)'\n'; } return dataPage; } #endregion #region method ReadData /// <summary> /// Reads specified amount data to buffer. /// </summary> /// <param name="buffer">Buffer where to store data.</param> /// <param name="startIndexInBuffer">Start index in buffer where data storing begins. Start index is included.</param> /// <param name="length">Number of bytes to read.</param> /// <param name="startOffset">Zero based offset of data area.</param> /// <returns></returns> public void ReadData(byte[] buffer,int startIndexInBuffer,int length,int startOffset) { if(startOffset < 0){ throw new Exception("startOffset can't negative value !"); } if((length + startOffset) > this.DataAreaSize){ throw new Exception("startOffset and length are out of range data page data area !"); } if((length + startOffset) > m_StoredDataLength){ throw new Exception("There isn't so much data stored in data page as requested ! Stored data length = " + m_StoredDataLength + "; start offset = " + startOffset + "; length wanted = " + length); } Array.Copy(m_Data,startOffset,buffer,startIndexInBuffer,length); } /// <summary> /// Reads data page data. Offset byte is included. /// </summary> /// <param name="startOffset">Zero based offset of data area.</param> /// <param name="length">Specifies how much data to read.</param> /// <returns></returns> public byte[] ReadData(int startOffset,int length) { if(startOffset < 0){ throw new Exception("startOffset can't negative value !"); } if((length + startOffset) > this.DataAreaSize){ throw new Exception("startOffset and length are out of range data page data area !"); } if((length + startOffset) > m_StoredDataLength){ throw new Exception("There isn't so much data stored in data page as requested ! Stored data length = " + m_StoredDataLength + "; start offset = " + startOffset + "; length wanted = " + length); } byte[] data = new byte[length]; Array.Copy(m_Data,startOffset,data,0,length); return data; } #endregion #region method WriteData /// <summary> /// Writed data to data page. /// </summary> /// <param name="data">Data to write.</param> public void WriteData(byte[] data) { if(data.Length > this.DataAreaSize){ throw new Exception("Data page can't store more than " + this.DataAreaSize + " bytes, use mutliple data pages !"); } // Set stored data length m_pOwnerDB.SetFilePosition(m_StartPointer + 27); m_pOwnerDB.WriteToFile(ldb_Utils.IntToByte(data.Length),0,4); // Store data m_pOwnerDB.SetFilePosition(m_StartPointer + 33); m_pOwnerDB.WriteToFile(data,0,data.Length); m_StoredDataLength = data.Length; } #endregion #region Properties Implementation /// <summary> /// Gets data page size on disk in bytes. /// </summary> public int DataPageSize { get{ return 33 + this.DataAreaSize; } } /// <summary> /// Gets this data page address (offset in database file). /// </summary> public long Pointer { get{ return m_StartPointer; } } /// <summary> /// Gets or sets if data page used or free space. /// </summary> public bool Used { get{ return m_Used; } set{ m_pOwnerDB.SetFilePosition(m_StartPointer + 2); m_pOwnerDB.WriteToFile(new byte[]{Convert.ToByte(value)},0,1); } } /// <summary> /// Gets owner object id what owns this data page. /// </summary> public long OwnerID { get{ return m_OwnerID; } } /// <summary> /// Gets or sets owner data page pointer. /// Returns 0 if this is first data page of multiple data pages or only data page. /// </summary> public long OwnerDataPagePointer { get{ return m_OwnerDataPagePointer; } set{ // owner data page pointer m_pOwnerDB.SetFilePosition(m_StartPointer + 11); m_pOwnerDB.WriteToFile(ldb_Utils.LongToByte(value),0,8); m_OwnerDataPagePointer = value; } } /// <summary> /// Gets or sets pointer to data page what continues this data page. /// Returns 0 if data page has enough room for data and there isn't continuing data page. /// </summary> public long NextDataPagePointer { get{ return m_NextDataPagePointer; } set{ // continuing data page pointer m_pOwnerDB.SetFilePosition(m_StartPointer + 19); m_pOwnerDB.WriteToFile(ldb_Utils.LongToByte(value),0,8); m_NextDataPagePointer = value; } } /* /// <summary> /// Gets or sets data that data page holds. Maximum size is this.DataAreaSize. Returns null if no data stored. /// </summary> public byte[] Data { get{ byte[] data = new byte[m_StoredDataLength]; m_pDbFileStream.Position = m_StartPointer + 33; m_pDbFileStream.Read(data,0,data.Length); return data; } set{ if(value.Length > this.DataAreaSize){ throw new Exception("Data page can't store more than " + this.DataAreaSize + " bytes, use mutliple data pages !"); } // Set stored data length m_pDbFileStream.Position = m_StartPointer + 27; byte[] dataLength = ldb_Utils.IntToByte(value.Length); m_pDbFileStream.Write(dataLength,0,dataLength.Length); // Store data m_pDbFileStream.Position = m_StartPointer + 33; m_pDbFileStream.Write(value,0,value.Length); m_StoredDataLength = value.Length; } } */ /// <summary> /// Gets how many data data page can store. /// </summary> public int DataAreaSize { get{ return m_DataAreaSize; } } /// <summary> /// Gets stored data length. /// </summary> public int StoredDataLength { get{ return m_StoredDataLength; } } /// <summary> /// Gets how much free data space is availabe in data page. /// </summary> public long SpaceAvailable { get{ return this.DataAreaSize - m_StoredDataLength; } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Xml; using Microsoft.Test.ModuleCore; using XmlCoreTest.Common; namespace CoreXml.Test.XLinq { public partial class XNodeReaderFunctionalTests : TestModule { public partial class XNodeReaderTests : XLinqTestCase { //[TestCase(Name = "ReadValue", Desc = "ReadValue")] public partial class TCReadValue : BridgeHelpers { private bool VerifyInvalidReadValue(int iBufferSize, int iIndex, int iCount, Type exceptionType) { bool bPassed = false; Char[] buffer = new Char[iBufferSize]; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, ST_TEST_NAME); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); return bPassed; } catch (NotSupportedException) { return true; } } try { DataReader.ReadValueChunk(buffer, iIndex, iCount); } catch (Exception e) { bPassed = (e.GetType().ToString() == exceptionType.ToString()); if (!bPassed) { TestLog.WriteLine("Actual exception:{0}", e.GetType().ToString()); TestLog.WriteLine("Expected exception:{0}", exceptionType.ToString()); } } return bPassed; } //[Variation("ReadValue", Priority = 0)] public void TestReadValuePri0() { char[] buffer = new char[5]; XmlReader DataReader = GetReader(new StringReader("<root>value</root>")); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read 5 chars"); TestLog.Compare("value", new string(buffer), "Strings don't match"); } //[Variation("ReadValue on Element", Priority = 0)] public void TestReadValuePri0onElement() { char[] buffer = new char[5]; XmlReader DataReader = GetReader(new StringReader("<root>value</root>")); PositionOnElement(DataReader, "root"); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } try { DataReader.ReadValueChunk(buffer, 0, 5); } catch (InvalidOperationException) { return; } throw new TestFailedException("ReadValue didn't throw expected exception"); } //[Variation("ReadValue on Attribute", Priority = 0)] public void TestReadValueOnAttribute0() { char[] buffer = new char[5]; XmlReader DataReader = GetReader(new StringReader("<root name=\"value\">value</root>")); PositionOnElement(DataReader, "root"); DataReader.MoveToNextAttribute(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read 5 chars"); TestLog.Compare("value", new string(buffer), "Strings don't match"); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 0, "Did read 5 chars"); } //[Variation("ReadValue on Attribute after ReadAttributeValue", Priority = 2)] public void TestReadValueOnAttribute1() { char[] buffer = new char[5]; XmlReader DataReader = GetReader(new StringReader("<root name=\"value\">value</root>")); PositionOnElement(DataReader, "root"); // This takes to text node of attribute. DataReader.MoveToNextAttribute(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadAttributeValue(), true, "Didn't read attribute value"); TestLog.Compare(DataReader.Value, "value", "Didn't read correct attribute value"); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read 5 chars"); TestLog.Compare("value", new string(buffer), "Strings don't match"); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 0, "Did read 5 chars"); DataReader.MoveToElement(); DataReader.Read(); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read 5 chars on text node"); TestLog.Compare("value", new string(buffer), "Strings don't match"); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 0, "Did read 5 chars on text node"); } //[Variation("ReadValue on empty buffer", Priority = 0)] public void TestReadValue2Pri0() { char[] buffer = new char[0]; XmlReader DataReader = GetReader(new StringReader("<root>value</root>")); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } try { DataReader.ReadValueChunk(buffer, 0, 5); } catch (ArgumentException) { return; } throw new TestFailedException("ReadValue didn't throw expected exception"); } //[Variation("ReadValue on negative count", Priority = 0)] public void TestReadValue3Pri0() { char[] buffer = new char[5]; XmlReader DataReader = GetReader(new StringReader("<root>value</root>")); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, -1); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } try { DataReader.ReadValueChunk(buffer, 0, -1); } catch (ArgumentOutOfRangeException) { return; } throw new TestFailedException("ReadValue didn't throw expected exception"); } //[Variation("ReadValue on negative offset", Priority = 0)] public void TestReadValue4Pri0() { char[] buffer = new char[5]; XmlReader DataReader = GetReader(new StringReader("<root>value</root>")); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, -1, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } try { DataReader.ReadValueChunk(buffer, -1, 5); } catch (ArgumentOutOfRangeException) { return; } throw new TestFailedException("ReadValue didn't throw expected exception"); } //[Variation("ReadValue with buffer = element content / 2", Priority = 0)] public void TestReadValue1() { Char[] buffer = new Char[5]; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, ST_TEST_NAME); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read first 5"); TestLog.Compare("01234", new string(buffer), "First strings don't match"); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read second 5 chars"); TestLog.Compare("56789", new string(buffer), "Second strings don't match"); } //[Variation("ReadValue entire value in one call", Priority = 0)] public void TestReadValue2() { Char[] buffer = new Char[10]; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, ST_TEST_NAME); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 10), 10, "Didn't read 10"); TestLog.Compare("0123456789", new string(buffer), "Strings don't match"); } //[Variation("ReadValue bit by bit", Priority = 0)] public void TestReadValue3() { Char[] buffer = new Char[10]; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, ST_TEST_NAME); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } int index = 0; for (index = 0; index < buffer.Length; index++) { TestLog.Compare(DataReader.ReadValueChunk(buffer, index, 1), 1, "Read " + index); } TestLog.Compare("0123456789", new string(buffer), "Strings don't match"); } //[Variation("ReadValue for value more than 4K", Priority = 0)] public void TestReadValue4() { int size = 8192; Char[] buffer = new Char[size]; string val = new string('x', size); XmlReader DataReader = GetReader(new StringReader("<root>" + val + "</root>")); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } int index = 0; for (index = 0; index < buffer.Length; index++) { TestLog.Compare(DataReader.ReadValueChunk(buffer, index, 1), 1, "Read " + index); } TestLog.Compare(val, new string(buffer), "Strings don't match"); } //[Variation("ReadValue for value more than 4K and invalid element", Priority = 1)] public void TestReadValue5() { int size = 8192; Char[] buffer = new Char[size]; string val = new string('x', size); try { XmlReader DataReader = GetReader(new StringReader("<root>" + val + "</notroot>")); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } int index = 0; for (index = 0; index < buffer.Length; index++) { TestLog.Compare(DataReader.ReadValueChunk(buffer, index, 1), 1, "Read " + index); } TestLog.Compare(val, new string(buffer), "Strings don't match"); DataReader.Read(); throw new TestException(TestResult.Failed, ""); } catch (XmlException) { return; } } //[Variation("ReadValue with Entity Reference, EntityHandling = ExpandEntities")] public void TestReadValue6() { string strExpected = ST_IGNORE_ENTITIES; Char[] buffer = new Char[strExpected.Length]; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, ST_ENTTEST_NAME); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, strExpected.Length), strExpected.Length, "ReadValue1"); TestLog.Compare(new string(buffer), strExpected, "Str1"); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 1), 0, "ReadValue2"); TestLog.Compare(VerifyNode(DataReader, XmlNodeType.Element, "ENTITY2", String.Empty), "Verify"); } //[Variation("ReadValue with count > buffer size")] public void TestReadValue7() { BoolToLTMResult(VerifyInvalidReadValue(5, 0, 6, typeof(ArgumentOutOfRangeException))); } //[Variation("ReadValue with index > buffer size")] public void TestReadValue8() { BoolToLTMResult(VerifyInvalidReadValue(5, 5, 1, typeof(ArgumentOutOfRangeException))); } //[Variation("ReadValue with index + count exceeds buffer")] public void TestReadValue10() { BoolToLTMResult(VerifyInvalidReadValue(5, 2, 5, typeof(ArgumentOutOfRangeException))); } //[Variation("ReadValue with combination Text, CDATA and Whitespace")] public void TestReadChar11() { string strExpected = "AB"; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, "CAT"); DataReader.Read(); char[] buffer = new char[strExpected.Length]; if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue"); TestLog.Compare(new string(buffer), strExpected, "str"); } //[Variation("ReadValue with combination Text, CDATA and SignificantWhitespace")] public void TestReadChar12() { string strExpected = "AB"; XmlReader DataReader = GetReader(); PositionOnElement(DataReader, "CATMIXED"); DataReader.Read(); char[] buffer = new char[strExpected.Length]; if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue"); TestLog.Compare(new string(buffer), strExpected, "str"); } //[Variation("ReadValue with buffer == null")] public void TestReadChar13() { XmlReader DataReader = GetReader(); PositionOnElement(DataReader, "CHARS1"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(null, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } try { DataReader.ReadValueChunk(null, 0, 0); } catch (ArgumentNullException) { return; } throw new TestException(TestResult.Failed, ""); } //[Variation("ReadValue with multiple different inner nodes")] public void TestReadChar14() { string strExpected = "somevalue"; char[] buffer = new char[strExpected.Length]; string strxml = "<ROOT>somevalue<![CDATA[somevalue]]>somevalue</ROOT>"; XmlReader DataReader = GetReaderStr(strxml); PositionOnElement(DataReader, "ROOT"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue1"); TestLog.Compare(new string(buffer), strExpected, "str1"); // Now on CDATA. DataReader.Read(); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue2"); TestLog.Compare(new string(buffer), strExpected, "str2"); // Now back on Text DataReader.Read(); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue3"); TestLog.Compare(new string(buffer), strExpected, "str3"); } //[Variation("ReadValue after failed ReadValue")] public void TestReadChar15() { string strExpected = "somevalue"; char[] buffer = new char[strExpected.Length]; string strxml = "<ROOT>somevalue</ROOT>"; XmlReader DataReader = GetReaderStr(strxml); PositionOnElement(DataReader, "ROOT"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } int nChars; try { nChars = DataReader.ReadValueChunk(buffer, strExpected.Length, 3); } catch (ArgumentException) { TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue Count"); TestLog.Compare(new string(buffer), strExpected, "str"); return; } TestLog.WriteLine("Couldn't read after ArgumentException"); throw new TestException(TestResult.Failed, ""); } //[Variation("Read after partial ReadValue")] public void TestReadChar16() { string strExpected = "somevalue"; char[] buffer = new char[strExpected.Length]; string strxml = "<ROOT>somevalue</ROOT>"; XmlReader DataReader = GetReaderStr(strxml); PositionOnElement(DataReader, "ROOT"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } int nChars = DataReader.ReadValueChunk(buffer, 0, 2); TestLog.Compare(nChars, 2, "Read 2"); DataReader.Read(); TestLog.Compare(VerifyNode(DataReader, XmlNodeType.EndElement, "ROOT", String.Empty), "1vn"); } //[Variation("Test error after successful ReadValue")] public void TestReadChar19() { Char[] buffer = new Char[9]; try { XmlReader DataReader = GetReaderStr("<root>somevalue</root></root>"); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } int index = 0; for (index = 0; index < buffer.Length; index++) { TestLog.Compare(DataReader.ReadValueChunk(buffer, index, 1), 1, "Read " + index); } TestLog.Compare("somevalue", new string(buffer), "Strings don't match"); while (DataReader.Read()) ; } catch (XmlException) { return; } throw new TestException(TestResult.Failed, ""); } //[Variation("Call on invalid element content after 4k boundary", Priority = 1)] public void TestReadChar21() { string somechar = new string('x', 5000); string strxml = String.Format("<ROOT>a" + somechar + "{0}c</ROOT>", Convert.ToChar(0)); try { XmlReader DataReader = GetReaderStr(strxml); PositionOnElement(DataReader, "ROOT"); char[] buffer = new char[1]; if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } DataReader.Read(); while (DataReader.ReadValueChunk(buffer, 0, 1) > 0) ; } catch (XmlException) { return; } throw new TestException(TestResult.Failed, ""); } //[Variation("ReadValue with whitespace")] public void TestTextReadValue25() { string strExpected = "somevalue"; char[] buffer = new char[strExpected.Length]; string strxml = "<ROOT>somevalue<![CDATA[somevalue]]><test1/> <test2/></ROOT>"; XmlReader DataReader = GetReaderStr(strxml); PositionOnElement(DataReader, "ROOT"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue1"); TestLog.Compare(new string(buffer), strExpected, "str1"); // Now on CDATA. DataReader.Read(); TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, buffer.Length), strExpected.Length, "ReadValue2"); TestLog.Compare(new string(buffer), strExpected, "str2"); // Now on test DataReader.Read(); char[] spaces = new char[4]; // Now on whitespace. DataReader.Read(); TestLog.Compare(DataReader.ReadValueChunk(spaces, 0, spaces.Length), spaces.Length, "ReadValue3"); TestLog.Compare(new string(spaces), " ", "str3"); } //[Variation("ReadValue when end tag doesn't exist")] public void TestTextReadValue26() { char[] buffer = new char[5]; try { XmlReader DataReader = GetReaderStr("<root>value</notroot>"); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 5), 5, "Didn't read 5 chars"); TestLog.Compare("value", new string(buffer), "Strings don't match"); DataReader.Read(); throw new TestException(TestResult.Failed, ""); } catch (XmlException) { return; } } //[Variation("Testing with character entities")] public void TestCharEntities0() { char[] buffer = new char[1]; XmlReader DataReader = GetReaderStr("<root>va&lt;/root&gt;lue</root>"); PositionOnElement(DataReader, "root"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } while (DataReader.ReadValueChunk(buffer, 0, 1) > 0) ; DataReader.Read(); DataReader.Read(); TestLog.Compare(DataReader.NodeType, XmlNodeType.None, "Not on End"); } //[Variation("Testing with character entities when value more than 4k")] public void TestCharEntities1() { char[] buffer = new char[1]; XmlReader DataReader = GetReaderStr("<root>va" + new string('x', 5000) + "l&lt;/root&gt;ue</root>"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } while (DataReader.ReadValueChunk(buffer, 0, 1) > 0) ; DataReader.Read(); DataReader.Read(); TestLog.Compare(DataReader.NodeType, XmlNodeType.None, "Not on End"); } //[Variation("Testing with character entities with another pattern")] public void TestCharEntities2() { char[] buffer = new char[1]; XmlReader DataReader = GetReaderStr("<!DOCTYPE root[<!ENTITY x \"somevalue\"><!ELEMENT root ANY>]><root>value&amp;x;</root>"); DataReader.Read(); if (!DataReader.CanReadValueChunk) { try { DataReader.ReadValueChunk(buffer, 0, 5); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { return; } } while (DataReader.ReadValueChunk(buffer, 0, 1) > 0) ; DataReader.Read(); DataReader.Read(); TestLog.Compare(DataReader.NodeType, XmlNodeType.None, "Not on End"); } //[Variation("Testing a use case pattern with large file")] public void TestReadValueOnBig() { XmlReader DataReader = GetReader(); char[] buffer = new char[1]; while (DataReader.Read()) { if (DataReader.HasValue && DataReader.CanReadValueChunk) { Random rand = new Random(); int count; do { count = rand.Next(4) + 1; buffer = new char[count]; if (rand.Next(1) == 1) { break; } } while (DataReader.ReadValueChunk(buffer, 0, count) > 0); } else { if (!DataReader.CanReadValueChunk) { try { buffer = new char[1]; DataReader.ReadValueChunk(buffer, 0, 1); } catch (NotSupportedException) { } } else { try { buffer = new char[1]; DataReader.ReadValueChunk(buffer, 0, 1); } catch (InvalidOperationException) { } } } } } //[Variation("ReadValue on Comments with IgnoreComments")] public void TestReadValueOnComments0() { char[] buffer = null; buffer = new char[3]; XmlReaderSettings settings = new XmlReaderSettings(); settings.IgnoreComments = true; XmlReader DataReader = GetReaderStr("<root>val<!--Comment-->ue</root>"); DataReader.Read(); try { TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 3), 3, "Didn't read 3 chars"); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { } buffer = new char[2]; DataReader.Read(); try { TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 2), 2, "Didn't read 2 chars"); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { } while (DataReader.Read()) ; DataReader.Dispose(); } //[Variation("ReadValue on PI with IgnorePI")] public void TestReadValueOnPIs0() { char[] buffer = null; buffer = new char[3]; XmlReader DataReader = GetReaderStr("<root>val<?pi target?>ue</root>"); DataReader.Read(); try { TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 3), 3, "Didn't read 3 chars"); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { } buffer = new char[2]; DataReader.Read(); try { TestLog.Compare(DataReader.ReadValueChunk(buffer, 0, 2), 2, "Didn't read 2 chars"); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { } while (DataReader.Read()) ; DataReader.Dispose(); } //[Variation("Skip after ReadAttributeValue/ReadValueChunk")] public void bug340158() { XmlReaderSettings settings = new XmlReaderSettings(); settings.DtdProcessing = DtdProcessing.Ignore; XmlReader r = XmlReader.Create(FilePathUtil.getStream(Path.Combine("StandardTests", "XML10", "ms_xml", "vs084.xml")), settings); XmlReader reader = GetReader(r); reader.ReadToFollowing("a"); reader.MoveToNextAttribute(); reader.ReadAttributeValue(); try { reader.ReadValueChunk(new char[3], 0, 3); throw new TestException(TestResult.Failed, ""); } catch (NotSupportedException) { } reader.Skip(); TestLog.Compare(reader.NodeType, XmlNodeType.Text, "NT"); reader.Read(); TestLog.Compare(reader.NodeType, XmlNodeType.Element, "NT1"); TestLog.Compare(reader.Name, "a", "Name"); } } } } }
#if FEATURE_CONCURRENTMERGESCHEDULER using J2N.Threading.Atomic; using Lucene.Net.Attributes; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; using Lucene.Net.Util; using NUnit.Framework; using System; using System.IO; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using Field = Field; using Lucene41PostingsFormat = Lucene.Net.Codecs.Lucene41.Lucene41PostingsFormat; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper; using StringField = StringField; using TestUtil = Lucene.Net.Util.TestUtil; using TextField = TextField; [TestFixture] public class TestConcurrentMergeScheduler : LuceneTestCase { private class FailOnlyOnFlush : Failure { private readonly TestConcurrentMergeScheduler outerInstance; public FailOnlyOnFlush(TestConcurrentMergeScheduler outerInstance) { this.outerInstance = outerInstance; } internal bool doFail; internal bool hitExc; public override void SetDoFail() { this.doFail = true; hitExc = false; } public override void ClearDoFail() { this.doFail = false; } public override void Eval(MockDirectoryWrapper dir) { if (doFail && IsTestThread) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. bool isDoFlush = Util.StackTraceHelper.DoesStackTraceContainMethod("Flush"); bool isClose = Util.StackTraceHelper.DoesStackTraceContainMethod("Close") || Util.StackTraceHelper.DoesStackTraceContainMethod("Dispose"); if (isDoFlush && !isClose && Random.NextBoolean()) { hitExc = true; throw new IOException(Thread.CurrentThread.Name + ": now failing during flush"); } } } } // Make sure running BG merges still work fine even when // we are hitting exceptions during flushing. [Test] public virtual void TestFlushExceptions() { MockDirectoryWrapper directory = NewMockDirectory(); FailOnlyOnFlush failure = new FailOnlyOnFlush(this); directory.FailOn(failure); IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); Document doc = new Document(); Field idField = NewStringField("id", "", Field.Store.YES); doc.Add(idField); int extraCount = 0; for (int i = 0; i < 10; i++) { if (Verbose) { Console.WriteLine("TEST: iter=" + i); } for (int j = 0; j < 20; j++) { idField.SetStringValue(Convert.ToString(i * 20 + j)); writer.AddDocument(doc); } // must cycle here because sometimes the merge flushes // the doc we just added and so there's nothing to // flush, and we don't hit the exception while (true) { writer.AddDocument(doc); failure.SetDoFail(); try { writer.Flush(true, true); if (failure.hitExc) { Assert.Fail("failed to hit IOException"); } extraCount++; } catch (IOException ioe) { if (Verbose) { Console.WriteLine(ioe.StackTrace); } failure.ClearDoFail(); break; } } Assert.AreEqual(20 * (i + 1) + extraCount, writer.NumDocs); } writer.Dispose(); IndexReader reader = DirectoryReader.Open(directory); Assert.AreEqual(200 + extraCount, reader.NumDocs); reader.Dispose(); directory.Dispose(); } // Test that deletes committed after a merge started and // before it finishes, are correctly merged back: [Test] public virtual void TestDeleteMerging() { Directory directory = NewDirectory(); LogDocMergePolicy mp = new LogDocMergePolicy(); // Force degenerate merging so we can get a mix of // merging of segments with and without deletes at the // start: mp.MinMergeDocs = 1000; IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(mp)); Document doc = new Document(); Field idField = NewStringField("id", "", Field.Store.YES); doc.Add(idField); for (int i = 0; i < 10; i++) { if (Verbose) { Console.WriteLine("\nTEST: cycle"); } for (int j = 0; j < 100; j++) { idField.SetStringValue(Convert.ToString(i * 100 + j)); writer.AddDocument(doc); } int delID = i; while (delID < 100 * (1 + i)) { if (Verbose) { Console.WriteLine("TEST: del " + delID); } writer.DeleteDocuments(new Term("id", "" + delID)); delID += 10; } writer.Commit(); } writer.Dispose(); IndexReader reader = DirectoryReader.Open(directory); // Verify that we did not lose any deletes... Assert.AreEqual(450, reader.NumDocs); reader.Dispose(); directory.Dispose(); } [Test] public virtual void TestNoExtraFiles() { Directory directory = NewDirectory(); IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); for (int iter = 0; iter < 7; iter++) { if (Verbose) { Console.WriteLine("TEST: iter=" + iter); } for (int j = 0; j < 21; j++) { Document doc = new Document(); doc.Add(NewTextField("content", "a b c", Field.Store.NO)); writer.AddDocument(doc); } writer.Dispose(); TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles"); // Reopen writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2)); } writer.Dispose(); directory.Dispose(); } [Test] public virtual void TestNoWaitClose() { Directory directory = NewDirectory(); Document doc = new Document(); Field idField = NewStringField("id", "", Field.Store.YES); doc.Add(idField); IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(100))); for (int iter = 0; iter < 10; iter++) { for (int j = 0; j < 201; j++) { idField.SetStringValue(Convert.ToString(iter * 201 + j)); writer.AddDocument(doc); } int delID = iter * 201; for (int j = 0; j < 20; j++) { writer.DeleteDocuments(new Term("id", Convert.ToString(delID))); delID += 5; } // Force a bunch of merge threads to kick off so we // stress out aborting them on close: ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 3; writer.AddDocument(doc); writer.Commit(); writer.Dispose(false); IndexReader reader = DirectoryReader.Open(directory); Assert.AreEqual((1 + iter) * 182, reader.NumDocs); reader.Dispose(); // Reopen writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy(100))); } writer.Dispose(); directory.Dispose(); } // LUCENE-4544 [Test] public virtual void TestMaxMergeCount() { Directory dir = NewDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); int maxMergeCount = TestUtil.NextInt32(Random, 1, 5); int maxMergeThreads = TestUtil.NextInt32(Random, 1, maxMergeCount); CountdownEvent enoughMergesWaiting = new CountdownEvent(maxMergeCount); AtomicInt32 runningMergeCount = new AtomicInt32(0); AtomicBoolean failed = new AtomicBoolean(); if (Verbose) { Console.WriteLine("TEST: maxMergeCount=" + maxMergeCount + " maxMergeThreads=" + maxMergeThreads); } ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousInnerClassHelper(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed); cms.SetMaxMergesAndThreads(maxMergeCount, maxMergeThreads); iwc.SetMergeScheduler(cms); iwc.SetMaxBufferedDocs(2); TieredMergePolicy tmp = new TieredMergePolicy(); iwc.SetMergePolicy(tmp); tmp.MaxMergeAtOnce = 2; tmp.SegmentsPerTier = 2; IndexWriter w = new IndexWriter(dir, iwc); Document doc = new Document(); doc.Add(NewField("field", "field", TextField.TYPE_NOT_STORED)); while (enoughMergesWaiting.CurrentCount != 0 && !failed) { for (int i = 0; i < 10; i++) { w.AddDocument(doc); } } w.Dispose(false); dir.Dispose(); } private class ConcurrentMergeSchedulerAnonymousInnerClassHelper : ConcurrentMergeScheduler { private readonly TestConcurrentMergeScheduler outerInstance; private readonly int maxMergeCount; private readonly CountdownEvent enoughMergesWaiting; private readonly AtomicInt32 runningMergeCount; private readonly AtomicBoolean failed; public ConcurrentMergeSchedulerAnonymousInnerClassHelper(TestConcurrentMergeScheduler outerInstance, int maxMergeCount, CountdownEvent enoughMergesWaiting, AtomicInt32 runningMergeCount, AtomicBoolean failed) { this.outerInstance = outerInstance; this.maxMergeCount = maxMergeCount; this.enoughMergesWaiting = enoughMergesWaiting; this.runningMergeCount = runningMergeCount; this.failed = failed; } protected override void DoMerge(MergePolicy.OneMerge merge) { try { // Stall all incoming merges until we see // maxMergeCount: int count = runningMergeCount.IncrementAndGet(); try { Assert.IsTrue(count <= maxMergeCount, "count=" + count + " vs maxMergeCount=" + maxMergeCount); enoughMergesWaiting.Signal(); // Stall this merge until we see exactly // maxMergeCount merges waiting while (true) { // wait for 10 milliseconds if (enoughMergesWaiting.Wait(new TimeSpan(0, 0, 0, 0, 10)) || failed) { break; } } // Then sleep a bit to give a chance for the bug // (too many pending merges) to appear: Thread.Sleep(20); base.DoMerge(merge); } finally { runningMergeCount.DecrementAndGet(); } } catch (Exception /*t*/) { failed.Value = (true); m_writer.MergeFinish(merge); // LUCENENET specific - throwing an exception on a background thread causes the test // runner to crash on .NET Core 2.0. //throw new Exception(t.ToString(), t); } } } private class TrackingCMS : ConcurrentMergeScheduler { internal long totMergedBytes; public TrackingCMS() { SetMaxMergesAndThreads(5, 5); } protected override void DoMerge(MergePolicy.OneMerge merge) { totMergedBytes += merge.TotalBytesSize; base.DoMerge(merge); } } [Test] public virtual void TestTotalBytesSize() { Directory d = NewDirectory(); if (d is MockDirectoryWrapper) { ((MockDirectoryWrapper)d).Throttling = Throttling.NEVER; } IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); iwc.SetMaxBufferedDocs(5); iwc.SetMergeScheduler(new TrackingCMS()); if (TestUtil.GetPostingsFormat("id").Equals("SimpleText", StringComparison.Ordinal)) { // no iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())); } RandomIndexWriter w = new RandomIndexWriter(Random, d, iwc); for (int i = 0; i < 1000; i++) { Document doc = new Document(); doc.Add(new StringField("id", "" + i, Field.Store.NO)); w.AddDocument(doc); if (Random.NextBoolean()) { w.DeleteDocuments(new Term("id", "" + Random.Next(i + 1))); } } Assert.IsTrue(((TrackingCMS)w.IndexWriter.Config.MergeScheduler).totMergedBytes != 0); w.Dispose(); d.Dispose(); } // LUCENENET specific private class FailOnlyOnMerge : Failure { public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. if (StackTraceHelper.DoesStackTraceContainMethod("DoMerge")) { throw new IOException("now failing during merge"); } } } // LUCENENET-603 [Test, LuceneNetSpecific] public void TestExceptionOnBackgroundThreadIsPropagatedToCallingThread() { using (MockDirectoryWrapper dir = NewMockDirectory()) { dir.FailOn(new FailOnlyOnMerge()); Document doc = new Document(); Field idField = NewStringField("id", "", Field.Store.YES); doc.Add(idField); var mergeScheduler = new ConcurrentMergeScheduler(); using (IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(mergeScheduler).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy()))) { LogMergePolicy logMP = (LogMergePolicy)writer.Config.MergePolicy; logMP.MergeFactor = 10; for (int i = 0; i < 20; i++) { writer.AddDocument(doc); } bool exceptionHit = false; try { mergeScheduler.Sync(); } catch (MergePolicy.MergeException) { exceptionHit = true; } assertTrue(exceptionHit); } } } } } #endif
//--------------------------------------------------------------------- // <copyright file="MetadataCache.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // // @owner [....] // @backupOwner [....] //--------------------------------------------------------------------- namespace System.Data.Metadata.Edm { using System; using System.Collections.Generic; using System.Data.Common.Utils; using System.Data.Entity; using System.Data.Mapping; using System.Diagnostics; using System.Runtime.Versioning; using System.Security.Permissions; using System.Threading; using System.Xml; /// <summary> /// Runtime Metadata Cache - this class contains the metadata cache entry for edm and store collections. /// </summary> internal static class MetadataCache { #region Fields private const string s_dataDirectory = "|datadirectory|"; private const string s_metadataPathSeparator = "|"; // This is the period in the periodic cleanup measured in milliseconds private const int cleanupPeriod = 5 * 60 * 1000; // This dictionary contains the cache entry for the edm item collection. The reason why we need to keep a seperate dictionary // for CSpace item collection is that the same model can be used for different providers. We don't want to load the model // again and again private static readonly Dictionary<string, EdmMetadataEntry> _edmLevelCache = new Dictionary<string, EdmMetadataEntry>(StringComparer.OrdinalIgnoreCase); /// <summary> /// This dictionary contains the store cache entry - this entry will only keep track of StorageMappingItemCollection, since internally /// storage mapping item collection keeps strong references to both edm item collection and store item collection. /// </summary> private static readonly Dictionary<string, StoreMetadataEntry> _storeLevelCache = new Dictionary<string, StoreMetadataEntry>(StringComparer.OrdinalIgnoreCase); /// <summary> /// The list maintains the store metadata entries that are still in use, maybe because someone is still holding a strong reference /// to it. We need to scan this list everytime the clean up thread wakes up and make sure if the item collection is no longer in use, /// call clear on query cache /// </summary> private static readonly List<StoreMetadataEntry> _metadataEntriesRemovedFromCache = new List<StoreMetadataEntry>(); private static Memoizer<string, List<MetadataArtifactLoader>> _artifactLoaderCache = new Memoizer<string, List<MetadataArtifactLoader>>(MetadataCache.SplitPaths, null); /// <summary> /// Read/Write lock for edm cache /// </summary> private static readonly object _edmLevelLock = new object(); /// <summary> /// Read/Write lock for the store cache /// </summary> private static readonly object _storeLevelLock = new object(); // Periodic thread which runs every n mins (look up the cleanupPeriod variable to see the exact time), walks through // every item in other store and edm cache and tries to do some cleanup private static Timer timer = new Timer(PeriodicCleanupCallback, null, cleanupPeriod, cleanupPeriod); #endregion #region Methods /// <summary> /// The purpose of the thread is to do cleanup. It marks the object in various stages before it actually cleans up the object /// Here's what this does for each entry in the cache: /// 1> First checks if the entry is marked for cleanup. /// 2> If the entry is marked for cleanup, that means its in one of the following 3 states /// a) If the strong reference to item collection is not null, it means that this item was marked for cleanup in /// the last cleanup cycle and we must make the strong reference set to null so that it can be garbage collected. /// b) Otherwise, we are waiting for GC to collect the item collection so that we can remove this entry from the cache /// If the weak reference to item collection is still alive, we don't do anything /// c) If the weak reference to item collection is not alive, we need to remove this entry from the cache /// 3> If the entry is not marked for cleanup, then check whether the weak reference to entry token is alive /// a) if it is alive, then this entry is in use and we must do nothing /// b) Otherwise, we can mark this entry for cleanup /// </summary> /// <param name="state"></param> private static void PeriodicCleanupCallback(object state) { // Perform clean up on edm cache DoCacheClean<EdmMetadataEntry>(_edmLevelCache, _edmLevelLock); // Perform clean up on store cache DoCacheClean<StoreMetadataEntry>(_storeLevelCache, _storeLevelLock); } /// <summary> /// A helper function for splitting up a string that is a concatenation of strings delimited by the metadata /// path separator into a string list. The resulting list is NOT sorted. /// </summary> /// <param name="paths">The paths to split</param> /// <returns>An array of strings</returns> [ResourceExposure(ResourceScope.Machine)] //Exposes the file name which is a Machine resource [ResourceConsumption(ResourceScope.Machine)] //For MetadataArtifactLoader.Create method call. But the path is not created in this method. internal static List<MetadataArtifactLoader> SplitPaths(string paths) { Debug.Assert(!string.IsNullOrEmpty(paths), "paths cannot be empty or null"); string[] results; // This is the registry of all URIs in the global collection. HashSet<string> uriRegistry = new HashSet<string>(StringComparer.OrdinalIgnoreCase); List<MetadataArtifactLoader> loaders = new List<MetadataArtifactLoader>(); // If the argument contains one or more occurrences of the macro '|DataDirectory|', we // pull those paths out so that we don't lose them in the string-splitting logic below. // Note that the macro '|DataDirectory|' cannot have any whitespace between the pipe // symbols and the macro name. Also note that the macro must appear at the beginning of // a path (else we will eventually fail with an invalid path exception, because in that // case the macro is not expanded). If a real/physical folder named 'DataDirectory' needs // to be included in the metadata path, whitespace should be used on either or both sides // of the name. // List<string> dataDirPaths = new List<string>(); int indexStart = paths.IndexOf(MetadataCache.s_dataDirectory, StringComparison.OrdinalIgnoreCase); while (indexStart != -1) { int prevSeparatorIndex = indexStart == 0 ? -1 : paths.LastIndexOf( MetadataCache.s_metadataPathSeparator, indexStart - 1, // start looking here StringComparison.Ordinal ); int macroPathBeginIndex = prevSeparatorIndex + 1; // The '|DataDirectory|' macro is composable, so identify the complete path, like // '|DataDirectory|\item1\item2'. If the macro appears anywhere other than at the // beginning, splice out the entire path, e.g. 'C:\item1\|DataDirectory|\item2'. In this // latter case the macro will not be expanded, and downstream code will throw an exception. // int indexEnd = paths.IndexOf(MetadataCache.s_metadataPathSeparator, indexStart + MetadataCache.s_dataDirectory.Length, StringComparison.Ordinal); if (indexEnd == -1) { dataDirPaths.Add(paths.Substring(macroPathBeginIndex)); paths = paths.Remove(macroPathBeginIndex); // update the concatenated list of paths break; } dataDirPaths.Add(paths.Substring(macroPathBeginIndex, indexEnd - macroPathBeginIndex)); // Update the concatenated list of paths by removing the one containing the macro. // paths = paths.Remove(macroPathBeginIndex, indexEnd - macroPathBeginIndex); indexStart = paths.IndexOf(MetadataCache.s_dataDirectory, StringComparison.OrdinalIgnoreCase); } // Split the string on the separator and remove all spaces around each parameter value results = paths.Split(new string[] { MetadataCache.s_metadataPathSeparator }, StringSplitOptions.RemoveEmptyEntries); // Now that the non-macro paths have been identified, merge the paths containing the macro // into the complete list. // if (dataDirPaths.Count > 0) { dataDirPaths.AddRange(results); results = dataDirPaths.ToArray(); } for (int i = 0; i < results.Length; i++) { // Trim out all the spaces for this parameter and add it only if it's not blank results[i] = results[i].Trim(); if (results[i].Length > 0) { loaders.Add(MetadataArtifactLoader.Create( results[i], MetadataArtifactLoader.ExtensionCheck.All, // validate the extension against all acceptable values null, uriRegistry )); } } return loaders; } /// <summary> /// Walks through the given cache and calls cleanup on each entry in the cache /// </summary> /// <typeparam name="T"></typeparam> /// <param name="cache"></param> /// <param name="objectToLock"></param> private static void DoCacheClean<T>(Dictionary<string, T> cache, object objectToLock) where T: MetadataEntry { // Sometime, for some reason, timer can be initialized and the cache is still not initialized. if (cache != null) { List<KeyValuePair<string, T>> keysForRemoval = null; lock (objectToLock) { // we should check for type of the lock object first, since otherwise we might be reading the count of the list // while some other thread might be modifying it. For e.g. when this function is called for edmcache, // we will be acquiring edmlock and trying to get the count for the list, while some other thread // might be calling ClearCache and we might be adding entries to the list if (objectToLock == _storeLevelLock && _metadataEntriesRemovedFromCache.Count != 0) { // First check the list of entries and remove things which are no longer in use for (int i = _metadataEntriesRemovedFromCache.Count - 1; 0 <= i; i--) { if (!_metadataEntriesRemovedFromCache[i].IsEntryStillValid()) { // Clear the query cache _metadataEntriesRemovedFromCache[i].CleanupQueryCache(); // Remove the entry at the current index. This is the reason why we // go backwards. _metadataEntriesRemovedFromCache.RemoveAt(i); } } } // We have to use a list to keep track of the keys to remove because we can't remove while enumerating foreach (KeyValuePair<string, T> pair in cache) { if (pair.Value.PeriodicCleanUpThread()) { if (keysForRemoval == null) { keysForRemoval = new List<KeyValuePair<string, T>>(); } keysForRemoval.Add(pair); } } // Remove all the entries from the cache if (keysForRemoval != null) { for (int i = 0; i < keysForRemoval.Count; i++) { keysForRemoval[i].Value.Clear(); cache.Remove(keysForRemoval[i].Key); } } } } } /// <summary> /// Retrieves an cache entry holding to edm metadata for a given cache key /// </summary> /// <param name="cacheKey">string containing all the files from which edm metadata is to be retrieved</param> /// <param name="composite">An instance of the composite MetadataArtifactLoader</param> /// <param name="entryToken">The metadata entry token for the returned entry</param> /// <returns>Returns the entry containing the edm metadata</returns> internal static EdmItemCollection GetOrCreateEdmItemCollection(string cacheKey, MetadataArtifactLoader loader, out object entryToken) { EdmMetadataEntry entry = GetCacheEntry<EdmMetadataEntry>(_edmLevelCache, cacheKey, _edmLevelLock, new EdmMetadataEntryConstructor(), out entryToken); // Load the edm item collection or if the collection is already loaded, check for security permission LoadItemCollection(new EdmItemCollectionLoader(loader), entry); return entry.EdmItemCollection; } /// <summary> /// Retrieves an entry holding store metadata for a given cache key /// </summary> /// <param name="cacheKey">The connection string whose store metadata is to be retrieved</param> /// <param name="composite">An instance of the composite MetadataArtifactLoader</param> /// <param name="entryToken">The metadata entry token for the returned entry</param> /// <returns>the entry containing the information on how to load store metadata</returns> internal static StorageMappingItemCollection GetOrCreateStoreAndMappingItemCollections( string cacheKey, MetadataArtifactLoader loader, EdmItemCollection edmItemCollection, out object entryToken) { StoreMetadataEntry entry = GetCacheEntry<StoreMetadataEntry>(_storeLevelCache, cacheKey, _storeLevelLock, new StoreMetadataEntryConstructor(), out entryToken); // Load the store item collection or if the collection is already loaded, check for security permission LoadItemCollection(new StoreItemCollectionLoader(edmItemCollection, loader), entry); return entry.StorageMappingItemCollection; } internal static List<MetadataArtifactLoader> GetOrCreateMetdataArtifactLoader(string paths) { return _artifactLoaderCache.Evaluate(paths); } /// <summary> /// Get the entry from the cache given the cache key. If the entry is not present, it creates a new entry and /// adds it to the cache /// </summary> /// <typeparam name="T"></typeparam> /// <param name="cache"></param> /// <param name="cacheKey"></param> /// <param name="entryToken"></param> /// <param name="metadataEntry"></param> /// <param name="objectToLock"></param> /// <returns></returns> private static T GetCacheEntry<T>(Dictionary<string, T> cache, string cacheKey, object objectToLock, IMetadataEntryConstructor<T> metadataEntry, out object entryToken) where T: MetadataEntry { T entry; // In the critical section, we need to do the minimal thing to ensure correctness // Within the lock, we will see if an entry is present. If it is not, we will create a new entry and // add it to the cache. In either case, we need to ensure the token to make sure so that any other // thread that comes looking for the same entry does nothing in this critical section // Also the cleanup thread doesn't do anything since the token is alive lock (objectToLock) { if (cache.TryGetValue(cacheKey, out entry)) { entryToken = entry.EnsureToken(); } else { entry = metadataEntry.GetMetadataEntry(); entryToken = entry.EnsureToken(); cache.Add(cacheKey, entry); } } return entry; } /// <summary> /// Loads the item collection for the entry /// </summary> /// <param name="itemCollectionLoader">struct which loads an item collection</param> /// <param name="entry">entry whose item collection needs to be loaded</param> private static void LoadItemCollection<T>(IItemCollectionLoader<T> itemCollectionLoader, T entry) where T : MetadataEntry { // At this point, you have made sure that there is an entry with an alive token in the cache so that // other threads can find it if they come querying for it, and cleanup thread won't clean the entry // If two or more threads come one after the other, we don't won't both of them to load the metadata. // So if one of them is loading the metadata, the other should wait and then use the same metadata. // For that reason, we have this lock on the entry itself to make sure that this happens. Its okay to // update the item collection outside the lock, since assignment are guarantees to be atomic and no two // thread are updating this at the same time bool isItemCollectionAlreadyLoaded = true; if (!entry.IsLoaded) { lock (entry) { if (!entry.IsLoaded) { itemCollectionLoader.LoadItemCollection(entry); isItemCollectionAlreadyLoaded = false; } } } Debug.Assert(entry.IsLoaded, "The entry must be loaded at this point"); // Making sure that the thread which loaded the item collection is not checking for file permisssions // again if (isItemCollectionAlreadyLoaded) { entry.CheckFilePermission(); } } /// <summary> /// Remove all the entries from the cache /// </summary> internal static void Clear() { lock (_edmLevelLock) { _edmLevelCache.Clear(); } lock (_storeLevelLock) { // Call clear on each of the metadata entries. This is to make sure we clear all the performance // counters associated with the query cache foreach (StoreMetadataEntry entry in _storeLevelCache.Values) { // Check if the weak reference to item collection is still alive if (entry.IsEntryStillValid()) { _metadataEntriesRemovedFromCache.Add(entry); } else { entry.Clear(); } } _storeLevelCache.Clear(); } Memoizer<string, List<MetadataArtifactLoader>> artifactLoaderCacheTemp = new Memoizer<string, List<MetadataArtifactLoader>>(MetadataCache.SplitPaths, null); Interlocked.CompareExchange(ref _artifactLoaderCache, artifactLoaderCacheTemp, _artifactLoaderCache); } #endregion #region InlineClasses /// <summary> /// The base class having common implementation for all metadata entry classes /// </summary> private abstract class MetadataEntry { private WeakReference _entryTokenReference; private ItemCollection _itemCollection; private WeakReference _weakReferenceItemCollection; private bool _markEntryForCleanup; private FileIOPermission _filePermissions; /// <summary> /// The constructor for constructing this MetadataEntry /// </summary> internal MetadataEntry() { // Create this once per life time of the object. Creating extra weak references causing unnecessary GC pressure _entryTokenReference = new WeakReference(null); _weakReferenceItemCollection = new WeakReference(null); } /// <summary> /// returns the item collection inside this metadata entry /// </summary> protected ItemCollection ItemCollection { get { return _itemCollection; } } /// <summary> /// Update the entry with the given item collection /// </summary> /// <param name="itemCollection"></param> protected void UpdateMetadataEntry(ItemCollection itemCollection, FileIOPermission filePermissions) { Debug.Assert(_entryTokenReference.IsAlive, "You must call Ensure token before you call this method"); Debug.Assert(_markEntryForCleanup == false, "The entry must not be marked for cleanup"); Debug.Assert(_itemCollection == null, "Item collection must be null"); Debug.Assert(_filePermissions == null, "filePermissions must be null"); // Update strong and weak reference for item collection _weakReferenceItemCollection.Target = itemCollection; _filePermissions = filePermissions; // do this last, because it signals that we are loaded _itemCollection = itemCollection; } internal bool IsLoaded { get { return _itemCollection != null; } } /// <summary> /// This method is called periodically by the cleanup thread to make the unused entries /// go through various stages, before it is ready for cleanup. If it is ready, this method /// returns true and then the entry is completely removed from the cache /// </summary> /// <returns></returns> internal bool PeriodicCleanUpThread() { // Here's what this does for each entry in the cache: // 1> First checks if the entry is marked for cleanup. // 2> If the entry is marked for cleanup, that means its in one of the following 3 states // a) If the strong reference to item collection is not null, it means that this item was marked for cleanup in // the last cleanup cycle and we must make the strong reference set to null so that it can be garbage collected. (GEN 2) // b) Otherwise, we are waiting for GC to collect the item collection so that we can remove this entry from the cache // If the weak reference to item collection is still alive, we don't do anything // c) If the weak reference to item collection is not alive, we need to remove this entry from the cache (GEN 3) // 3> If the entry is not marked for cleanup, then check whether the weak reference to entry token is alive // a) if it is alive, then this entry is in use and we must do nothing // b) Otherwise, we can mark this entry for cleanup (GEN 1) if (_markEntryForCleanup) { Debug.Assert(_entryTokenReference.IsAlive == false, "Entry Token must never be alive if the entry is marked for cleanup"); if (_itemCollection != null) { // GEN 2 _itemCollection = null; } else if (!_weakReferenceItemCollection.IsAlive) { // GEN 3 _filePermissions = null; // this entry must be removed from the cache return true; } } else if (!_entryTokenReference.IsAlive) { // GEN 1 // If someone creates a entity connection, and calls GetMetadataWorkspace. This creates an cache entry, // but the item collection is not initialized yet (since store item collection are initialized only // when one calls connection.Open()). Suppose now the connection is no longer used - in other words, // open was never called and it goes out of scope. After some time when the connection gets GC'ed, // entry token won't be alive any longer, but item collection inside it will be null, since it was never initialized. // So we can't assert that item collection must be always initialized here _markEntryForCleanup = true; } return false; } /// <summary> /// Make sure that the entry has a alive token and returns that token - it can be new token or an existing /// one, depending on the state of the entry /// </summary> /// <returns></returns> internal object EnsureToken() { object entryToken = _entryTokenReference.Target; ItemCollection itemCollection = (ItemCollection)_weakReferenceItemCollection.Target; // When ensure token is called, the entry can be in different stages // 1> Its a newly created entry - no token, no item collection, etc. Just create a new token and // return back // 2> An entry already in use - the weak reference to token must be alive. We just need to grab the token // and return it // 3> No one is using this entry and hence the token is no longer alive. If we have strong reference to item // collection, then create a new token and return it // 4> No one has used this token for one cleanup cycle and hence strong reference is null. But the weak reference // is still alive. We need to make the initialize the strong reference again, create a new token and return it // 5> This entry has not been used for long enough that even the weak reference is no longer alive. This entry is // now exactly like a new entry, except that it is still marked for cleanup. Create a new token, set mark for // cleanup to false and return the token if (_entryTokenReference.IsAlive) { Debug.Assert(_markEntryForCleanup == false, "An entry with alive token cannot be marked for cleanup"); // ItemCollection strong pointer can be null or not null. If the entry has been created, and loadItemCollection // hasn't been called yet, the token will be alive, but item collection will be null. If someone called // load item collection, then item collection will not be non-null return entryToken; } // If the entry token is not alive, then it can be either a new created entry with everything set // to null or it must be one of the entries which is no longer in use else if (_itemCollection != null) { Debug.Assert(_weakReferenceItemCollection.IsAlive, "Since the strong reference is still there, weak reference must also be alive"); // This means that no one is using the item collection, and its waiting to be cleanuped } else { if (_weakReferenceItemCollection.IsAlive) { Debug.Assert(_markEntryForCleanup, "Since the strong reference is null, this entry must be marked for cleanup"); // Initialize the strong reference to item collection _itemCollection = itemCollection; } else { // no more references to the collection // are available, so get rid of the permissions // object. We will get a new one when we get a new collection _filePermissions = null; } } // Even if the _weakReferenceItemCollection is no longer alive, we will reuse this entry. Assign a new entry token and set mark for cleanup to false // so that this entry is not cleared by the cleanup thread entryToken = new object(); _entryTokenReference.Target = entryToken; _markEntryForCleanup = false; return entryToken; } /// <summary> /// Check if the thread has appropriate permissions to use the already loaded metadata /// </summary> internal void CheckFilePermission() { Debug.Assert(_itemCollection != null, "Item collection must be present since we want to reuse the metadata"); Debug.Assert(_entryTokenReference.IsAlive, "This entry must be in use"); Debug.Assert(_markEntryForCleanup == false, "The entry must not marked for cleanup"); Debug.Assert(_weakReferenceItemCollection.IsAlive, "Weak reference to item collection must be alive"); // we will have an empty ItemCollection (no files were used to load it) if (_filePermissions != null) { _filePermissions.Demand(); } } /// <summary> /// Dispose the composite loader that encapsulates all artifacts /// </summary> internal virtual void Clear() { } /// <summary> /// This returns true if the entry is still in use - the entry can be use if the entry token is /// still alive.If the entry token is still not alive, it means that no one is using this entry /// and its okay to remove it. Today there is no /// </summary> /// <returns></returns> internal bool IsEntryStillValid() { return _entryTokenReference.IsAlive; } } /// <summary> /// A metadata entry holding EdmItemCollection object for the cache /// </summary> private class EdmMetadataEntry : MetadataEntry { /// <summary> /// Gets the EdmItemCollection for this entry /// </summary> internal EdmItemCollection EdmItemCollection { get { return (EdmItemCollection)this.ItemCollection; } } /// <summary> /// Just loads the edm item collection /// </summary> /// <returns></returns> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2103:ReviewImperativeSecurity")] internal void LoadEdmItemCollection(MetadataArtifactLoader loader) { Debug.Assert(loader != null, "loader is null"); List<XmlReader> readers = loader.CreateReaders(DataSpace.CSpace); try { EdmItemCollection itemCollection = new EdmItemCollection( readers, loader.GetPaths(DataSpace.CSpace) ); List<string> permissionPaths = new List<string>(); loader.CollectFilePermissionPaths(permissionPaths, DataSpace.CSpace); FileIOPermission filePermissions = null; if (permissionPaths.Count > 0) { filePermissions = new FileIOPermission(FileIOPermissionAccess.Read, permissionPaths.ToArray()); } UpdateMetadataEntry(itemCollection, filePermissions); } finally { Helper.DisposeXmlReaders(readers); } } } /// <summary> /// A metadata entry holding a StoreItemCollection and a StorageMappingItemCollection objects for the cache /// </summary> private class StoreMetadataEntry : MetadataEntry { private System.Data.Common.QueryCache.QueryCacheManager _queryCacheManager; /// <summary> /// The constructor for constructing this entry with an StoreItemCollection and a StorageMappingItemCollection /// </summary> /// <param name="compositeLoader">An instance of the composite MetadataArtifactLoader</param> internal StoreMetadataEntry() { } /// <summary> /// Gets the StorageMappingItemCollection for this entry /// </summary> internal StorageMappingItemCollection StorageMappingItemCollection { get { return (StorageMappingItemCollection)this.ItemCollection; } } /// <summary> /// Load store specific metadata into the StoreItemCollection for this entry /// </summary> /// <param name="factory">The store-specific provider factory</param> /// <param name="edmItemCollection">edmItemCollection</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2103:ReviewImperativeSecurity")] internal void LoadStoreCollection(EdmItemCollection edmItemCollection, MetadataArtifactLoader loader) { StoreItemCollection storeItemCollection = null; IEnumerable<XmlReader> sSpaceXmlReaders = loader.CreateReaders(DataSpace.SSpace); try { // Load the store side, however, only do so if we don't already have one storeItemCollection = new StoreItemCollection( sSpaceXmlReaders, loader.GetPaths(DataSpace.SSpace)); } finally { Helper.DisposeXmlReaders(sSpaceXmlReaders); } // If this entry is getting re-used, make sure that the previous query cache manager gets // cleared up if (_queryCacheManager != null) { _queryCacheManager.Clear(); } // Update the query cache manager reference _queryCacheManager = storeItemCollection.QueryCacheManager; // With the store metadata in place, we can then load the mappings, however, only use it // if we don't already have one // StorageMappingItemCollection storageMappingItemCollection = null; IEnumerable<XmlReader> csSpaceXmlReaders = loader.CreateReaders(DataSpace.CSSpace); try { storageMappingItemCollection = new StorageMappingItemCollection( edmItemCollection, storeItemCollection, csSpaceXmlReaders, loader.GetPaths(DataSpace.CSSpace)); } finally { Helper.DisposeXmlReaders(csSpaceXmlReaders); } List<string> permissionPaths = new List<string>(); loader.CollectFilePermissionPaths(permissionPaths, DataSpace.SSpace); loader.CollectFilePermissionPaths(permissionPaths, DataSpace.CSSpace); FileIOPermission filePermissions = null; if (permissionPaths.Count > 0) { filePermissions = new FileIOPermission(FileIOPermissionAccess.Read, permissionPaths.ToArray()); } this.UpdateMetadataEntry(storageMappingItemCollection, filePermissions); } /// <summary> /// Calls clear on query cache manager to make sure all the performance counters associated with the query /// cache are gone /// </summary> internal override void Clear() { // there can be entries in cache for which the store item collection was never created. For e.g. // if you create a new entity connection, but never call open on it CleanupQueryCache(); base.Clear(); } /// <summary> /// Cleans and Dispose query cache manager /// </summary> internal void CleanupQueryCache() { if (null != _queryCacheManager) { _queryCacheManager.Dispose(); _queryCacheManager = null; } } } /// <summary> /// Interface to construct the metadata entry so that code can be reused /// </summary> /// <typeparam name="T"></typeparam> interface IMetadataEntryConstructor<T> { T GetMetadataEntry(); } /// <summary> /// Struct for creating EdmMetadataEntry /// </summary> private struct EdmMetadataEntryConstructor : IMetadataEntryConstructor<EdmMetadataEntry> { public EdmMetadataEntry GetMetadataEntry() { return new EdmMetadataEntry(); } } /// <summary> /// Struct for creating StoreMetadataEntry /// </summary> private struct StoreMetadataEntryConstructor : IMetadataEntryConstructor<StoreMetadataEntry> { public StoreMetadataEntry GetMetadataEntry() { return new StoreMetadataEntry(); } } /// <summary> /// Interface which constructs a new Item collection /// </summary> /// <typeparam name="T"></typeparam> interface IItemCollectionLoader<T> where T : MetadataEntry { void LoadItemCollection(T entry); } private struct EdmItemCollectionLoader : IItemCollectionLoader<EdmMetadataEntry> { private MetadataArtifactLoader _loader; public EdmItemCollectionLoader(MetadataArtifactLoader loader) { Debug.Assert(loader != null, "loader must never be null"); _loader = loader; } /// <summary> /// Creates a new item collection and updates the entry with the item collection /// </summary> /// <param name="entry"></param> /// <returns></returns> public void LoadItemCollection(EdmMetadataEntry entry) { entry.LoadEdmItemCollection(_loader); } } private struct StoreItemCollectionLoader : IItemCollectionLoader<StoreMetadataEntry> { private EdmItemCollection _edmItemCollection; private MetadataArtifactLoader _loader; /// <summary> /// Constructs a struct from which you can load edm item collection /// </summary> /// <param name="factory"></param> /// <param name="edmItemCollection"></param> internal StoreItemCollectionLoader(EdmItemCollection edmItemCollection, MetadataArtifactLoader loader) { Debug.Assert(edmItemCollection != null, "EdmItemCollection must never be null"); Debug.Assert(loader != null, "loader must never be null"); //StoreItemCollection requires atleast one SSDL path. if ((loader.GetPaths(DataSpace.SSpace) == null) || (loader.GetPaths(DataSpace.SSpace).Count == 0)) { throw EntityUtil.Metadata(Strings.AtleastOneSSDLNeeded); } _edmItemCollection = edmItemCollection; _loader = loader; } public void LoadItemCollection(StoreMetadataEntry entry) { entry.LoadStoreCollection(_edmItemCollection, _loader); } } #endregion } }
namespace Keiser.MvxPlugins.Bluetooth.Droid { using Android.Bluetooth; using Android.Bluetooth.LE; using Android.Content; using Keiser.MvxPlugins.Bluetooth.Droid.LE; using System; using System.Collections.Generic; using System.Threading; public class Adapter : BroadcastReceiver { private Context _context; public Context Context { get { if (_context == null) _context = Android.App.Application.Context; return _context; } } private BluetoothManager _bluetoothManager; public BluetoothManager BluetoothManager { get { if (_bluetoothManager == null) _bluetoothManager = (BluetoothManager)Context.GetSystemService(Context.BluetoothService); return _bluetoothManager; } } public BluetoothAdapter BluetoothAdapter { get { return BluetoothManager.Adapter; } } private object _leSupportedLocker = new object(); private bool _leSupported, _leSupportedSet; public bool LESupported { get { lock (_leSupportedLocker) { if (!_leSupportedSet) { _leSupportedSet = true; _leSupported = Context.PackageManager.HasSystemFeature(Android.Content.PM.PackageManager.FeatureBluetoothLe); } return _leSupported; } } } private object _isLollipopLocker = new object(); private bool _isLollipop, _isLollipopSet; public bool IsLollipop { get { lock (_isLollipopLocker) { if (!_isLollipopSet) { _isLollipopSet = true; _isLollipop = ((int)Android.OS.Build.VERSION.SdkInt) >= 21; } return _isLollipop; } } } protected volatile bool AutoScan = false; protected volatile bool AutoEnable = false; protected volatile bool AutoClearCache = false; protected const int AdapterEnableTimeout = 6000; protected Bluetooth.Timer AdapterEnableTimer; public Adapter() { Register(); //ClearCache(); } protected override void Dispose(bool disposing) { Unregister(); base.Dispose(disposing); } protected void Register() { IntentFilter filter = new IntentFilter(); filter.AddAction(BluetoothAdapter.ActionStateChanged); Context.RegisterReceiver(this, filter); } protected void Unregister() { Context.UnregisterReceiver(this); } public override void OnReceive(Context context, Intent intent) { int state = intent.GetIntExtra(BluetoothAdapter.ExtraState, BluetoothAdapter.Error); switch (state) { case BluetoothAdapter.Error: Error("Change State Error"); break; case 12: // STATE_ON Enabled(); break; case 10: // STATE_OFF Disabled(); break; } } protected int FailedRecoverCount = 0; protected bool SuccessfullyRecovered = true; protected void Error(string message = "Unknown", bool hard = false) { if (SuccessfullyRecovered) { FailedRecoverCount = 0; } bool recovered = SuccessfullyRecovered; Trace.Error("Bluetooth Adapter: Error[ " + message + " ]"); if (AdapterEnableTimer != null) { AdapterEnableTimer.Cancel(); } SuccessfullyRecovered = false; FailedRecoverCount++; Cycle(hard || !recovered, FailedRecoverCount > 2); } protected void Enable() { #if DEBUG Trace.Info("Bluetooth Adapter: Enable Issued"); #endif if (Wifi.IsEnabled) { Wifi.Disable(); } if (IsEnabled) { Enabled(); } else { if (BluetoothAdapter.Enable()) { AdapterEnableTimer = new Bluetooth.Timer(_ => Error("Failed To Enable", true), null, AdapterEnableTimeout, Timeout.Infinite); } else { Error("Enable Error"); } } } protected Bluetooth.Timer StartAdapterTimer; protected const int StartAdapterDelay = 500; protected bool IsEnabled { get { return (BluetoothAdapter.State == State.On); } } protected void Enabled() { #if DEBUG Trace.Info("Bluetooth Adapter: Enabled"); #endif if (AdapterEnableTimer != null) { AdapterEnableTimer.Cancel(); } if (AutoScan) { StartAdapterTimer = new Bluetooth.Timer(_ => StartAdapterScan(), null, StartAdapterDelay, Timeout.Infinite); //StartAdapterScan(); } } protected void Disable() { #if DEBUG Trace.Info("Bluetooth Adapter: Disable Issued"); #endif if (!IsEnabled) { Disabled(); } else if (!BluetoothAdapter.Disable()) { Error("Disable Error"); } } protected void Disabled() { #if DEBUG Trace.Info("Bluetooth Adapter: Disabled"); #endif if (AutoClearCache) { AutoClearCache = false; ClearCache(); } if (AutoEnable) { AutoEnable = false; Enable(); } } protected void Cycle(bool hard = false, bool dire = false) { #if DEBUG Trace.Info("LE Scanner: Cycling [Hard: " + hard + ", Dire: " + dire + "]"); #endif if (dire) { AutoClearCache = true; } else if (hard) { Shell.Command("am force-stop com.android.bluetooth"); } AutoEnable = true; Disable(); } protected CallbackQueuer CallbackQueuer; protected ClassicScanCallback ClassicScanCallback; protected LEScanCallback LEScanCallback; public void StartLEScan(CallbackQueuer callbackQueuer) { #if DEBUG Trace.Info("LE Scanner: Start Issued"); #endif CallbackQueuer = callbackQueuer; AutoScan = true; if (!IsEnabled) { Enable(); } else { StartAdapterScan(); ClassicScanCallback = new ClassicScanCallback(CallbackQueuer); } } protected volatile bool Running = false; protected volatile bool HardResetFresh = true; protected Bluetooth.Timer MonitorRadioTimer; protected void MonitorRadio(bool initial = false) { if (Running) { int activity = CallbackQueuer.ActivitySinceLastCheck; #if DEBUG Trace.Info("Monitoring Activity: " + activity); #endif if (HardResetFresh && activity > 0) { HardResetFresh = false; } if (activity == 0 && !HardResetFresh) { StopAdapterScan(); Disable(); Shell.Command("am force-stop com.android.bluetooth"); Enable(); //StartAdapterScan(); HardResetFresh = true; } else { ContinueMonitorRadio(activity); } } } protected DateTime MonitorStartTime; protected void StartMonitorRadio(int timeout = 15000) { Running = true; if (MonitorRadioTimer != null) { MonitorRadioTimer.Cancel(); } MonitorRadioTimer = new Bluetooth.Timer(_ => MonitorRadio(), null, timeout, Timeout.Infinite); MonitorStartTime = DateTime.Now; } protected int LastActivityRatio = 0; protected void ContinueMonitorRadio(int activity = 0) { double elapsed = (DateTime.Now - MonitorStartTime).TotalSeconds; int activityRatio = (int)(activity / elapsed); int timeout = 10000; if (activityRatio >= 10) { timeout = 2000; } if (activityRatio < (LastActivityRatio * 0.75)) { timeout = 1000; } LastActivityRatio = activityRatio; StartMonitorRadio(timeout); } protected void StopMonitorRadio() { Running = false; MonitorRadioTimer.Cancel(); } protected void StartAdapterScan() { if (IsLollipop) { LEScanCallback = new LEScanCallback(CallbackQueuer); ScanSettings settings = new ScanSettings.Builder().SetScanMode(Android.Bluetooth.LE.ScanMode.LowLatency).Build(); List<ScanFilter> filters = new List<ScanFilter>() { }; BluetoothAdapter.BluetoothLeScanner.StartScan(filters, settings, LEScanCallback); } else { if (!BluetoothAdapter.StartLeScan(ClassicScanCallback)) { Error("Start LE Scan Error"); } else { SuccessfullyRecovered = true; #if DEBUG Trace.Info("LE Scanner: Starting"); #endif StartMonitorRadio(); } } } public void StopLEScan() { #if DEBUG Trace.Info("LE Scanner: Stop Issued"); #endif AutoScan = false; StopAdapterScan(); AutoClearCache = true; Disable(); } protected void StopAdapterScan() { if (IsLollipop) { BluetoothAdapter.BluetoothLeScanner.StopScan(LEScanCallback); } else { BluetoothAdapter.StopLeScan(ClassicScanCallback); StopMonitorRadio(); } #if DEBUG Trace.Info("LE Scanner: Stopping"); #endif } protected void ClearCache() { #if DEBUG Trace.Info("Bluetooth Adapter: Starting Cache Clear"); #endif if (true /*!IsEnabled*/) { Shell.Command("pm disable com.android.bluetooth"); Shell.Command("am force-stop com.android.bluetooth"); Shell.Command("rm -rf /data/misc/bluedroid/*"); Shell.Command("pm enable com.android.bluetooth"); #if DEBUG Trace.Info("Bluetooth Adapter: Finished Cache Clear"); #endif } #if DEBUG else { Trace.Info("Bluetooth Adapter: Adapter Still Active"); } #endif } } }
//------------------------------------------------------------------------------ // <copyright file="InterchangeableLists.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ #if UNUSED_CODE namespace System.Web.UI.Design.MobileControls.Util { using System; using System.Collections; using System.ComponentModel; using System.Data; using System.Drawing; using System.Diagnostics; using System.Web.UI.Design.MobileControls; using System.Windows.Forms; [ ToolboxItem(false), System.Security.Permissions.SecurityPermission(System.Security.Permissions.SecurityAction.Demand, Flags=System.Security.Permissions.SecurityPermissionFlag.UnmanagedCode) ] internal sealed class InterchangeableLists : System.Windows.Forms.Panel { private System.Windows.Forms.Button _removeButton; private System.Windows.Forms.Button _addButton; private System.Windows.Forms.Button _upButton; private System.Windows.Forms.TreeView _availableList; private System.Windows.Forms.Label _availableFieldLabel; private System.Windows.Forms.TreeView _selectedList; private System.Windows.Forms.Button _downButton; private System.Windows.Forms.Label _selectedFieldLabel; private Hashtable _eventTable; /// <summary> /// Required designer variable. /// </summary> private static readonly Object _componentChangedEvent = new Object(); internal InterchangeableLists() { // This call is required by the Windows.Forms Form Designer. InitializeComponent(); // TODO: Add any initialization after the InitForm call _downButton.Image = GenericUI.SortDownIcon; _upButton.Image = GenericUI.SortUpIcon; UpdateButtonEnabling(); this._eventTable = new Hashtable(); } internal void SetTitles( String availableListTitle, String selectedListTitle) { this._selectedFieldLabel.Text = selectedListTitle; this._availableFieldLabel.Text = availableListTitle; } internal void AddToAvailableList(Object obj) { AddItem(_availableList, new TreeNode(obj.ToString())); } internal void AddToSelectedList(Object obj) { AddItem(_selectedList, new TreeNode(obj.ToString())); } internal void Initialize() { if (_availableList.Nodes.Count > 0) { _availableList.SelectedNode = _availableList.Nodes[0]; } if (_selectedList.Nodes.Count > 0) { _selectedList.SelectedNode = _selectedList.Nodes[0]; } } internal EventHandler OnComponentChanged { get { return (EventHandler)_eventTable[_componentChangedEvent]; } set { _eventTable[_componentChangedEvent] = value; } } private void NotifyChangeEvent() { EventHandler handler = (EventHandler)_eventTable[_componentChangedEvent]; if (handler != null) { handler(this, EventArgs.Empty); } } internal void Clear() { _availableList.Nodes.Clear(); _selectedList.Nodes.Clear(); } internal ICollection GetSelectedItems() { ArrayList list = new ArrayList(); foreach (TreeNode node in _selectedList.Nodes) { list.Add(node.Text); } return list; } /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this._removeButton = new System.Windows.Forms.Button(); this._selectedFieldLabel = new System.Windows.Forms.Label(); this._addButton = new System.Windows.Forms.Button(); this._selectedList = new System.Windows.Forms.TreeView(); this._availableList = new System.Windows.Forms.TreeView(); this._availableFieldLabel = new System.Windows.Forms.Label(); this._upButton = new System.Windows.Forms.Button(); this._downButton = new System.Windows.Forms.Button(); this._removeButton.Location = new System.Drawing.Point(166, 69); this._removeButton.Size = new System.Drawing.Size(32, 25); this._removeButton.TabIndex = 4; this._removeButton.Text = "<"; this._removeButton.Click += new System.EventHandler(this.RemoveNode); this._removeButton.AccessibleName = SR.GetString(SR.EditableTreeList_DeleteName); this._removeButton.AccessibleDescription = SR.GetString(SR.EditableTreeList_DeleteDescription); this._removeButton.Name = SR.GetString(SR.EditableTreeList_DeleteName); this._selectedFieldLabel.Location = new System.Drawing.Point(202, 8); this._selectedFieldLabel.Size = new System.Drawing.Size(164, 16); this._selectedFieldLabel.TabIndex = 5; this._addButton.AccessibleName = SR.GetString(SR.EditableTreeList_AddName); this._addButton.AccessibleDescription = SR.GetString(SR.EditableTreeList_AddDescription); this._addButton.Name = SR.GetString(SR.EditableTreeList_AddName); this._addButton.Location = new System.Drawing.Point(166, 40); this._addButton.Size = new System.Drawing.Size(32, 25); this._addButton.TabIndex = 3; this._addButton.Text = ">"; this._addButton.Click += new System.EventHandler(this.AddNode); this._selectedList.HideSelection = false; this._selectedList.Indent = 15; this._selectedList.Location = new System.Drawing.Point(202, 24); this._selectedList.ShowLines = false; this._selectedList.ShowPlusMinus = false; this._selectedList.ShowRootLines = false; this._selectedList.Size = new System.Drawing.Size(154, 89); this._selectedList.TabIndex = 6; this._selectedList.DoubleClick += new System.EventHandler(this.RemoveNode); this._selectedList.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.SelectedList_AfterSelect); this._availableList.HideSelection = false; this._availableList.Indent = 15; this._availableList.Location = new System.Drawing.Point(8, 24); this._availableList.ShowLines = false; this._availableList.ShowPlusMinus = false; this._availableList.ShowRootLines = false; this._availableList.Size = new System.Drawing.Size(154, 89); this._availableList.TabIndex = 2; this._availableList.DoubleClick += new System.EventHandler(this.AddNode); this._availableList.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.AvailableList_AfterSelect); this._availableFieldLabel.Location = new System.Drawing.Point(8, 8); this._availableFieldLabel.Size = new System.Drawing.Size(164, 16); this._availableFieldLabel.TabIndex = 1; this._upButton.AccessibleName = SR.GetString(SR.EditableTreeList_MoveUpName); this._upButton.AccessibleDescription = SR.GetString(SR.EditableTreeList_MoveUpDescription); this._upButton.Name = SR.GetString(SR.EditableTreeList_MoveUpName); this._upButton.Location = new System.Drawing.Point(360, 24); this._upButton.Size = new System.Drawing.Size(28, 27); this._upButton.TabIndex = 7; this._upButton.Click += new System.EventHandler(this.Up_Click); this._downButton.AccessibleName = SR.GetString(SR.EditableTreeList_MoveDownName); this._downButton.AccessibleDescription = SR.GetString(SR.EditableTreeList_MoveDownDescription); this._downButton.Name = SR.GetString(SR.EditableTreeList_MoveDownName); this._downButton.Location = new System.Drawing.Point(360, 55); this._downButton.Size = new System.Drawing.Size(28, 27); this._downButton.TabIndex = 8; this._downButton.Click += new System.EventHandler(this.Down_Click); this.Controls.AddRange(new System.Windows.Forms.Control[] {this._availableFieldLabel, this._selectedFieldLabel, this._upButton, this._downButton, this._removeButton, this._selectedList, this._addButton, this._availableList}); this.Size = new System.Drawing.Size(396, 119); } private void UpdateButtonEnabling() { bool anAvailableItemIsSelected = (_availableList.SelectedNode != null); bool anSelectedItemIsSelected = (_selectedList.SelectedNode != null); _addButton.Enabled = anAvailableItemIsSelected; _removeButton.Enabled = anSelectedItemIsSelected; if (anSelectedItemIsSelected) { int selectedIndex = _selectedList.SelectedNode.Index; _upButton.Enabled = (selectedIndex > 0); _downButton.Enabled = (selectedIndex < _selectedList.Nodes.Count - 1); } else { _downButton.Enabled = false; _upButton.Enabled = false; } } private void AddNode(object sender, System.EventArgs e) { TreeNode selectedNode = _availableList.SelectedNode; if (selectedNode != null) { RemoveItem(_availableList, selectedNode); AddItem(_selectedList, selectedNode); UpdateButtonEnabling(); NotifyChangeEvent(); } } private void RemoveItem(TreeView list, TreeNode node) { Debug.Assert (list.Nodes.Contains(node)); int itemCount = list.Nodes.Count; int selectedIndex = list.SelectedNode.Index; list.Nodes.Remove(node); if (selectedIndex < itemCount - 1) { list.SelectedNode = list.Nodes[selectedIndex]; } else if (selectedIndex >= 1) { list.SelectedNode = list.Nodes[selectedIndex-1]; } else { Debug.Assert(itemCount == 1); list.SelectedNode = null; } } private void AddItem(TreeView list, TreeNode node) { Debug.Assert(node != null); list.Nodes.Add(node); list.SelectedNode = node; //_selectedList.Select(); } private void RemoveNode(object sender, System.EventArgs e) { TreeNode selectedNode = _selectedList.SelectedNode; if (selectedNode != null) { RemoveItem(_selectedList, selectedNode); AddItem(_availableList, selectedNode); UpdateButtonEnabling(); } //_availableList.Select(); NotifyChangeEvent(); } private void MoveItem( int direction /* 1 is up, -1 is down */) { Debug.Assert(direction == -1 || direction == 1); int selectedIndex = _selectedList.SelectedNode.Index; int newIndex = selectedIndex + direction; TreeNode node = _selectedList.SelectedNode; _selectedList.Nodes.RemoveAt(selectedIndex); _selectedList.Nodes.Insert(newIndex, node); _selectedList.SelectedNode = node; } private void Up_Click(object sender, System.EventArgs e) { MoveItem(-1); UpdateButtonEnabling(); //_selectedList.Select(); NotifyChangeEvent(); } private void Down_Click(object sender, System.EventArgs e) { MoveItem(+1); UpdateButtonEnabling(); //_selectedList.Select(); NotifyChangeEvent(); } private void AvailableList_AfterSelect(object sender, System.Windows.Forms.TreeViewEventArgs e) { UpdateButtonEnabling(); } private void SelectedList_AfterSelect(object sender, System.Windows.Forms.TreeViewEventArgs e) { UpdateButtonEnabling(); } } } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using Xunit; namespace System.IO.Tests { public class Directory_CreateDirectory : FileSystemTest { #region Utilities public virtual DirectoryInfo Create(string path) { return Directory.CreateDirectory(path); } #endregion #region UniversalTests [Fact] public void NullAsPath_ThrowsArgumentNullException() { Assert.Throws<ArgumentNullException>(() => Create(null)); } [Fact] public void EmptyAsPath_ThrowsArgumentException() { Assert.Throws<ArgumentException>(() => Create(string.Empty)); } [Fact] public void PathWithInvalidCharactersAsPath_ThrowsArgumentException() { var paths = IOInputs.GetPathsWithInvalidCharacters(); Assert.All(paths, (path) => { Assert.Throws<ArgumentException>(() => Create(path)); }); } [Fact] public void PathAlreadyExistsAsFile() { string path = GetTestFilePath(); File.Create(path).Dispose(); Assert.Throws<IOException>(() => Create(path)); Assert.Throws<IOException>(() => Create(IOServices.AddTrailingSlashIfNeeded(path))); Assert.Throws<IOException>(() => Create(IOServices.RemoveTrailingSlash(path))); } [Theory] [InlineData(FileAttributes.Hidden)] [InlineData(FileAttributes.ReadOnly)] [InlineData(FileAttributes.Normal)] public void PathAlreadyExistsAsDirectory(FileAttributes attributes) { DirectoryInfo testDir = Create(GetTestFilePath()); FileAttributes original = testDir.Attributes; try { testDir.Attributes = attributes; Assert.Equal(testDir.FullName, Create(testDir.FullName).FullName); } finally { testDir.Attributes = original; } } [Fact] public void RootPath() { string dirName = Path.GetPathRoot(Directory.GetCurrentDirectory()); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void DotIsCurrentDirectory() { string path = GetTestFilePath(); DirectoryInfo result = Create(Path.Combine(path, ".")); Assert.Equal(IOServices.RemoveTrailingSlash(path), result.FullName); result = Create(Path.Combine(path, ".") + Path.DirectorySeparatorChar); Assert.Equal(IOServices.AddTrailingSlashIfNeeded(path), result.FullName); } [Fact] public void CreateCurrentDirectory() { DirectoryInfo result = Create(Directory.GetCurrentDirectory()); Assert.Equal(Directory.GetCurrentDirectory(), result.FullName); } [Fact] public void DotDotIsParentDirectory() { DirectoryInfo result = Create(Path.Combine(GetTestFilePath(), "..")); Assert.Equal(IOServices.RemoveTrailingSlash(TestDirectory), result.FullName); result = Create(Path.Combine(GetTestFilePath(), "..") + Path.DirectorySeparatorChar); Assert.Equal(IOServices.AddTrailingSlashIfNeeded(TestDirectory), result.FullName); } [Fact] public void ValidPathWithTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = IOServices.AddTrailingSlashIfNeeded(Path.Combine(testDir.FullName, component)); DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(result.Exists); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void ValidExtendedPathWithTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = IOInputs.ExtendedPrefix + IOServices.AddTrailingSlashIfNeeded(Path.Combine(testDir.FullName, component)); DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(result.Exists); }); } [Fact] public void ValidPathWithoutTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = testDir.FullName + Path.DirectorySeparatorChar + component; DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] public void ValidPathWithMultipleSubdirectories() { string dirName = Path.Combine(GetTestFilePath(), "Test", "Test", "Test"); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void AllowedSymbols() { string dirName = Path.Combine(TestDirectory, Path.GetRandomFileName() + "!@#$%^&"); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void DirectoryEqualToMaxDirectory_CanBeCreated() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, IOInputs.MaxComponent); Assert.All(path.SubPaths, (subpath) => { DirectoryInfo result = Create(subpath); Assert.Equal(subpath, result.FullName); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] public void DirectoryEqualToMaxDirectory_CanBeCreatedAllAtOnce() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, maxComponent: 10); DirectoryInfo result = Create(path.FullPath); Assert.Equal(path.FullPath, result.FullName); Assert.True(Directory.Exists(result.FullName)); } [Fact] public void DirectoryWithComponentLongerThanMaxComponentAsPath_ThrowsPathTooLongException() { // While paths themselves can be up to 260 characters including trailing null, file systems // limit each components of the path to a total of 255 characters. var paths = IOInputs.GetPathsWithComponentLongerThanMaxComponent(); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } #endregion #region PlatformSpecific [Fact] [PlatformSpecific(PlatformID.Windows)] public void PathWithInvalidColons_ThrowsNotSupportedException() { var paths = IOInputs.GetPathsWithInvalidColons(); Assert.All(paths, (path) => { Assert.Throws<NotSupportedException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxPath_Succeeds() { var paths = IOInputs.GetPathsLongerThanMaxPath(GetTestFilePath()); Assert.All(paths, (path) => { DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxLongPath_ThrowsPathTooLongException() { var paths = IOInputs.GetPathsLongerThanMaxLongPath(GetTestFilePath()); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxLongPathWithExtendedSyntax_ThrowsPathTooLongException() { var paths = IOInputs.GetPathsLongerThanMaxLongPath(GetTestFilePath(), useExtendedSyntax: true); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void ExtendedDirectoryLongerThanLegacyMaxPath_Succeeds() { var paths = IOInputs.GetPathsLongerThanMaxPath(GetTestFilePath(), useExtendedSyntax: true); Assert.All(paths, (path) => { Assert.True(Create(path).Exists); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxDirectoryAsPath_Succeeds() { var paths = IOInputs.GetPathsLongerThanMaxDirectory(GetTestFilePath()); Assert.All(paths, (path) => { var result = Create(path); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixPathLongerThan256_Allowed() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, 257, IOInputs.MaxComponent); DirectoryInfo result = Create(path.FullPath); Assert.Equal(path.FullPath, result.FullName); Assert.True(Directory.Exists(result.FullName)); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixPathWithDeeplyNestedDirectories() { DirectoryInfo parent = Create(GetTestFilePath()); for (int i = 1; i <= 100; i++) // 100 == arbitrarily large number of directories { parent = Create(Path.Combine(parent.FullName, "dir" + i)); Assert.True(Directory.Exists(parent.FullName)); } } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsWhiteSpaceAsPath_ThrowsArgumentException() { var paths = IOInputs.GetWhiteSpace(); Assert.All(paths, (path) => { Assert.Throws<ArgumentException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixWhiteSpaceAsPath_Allowed() { var paths = IOInputs.GetWhiteSpace(); Assert.All(paths, (path) => { Create(Path.Combine(TestDirectory, path)); Assert.True(Directory.Exists(Path.Combine(TestDirectory, path))); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsTrailingWhiteSpace() { // Windows will remove all non-significant whitespace in a path DirectoryInfo testDir = Create(GetTestFilePath()); var components = IOInputs.GetWhiteSpace(); Assert.All(components, (component) => { string path = IOServices.RemoveTrailingSlash(testDir.FullName) + component; DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); Assert.Equal(testDir.FullName, IOServices.RemoveTrailingSlash(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsExtendedSyntaxWhiteSpace() { var paths = IOInputs.GetSimpleWhiteSpace(); using (TemporaryDirectory directory = new TemporaryDirectory()) { foreach (var path in paths) { string extendedPath = Path.Combine(IOInputs.ExtendedPrefix + directory.Path, path); Directory.CreateDirectory(extendedPath); Assert.True(Directory.Exists(extendedPath), extendedPath); } } } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixNonSignificantTrailingWhiteSpace() { // Unix treats trailing/prename whitespace as significant and a part of the name. DirectoryInfo testDir = Create(GetTestFilePath()); var components = IOInputs.GetWhiteSpace(); Assert.All(components, (component) => { string path = IOServices.RemoveTrailingSlash(testDir.FullName) + component; DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); Assert.NotEqual(testDir.FullName, IOServices.RemoveTrailingSlash(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // alternate data streams public void PathWithAlternateDataStreams_ThrowsNotSupportedException() { var paths = IOInputs.GetPathsWithAlternativeDataStreams(); Assert.All(paths, (path) => { Assert.Throws<NotSupportedException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // device name prefixes public void PathWithReservedDeviceNameAsPath_ThrowsDirectoryNotFoundException() { // Throws DirectoryNotFoundException, when the behavior really should be an invalid path var paths = IOInputs.GetPathsWithReservedDeviceNames(); Assert.All(paths, (path) => { Assert.Throws<DirectoryNotFoundException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // device name prefixes public void PathWithReservedDeviceNameAsExtendedPath() { var paths = IOInputs.GetReservedDeviceNames(); using (TemporaryDirectory directory = new TemporaryDirectory()) { Assert.All(paths, (path) => { Assert.True(Create(IOInputs.ExtendedPrefix + Path.Combine(directory.Path, path)).Exists, path); }); } } [Fact] [PlatformSpecific(PlatformID.Windows)] // UNC shares public void UncPathWithoutShareNameAsPath_ThrowsArgumentException() { var paths = IOInputs.GetUncPathsWithoutShareName(); foreach (var path in paths) { Assert.Throws<ArgumentException>(() => Create(path)); } } [Fact] [PlatformSpecific(PlatformID.Windows)] // UNC shares public void UNCPathWithOnlySlashes() { Assert.Throws<ArgumentException>(() => Create("//")); } [Fact] [PlatformSpecific(PlatformID.Windows)] // drive labels public void CDriveCase() { DirectoryInfo dir = Create("c:\\"); DirectoryInfo dir2 = Create("C:\\"); Assert.NotEqual(dir.FullName, dir2.FullName); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DriveLetter_Windows() { // On Windows, DirectoryInfo will replace "<DriveLetter>:" with "." var driveLetter = Create(Directory.GetCurrentDirectory()[0] + ":"); var current = Create("."); Assert.Equal(current.Name, driveLetter.Name); Assert.Equal(current.FullName, driveLetter.FullName); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void DriveLetter_Unix() { // On Unix, there's no special casing for drive letters, which are valid file names var driveLetter = Create("C:"); var current = Create("."); Assert.Equal("C:", driveLetter.Name); Assert.Equal(Path.Combine(current.FullName, "C:"), driveLetter.FullName); Directory.Delete("C:"); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void NonExistentDriveAsPath_ThrowsDirectoryNotFoundException() { Assert.Throws<DirectoryNotFoundException>(() => { Create(IOServices.GetNonExistentDrive()); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void SubdirectoryOnNonExistentDriveAsPath_ThrowsDirectoryNotFoundException() { Assert.Throws<DirectoryNotFoundException>(() => { Create(Path.Combine(IOServices.GetNonExistentDrive(), "Subdirectory")); }); } [Fact] [ActiveIssue(1221)] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void NotReadyDriveAsPath_ThrowsDirectoryNotFoundException() { // Behavior is suspect, should really have thrown IOException similar to the SubDirectory case var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } Assert.Throws<DirectoryNotFoundException>(() => { Create(drive); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels [ActiveIssue(1221)] public void SubdirectoryOnNotReadyDriveAsPath_ThrowsIOException() { var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } // 'Device is not ready' Assert.Throws<IOException>(() => { Create(Path.Combine(drive, "Subdirectory")); }); } #if !TEST_WINRT // Cannot set current directory to root from appcontainer with it's default ACL /* [Fact] [ActiveIssue(1220)] // SetCurrentDirectory public void DotDotAsPath_WhenCurrentDirectoryIsRoot_DoesNotThrow() { string root = Path.GetPathRoot(Directory.GetCurrentDirectory()); using (CurrentDirectoryContext context = new CurrentDirectoryContext(root)) { DirectoryInfo result = Create(".."); Assert.True(Directory.Exists(result.FullName)); Assert.Equal(root, result.FullName); } } */ #endif #endregion } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Reflection; using Newtonsoft.Json.Utilities; using System.Collections; #if !HAVE_LINQ using Newtonsoft.Json.Utilities.LinqBridge; #endif namespace Newtonsoft.Json.Serialization { /// <summary> /// Contract details for a <see cref="System.Type"/> used by the <see cref="JsonSerializer"/>. /// </summary> public class JsonDictionaryContract : JsonContainerContract { /// <summary> /// Gets or sets the dictionary key resolver. /// </summary> /// <value>The dictionary key resolver.</value> public Func<string, string> DictionaryKeyResolver { get; set; } /// <summary> /// Gets the <see cref="System.Type"/> of the dictionary keys. /// </summary> /// <value>The <see cref="System.Type"/> of the dictionary keys.</value> public Type DictionaryKeyType { get; } /// <summary> /// Gets the <see cref="System.Type"/> of the dictionary values. /// </summary> /// <value>The <see cref="System.Type"/> of the dictionary values.</value> public Type DictionaryValueType { get; } internal JsonContract KeyContract { get; set; } private readonly Type _genericCollectionDefinitionType; private Type _genericWrapperType; private ObjectConstructor<object> _genericWrapperCreator; private Func<object> _genericTemporaryDictionaryCreator; internal bool ShouldCreateWrapper { get; } private readonly ConstructorInfo _parameterizedConstructor; private ObjectConstructor<object> _overrideCreator; private ObjectConstructor<object> _parameterizedCreator; internal ObjectConstructor<object> ParameterizedCreator { get { if (_parameterizedCreator == null) { _parameterizedCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateParameterizedConstructor(_parameterizedConstructor); } return _parameterizedCreator; } } /// <summary> /// Gets or sets the function used to create the object. When set this function will override <see cref="JsonContract.DefaultCreator"/>. /// </summary> /// <value>The function used to create the object.</value> public ObjectConstructor<object> OverrideCreator { get => _overrideCreator; set => _overrideCreator = value; } /// <summary> /// Gets a value indicating whether the creator has a parameter with the dictionary values. /// </summary> /// <value><c>true</c> if the creator has a parameter with the dictionary values; otherwise, <c>false</c>.</value> public bool HasParameterizedCreator { get; set; } internal bool HasParameterizedCreatorInternal => (HasParameterizedCreator || _parameterizedCreator != null || _parameterizedConstructor != null); /// <summary> /// Initializes a new instance of the <see cref="JsonDictionaryContract"/> class. /// </summary> /// <param name="underlyingType">The underlying type for the contract.</param> public JsonDictionaryContract(Type underlyingType) : base(underlyingType) { ContractType = JsonContractType.Dictionary; Type keyType; Type valueType; if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(IDictionary<,>), out _genericCollectionDefinitionType)) { keyType = _genericCollectionDefinitionType.GetGenericArguments()[0]; valueType = _genericCollectionDefinitionType.GetGenericArguments()[1]; if (ReflectionUtils.IsGenericDefinition(UnderlyingType, typeof(IDictionary<,>))) { CreatedType = typeof(Dictionary<,>).MakeGenericType(keyType, valueType); } #if HAVE_READ_ONLY_COLLECTIONS IsReadOnlyOrFixedSize = ReflectionUtils.InheritsGenericDefinition(underlyingType, typeof(ReadOnlyDictionary<,>)); #endif } #if HAVE_READ_ONLY_COLLECTIONS else if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(IReadOnlyDictionary<,>), out _genericCollectionDefinitionType)) { keyType = _genericCollectionDefinitionType.GetGenericArguments()[0]; valueType = _genericCollectionDefinitionType.GetGenericArguments()[1]; if (ReflectionUtils.IsGenericDefinition(UnderlyingType, typeof(IReadOnlyDictionary<,>))) { CreatedType = typeof(ReadOnlyDictionary<,>).MakeGenericType(keyType, valueType); } IsReadOnlyOrFixedSize = true; } #endif else { ReflectionUtils.GetDictionaryKeyValueTypes(UnderlyingType, out keyType, out valueType); if (UnderlyingType == typeof(IDictionary)) { CreatedType = typeof(Dictionary<object, object>); } } if (keyType != null && valueType != null) { _parameterizedConstructor = CollectionUtils.ResolveEnumerableCollectionConstructor( CreatedType, typeof(KeyValuePair<,>).MakeGenericType(keyType, valueType), typeof(IDictionary<,>).MakeGenericType(keyType, valueType)); #if HAVE_FSHARP_TYPES if (!HasParameterizedCreatorInternal && underlyingType.Name == FSharpUtils.FSharpMapTypeName) { FSharpUtils.EnsureInitialized(underlyingType.Assembly()); _parameterizedCreator = FSharpUtils.CreateMap(keyType, valueType); } #endif } ShouldCreateWrapper = !typeof(IDictionary).IsAssignableFrom(CreatedType); DictionaryKeyType = keyType; DictionaryValueType = valueType; #if (NET20 || NET35) if (DictionaryValueType != null && ReflectionUtils.IsNullableType(DictionaryValueType)) { // bug in .NET 2.0 & 3.5 that Dictionary<TKey, Nullable<TValue>> throws an error when adding null via IDictionary[key] = object // wrapper will handle calling Add(T) instead if (ReflectionUtils.InheritsGenericDefinition(CreatedType, typeof(Dictionary<,>), out _)) { ShouldCreateWrapper = true; } } #endif if (ImmutableCollectionsUtils.TryBuildImmutableForDictionaryContract( underlyingType, DictionaryKeyType, DictionaryValueType, out Type immutableCreatedType, out ObjectConstructor<object> immutableParameterizedCreator)) { CreatedType = immutableCreatedType; _parameterizedCreator = immutableParameterizedCreator; IsReadOnlyOrFixedSize = true; } } internal IWrappedDictionary CreateWrapper(object dictionary) { if (_genericWrapperCreator == null) { _genericWrapperType = typeof(DictionaryWrapper<,>).MakeGenericType(DictionaryKeyType, DictionaryValueType); ConstructorInfo genericWrapperConstructor = _genericWrapperType.GetConstructor(new[] { _genericCollectionDefinitionType }); _genericWrapperCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateParameterizedConstructor(genericWrapperConstructor); } return (IWrappedDictionary)_genericWrapperCreator(dictionary); } internal IDictionary CreateTemporaryDictionary() { if (_genericTemporaryDictionaryCreator == null) { Type temporaryDictionaryType = typeof(Dictionary<,>).MakeGenericType(DictionaryKeyType ?? typeof(object), DictionaryValueType ?? typeof(object)); _genericTemporaryDictionaryCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateDefaultConstructor<object>(temporaryDictionaryType); } return (IDictionary)_genericTemporaryDictionaryCreator(); } } }
// // Authors: // Christian Hergert <[email protected]> // Ben Motmans <[email protected]> // // Copyright (C) 2005 Mosaix Communications, Inc. // Copyright (c) 2007 Ben Motmans // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using MonoDevelop.Database.Sql; namespace MonoDevelop.Database.ConnectionManager { public abstract class BaseNode { public event EventHandler RefreshEvent; protected DatabaseConnectionContext context; public BaseNode (DatabaseConnectionContext context) { if (context == null) throw new ArgumentNullException ("context"); this.context = context; } public DatabaseConnectionContext ConnectionContext { get { return context; } } public void Refresh () { if (RefreshEvent != null) RefreshEvent (this, EventArgs.Empty); } } public class TableNode : BaseNode { protected TableSchema table; public TableNode (DatabaseConnectionContext context, TableSchema table) : base (context) { if (table == null) throw new ArgumentNullException ("table"); this.table = table; } public TableSchema Table { get { return table; } } } public class TablesNode : BaseNode { public TablesNode (DatabaseConnectionContext context) : base (context) { } } public class ViewNode : BaseNode { protected ViewSchema view; public ViewNode (DatabaseConnectionContext context, ViewSchema view) : base (context) { if (view == null) throw new ArgumentNullException ("view"); this.view = view; } public ViewSchema View { get { return view; } } } public class ViewsNode : BaseNode { public ViewsNode (DatabaseConnectionContext context) : base (context) { } } public class ProcedureNode : BaseNode { protected ProcedureSchema procedure; public ProcedureNode (DatabaseConnectionContext context, ProcedureSchema procedure) : base (context) { if (procedure == null) throw new ArgumentNullException ("procedure"); this.procedure = procedure; } public ProcedureSchema Procedure { get { return procedure; } } } public class ProceduresNode : BaseNode { public ProceduresNode (DatabaseConnectionContext context) : base (context) { } } public class AggregatesNode : BaseNode { public AggregatesNode (DatabaseConnectionContext context) : base (context) { } } public class GroupsNode : BaseNode { public GroupsNode (DatabaseConnectionContext context) : base (context) { } } public class LanguagesNode : BaseNode { public LanguagesNode (DatabaseConnectionContext context) : base (context) { } } public class OperatorsNode : BaseNode { public OperatorsNode (DatabaseConnectionContext context) : base (context) { } } public class RulesNode : BaseNode { public RulesNode (DatabaseConnectionContext context) : base (context) { } } public class RolesNode : BaseNode { public RolesNode (DatabaseConnectionContext context) : base (context) { } } public class SequencesNode : BaseNode { public SequencesNode (DatabaseConnectionContext context) : base (context) { } } public class UserNode : BaseNode { protected UserSchema user; public UserNode (DatabaseConnectionContext context, UserSchema user) : base (context) { if (user == null) throw new ArgumentNullException ("user"); this.user = user; } public UserSchema User { get { return user; } } } public class UsersNode : BaseNode { public UsersNode (DatabaseConnectionContext context) : base (context) { } } public class TypesNode : BaseNode { public TypesNode (DatabaseConnectionContext context) : base (context) { } } public class ColumnNode : BaseNode { protected ColumnSchema column; public ColumnNode (DatabaseConnectionContext context, ColumnSchema column) : base (context) { if (column == null) throw new ArgumentNullException ("column"); this.column = column; } public ColumnSchema Column { get { return column; } } } public class ColumnsNode : BaseNode { protected ISchema schema; public ColumnsNode (DatabaseConnectionContext context, ISchema schema) : base (context) { if (schema == null) throw new ArgumentNullException ("schema"); this.schema = schema; } public ISchema Schema { get { return schema; } } } public class ConstraintsNode : BaseNode { protected ISchema schema; public ConstraintsNode (DatabaseConnectionContext context, ISchema schema) : base (context) { if (schema == null) throw new ArgumentNullException ("schema"); this.schema = schema; } public ISchema Schema { get { return schema; } } } public class TriggersNode : BaseNode { public TriggersNode (DatabaseConnectionContext context) : base (context) { } } public class ParametersNode : BaseNode { private ProcedureSchema procedure; public ParametersNode (DatabaseConnectionContext context, ProcedureSchema procedure) : base (context) { if (procedure == null) throw new ArgumentNullException ("procedure"); this.procedure = procedure; } public ProcedureSchema Procedure { get { return procedure; } } } }
// Copyright 2017 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using UnityEngine; using PolyToolkitInternal; using System.Collections; namespace PolyToolkit { /// <summary> /// Represents a Poly asset (the information about a 3D object in Poly). /// </summary> /// <remarks> /// This is not the actual object that is added to the scene. This is just a container for /// the object's data, from which a GameObject can eventually be constructed. /// </remarks> [AutoStringifiable] public class PolyAsset { /// <summary> /// Format of the URL to a particular asset, given its ID. /// </summary> private const string URL_FORMAT = "https://poly.google.com/view/{0}"; /// <summary> /// Identifier for the asset. This is an alphanumeric string that identifies the asset, /// but is not meant for display. For example, "assets/L1o2e3m4I5p6s7u8m". /// </summary> public string name; /// <summary> /// Human-readable name of the asset. /// </summary> public string displayName; /// <summary> /// Name of the asset's author. /// </summary> public string authorName; /// <summary> /// Human-readable description of the asset. /// </summary> public string description; /// <summary> /// Date and time when the asset was created. /// </summary> public DateTime createTime; /// <summary> /// Date and time when the asset was last updated. /// </summary> public DateTime updateTime; /// <summary> /// A list of the available formats for this asset. Each format describes a content-type of a /// representation of the asset, and specifies where the underlying data files can be found. /// </summary> public List<PolyFormat> formats = new List<PolyFormat>(); /// <summary> /// Thumbnail image information for this asset. /// </summary> public PolyFile thumbnail; /// <summary> /// The license under which the author has made this asset available for use, if any. /// </summary> public PolyAssetLicense license; /// <summary> /// Visibility of this asset (who can access it). /// </summary> public PolyVisibility visibility; /// <summary> /// If true, the asset was manually curated by the Poly team. /// </summary> public bool isCurated; /// <summary> /// The texture with the asset's thumbnail image. Only available after successfully fetched. /// </summary> public Texture2D thumbnailTexture; /// <summary> /// Returns a PolyFormat of the given type, if it exists. /// If the asset has more than one format of the given type, returns the first one seen. /// If the asset does not have a format of the given type, returns null. /// </summary> public PolyFormat GetFormatIfExists(PolyFormatType type) { foreach (PolyFormat format in formats) { if (format == null) { continue; } if (format.formatType == type) return format; } return null; } /// <summary> /// Returns whether the asset is known to be mutable, due to its visibility. /// Public and unlisted assets are immutable. Private assets are mutable. /// </summary> /// <remarks> /// Immutable assets can be cached indefinitely, since they can't be modified. /// Depending on your use-case, you may wish to frequently re-download mutable assets, if you expect them to be /// changed while your app is running. /// </remarks> public bool IsMutable { get { return visibility == PolyVisibility.PRIVATE || visibility == PolyVisibility.UNSPECIFIED; } } /// <summary> /// Returns the Poly url of the asset. /// </summary> public string Url { get { return string.Format(URL_FORMAT, name.Replace("assets/", "")); } } /// <summary> /// Returns attribution information about the asset. /// </summary> public string AttributionInfo { get { return AttributionGeneration.GenerateAttributionString(displayName, authorName, Url, license == PolyAssetLicense.CREATIVE_COMMONS_BY ? AttributionGeneration.CC_BY_LICENSE : "All Rights Reserved"); } } public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// A specific representation of an asset, containing all the information needed to retrieve and /// describe this representation. /// </summary> /// <remarks> /// Each format is a "package" of files, with one root file and any number of resource files that accompany /// it. For example, for the OBJ format, the root file is the OBJ file that contains the asset's geometry /// and the corresponding MTL files are resource files. /// </remarks> [AutoStringifiable] public class PolyFormat { /// <summary> /// Format type (OBJ, GLTF, etc). /// </summary> public PolyFormatType formatType; /// <summary> /// The root (main) file for this format. /// </summary> public PolyFile root; /// <summary> /// The list of resource (auxiliary) files for this format. /// </summary> public List<PolyFile> resources = new List<PolyFile>(); /// <summary> /// Complexity of this format. /// </summary> public PolyFormatComplexity formatComplexity; public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Represents a Poly file. /// </summary> [AutoStringifiable] public class PolyFile { /// <summary> /// The relative path of the file in the local filesystem when it was uploaded. /// For resource files, the path is relative to the root file. This always includes the name fo the /// file, and may or may not include a directory path. /// </summary> public string relativePath; /// <summary> /// The URL at which the contents of this file can be retrieved. /// </summary> public string url; /// <summary> /// The content type of this file. For example, "text/plain". /// </summary> public string contentType; /// <summary> /// Binary contents of this file. Only available after fetched. /// </summary> [AutoStringifyAbridged] public byte[] contents; /// <summary> /// Cached text contents of this file (lazily decoded from binary). /// </summary> [AutoStringifyAbridged] private string text; public PolyFile(string relativePath, string url, string contentType) { this.relativePath = relativePath; this.url = url; this.contentType = contentType; } /// <summary> /// Returns the contents of this file as text. /// </summary> public string Text { get { if (text == null) text = System.Text.Encoding.UTF8.GetString(contents); return text; } } public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Information on the complexity of a format. /// </summary> [AutoStringifiable] public class PolyFormatComplexity { /// <summary> /// Approximate number of triangles in the asset's geometry. /// </summary> public long triangleCount; /// <summary> /// Hint for the level of detail (LOD) of this format relative to the other formats in this /// same asset. 0 is the most detailed version. /// </summary> public int lodHint; public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Possible format types that can be returned from the Poly REST API. /// </summary> public enum PolyFormatType { UNKNOWN = 0, OBJ = 1, GLTF = 2, GLTF_2 = 3, TILT = 4, } /// <summary> /// Possible asset licenses. /// </summary> public enum PolyAssetLicense { /// <summary> /// License unknown/unspecified. /// </summary> UNKNOWN = 0, /// <summary> /// Creative Commons license. /// </summary> CREATIVE_COMMONS_BY = 1, /// <summary> /// All Rights Reserved by author (not licensed). /// </summary> ALL_RIGHTS_RESERVED = 2, } /// <summary> /// Visibility filters for a PolyListUserAssets request. /// </summary> public enum PolyVisibilityFilter { /// <summary> /// No visibility specified. Returns all assets. /// </summary> UNSPECIFIED = 0, /// <summary> /// Return only private assets. /// </summary> PRIVATE = 1, /// <summary> /// Return only published assets, including unlisted assets. /// </summary> PUBLISHED = 2, } /// <summary> /// Visibility of a Poly asset. /// </summary> public enum PolyVisibility { /// <summary> /// Unknown (and invalid) visibility. /// </summary> UNSPECIFIED = 0, /// <summary> /// Only the owner of the asset can access it. /// </summary> PRIVATE = 1, /// <summary> /// Read access to anyone who knows the asset ID (link to the asset), but the /// logged-in user's unlisted assets are returned in PolyListUserAssets. /// </summary> UNLISTED = 2, /// <summary> /// Read access for everyone. /// </summary> PUBLISHED = 3, } /// <summary> /// Category of a Poly asset. /// </summary> public enum PolyCategory { UNSPECIFIED = 0, ANIMALS = 1, ARCHITECTURE = 2, ART = 3, FOOD = 4, NATURE = 5, OBJECTS = 6, PEOPLE = 7, PLACES = 8, TECH = 9, TRANSPORT = 10, } /// <summary> /// How the requested assets should be ordered in the response. /// </summary> public enum PolyOrderBy { BEST, NEWEST, OLDEST, // Liked time is only a valid in a PolyListLikedAssetsRequest. LIKED_TIME } /// <summary> /// Options for filtering to return only assets that contain the given format. /// </summary> public enum PolyFormatFilter { BLOCKS = 1, FBX = 2, GLTF = 3, GLTF_2 = 4, OBJ = 5, TILT = 6, } /// <summary> /// Options for filtering on the maximum complexity of the asset. /// </summary> public enum PolyMaxComplexityFilter { UNSPECIFIED = 0, SIMPLE = 1, MEDIUM = 2, COMPLEX = 3, } /// <summary> /// Base class that all request types derive from. /// </summary> public abstract class PolyRequest { /// <summary> /// How to sort the results. /// </summary> public PolyOrderBy orderBy = PolyOrderBy.NEWEST; /// <summary> /// Size of each returned page. /// </summary> public int pageSize = 45; /// <summary> /// Page continuation token for pagination. /// </summary> public string pageToken = null; } /// <summary> /// Represents a set of Poly request parameters determining which assets should be returned. /// null values mean "don't filter by this parameter". /// </summary> [AutoStringifiable] public class PolyListAssetsRequest : PolyRequest { public string keywords = ""; public bool curated = false; /// <summary> /// Category can be any of the PolyCategory object categories (e.g. "PolyCategory.ANIMALS"). /// </summary> public PolyCategory category = PolyCategory.UNSPECIFIED; public PolyMaxComplexityFilter maxComplexity = PolyMaxComplexityFilter.UNSPECIFIED; public PolyFormatFilter? formatFilter = null; public PolyListAssetsRequest() {} /// <summary> /// Returns a ListAssetsRequest that requests the featured assets. This approximates what the /// user would see in the Poly main page, but the ordering might be different. /// </summary> public static PolyListAssetsRequest Featured() { PolyListAssetsRequest featured = new PolyListAssetsRequest(); featured.curated = true; featured.orderBy = PolyOrderBy.BEST; return featured; } /// <summary> /// Returns a ListAssetsRequest that requests the latest assets. This query is not curated, /// so it will return the latest assets regardless of whether they have been reviewed. /// If you wish to enable curation, set curated=true on the returned object. /// </summary> public static PolyListAssetsRequest Latest() { PolyListAssetsRequest latest = new PolyListAssetsRequest(); latest.orderBy = PolyOrderBy.NEWEST; return latest; } public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Represents a set of Poly request parameters determining which of the user's assets should be returned. /// null values mean "don't filter by this parameter". /// </summary> [AutoStringifiable] public class PolyListUserAssetsRequest : PolyRequest { public PolyFormatType format = PolyFormatType.UNKNOWN; public PolyVisibilityFilter visibility = PolyVisibilityFilter.UNSPECIFIED; public PolyFormatFilter? formatFilter = null; public PolyListUserAssetsRequest() { } /// <summary> /// Returns a ListUserAssetsRequest that requests the user's latest assets. /// </summary> public static PolyListUserAssetsRequest MyNewest() { PolyListUserAssetsRequest myNewest = new PolyListUserAssetsRequest(); myNewest.orderBy = PolyOrderBy.NEWEST; return myNewest; } public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Represents a set of Poly request parameters determining which liked assets should be returned. /// Currently, only requests for the liked assets of the logged in user are supported. /// null values mean "don't filter by this parameter". /// </summary> [AutoStringifiable] public class PolyListLikedAssetsRequest : PolyRequest { /// <summary> // A valid user id. Currently, only the special value 'me', representing the // currently-authenticated user is supported. To use 'me', you must pass // an OAuth token with the request. /// </summary> public string name = "me"; public PolyListLikedAssetsRequest() { } /// <summary> /// Returns a ListUserAssetsRequest that requests the user's most recently liked assets. /// </summary> public static PolyListLikedAssetsRequest MyLiked() { PolyListLikedAssetsRequest myLiked = new PolyListLikedAssetsRequest(); myLiked.orderBy = PolyOrderBy.LIKED_TIME; return myLiked; } public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Represents the status of an operation: success or failure + error message. /// /// A typical pattern is to return a PolyStatus to indicate the success of an operation, instead of just a bool. /// So your code would do something like: /// /// @{ /// PolyStatus MyMethod() { /// if (somethingWentWrong) { /// return PolyStatus.Error("Failed to reticulate spline."); /// } /// ... /// return PolyStatus.Success(); /// } /// @} /// /// You can also chain PolyStatus failures, using one PolyStatus as the cause of another: /// /// @{ /// PolyStatus MyMethod() { /// PolyStatus status = TesselateParabolicNonUniformDarkMatterQuantumSuperManifoldWithCheese(); /// if (!status.ok) { /// return PolyStatus.Error(status, "Tesselation failure."); /// } /// ... /// return PolyStatus.Success(); /// } /// @} /// /// Using PolyStatus vs. throwing exceptions: PolyStatus typically represents an "expected" failure, that is, /// an operation where failure is common and acceptable. For example, validating user input, consuming some /// external file which might or might not be well formatted, sending a web request, etc. For unexpected /// failures (logic errors, assumption violations, etc), it's best to use exceptions. /// </summary> public struct PolyStatus { /// <summary> /// Indicates whether the operation succeeded. /// </summary> public bool ok; /// <summary> /// If the operation failed, this is the error message. This is an error message suitable for /// logging, not necessarily a user-friendly message. /// </summary> public string errorMessage; /// <summary> /// Creates a new PolyStatus with the given success status and error message. /// </summary> /// <param name="ok">Whether the operation succeeded.</param> /// <param name="errorMessage">The error message (only relevant if ok == false).</param> public PolyStatus(bool ok, string errorMessage = "") { this.ok = ok; this.errorMessage = errorMessage; } /// <summary> /// Creates a new success status. /// </summary> public static PolyStatus Success() { return new PolyStatus(true); } /// <summary> /// Creates a new error status with the given error message. /// </summary> public static PolyStatus Error(string errorMessage) { return new PolyStatus(false, errorMessage); } /// <summary> /// Creates a new error status with the given error message. /// </summary> public static PolyStatus Error(string format, params object[] args) { return new PolyStatus(false, string.Format(format, args)); } /// <summary> /// Creates a new error status with the given error message and cause. /// The error message will automatically include all error messages in the causal chain. /// </summary> public static PolyStatus Error(PolyStatus cause, string errorMessage) { return new PolyStatus(false, errorMessage + "\nCaused by: " + cause.errorMessage); } /// <summary> /// Creates a new error status with the given error message and cause. /// The error message will automatically include all error messages in the causal chain. /// </summary> public static PolyStatus Error(PolyStatus cause, string format, params object[] args) { return new PolyStatus(false, string.Format(format, args) + "\nCaused by: " + cause.errorMessage); } public override string ToString() { return ok ? "OK" : string.Format("ERROR: {0}", errorMessage); } } /// <summary> /// A union of a PolyStatus and a type. Used to represent the result of an operation, which can either /// be an error (represented as a PolyStatus), or a result object (the parameter type T). /// </summary> /// <typeparam name="T">The result object.</typeparam> public class PolyStatusOr<T> { private PolyStatus status; private T value; /// <summary> /// Creates a PolyStatusOr with the given error status. /// </summary> /// <param name="status">The error status with which to create it.</param> public PolyStatusOr(PolyStatus status) { if (status.ok) { throw new Exception("PolyStatusOr(PolyStatus) can only be used with an error status."); } this.status = status; this.value = default(T); } /// <summary> /// Creates a PolyStatusOr with the given value. /// The status will be set to success. /// </summary> /// <param name="value">The value with which to create it.</param> public PolyStatusOr(T value) { this.status = PolyStatus.Success(); this.value = value; } /// <summary> /// Returns the status. /// </summary> public PolyStatus Status { get { return status; } } /// <summary> /// Shortcut to Status.ok. /// </summary> public bool Ok { get { return status.ok; } } /// <summary> /// Returns the value. The value can only be obtained if the status is successful. If the status /// is an error, reading this property will throw an exception. /// </summary> public T Value { get { if (!status.ok) { throw new Exception("Can't get value from an unsuccessful PolyStatusOr: " + this); } return value; } } public override string ToString() { return string.Format("PolyStatusOr<{0}>: {1}{2}", typeof(T).Name, status, status.ok ? (value == null ? "(null)" : value.ToString()) : ""); } } /// <summary> /// Base class for all result types. /// </summary> public abstract class PolyBaseResult { /// <summary> /// The status of the operation (success or failure). /// </summary> public PolyStatus status; } /// <summary> /// Represents the result of a PolyListAssetsRequest or PolyListUserAssetsRequest. /// </summary> [AutoStringifiable] public class PolyListAssetsResult : PolyBaseResult { /// <summary> /// A list of assets that match the criteria specified in the request. /// </summary> public List<PolyAsset> assets; /// <summary> /// The total number of assets in the list, without pagination. /// </summary> public int totalSize; /// <summary> /// The token to retrieve the next page of results, if any. /// If there is no next page, this will be null. /// </summary> public string nextPageToken; public PolyListAssetsResult(PolyStatus status, int totalSize = 0, List<PolyAsset> assets = null, string nextPageToken = null) { this.status = status; this.assets = assets; this.totalSize = totalSize; this.nextPageToken = nextPageToken; } public override string ToString() { return AutoStringify.Stringify(this); } } /// <summary> /// Represents the result of importing an asset. /// </summary> public class PolyImportResult { /// <summary> /// The GameObject representing the imported asset. /// </summary> public GameObject gameObject; /// <summary> /// The main thread throttler object, if importing in "throttled" mode. This will be null if not /// in throttled mode. Enumerate this on the main thread to gradually perform necessary main /// thread operations like creating meshes, textures, etc (see documentation for PolyImportOptions for /// more details). /// /// IMPORTANT: this enumerator is not designed to be used across scene (level) loads. Always finish /// enumerating it before loading a new scene. /// </summary> public IEnumerable mainThreadThrottler; public PolyImportResult(GameObject gameObject) { this.gameObject = gameObject; } } /// <summary> /// Represents the result of fetching files for an asset. /// </summary> [AutoStringifiable] public class PolyFormatTypeFetchResult : PolyBaseResult { public PolyAsset asset; public PolyFormatTypeFetchResult(PolyStatus status, PolyAsset asset) { this.status = status; this.asset = asset; } public override string ToString() { return AutoStringify.Stringify(this); } } }
using System; using Microsoft.SPOT; using System.Collections; using System.Text; namespace MicroTweet { internal delegate object ObjectCreationFunction(object o); // JSON reference: http://json.org/ internal static class Json { /// <summary> /// Parses a JSON-encoded string into a collection of Hashtables, ArrayLists, and values. /// </summary> /// <param name="input">The JSON-encoded string to parse.</param> /// <returns>The decoded root object.</returns> public static object Parse(string input) { return Parse(input.ToCharArray()); } /// <summary> /// Parses a JSON-encoded character array into a collection of Hashtables, ArrayLists, and values. /// </summary> /// <param name="input">The JSON-encoded character array to parse.</param> /// <returns>The decoded root object.</returns> public static object Parse(char[] input) { object result; TryParse(input, out result, true); return result; } /// <summary> /// Attempts to parse a JSON-encoded string into a collection of Hashtables, ArrayLists, and values. /// </summary> /// <param name="input">The JSON-encoded string to parse.</param> /// <param name="result">Returns the decoded root object.</param> /// <returns>true is parsing was successful; otherwise, false.</returns> public static bool TryParse(string input, out object result) { return TryParse(input.ToCharArray(), out result); } /// <summary> /// Attempts to parse a JSON-encoded character array into a collection of Hashtables, ArrayLists, and values. /// </summary> /// <param name="input">The JSON-encoded character array to parse.</param> /// <param name="result">Returns the decoded root object.</param> /// <returns>true is parsing was successful; otherwise, false.</returns> public static bool TryParse(char[] input, out object result) { return TryParse(input, out result, false); } private static bool TryParse(char[] input, out object result, bool throwIfError) { int index = 0; // Attempt to parse the root value bool success = TryParseValue(input, ref index, out result); // Throw an exception if necessary if (!success && throwIfError) throw new Exception("JSON parse error at index " + index); // Done return success; } /// <summary> /// Parses a JSON-encoded array into a collection of objects created with the specified delegate. /// </summary> /// <param name="input">The JSON-encoded character array to parse.</param> /// <param name="func">The delegate method used to create objects from the JSON array's contents.</param> /// <returns>A list of decoded objects.</returns> public static ArrayList ParseArrayToObjects(char[] input, ObjectCreationFunction func) { ArrayList result; TryParseArrayToObjects(input, func, out result, true); return result; } /// <summary> /// Attempts to parse a JSON-encoded array into a collection of objects created with the specified delegate. /// </summary> /// <param name="input">The JSON-encoded character array to parse.</param> /// <param name="func">The delegate method used to create objects from the JSON array's contents.</param> /// <param name="result">Returns the list of decoded objects.</param> /// <returns>true if parsing was successful; otherwise, false.</returns> public static bool TryParseArrayToObjects(char[] input, ObjectCreationFunction func, out ArrayList result) { return TryParseArrayToObjects(input, func, out result, false); } private static bool TryParseArrayToObjects(char[] input, ObjectCreationFunction func, out ArrayList result, bool throwIfError) { int index = 0; object obj; bool success = TryParseArray(input, ref index, func, out obj); result = obj as ArrayList; if (throwIfError && (!success || result == null)) throw new Exception("JSON parse error at index " + index); // Done return success; } /// <summary> /// Skips ahead to the next non-whitespace character. /// </summary> private static void SkipWhitespace(char[] input, ref int index) { while (index < input.Length) { switch (input[index]) { case ' ': case '\t': case '\n': case '\r': index++; break; default: return; } } } /// <summary> /// Attempts to parse a JSON object, array, or value starting at the specified index. /// </summary> private static bool TryParseValue(char[] input, ref int index, out object result) { // Skip whitespace and make sure we're not at the end of the input SkipWhitespace(input, ref index); if (index >= input.Length) { result = null; return false; } // Detect the type of value at this position char c = input[index]; switch (c) { case '{': // Object return TryParseObject(input, ref index, out result); case '[': // Array return TryParseArray(input, ref index, null, out result); case '"': // String return TryParseString(input, ref index, out result); case 't': // true case 'f': // false case 'n': // null return TryParseLiteral(input, ref index, out result); } // Numeric value if ((c >= '0' && c <= '9') || c == '-') return TryParseNumber(input, ref index, out result); // Couldn't determine the type of value result = null; return false; } /// <summary> /// Attempts to parse a JSON object starting at the specified index. /// </summary> private static bool TryParseObject(char[] input, ref int index, out object result) { if (input[index++] != '{') { result = null; return false; } SkipWhitespace(input, ref index); string key; object value; char c; Hashtable hashtable = new Hashtable(); while (index < input.Length) { // Are we at the end of the object? if (input[index] == '}') { index++; result = hashtable; return true; } // Try to parse the key (as a string) if (!TryParseString(input, ref index, out value)) break; key = (string)value; // Skip ahead to the ':' delimiter SkipWhitespace(input, ref index); if (index >= input.Length) break; if (input[index++] != ':') break; // Try to parse the value if (!TryParseValue(input, ref index, out value)) break; // Success, add it to the result hashtable[key] = value; // Skip ahead to the next non-whitespace character SkipWhitespace(input, ref index); if (index >= input.Length) break; c = input[index]; if (c == '}') continue; if (c == ',') { index++; SkipWhitespace(input, ref index); continue; } // Error: there should be a ',' or '}' after each key/value pair break; } // An error occurred or we reached the end of the input stream before finishing result = null; return false; } /// <summary> /// Attempts to parse a JSON array starting at the specified index. /// </summary> private static bool TryParseArray(char[] input, ref int index, ObjectCreationFunction func, out object result) { if (input[index++] != '[') { result = null; return false; } SkipWhitespace(input, ref index); object value; char c; ArrayList arrayList = new ArrayList(); while (index < input.Length) { // Are we at the end of the array? if (input[index] == ']') { index++; result = arrayList; return true; } // Try to parse a value if (!TryParseValue(input, ref index, out value)) break; // Success, add it to the list if (func == null) arrayList.Add(value); else arrayList.Add(func(value)); // Skip ahead to the next non-whitespace character SkipWhitespace(input, ref index); if (index >= input.Length) break; c = input[index]; if (c == ']') continue; if (c == ',') { index++; SkipWhitespace(input, ref index); continue; } // Error: there should be a ',' or ']' after each value break; } // An error occurred or we reached the end of the input stream before finishing result = null; return false; } /// <summary> /// Attempts to parse a JSON-encoded string at the specified index. /// </summary> private static bool TryParseString(char[] input, ref int index, out object result) { if (input[index++] != '"') { result = null; return false; } char c; StringBuilder sb = new StringBuilder(); while (index < input.Length) { c = input[index++]; // Are we at the end of the string? if (c == '"') { result = sb.ToString(); return true; } // Is this an escape character? else if (c == '\\') { // Make sure we can read at least one more char if (index >= input.Length) break; c = input[index++]; // What kind of escape character is this? switch (c) { case '"': case '\\': case '/': sb.Append(c); continue; case 'b': sb.Append('\b'); continue; case 'f': sb.Append('\f'); continue; case 'n': sb.Append('\n'); continue; case 'r': sb.Append('\r'); continue; case 't': sb.Append('\t'); continue; // Arbitrary character (encoded as 4 hex characters) case 'u': // Make sure we can read at least four more chars if (index + 3 >= input.Length) break; try { // Parse the hexadecimal string into a char c = (char)Convert.ToInt32(new string(input, index, 4), 16); index += 4; sb.Append(c); continue; } catch { } break; } // Could not parse the escaped character break; } else { // Normal character, just add it to the result sb.Append(c); } } // An error occurred or we reached the end of the input stream before finishing result = null; return false; } /// <summary> /// Attempts to parse a JSON literal (true, false, or null) at the specified index. /// </summary> private static bool TryParseLiteral(char[] input, ref int index, out object result) { switch (input[index++]) { case 't': if (index + 2 < input.Length && input[index + 0] == 'r' && input[index + 1] == 'u' && input[index + 2] == 'e') { index += 3; result = true; return true; } break; case 'f': if (index + 3 < input.Length && input[index + 0] == 'a' && input[index + 1] == 'l' && input[index + 2] == 's' && input[index + 3] == 'e') { index += 4; result = false; return true; } break; case 'n': if (index + 2 < input.Length && input[index + 0] == 'u' && input[index + 1] == 'l' && input[index + 2] == 'l') { index += 3; result = null; return true; } break; } // Could not match "true", "false", or "null" or we reached the end of the input stream before finishing result = null; return false; } /// <summary> /// Attempts to parse a number at the specified index. The numeric type (long or double) is automatically chosen based on the value. /// </summary> private static bool TryParseNumber(char[] input, ref int index, out object result) { char c; StringBuilder sb = new StringBuilder(); // We'll need to know whether the number contains a decimal point (or is expressed in scientific notation) bool useDouble = false; while (index < input.Length) { c = input[index]; if (c >= '0' && c <= '9') { sb.Append(c); index++; continue; } if (c == '.' || c == 'e' || c == 'E' || c == '+' || c == '-') { useDouble = true; sb.Append(c); index++; continue; } // Reached a non-numeric character break; } // Attempt to parse the number if (useDouble) { double value; if (double.TryParse(sb.ToString(), out value)) { result = value; return true; } } else { try { long value = long.Parse(sb.ToString()); result = value; return true; } catch { } } // An error occurred or we reached the end of the input stream before finishing result = null; return false; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Net.Security; using System.Runtime.InteropServices; namespace System.Net { internal static class SSPIWrapper { internal static SecurityPackageInfoClass[] EnumerateSecurityPackages(SSPIInterface secModule) { if (GlobalLog.IsEnabled) { GlobalLog.Enter("EnumerateSecurityPackages"); } if (secModule.SecurityPackages == null) { lock (secModule) { if (secModule.SecurityPackages == null) { int moduleCount = 0; SafeFreeContextBuffer arrayBaseHandle = null; try { int errorCode = secModule.EnumerateSecurityPackages(out moduleCount, out arrayBaseHandle); if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::arrayBase: " + (arrayBaseHandle.DangerousGetHandle().ToString("x"))); } if (errorCode != 0) { throw new Win32Exception(errorCode); } var securityPackages = new SecurityPackageInfoClass[moduleCount]; int i; for (i = 0; i < moduleCount; i++) { securityPackages[i] = new SecurityPackageInfoClass(arrayBaseHandle, i); if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.EnumerateSecurityPackages(securityPackages[i].Name); } } secModule.SecurityPackages = securityPackages; } finally { if (arrayBaseHandle != null) { arrayBaseHandle.Dispose(); } } } } } if (GlobalLog.IsEnabled) { GlobalLog.Leave("EnumerateSecurityPackages"); } return secModule.SecurityPackages; } internal static SecurityPackageInfoClass GetVerifyPackageInfo(SSPIInterface secModule, string packageName) { return GetVerifyPackageInfo(secModule, packageName, false); } internal static SecurityPackageInfoClass GetVerifyPackageInfo(SSPIInterface secModule, string packageName, bool throwIfMissing) { SecurityPackageInfoClass[] supportedSecurityPackages = EnumerateSecurityPackages(secModule); if (supportedSecurityPackages != null) { for (int i = 0; i < supportedSecurityPackages.Length; i++) { if (string.Compare(supportedSecurityPackages[i].Name, packageName, StringComparison.OrdinalIgnoreCase) == 0) { return supportedSecurityPackages[i]; } } } if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.SspiPackageNotFound(packageName); } if (throwIfMissing) { throw new NotSupportedException(SR.net_securitypackagesupport); } return null; } public static SafeFreeCredentials AcquireDefaultCredential(SSPIInterface secModule, string package, Interop.SspiCli.CredentialUse intent) { if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireDefaultCredential(): using " + package); } if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.AcquireDefaultCredential(package, intent); } SafeFreeCredentials outCredential = null; int errorCode = secModule.AcquireDefaultCredential(package, intent, out outCredential); if (errorCode != 0) { #if TRACE_VERBOSE if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireDefaultCredential(): error " + Interop.MapSecurityStatus((uint)errorCode)); } #endif if (NetEventSource.Log.IsEnabled()) { NetEventSource.PrintError(NetEventSource.ComponentType.Security, SR.Format(SR.net_log_operation_failed_with_error, "AcquireDefaultCredential()", String.Format(CultureInfo.CurrentCulture, "0X{0:X}", errorCode))); } throw new Win32Exception(errorCode); } return outCredential; } public static SafeFreeCredentials AcquireCredentialsHandle(SSPIInterface secModule, string package, Interop.SspiCli.CredentialUse intent, ref Interop.SspiCli.AuthIdentity authdata) { if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireCredentialsHandle#2(): using " + package); } if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.AcquireCredentialsHandle(package, intent, authdata); } SafeFreeCredentials credentialsHandle = null; int errorCode = secModule.AcquireCredentialsHandle(package, intent, ref authdata, out credentialsHandle); if (errorCode != 0) { #if TRACE_VERBOSE if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireCredentialsHandle#2(): error " + Interop.MapSecurityStatus((uint)errorCode)); } #endif if (NetEventSource.Log.IsEnabled()) { NetEventSource.PrintError(NetEventSource.ComponentType.Security, SR.Format(SR.net_log_operation_failed_with_error, "AcquireCredentialsHandle()", String.Format(CultureInfo.CurrentCulture, "0X{0:X}", errorCode))); } throw new Win32Exception(errorCode); } return credentialsHandle; } public static SafeFreeCredentials AcquireCredentialsHandle(SSPIInterface secModule, string package, Interop.SspiCli.CredentialUse intent, ref SafeSspiAuthDataHandle authdata) { if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.AcquireCredentialsHandle(package, intent, authdata); } SafeFreeCredentials credentialsHandle = null; int errorCode = secModule.AcquireCredentialsHandle(package, intent, ref authdata, out credentialsHandle); if (errorCode != 0) { if (NetEventSource.Log.IsEnabled()) { NetEventSource.PrintError(NetEventSource.ComponentType.Security, SR.Format(SR.net_log_operation_failed_with_error, "AcquireCredentialsHandle()", String.Format(CultureInfo.CurrentCulture, "0X{0:X}", errorCode))); } throw new Win32Exception(errorCode); } return credentialsHandle; } public static SafeFreeCredentials AcquireCredentialsHandle(SSPIInterface secModule, string package, Interop.SspiCli.CredentialUse intent, Interop.SspiCli.SecureCredential scc) { if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireCredentialsHandle#3(): using " + package); } if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.AcquireCredentialsHandle(package, intent, scc); } SafeFreeCredentials outCredential = null; int errorCode = secModule.AcquireCredentialsHandle( package, intent, ref scc, out outCredential); if (errorCode != 0) { #if TRACE_VERBOSE if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireCredentialsHandle#3(): error " + Interop.MapSecurityStatus((uint)errorCode)); } #endif if (NetEventSource.Log.IsEnabled()) { NetEventSource.PrintError(NetEventSource.ComponentType.Security, SR.Format(SR.net_log_operation_failed_with_error, "AcquireCredentialsHandle()", String.Format(CultureInfo.CurrentCulture, "0X{0:X}", errorCode))); } throw new Win32Exception(errorCode); } #if TRACE_VERBOSE if (GlobalLog.IsEnabled) { GlobalLog.Print("SSPIWrapper::AcquireCredentialsHandle#3(): cred handle = " + outCredential.ToString()); } #endif return outCredential; } internal static int InitializeSecurityContext(SSPIInterface secModule, ref SafeFreeCredentials credential, ref SafeDeleteContext context, string targetName, Interop.SspiCli.ContextFlags inFlags, Interop.SspiCli.Endianness datarep, SecurityBuffer inputBuffer, SecurityBuffer outputBuffer, ref Interop.SspiCli.ContextFlags outFlags) { if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.InitializeSecurityContext(credential.ToString(), LoggingHash.ObjectToString(context), targetName, inFlags); } int errorCode = secModule.InitializeSecurityContext(ref credential, ref context, targetName, inFlags, datarep, inputBuffer, outputBuffer, ref outFlags); if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.SecurityContextInputBuffer("InitializeSecurityContext", (inputBuffer == null ? 0 : inputBuffer.size), outputBuffer.size, (Interop.SecurityStatus)errorCode); } return errorCode; } internal static int InitializeSecurityContext(SSPIInterface secModule, SafeFreeCredentials credential, ref SafeDeleteContext context, string targetName, Interop.SspiCli.ContextFlags inFlags, Interop.SspiCli.Endianness datarep, SecurityBuffer[] inputBuffers, SecurityBuffer outputBuffer, ref Interop.SspiCli.ContextFlags outFlags) { if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.InitializeSecurityContext(credential.ToString(), LoggingHash.ObjectToString(context), targetName, inFlags); } int errorCode = secModule.InitializeSecurityContext(credential, ref context, targetName, inFlags, datarep, inputBuffers, outputBuffer, ref outFlags); if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.SecurityContextInputBuffers("InitializeSecurityContext", (inputBuffers == null ? 0 : inputBuffers.Length), outputBuffer.size, (Interop.SecurityStatus)errorCode); } return errorCode; } internal static int AcceptSecurityContext(SSPIInterface secModule, ref SafeFreeCredentials credential, ref SafeDeleteContext context, Interop.SspiCli.ContextFlags inFlags, Interop.SspiCli.Endianness datarep, SecurityBuffer inputBuffer, SecurityBuffer outputBuffer, ref Interop.SspiCli.ContextFlags outFlags) { if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.AcceptSecurityContext(credential.ToString(), LoggingHash.ObjectToString(context), inFlags); } int errorCode = secModule.AcceptSecurityContext(ref credential, ref context, inputBuffer, inFlags, datarep, outputBuffer, ref outFlags); if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.SecurityContextInputBuffer("AcceptSecurityContext", (inputBuffer == null ? 0 : inputBuffer.size), outputBuffer.size, (Interop.SecurityStatus)errorCode); } return errorCode; } internal static int AcceptSecurityContext(SSPIInterface secModule, SafeFreeCredentials credential, ref SafeDeleteContext context, Interop.SspiCli.ContextFlags inFlags, Interop.SspiCli.Endianness datarep, SecurityBuffer[] inputBuffers, SecurityBuffer outputBuffer, ref Interop.SspiCli.ContextFlags outFlags) { if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.AcceptSecurityContext(credential.ToString(), LoggingHash.ObjectToString(context), inFlags); } int errorCode = secModule.AcceptSecurityContext(credential, ref context, inputBuffers, inFlags, datarep, outputBuffer, ref outFlags); if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.SecurityContextInputBuffers("AcceptSecurityContext", (inputBuffers == null ? 0 : inputBuffers.Length), outputBuffer.size, (Interop.SecurityStatus)errorCode); } return errorCode; } internal static int CompleteAuthToken(SSPIInterface secModule, ref SafeDeleteContext context, SecurityBuffer[] inputBuffers) { int errorCode = secModule.CompleteAuthToken(ref context, inputBuffers); if (SecurityEventSource.Log.IsEnabled()) { SecurityEventSource.Log.OperationReturnedSomething("CompleteAuthToken()", (Interop.SecurityStatus)errorCode); } return errorCode; } public static int QuerySecurityContextToken(SSPIInterface secModule, SafeDeleteContext context, out SecurityContextTokenHandle token) { return secModule.QuerySecurityContextToken(context, out token); } public static int EncryptMessage(SSPIInterface secModule, SafeDeleteContext context, SecurityBuffer[] input, uint sequenceNumber) { return EncryptDecryptHelper(OP.Encrypt, secModule, context, input, sequenceNumber); } public static int DecryptMessage(SSPIInterface secModule, SafeDeleteContext context, SecurityBuffer[] input, uint sequenceNumber) { return EncryptDecryptHelper(OP.Decrypt, secModule, context, input, sequenceNumber); } internal static int MakeSignature(SSPIInterface secModule, SafeDeleteContext context, SecurityBuffer[] input, uint sequenceNumber) { return EncryptDecryptHelper(OP.MakeSignature, secModule, context, input, sequenceNumber); } public static int VerifySignature(SSPIInterface secModule, SafeDeleteContext context, SecurityBuffer[] input, uint sequenceNumber) { return EncryptDecryptHelper(OP.VerifySignature, secModule, context, input, sequenceNumber); } private enum OP { Encrypt = 1, Decrypt, MakeSignature, VerifySignature } private unsafe static int EncryptDecryptHelper(OP op, SSPIInterface secModule, SafeDeleteContext context, SecurityBuffer[] input, uint sequenceNumber) { Interop.SspiCli.SecurityBufferDescriptor sdcInOut = new Interop.SspiCli.SecurityBufferDescriptor(input.Length); var unmanagedBuffer = new Interop.SspiCli.SecurityBufferStruct[input.Length]; fixed (Interop.SspiCli.SecurityBufferStruct* unmanagedBufferPtr = unmanagedBuffer) { sdcInOut.UnmanagedPointer = unmanagedBufferPtr; GCHandle[] pinnedBuffers = new GCHandle[input.Length]; byte[][] buffers = new byte[input.Length][]; try { for (int i = 0; i < input.Length; i++) { SecurityBuffer iBuffer = input[i]; unmanagedBuffer[i].count = iBuffer.size; unmanagedBuffer[i].type = iBuffer.type; if (iBuffer.token == null || iBuffer.token.Length == 0) { unmanagedBuffer[i].token = IntPtr.Zero; } else { pinnedBuffers[i] = GCHandle.Alloc(iBuffer.token, GCHandleType.Pinned); unmanagedBuffer[i].token = Marshal.UnsafeAddrOfPinnedArrayElement(iBuffer.token, iBuffer.offset); buffers[i] = iBuffer.token; } } // The result is written in the input Buffer passed as type=BufferType.Data. int errorCode; switch (op) { case OP.Encrypt: errorCode = secModule.EncryptMessage(context, sdcInOut, sequenceNumber); break; case OP.Decrypt: errorCode = secModule.DecryptMessage(context, sdcInOut, sequenceNumber); break; case OP.MakeSignature: errorCode = secModule.MakeSignature(context, sdcInOut, sequenceNumber); break; case OP.VerifySignature: errorCode = secModule.VerifySignature(context, sdcInOut, sequenceNumber); break; default: if (GlobalLog.IsEnabled) { GlobalLog.Assert("SSPIWrapper::EncryptDecryptHelper", "Unknown OP: " + op); } Debug.Fail("SSPIWrapper::EncryptDecryptHelper", "Unknown OP: " + op); throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException); } // Marshalling back returned sizes / data. for (int i = 0; i < input.Length; i++) { SecurityBuffer iBuffer = input[i]; iBuffer.size = unmanagedBuffer[i].count; iBuffer.type = unmanagedBuffer[i].type; if (iBuffer.size == 0) { iBuffer.offset = 0; iBuffer.token = null; } else { checked { // Find the buffer this is inside of. Usually they all point inside buffer 0. int j; for (j = 0; j < input.Length; j++) { if (buffers[j] == null) { continue; } byte* bufferAddress = (byte*)Marshal.UnsafeAddrOfPinnedArrayElement(buffers[j], 0); if ((byte*)unmanagedBuffer[i].token >= bufferAddress && (byte*)unmanagedBuffer[i].token + iBuffer.size <= bufferAddress + buffers[j].Length) { iBuffer.offset = (int)((byte*)unmanagedBuffer[i].token - bufferAddress); iBuffer.token = buffers[j]; break; } } if (j >= input.Length) { if (GlobalLog.IsEnabled) { GlobalLog.Assert("SSPIWrapper::EncryptDecryptHelper", "Output buffer out of range."); } Debug.Fail("SSPIWrapper::EncryptDecryptHelper", "Output buffer out of range."); iBuffer.size = 0; iBuffer.offset = 0; iBuffer.token = null; } } } // Backup validate the new sizes. if (iBuffer.offset < 0 || iBuffer.offset > (iBuffer.token == null ? 0 : iBuffer.token.Length)) { if (GlobalLog.IsEnabled) { GlobalLog.AssertFormat("SSPIWrapper::EncryptDecryptHelper|'offset' out of range. [{0}]", iBuffer.offset); } Debug.Fail("SSPIWrapper::EncryptDecryptHelper|'offset' out of range. [" + iBuffer.offset + "]"); } if (iBuffer.size < 0 || iBuffer.size > (iBuffer.token == null ? 0 : iBuffer.token.Length - iBuffer.offset)) { if (GlobalLog.IsEnabled) { GlobalLog.AssertFormat("SSPIWrapper::EncryptDecryptHelper|'size' out of range. [{0}]", iBuffer.size); } Debug.Fail("SSPIWrapper::EncryptDecryptHelper|'size' out of range. [" + iBuffer.size + "]"); } } if (errorCode != 0 && NetEventSource.Log.IsEnabled()) { if (errorCode == Interop.SspiCli.SEC_I_RENEGOTIATE) { NetEventSource.PrintError(NetEventSource.ComponentType.Security, SR.Format(SR.event_OperationReturnedSomething, op, "SEC_I_RENEGOTIATE")); } else { NetEventSource.PrintError(NetEventSource.ComponentType.Security, SR.Format(SR.net_log_operation_failed_with_error, op, String.Format(CultureInfo.CurrentCulture, "0X{0:X}", errorCode))); } } return errorCode; } finally { for (int i = 0; i < pinnedBuffers.Length; ++i) { if (pinnedBuffers[i].IsAllocated) { pinnedBuffers[i].Free(); } } } } } public static SafeFreeContextBufferChannelBinding QueryContextChannelBinding(SSPIInterface secModule, SafeDeleteContext securityContext, Interop.SspiCli.ContextAttribute contextAttribute) { if (GlobalLog.IsEnabled) { GlobalLog.Enter("QueryContextChannelBinding", contextAttribute.ToString()); } SafeFreeContextBufferChannelBinding result; int errorCode = secModule.QueryContextChannelBinding(securityContext, contextAttribute, out result); if (errorCode != 0) { if (GlobalLog.IsEnabled) { GlobalLog.Leave("QueryContextChannelBinding", "ERROR = " + ErrorDescription(errorCode)); } return null; } if (GlobalLog.IsEnabled) { GlobalLog.Leave("QueryContextChannelBinding", LoggingHash.HashString(result)); } return result; } public static object QueryContextAttributes(SSPIInterface secModule, SafeDeleteContext securityContext, Interop.SspiCli.ContextAttribute contextAttribute) { int errorCode; return QueryContextAttributes(secModule, securityContext, contextAttribute, out errorCode); } public static object QueryContextAttributes(SSPIInterface secModule, SafeDeleteContext securityContext, Interop.SspiCli.ContextAttribute contextAttribute, out int errorCode) { if (GlobalLog.IsEnabled) { GlobalLog.Enter("QueryContextAttributes", contextAttribute.ToString()); } int nativeBlockSize = IntPtr.Size; Type handleType = null; switch (contextAttribute) { case Interop.SspiCli.ContextAttribute.Sizes: nativeBlockSize = SecSizes.SizeOf; break; case Interop.SspiCli.ContextAttribute.StreamSizes: nativeBlockSize = StreamSizes.SizeOf; break; case Interop.SspiCli.ContextAttribute.Names: handleType = typeof(SafeFreeContextBuffer); break; case Interop.SspiCli.ContextAttribute.PackageInfo: handleType = typeof(SafeFreeContextBuffer); break; case Interop.SspiCli.ContextAttribute.NegotiationInfo: handleType = typeof(SafeFreeContextBuffer); nativeBlockSize = Marshal.SizeOf<NegotiationInfo>(); break; case Interop.SspiCli.ContextAttribute.ClientSpecifiedSpn: handleType = typeof(SafeFreeContextBuffer); break; case Interop.SspiCli.ContextAttribute.RemoteCertificate: handleType = typeof(SafeFreeCertContext); break; case Interop.SspiCli.ContextAttribute.LocalCertificate: handleType = typeof(SafeFreeCertContext); break; case Interop.SspiCli.ContextAttribute.IssuerListInfoEx: nativeBlockSize = Marshal.SizeOf<Interop.SspiCli.IssuerListInfoEx>(); handleType = typeof(SafeFreeContextBuffer); break; case Interop.SspiCli.ContextAttribute.ConnectionInfo: nativeBlockSize = Marshal.SizeOf<SslConnectionInfo>(); break; default: throw new ArgumentException(SR.Format(SR.net_invalid_enum, "ContextAttribute"), "contextAttribute"); } SafeHandle sspiHandle = null; object attribute = null; try { var nativeBuffer = new byte[nativeBlockSize]; errorCode = secModule.QueryContextAttributes(securityContext, contextAttribute, nativeBuffer, handleType, out sspiHandle); if (errorCode != 0) { if (GlobalLog.IsEnabled) { GlobalLog.Leave("Win32:QueryContextAttributes", "ERROR = " + ErrorDescription(errorCode)); } return null; } switch (contextAttribute) { case Interop.SspiCli.ContextAttribute.Sizes: attribute = new SecSizes(nativeBuffer); break; case Interop.SspiCli.ContextAttribute.StreamSizes: attribute = new StreamSizes(nativeBuffer); break; case Interop.SspiCli.ContextAttribute.Names: attribute = Marshal.PtrToStringUni(sspiHandle.DangerousGetHandle()); break; case Interop.SspiCli.ContextAttribute.PackageInfo: attribute = new SecurityPackageInfoClass(sspiHandle, 0); break; case Interop.SspiCli.ContextAttribute.NegotiationInfo: unsafe { fixed (void* ptr = nativeBuffer) { attribute = new NegotiationInfoClass(sspiHandle, Marshal.ReadInt32(new IntPtr(ptr), NegotiationInfo.NegotiationStateOffest)); } } break; case Interop.SspiCli.ContextAttribute.ClientSpecifiedSpn: attribute = Marshal.PtrToStringUni(sspiHandle.DangerousGetHandle()); break; case Interop.SspiCli.ContextAttribute.LocalCertificate: // Fall-through to RemoteCertificate is intentional. case Interop.SspiCli.ContextAttribute.RemoteCertificate: attribute = sspiHandle; sspiHandle = null; break; case Interop.SspiCli.ContextAttribute.IssuerListInfoEx: attribute = new Interop.SspiCli.IssuerListInfoEx(sspiHandle, nativeBuffer); sspiHandle = null; break; case Interop.SspiCli.ContextAttribute.ConnectionInfo: attribute = new SslConnectionInfo(nativeBuffer); break; default: // Will return null. break; } } finally { if (sspiHandle != null) { sspiHandle.Dispose(); } } if (GlobalLog.IsEnabled) { GlobalLog.Leave("QueryContextAttributes", LoggingHash.ObjectToString(attribute)); } return attribute; } public static string ErrorDescription(int errorCode) { if (errorCode == -1) { return "An exception when invoking Win32 API"; } switch ((Interop.SecurityStatus)errorCode) { case Interop.SecurityStatus.InvalidHandle: return "Invalid handle"; case Interop.SecurityStatus.InvalidToken: return "Invalid token"; case Interop.SecurityStatus.ContinueNeeded: return "Continue needed"; case Interop.SecurityStatus.IncompleteMessage: return "Message incomplete"; case Interop.SecurityStatus.WrongPrincipal: return "Wrong principal"; case Interop.SecurityStatus.TargetUnknown: return "Target unknown"; case Interop.SecurityStatus.PackageNotFound: return "Package not found"; case Interop.SecurityStatus.BufferNotEnough: return "Buffer not enough"; case Interop.SecurityStatus.MessageAltered: return "Message altered"; case Interop.SecurityStatus.UntrustedRoot: return "Untrusted root"; default: return "0x" + errorCode.ToString("x", NumberFormatInfo.InvariantInfo); } } } // class SSPIWrapper }
// // ViewActions.cs // // Author: // Jonathan Pobst <[email protected]> // // Copyright (c) 2010 Jonathan Pobst // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using Mono.Unix; using Gtk; namespace Pinta.Core { public class ViewActions { public Gtk.Action ZoomIn { get; private set; } public Gtk.Action ZoomOut { get; private set; } public Gtk.Action ZoomToWindow { get; private set; } public Gtk.Action ZoomToSelection { get; private set; } public Gtk.Action ActualSize { get; private set; } public Gtk.ToggleAction ToolBar { get; private set; } public Gtk.ToggleAction PixelGrid { get; private set; } public Gtk.ToggleAction Rulers { get; private set; } public Gtk.RadioAction Pixels { get; private set; } public Gtk.RadioAction Inches { get; private set; } public Gtk.RadioAction Centimeters { get; private set; } public Gtk.Action Fullscreen { get; private set; } public ToolBarComboBox ZoomComboBox { get; private set; } public string[] ZoomCollection { get; private set; } private string old_zoom_text = ""; private bool zoom_to_window_activated = false; public bool ZoomToWindowActivated { get { return zoom_to_window_activated; } set { zoom_to_window_activated = value; old_zoom_text = ZoomComboBox.ComboBox.ActiveText; } } public ViewActions () { Gtk.IconFactory fact = new Gtk.IconFactory (); fact.Add ("Menu.View.ActualSize.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.ActualSize.png"))); fact.Add ("Menu.View.Grid.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.Grid.png"))); fact.Add ("Menu.View.Rulers.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.Rulers.png"))); fact.Add ("Menu.View.ZoomIn.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.ZoomIn.png"))); fact.Add ("Menu.View.ZoomOut.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.ZoomOut.png"))); fact.Add ("Menu.View.ZoomToSelection.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.ZoomToSelection.png"))); fact.Add ("Menu.View.ZoomToWindow.png", new Gtk.IconSet (PintaCore.Resources.GetIcon ("Menu.View.ZoomToWindow.png"))); fact.AddDefault (); ZoomIn = new Gtk.Action ("ZoomIn", Catalog.GetString ("Zoom In"), null, Stock.ZoomIn); ZoomOut = new Gtk.Action ("ZoomOut", Catalog.GetString ("Zoom Out"), null, Stock.ZoomOut); ZoomToWindow = new Gtk.Action ("ZoomToWindow", Catalog.GetString ("Best Fit"), null, Stock.ZoomFit); ZoomToSelection = new Gtk.Action ("ZoomToSelection", Catalog.GetString ("Zoom to Selection"), null, "Menu.View.ZoomToSelection.png"); ActualSize = new Gtk.Action ("ActualSize", Catalog.GetString ("Normal Size"), null, Stock.Zoom100); ToolBar = new Gtk.ToggleAction ("Toolbar", Catalog.GetString ("Toolbar"), null, null); PixelGrid = new Gtk.ToggleAction ("PixelGrid", Catalog.GetString ("Pixel Grid"), null, "Menu.View.Grid.png"); Rulers = new Gtk.ToggleAction ("Rulers", Catalog.GetString ("Rulers"), null, "Menu.View.Rulers.png"); Pixels = new Gtk.RadioAction ("Pixels", Catalog.GetString ("Pixels"), null, null, 0); Inches = new Gtk.RadioAction ("Inches", Catalog.GetString ("Inches"), null, null, 1); Centimeters = new Gtk.RadioAction ("Centimeters", Catalog.GetString ("Centimeters"), null, null, 2); Fullscreen = new Gtk.Action ("Fullscreen", Catalog.GetString ("Fullscreen"), null, Stock.Fullscreen); ZoomCollection = new string[] { "3600%", "2400%", "1600%", "1200%", "800%", "700%", "600%", "500%", "400%", "300%", "200%", "175%", "150%", "125%", "100%", "66%", "50%", "33%", "25%", "16%", "12%", "8%", "5%", Catalog.GetString ("Window") }; ZoomComboBox = new ToolBarComboBox (75, DefaultZoomIndex(), true, ZoomCollection); // Make sure these are the same group so only one will be selected at a time Inches.Group = Pixels.Group; Centimeters.Group = Pixels.Group; } #region Initialization public void CreateMainMenu (Gtk.Menu menu) { MenuItem show_pad = (MenuItem)menu.Children[0]; menu.Remove (show_pad); menu.Append (ToolBar.CreateMenuItem ()); menu.Append (PixelGrid.CreateMenuItem ()); menu.Append (Rulers.CreateMenuItem ()); menu.AppendSeparator (); ImageMenuItem zoomin = ZoomIn.CreateAcceleratedMenuItem (Gdk.Key.plus, Gdk.ModifierType.ControlMask); zoomin.AddAccelerator ("activate", PintaCore.Actions.AccelGroup, new AccelKey (Gdk.Key.equal, Gdk.ModifierType.ControlMask, AccelFlags.Visible)); zoomin.AddAccelerator ("activate", PintaCore.Actions.AccelGroup, new AccelKey (Gdk.Key.KP_Add, Gdk.ModifierType.ControlMask, AccelFlags.Visible)); menu.Append (zoomin); ImageMenuItem zoomout = ZoomOut.CreateAcceleratedMenuItem (Gdk.Key.minus, Gdk.ModifierType.ControlMask); zoomout.AddAccelerator ("activate", PintaCore.Actions.AccelGroup, new AccelKey (Gdk.Key.underscore, Gdk.ModifierType.ControlMask, AccelFlags.Visible)); zoomout.AddAccelerator ("activate", PintaCore.Actions.AccelGroup, new AccelKey (Gdk.Key.KP_Subtract, Gdk.ModifierType.ControlMask, AccelFlags.Visible)); menu.Append (zoomout); ImageMenuItem actualsize = ActualSize.CreateAcceleratedMenuItem (Gdk.Key.Key_0, Gdk.ModifierType.ControlMask); actualsize.AddAccelerator ("activate", PintaCore.Actions.AccelGroup, new AccelKey (Gdk.Key.A, Gdk.ModifierType.ControlMask | Gdk.ModifierType.ShiftMask, AccelFlags.Visible)); menu.Append (actualsize); menu.Append (ZoomToWindow.CreateAcceleratedMenuItem (Gdk.Key.B, Gdk.ModifierType.ControlMask)); //menu.Append (ZoomToSelection.CreateAcceleratedMenuItem (Gdk.Key.B, Gdk.ModifierType.ControlMask | Gdk.ModifierType.ShiftMask)); menu.Append (Fullscreen.CreateAcceleratedMenuItem (Gdk.Key.F11, Gdk.ModifierType.None)); menu.AppendSeparator (); Gtk.Action unit_action = new Gtk.Action ("RulerUnits", Mono.Unix.Catalog.GetString ("Ruler Units"), null, null); Menu unit_menu = (Menu)menu.AppendItem (unit_action.CreateSubMenuItem ()).Submenu; unit_menu.Append (Pixels.CreateMenuItem ()); unit_menu.Append (Inches.CreateMenuItem ()); unit_menu.Append (Centimeters.CreateMenuItem ()); menu.AppendSeparator (); menu.Append (show_pad); } public void CreateToolBar (Gtk.Toolbar toolbar) { toolbar.AppendItem (new Gtk.SeparatorToolItem ()); toolbar.AppendItem (ZoomOut.CreateToolBarItem ()); toolbar.AppendItem (ZoomComboBox); toolbar.AppendItem (ZoomIn.CreateToolBarItem ()); } public void RegisterHandlers () { ZoomIn.Activated += HandlePintaCoreActionsViewZoomInActivated; ZoomOut.Activated += HandlePintaCoreActionsViewZoomOutActivated; ZoomComboBox.ComboBox.Changed += HandlePintaCoreActionsViewZoomComboBoxComboBoxChanged; (ZoomComboBox.ComboBox as Gtk.ComboBoxEntry).Entry.FocusOutEvent += new Gtk.FocusOutEventHandler (ComboBox_FocusOutEvent); (ZoomComboBox.ComboBox as Gtk.ComboBoxEntry).Entry.FocusInEvent += new Gtk.FocusInEventHandler (Entry_FocusInEvent); ActualSize.Activated += HandlePintaCoreActionsViewActualSizeActivated; PixelGrid.Toggled += delegate (object sender, EventArgs e) { PintaCore.Workspace.Invalidate (); }; var isFullscreen = false; Fullscreen.Activated += (foo, bar) => { if (!isFullscreen) PintaCore.Chrome.MainWindow.Fullscreen (); else PintaCore.Chrome.MainWindow.Unfullscreen (); isFullscreen = !isFullscreen; }; } private string temp_zoom; private bool suspend_zoom_change; private void Entry_FocusInEvent (object o, Gtk.FocusInEventArgs args) { temp_zoom = PintaCore.Actions.View.ZoomComboBox.ComboBox.ActiveText; } private void ComboBox_FocusOutEvent (object o, Gtk.FocusOutEventArgs args) { string text = PintaCore.Actions.View.ZoomComboBox.ComboBox.ActiveText; double percent; if (!TryParsePercent (text, out percent)) { (PintaCore.Actions.View.ZoomComboBox.ComboBox as Gtk.ComboBoxEntry).Entry.Text = temp_zoom; return; } if (percent > 3600) PintaCore.Actions.View.ZoomComboBox.ComboBox.Active = 0; } #endregion /// <summary> /// Converts the string representation of a percent (with or without a '%' sign) to a numeric value /// </summary> public static bool TryParsePercent (string text, out double percent) { return double.TryParse (text.Trim ('%'), out percent); } public void SuspendZoomUpdate () { suspend_zoom_change = true; } public void ResumeZoomUpdate () { suspend_zoom_change = false; } public void UpdateCanvasScale () { string text = PintaCore.Actions.View.ZoomComboBox.ComboBox.ActiveText; // stay in "Zoom to Window" mode if this function was called without the zoom level being changed by the user (e.g. if the // image was rotated or cropped) and "Zoom to Window" mode is active if (text == Catalog.GetString ("Window") || (ZoomToWindowActivated && old_zoom_text == text)) { PintaCore.Actions.View.ZoomToWindow.Activate (); ZoomToWindowActivated = true; return; } else { ZoomToWindowActivated = false; } double percent; if (!TryParsePercent (text, out percent)) return; percent = Math.Min (percent, 3600); percent = percent / 100.0; PintaCore.Workspace.Scale = percent; } #region Action Handlers private void HandlePintaCoreActionsViewActualSizeActivated (object sender, EventArgs e) { int default_zoom = DefaultZoomIndex (); if (ZoomComboBox.ComboBox.Active != default_zoom) { ZoomComboBox.ComboBox.Active = default_zoom; UpdateCanvasScale (); } } private void HandlePintaCoreActionsViewZoomComboBoxComboBoxChanged (object sender, EventArgs e) { if (suspend_zoom_change) return; PintaCore.Workspace.ActiveDocument.Workspace.ZoomManually (); } private void HandlePintaCoreActionsViewZoomOutActivated (object sender, EventArgs e) { PintaCore.Workspace.ActiveDocument.Workspace.ZoomOut (); } private void HandlePintaCoreActionsViewZoomInActivated (object sender, EventArgs e) { PintaCore.Workspace.ActiveDocument.Workspace.ZoomIn (); } #endregion /// <summary> /// Returns the index in the ZoomCollection of the default zoom level /// </summary> private int DefaultZoomIndex() { return Array.IndexOf(ZoomCollection, "100%"); } } }
using System; using System.Drawing; using Microsoft.DirectX; namespace Voyage.Terraingine.DataCore { /// <summary> /// World data for a piece of terrain. /// </summary> public class TerrainPage { #region Data Members private string _name; private Vector3 _position; private Quaternion _rotation; private bool _renderable; private TerrainPatch _patch; private Vector3 _scale; private float _maxVertexHeight; #endregion #region Properties /// <summary> /// Accesses the name of the TerrainPage. /// </summary> public string Name { get { return _name; } set { _name = value; } } /// <summary> /// Get the local position of the TerrainPage. /// </summary> public Vector3 Position { get{ return _position; } set{ _position = value; } } /// <summary> /// Get the local rotation of the TerrainPage. /// </summary> public Quaternion Rotation { get{ return _rotation; } set{ _rotation = value; } } /// <summary> /// Is the TerrainPage renderable? /// </summary> public bool Renderable { get{ return _renderable; } set{ _renderable = value; } } /// <summary> /// Get the TerrainPatch of the TerrainPage. /// </summary> public TerrainPatch TerrainPatch { get{ return _patch; } set{ _patch = value; } } /// <summary> /// Gets or sets the scale of the TerrainPage. /// </summary> public Vector3 Scale { get { return _scale; } set { _scale = value; } } /// <summary> /// Gets or sets the maximum vertex height for the TerrainPatch. /// </summary> public float MaximumVertexHeight { get { return _maxVertexHeight; } set { Vector3 position; _maxVertexHeight = value; _patch.RefreshBuffers = true; for ( int i = 0; i < _patch.Rows; i++ ) { for ( int j = 0; j < _patch.Columns; j++ ) { position = _patch.Vertices[i * _patch.Rows + j].Position; if ( position.Y > _maxVertexHeight ) { position.Y = _maxVertexHeight; _patch.Vertices[i * _patch.Rows + j].Position = position; } } } _patch.CalculateNormals(); } } #endregion #region Basic Methods /// <summary> /// Initialize a TerrainPage object. /// </summary> public TerrainPage() { Dispose(); _patch = new TerrainPatch(); } /// <summary> /// Creates a member-wise copy of the specified TerrainPage. /// </summary> /// <param name="page">The TerrainPage to copy.</param> public TerrainPage( TerrainPage page ) { if ( page != null ) { _name = page._name; _position = page._position; _scale = page._scale; _rotation = page._rotation; _renderable = page._renderable; _maxVertexHeight = page._maxVertexHeight; _patch = new TerrainPatch( page._patch ); } else { Dispose(); _patch = new TerrainPatch(); } } /// <summary> /// Safely release the data held in the TerrainPage object. /// </summary> public void Dispose() { ResetData(); if ( _patch != null ) _patch.Dispose(); } /// <summary> /// Reset the data in the TerrainPage (not including the TerrainPatch). /// </summary> public void ResetData() { _name = null; _position = new Vector3( 0f, 0f, 0f ); _scale = new Vector3( 1.0f, 1.0f, 1.0f ); _rotation = Quaternion.Identity; _renderable = true; _maxVertexHeight = 1.0f; } /// <summary> /// Gets the model matrix for the TerrainPage. /// </summary> /// <returns>The model matrix for the TerrainPage.</returns> public Matrix TerrainModelMatrix() { return Matrix.Transformation( Vector3.Empty, Quaternion.Identity, _scale, Vector3.Empty, _rotation, _position ); } #endregion #region Vertex Selection And Distances /// <summary> /// Selects the nearest vertex to the picking ray. /// </summary> /// <param name="ray">Picking ray used to select a vertex.</param> /// <param name="origin">Origin of the picking ray.</param> /// <param name="endSelection">Whether to enable clearing selected vertices if one is not selected.</param> /// <param name="multiSelect">Whether to allow selection of multiple vertices.</param> public void SelectVertex( Vector3 ray, Vector3 origin, bool endSelection, bool multiSelect ) { bool previouslySelected = false; int selectedVertex = _patch.FindNearestVertex( ray, origin + _position ); float distance = VectorMath.Distance_PointToLine( ray, origin, _patch.Vertices[selectedVertex].Position + _position ); if ( distance > _patch.NearestVertices / 2 ) selectedVertex = -1; // If multiple-vertex selection is not allowed if ( !multiSelect ) { // Has the vertex been previously selected? if ( selectedVertex > -1 && _patch.SelectedVertices[selectedVertex] ) previouslySelected = true; // Is this a "begin" selection? if ( !endSelection ) { // If the vertex has not been previously selected, clear all selected vertices if ( !previouslySelected ) _patch.ResetSelectedVertices(); // If the vertex is valid, select it if ( selectedVertex > -1 ) _patch.SelectVertex( selectedVertex ); } } else if ( !endSelection ) { // Multiple-vertex selection is allowed and a vertex was selected // Only select new vertices on a "begin" selection _patch.SelectVertex( selectedVertex ); } _patch.RefreshVertices = true; } /// <summary> /// Selects the specified vertex. /// </summary> /// <param name="index">Index of the vertex.</param> /// <param name="endSelection">Whether to enable clearing selected vertices if one is not selected.</param> /// <param name="multiSelect">Whether to allow selection of multiple vertices.</param> public void SelectVertex( int index, bool endSelection, bool multiSelect ) { bool previouslySelected = false; // If multiple-vertex selection is not allowed if ( !multiSelect ) { // Has the vertex been previously selected? if ( index > -1 && _patch.SelectedVertices[index] ) previouslySelected = true; // Is this a "begin" selection? if ( !endSelection ) { // If the vertex has not been previously selected, clear all selected vertices if ( !previouslySelected ) _patch.ResetSelectedVertices(); // If the vertex is valid, select it if ( index > -1 ) _patch.SelectVertex( index ); } } else if ( !endSelection ) { // Multiple-vertex selection is allowed and a vertex was selected // Only select new vertices on a "begin" selection _patch.SelectVertex( index ); } _patch.RefreshVertices = true; } /// <summary> /// Gets the normal of the nearest vertex to the intersecting ray. /// </summary> /// <param name="ray">The intersecting ray.</param> /// <param name="point">The point from which to draw the ray.</param> /// <returns>The normal of the nearest vertex.</returns> public Vector3 GetNearestVertexNormal( Vector3 ray, Vector3 point ) { int vertex = _patch.FindNearestVertex( ray, point + _position ); return _patch.Vertices[vertex].Normal; } /// <summary> /// Gets the normal of the triangle where the ray intersects the terrain. /// </summary> /// <param name="ray">The intersecting ray.</param> /// <param name="point">The point from which to draw the ray.</param> /// <param name="threshold">The distance threshold under which to get the nearest vertex normal.</param> /// <returns>The normal of the intersected triangle.</returns> public Vector3 GetIntersectNormal( Vector3 ray, Vector3 point, float threshold ) { // Adjust ray to TerrainPage coordinate space ray.X += _position.X * _scale.X; ray.Y += _position.Y * _scale.Y; ray.Z += _position.Z * _scale.Z; Vector3 normal = Vector3.Empty; int vertex = _patch.FindNearestVertex( ray, point ); Vector3 length; float thresholdSquared = threshold * threshold; // Check if the nearest vertex is under the threshold distance length = ray - _patch.Vertices[vertex].Position; if ( length.LengthSq() < thresholdSquared ) normal = _patch.Vertices[vertex].Normal; return normal; } #endregion #region Vertex Movement /// <summary> /// Moves the selected vertices in the TerrainPage. /// </summary> /// <param name="distChange">The distance to move the selected vertices.</param> public void MoveSelectedVertices( float distChange ) { MoveSelectedVertices( false, distChange, false, 0f ); } /// <summary> /// Moves the selected vertices in the TerrainPage. /// </summary> /// <param name="enableSoftSelection">Whether to use soft selection.</param> /// <param name="distChange">The distance to move the selected vertices.</param> /// <param name="useFalloff">Whether to use falloff in blending the vertices.</param> /// <param name="softDistSquared">The squared soft selection term.</param> public void MoveSelectedVertices( bool enableSoftSelection, float distChange, bool useFalloff, float softDistSquared ) { Vector3 position; for ( int i = 0; i < _patch.NumVertices; i++ ) { if ( _patch.SelectedVertices[i] ) { position = _patch.Vertices[i].Position; position.Y += distChange; if ( position.Y < 0.0f ) position.Y = 0.0f; else if ( position.Y > _maxVertexHeight ) position.Y = _maxVertexHeight; _patch.Vertices[i].Position = position; } } if ( enableSoftSelection ) MoveSoftSelection( distChange, useFalloff, softDistSquared ); _patch.CalculateNormals(); } /// <summary> /// Moves the vertices selected using soft selection. /// </summary> /// <param name="distChange">The distance to move the selected vertices.</param> /// <param name="useFalloff">Whether to use falloff in blending the vertices.</param> /// <param name="softDistSquared">The squared soft selection term.</param> private void MoveSoftSelection( float distChange, bool useFalloff, float softDistSquared ) { Vector3 position; float distance; for ( int i = 0; i < _patch.NumVertices; i++ ) { if ( !_patch.SelectedVertices[i] ) { position = _patch.Vertices[i].Position; distance = _patch.FindShortestDistanceToSelectedVertex( position ); if ( distance <= softDistSquared ) { if ( useFalloff ) position.Y += distChange * ( 1 - distance / softDistSquared ); else position.Y += distChange; if ( position.Y < 0.0f ) position.Y = 0.0f; else if ( position.Y > _maxVertexHeight ) position.Y = _maxVertexHeight; _patch.Vertices[i].Position = position; } } } } /// <summary> /// Moves the selected vertices in the TerrainPage. Does not affect soft-selected vertices. /// </summary> /// <param name="enableSoftSelection">Whether to use soft selection.</param> /// <param name="height">The height to set the selected vertices to.</param> public void SetSelectedVerticesHeight( bool enableSoftSelection, float height ) { Vector3 position; for ( int i = 0; i < _patch.NumVertices; i++ ) { if ( _patch.SelectedVertices[i] ) { position = _patch.Vertices[i].Position; position.Y = height; if ( position.Y < 0.0f ) position.Y = 0.0f; else if ( position.Y > _maxVertexHeight ) position.Y = _maxVertexHeight; _patch.Vertices[i].Position = position; } } _patch.CalculateNormals(); } #endregion #region Other Methods /// <summary> /// Gets the plane on the terrain (defined by three points, /// with the fourth point as the 2D point elevated onto the plane) the given point is in. /// </summary> /// <param name="point">The point in the terrain.</param> /// <param name="v1">The first vertex in the plane.</param> /// <param name="v2">The second vertex in the plane.</param> /// <param name="v3">The third vertex in the plane.</param> /// <param name="point3d">The 3D elevated point in the terrain.</param> public bool GetPlane( Vector2 point, out int v1, out int v2, out int v3, out Vector3 point3d ) { v1 = -1; v2 = -1; v3 = -1; point3d = Vector3.Empty; bool result = false; // Check if the point is within the bounds of the terrain if ( point.X < _position.X || point.X > _patch.Width || point.Y < _position.Z || point.Y > _patch.Height ) return result; GetPlane( point.X, point.Y, out v1, out v2, out v3 ); // rows & cols determines the lower-left-hand corner of the quad // Check if the point is in the lower-right triangle of the quad point3d = _patch.GetPointOnPlane( v1, v2, v3, point ); if ( !VectorMath.IsPointInTriangle( _patch.Vertices[v1].Position, _patch.Vertices[v2].Position, _patch.Vertices[v3].Position, point3d ) ) { // The point is within the upper-left triangle // Return plane as counter-clockwise triangle, with the hypotenuse formed by the first // and last points v3 = v1; v2 = v1 + _patch.Columns; v1 = v1 + _patch.Columns + 1; if ( VectorMath.IsPointInTriangle( _patch.Vertices[v1].Position, _patch.Vertices[v2].Position, _patch.Vertices[v3].Position, point3d ) ) result = true; else result = false; } else result = true; return result; } /// <summary> /// Gets the plane on the terrain (defined by three points) the given point is in. /// </summary> /// <param name="point">The point in the terrain.</param> /// <param name="v1">The first vertex in the plane.</param> /// <param name="v2">The second vertex in the plane.</param> /// <param name="v3">The third vertex in the plane.</param> public bool GetPlane( Vector3 point, out int v1, out int v2, out int v3 ) { v1 = -1; v2 = -1; v3 = -1; bool result = false; // Check if the point is within the bounds of the terrain if ( point.X < _position.X || point.X > _patch.Width || point.Y < _position.Y || point.Z < _position.Z || point.Z > _patch.Height ) return false; GetPlane( point.X, point.Y, out v1, out v2, out v3 ); if ( !VectorMath.IsPointInTriangle( _patch.Vertices[v1].Position, _patch.Vertices[v2].Position, _patch.Vertices[v3].Position, point ) ) { // The point is within the upper-left triangle // Return plane as counter-clockwise triangle, with the hypotenuse formed by the first // and last points v3 = v1; v2 = v1 + _patch.Columns; v1 = v1 + _patch.Columns + 1; if ( VectorMath.IsPointInTriangle( _patch.Vertices[v1].Position, _patch.Vertices[v2].Position, _patch.Vertices[v3].Position, point ) ) result = true; else result = false; } else result = true; return result; } /// <summary> /// Gets the vertices that make up the plane at the specified point in the terrain. /// </summary> /// <param name="xPos">The X-coordinate in the terrain from which to find the plane.</param> /// <param name="zPos">The Z-coordinate in the terrain from which to find the plane.</param> /// <param name="v1">The first vertex of the plane.</param> /// <param name="v2">The second vertex of the plane.</param> /// <param name="v3">The third vertex of the plane.</param> /// <returns></returns> private bool GetPlane( float xPos, float zPos, out int v1, out int v2, out int v3 ) { bool result = false; // Determine the quad the point is in int rows = (int) ( zPos / _patch.RowHeight ); int cols = (int) ( xPos / _patch.ColumnWidth ); // If the point is right on the edge of the terrain, set to the last quad if ( rows == _patch.Rows ) rows--; if ( cols == _patch.Columns ) cols--; v1 = rows * _patch.Columns + cols; v2 = rows * _patch.Columns + cols + 1; v3 = (rows + 1) * _patch.Columns + cols + 1; return result; } #endregion } }
// *********************************************************************** // Copyright (c) 2009-2015 Charlie Poole, Rob Prouse // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections; using System.Collections.Generic; using System.Linq; using NUnit.Framework; namespace NUnit.TestData.TestCaseSourceAttributeFixture { [TestFixture] public class TestCaseSourceAttributeFixture { #region Test Calling Assert.Ignore [TestCaseSource(nameof(source))] public void MethodCallsIgnore(int x, int y, int z) { Assert.Ignore("Ignore this"); } #pragma warning disable 414 private static object[] source = new object[] { new TestCaseData( 2, 3, 4 ) }; #pragma warning restore 414 #endregion #region Test With Ignored TestCaseData [TestCaseSource(nameof(ignored_source))] public void MethodWithIgnoredTestCases(int num) { } private static IEnumerable ignored_source { get { return new object[] { new TestCaseData(1), new TestCaseData(2).Ignore("Don't Run Me!") }; } } #endregion #region Test With Explicit TestCaseData [TestCaseSource(nameof(explicit_source))] public void MethodWithExplicitTestCases(int num) { } private static IEnumerable explicit_source { get { return new object[] { new TestCaseData(1), new TestCaseData(2).Explicit(), new TestCaseData(3).Explicit("Connection failing") }; } } #endregion #region Tests Using Instance Members as Source [Test, TestCaseSource(nameof(InstanceProperty))] public void MethodWithInstancePropertyAsSource(string source) { Assert.AreEqual("InstanceProperty", source); } IEnumerable InstanceProperty { get { return new object[] { new object[] { "InstanceProperty" } }; } } [Test, TestCaseSource(nameof(InstanceMethod))] public void MethodWithInstanceMethodAsSource(string source) { Assert.AreEqual("InstanceMethod", source); } IEnumerable InstanceMethod() { return new object[] { new object[] { "InstanceMethod" } }; } [Test, TestCaseSource(nameof(InstanceField))] public void MethodWithInstanceFieldAsSource(string source) { Assert.AreEqual("InstanceField", source); } #pragma warning disable 414 object[] InstanceField = { new object[] { "InstanceField" } }; #pragma warning restore 414 #endregion [Test, TestCaseSource(typeof(DivideDataProvider), nameof(DivideDataProvider.MyField), new object[] { 100, 4, 25 })] public void SourceInAnotherClassPassingParamsToField(int n, int d, int q) { } [Test, TestCaseSource(typeof(DivideDataProvider), nameof(DivideDataProvider.MyProperty), new object[] { 100, 4, 25 })] public void SourceInAnotherClassPassingParamsToProperty(int n, int d, int q) { } [Test, TestCaseSource(typeof(DivideDataProvider), nameof(DivideDataProvider.HereIsTheDataWithParameters), new object[] { 100, 4 })] public void SourceInAnotherClassPassingSomeDataToConstructorWrongNumberParam(int n, int d, int q) { } [TestCaseSource(nameof(exception_source))] public void MethodWithSourceThrowingException(string lhs, string rhs) { } [TestCaseSource("NonExistingSource")] public void MethodWithNonExistingSource(object param) { } [TestCaseSource(nameof(ComplexArrayBasedTestInputTestCases))] public void MethodWithArrayArguments(object o) { } static IEnumerable exception_source { get { yield return new TestCaseData("a", "a"); yield return new TestCaseData("b", "b"); throw new System.Exception("my message"); } } class DivideDataProvider { #pragma warning disable 0169, 0649 // x is never assigned static object[] myObject; public static string MyField; #pragma warning restore 0169, 0649 public static int MyProperty { get; set; } public static IEnumerable HereIsTheDataWithParameters(int inject1, int inject2, int inject3) { yield return new object[] { inject1, inject2, inject3 }; } public static IEnumerable HereIsTheData { get { yield return new object[] { 100, 20, 5 }; yield return new object[] { 100, 4, 25 }; } } } static object[] ComplexArrayBasedTestInput = new[] { new object[] { 1, "text", new object() }, new object[0], new object[] { 1, new int[] { 2, 3 }, 4 }, new object[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, new object[] { new byte[,] { { 1, 2 }, { 2, 3 } } } }; static IEnumerable<TestCaseData> ComplexArrayBasedTestInputTestCases() { foreach (var argumentValue in ComplexArrayBasedTestInput) yield return new TestCaseData(args: new object[] { argumentValue }); } #region Test name tests [TestCaseSource(nameof(TestCaseNameTestDataSource))] public static void TestCaseNameTestDataMethod(params object[] args) { } public static IEnumerable<TestCaseData> TestCaseNameTestDataSource() => from spec in TestDataSpec.Specs select new TestCaseData(spec.Arguments) .SetArgDisplayNames(spec.ArgDisplayNames) .SetProperty("ExpectedTestName", spec.GetTestCaseName(nameof(TestCaseNameTestDataMethod))); #endregion } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace ApplicationGateway { using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// NetworkSecurityGroupsOperations operations. /// </summary> internal partial class NetworkSecurityGroupsOperations : IServiceOperations<NetworkClient>, INetworkSecurityGroupsOperations { /// <summary> /// Initializes a new instance of the NetworkSecurityGroupsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal NetworkSecurityGroupsOperations(NetworkClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the NetworkClient /// </summary> public NetworkClient Client { get; private set; } /// <summary> /// Deletes the specified network security group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Send request AzureOperationResponse _response = await BeginDeleteWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, customHeaders, cancellationToken).ConfigureAwait(false); return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets the specified network security group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='expand'> /// Expands referenced resources. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<NetworkSecurityGroup>> GetWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (networkSecurityGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "networkSecurityGroupName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("networkSecurityGroupName", networkSecurityGroupName); tracingParameters.Add("expand", expand); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{networkSecurityGroupName}", System.Uri.EscapeDataString(networkSecurityGroupName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (expand != null) { _queryParameters.Add(string.Format("$expand={0}", System.Uri.EscapeDataString(expand))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<NetworkSecurityGroup>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<NetworkSecurityGroup>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Creates or updates a network security group in the specified resource /// group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='parameters'> /// Parameters supplied to the create or update network security group /// operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<NetworkSecurityGroup>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, NetworkSecurityGroup parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Send Request AzureOperationResponse<NetworkSecurityGroup> _response = await BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, parameters, customHeaders, cancellationToken).ConfigureAwait(false); return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets all network security groups in a subscription. /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListAllWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListAll", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Network/networkSecurityGroups").ToString(); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets all network security groups in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Deletes the specified network security group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (networkSecurityGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "networkSecurityGroupName"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("networkSecurityGroupName", networkSecurityGroupName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "BeginDelete", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{networkSecurityGroupName}", System.Uri.EscapeDataString(networkSecurityGroupName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("DELETE"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 202 && (int)_statusCode != 200 && (int)_statusCode != 204) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Creates or updates a network security group in the specified resource /// group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='parameters'> /// Parameters supplied to the create or update network security group /// operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<NetworkSecurityGroup>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string networkSecurityGroupName, NetworkSecurityGroup parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (networkSecurityGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "networkSecurityGroupName"); } if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("networkSecurityGroupName", networkSecurityGroupName); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{networkSecurityGroupName}", System.Uri.EscapeDataString(networkSecurityGroupName)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 201 && (int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<NetworkSecurityGroup>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 201) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<NetworkSecurityGroup>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<NetworkSecurityGroup>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets all network security groups in a subscription. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListAllNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListAllNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets all network security groups in a resource group. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<NetworkSecurityGroup>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<NetworkSecurityGroup>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<NetworkSecurityGroup>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// // ButtonBackend.cs // // Author: // Alexander Bothe <[email protected]> // // Copyright (c) 2013 Alexander Bothe // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using Xwt.Backends; using Xwt.Drawing; namespace Xwt.Sdl { public class ButtonBackend : WidgetBackend, IButtonBackend { #region Properties ButtonStyle style; ButtonType type; Label label; Color? labelColor; bool isDefault; public Size LabelSize => label != null ? label.Size : new Size (); public Color LabelColor { get => labelColor != null ? labelColor.Value : Colors.Black; set { labelColor = value; Invalidate (); } } public bool IsDefault { get => isDefault; set { isDefault = value; Invalidate (); } } bool mnemonic; ImageDescription image; ContentPosition pos; public new IButtonEventSink EventSink {get{return base.EventSink as IButtonEventSink;}} bool clicked; #endregion protected override void SensitivityChanged () { if (!Sensitive) clicked = false; } const double imageToLabelSpace = 10.0; const double yPadding = 5; const double xPadding = 10; const double cornerRadius = 3; protected override void DrawInternally (CairoBackend.CairoContextBackend c,Rectangle dirtyRect, double X, double Y) { var style = WidgetStyles.Instance; // Border { c.MoveTo (X, Y + cornerRadius); // Top left corner c.Arc (X +cornerRadius, Y + cornerRadius, cornerRadius, -90, -45); // top edge c.Context.RelLineTo (Width - (cornerRadius*2), 0); // top right corner c.Arc (X + Width -cornerRadius, Y + cornerRadius, cornerRadius, -90, 0); // left edge c.Context.RelLineTo (0, Height-cornerRadius*2); // bottom right corner c.Arc (X + Width -cornerRadius, Y + Height - cornerRadius, cornerRadius, 0, 90); // bottom corner c.Context.RelLineTo (-Width+(cornerRadius*2), 0); // bottom left corner c.Arc (X + cornerRadius, Y + Height - cornerRadius, cornerRadius, 90, 180); c.Context.ClosePath (); c.SetColor (style.ButtonBorderColor); c.Context.LineWidth = style.ButtonBorderLineWidth; c.Context.StrokePreserve (); } // button background { double grey; if (!Sensitive) grey = style.ButtonInsensitiveGrey; else if (clicked) grey = style.ButtonClickedGrey; else if (MouseEntered) grey = style.ButtonHoveredGrey; else grey = style.ButtonDefaultGrey; var g = new Cairo.LinearGradient (X + Width / 2, Y, X + Width / 2, Y + Height); g.AddColorStop (0, new Cairo.Color (grey, grey, grey)); grey /= 1.2; g.AddColorStop (1, new Cairo.Color (grey, grey, grey)); c.Context.SetSource (g); c.Fill (); g.Dispose (); } // Focus dash border if (HasFocus) { c.Context.LineWidth = 1; c.SetColor (style.FocusDashBorderColor); c.Context.SetDash (style.FocusDashLine, 0); c.Rectangle (X + xPadding/2, Y + yPadding/2, Width - xPadding, Height - yPadding); c.Context.Stroke (); } // Image if (!image.IsNull) { var imgBck = (image.Backend as ImageBackend).Bitmap; var data = imgBck.LockBits (new System.Drawing.Rectangle (0, 0, imgBck.Width, imgBck.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppPArgb); var imgSurf = new Cairo.ImageSurface (data.Scan0, Cairo.Format.Argb32, data.Width, data.Height, data.Stride); var imageWidth = (double)imgSurf.Width; var imgY = Y + Height / 2.0 - imgSurf.Height / 2; if (label == null) c.Context.SetSource (imgSurf, X + Width / 2 - imageWidth / 2, imgY); else { var contentWidth = (label != null ? (label.GetBackend() as LabelBackend).Width : 0) + imageToLabelSpace + imageWidth; if (contentWidth < Width) c.Context.SetSource (imgSurf, X + Width / 2 - contentWidth / 2, imgY); else c.Context.SetSource (imgSurf, X + xPadding, imgY); } c.Context.Paint (); imgSurf.Dispose (); imgBck.UnlockBits (data); } // Label if (label != null) { var labelBack = label.GetBackend () as LabelBackend; if (labelColor != null) { labelBack.textCol = labelColor.Value; } else { labelBack.textCol = Sensitive ? style.ButtonLabelColor : style.ButtonInsensitiveLabelColor; } labelBack.Draw (c, dirtyRect); } } public override Size GetPreferredSize (Cairo.Context c, double maxX, double maxY) { var ext = label != null ? (label.GetBackend () as LabelBackend).GetPreferredSize(c,maxX, maxY) : new Size(); var imgSz = image.Size; var x = ext.Width + imgSz.Width + cornerRadius/2; if (label != null && imgSz.Width > 0) x += imageToLabelSpace; var y = Math.Max(ext.Height, imgSz.Height); return new Size (x + xPadding, y + yPadding + cornerRadius/2); } internal override bool OnBoundsChanged (double x, double y, double width, double height) { if (!base.OnBoundsChanged (x, y, width, height)) return false; if (label != null) { var ll = (label.GetBackend () as LabelBackend); var imageWidth = image.IsNull ? 0.0 : (image.Size.Width + imageToLabelSpace); var labelSize = ll.GetPreferredSize ( SizeConstraint.WithSize (width - imageWidth - 2 * cornerRadius), SizeConstraint.WithSize (height - 2* cornerRadius)); double movX; if (labelSize.Width + imageWidth < Width) movX = Width / 2.0 + (-labelSize.Width + imageWidth) / 2; else movX = xPadding + imageWidth; ll.OnBoundsChanged(movX, Height / 2d - labelSize.Height / 2d, Math.Max(0.0, Math.Min(labelSize.Width, Width - imageWidth)), Math.Max(0.0, Math.Min(labelSize.Height, Height - 2 * cornerRadius))); } return true; } internal override void FireMouseEnter () { clicked = false; base.FireMouseEnter (); Invalidate (); } internal override void FireMouseLeave () { clicked = false; base.FireMouseLeave (); Invalidate (); } internal override bool FireMouseButton (bool down, PointerButton butt, int x, int y, int multiplePress = 1) { var ret = base.FireMouseButton (down, butt, x, y, multiplePress); if (Sensitive) { if (down) { clicked = true; } else if(clicked) { clicked = false; EventSink.OnClicked (); } Invalidate (); } return ret; } internal override bool FireKeyDown (Key k, char ch, ModifierKeys mods, bool rep, uint timestamp) { if (Sensitive && mods == ModifierKeys.None && (k == Key.Space || k == Key.Return)) { clicked = true; Invalidate (); return true; } return base.FireKeyDown (k, ch, mods, rep, timestamp); } internal override bool FireKeyUp (Key k, char ch, ModifierKeys mods, bool rep, uint timestamp) { if (clicked) { clicked = false; EventSink.OnClicked (); Invalidate (); return true; } return base.FireKeyUp (k, ch, mods, rep, timestamp); } public override object Font { set { base.Font = value; if (label != null) label.Font = Frontend.Font; UpdateWidgetPreferredSize (); } } #region IButtonBackend implementation public void SetButtonStyle (ButtonStyle style) { this.style = style; Invalidate (); } public void SetButtonType (ButtonType type) { this.type = type; Invalidate (); } public void SetContent (string text, bool useMnemonic, ImageDescription image, ContentPosition position) { if (string.IsNullOrEmpty (text)) { if (label != null) { label.Dispose (); label = null; } } else if (label != null) label.Text = text; else { label = new Label (text); var back = label.GetBackend () as LabelBackend; back.Parent = this; back.havePadding = false; back.Font = Font; } this.mnemonic = useMnemonic; this.image = image; this.pos = position; Invalidate (); } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // DistinctQueryOperator.cs // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Collections.Generic; using System.Diagnostics; using System.Threading; namespace System.Linq.Parallel { /// <summary> /// This operator yields all of the distinct elements in a single data set. It works quite /// like the above set operations, with the obvious difference being that it only accepts /// a single data source as input. /// </summary> /// <typeparam name="TInputOutput"></typeparam> internal sealed class DistinctQueryOperator<TInputOutput> : UnaryQueryOperator<TInputOutput, TInputOutput> { private readonly IEqualityComparer<TInputOutput> _comparer; // An (optional) equality comparer. //--------------------------------------------------------------------------------------- // Constructs a new distinction operator. // internal DistinctQueryOperator(IEnumerable<TInputOutput> source, IEqualityComparer<TInputOutput> comparer) : base(source) { Debug.Assert(source != null, "child data source cannot be null"); _comparer = comparer; SetOrdinalIndexState(OrdinalIndexState.Shuffled); } //--------------------------------------------------------------------------------------- // Just opens the current operator, including opening the child and wrapping it with // partitions as needed. // internal override QueryResults<TInputOutput> Open(QuerySettings settings, bool preferStriping) { // We just open our child operator. Do not propagate the preferStriping value, but // instead explicitly set it to false. Regardless of whether the parent prefers striping or range // partitioning, the output will be hash-partititioned. QueryResults<TInputOutput> childResults = Child.Open(settings, false); return new UnaryQueryOperatorResults(childResults, this, settings, false); } internal override void WrapPartitionedStream<TKey>( PartitionedStream<TInputOutput, TKey> inputStream, IPartitionedStreamRecipient<TInputOutput> recipient, bool preferStriping, QuerySettings settings) { // Hash-repartion the source stream if (OutputOrdered) { WrapPartitionedStreamHelper<TKey>( ExchangeUtilities.HashRepartitionOrdered<TInputOutput, NoKeyMemoizationRequired, TKey>( inputStream, null, null, _comparer, settings.CancellationState.MergedCancellationToken), recipient, settings.CancellationState.MergedCancellationToken); } else { WrapPartitionedStreamHelper<int>( ExchangeUtilities.HashRepartition<TInputOutput, NoKeyMemoizationRequired, TKey>( inputStream, null, null, _comparer, settings.CancellationState.MergedCancellationToken), recipient, settings.CancellationState.MergedCancellationToken); } } //--------------------------------------------------------------------------------------- // This is a helper method. WrapPartitionedStream decides what type TKey is going // to be, and then call this method with that key as a generic parameter. // private void WrapPartitionedStreamHelper<TKey>( PartitionedStream<Pair, TKey> hashStream, IPartitionedStreamRecipient<TInputOutput> recipient, CancellationToken cancellationToken) { int partitionCount = hashStream.PartitionCount; PartitionedStream<TInputOutput, TKey> outputStream = new PartitionedStream<TInputOutput, TKey>(partitionCount, hashStream.KeyComparer, OrdinalIndexState.Shuffled); for (int i = 0; i < partitionCount; i++) { if (OutputOrdered) { outputStream[i] = new OrderedDistinctQueryOperatorEnumerator<TKey>(hashStream[i], _comparer, hashStream.KeyComparer, cancellationToken); } else { outputStream[i] = (QueryOperatorEnumerator<TInputOutput, TKey>)(object) new DistinctQueryOperatorEnumerator<TKey>(hashStream[i], _comparer, cancellationToken); } } recipient.Receive(outputStream); } //--------------------------------------------------------------------------------------- // Whether this operator performs a premature merge that would not be performed in // a similar sequential operation (i.e., in LINQ to Objects). // internal override bool LimitsParallelism { get { return false; } } //--------------------------------------------------------------------------------------- // This enumerator performs the distinct operation incrementally. It does this by // maintaining a history -- in the form of a set -- of all data already seen. It simply // then doesn't return elements it has already seen before. // class DistinctQueryOperatorEnumerator<TKey> : QueryOperatorEnumerator<TInputOutput, int> { private QueryOperatorEnumerator<Pair, TKey> _source; // The data source. private Set<TInputOutput> _hashLookup; // The hash lookup, used to produce the distinct set. private CancellationToken _cancellationToken; private Shared<int> _outputLoopCount; // Allocated in MoveNext to avoid false sharing. //--------------------------------------------------------------------------------------- // Instantiates a new distinction operator. // internal DistinctQueryOperatorEnumerator( QueryOperatorEnumerator<Pair, TKey> source, IEqualityComparer<TInputOutput> comparer, CancellationToken cancellationToken) { Debug.Assert(source != null); _source = source; _hashLookup = new Set<TInputOutput>(comparer); _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // Walks the single data source, skipping elements it has already seen. // internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey) { Debug.Assert(_source != null); Debug.Assert(_hashLookup != null); // Iterate over this set's elements until we find a unique element. TKey keyUnused = default(TKey); Pair current = new Pair(default(TInputOutput), default(NoKeyMemoizationRequired)); if (_outputLoopCount == null) _outputLoopCount = new Shared<int>(0); while (_source.MoveNext(ref current, ref keyUnused)) { if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // We ensure we never return duplicates by tracking them in our set. if (_hashLookup.Add((TInputOutput)current.First)) { #if DEBUG currentKey = unchecked((int)0xdeadbeef); #endif currentElement = (TInputOutput)current.First; return true; } } return false; } protected override void Dispose(bool disposing) { Debug.Assert(_source != null); _source.Dispose(); } } //--------------------------------------------------------------------------------------- // Returns an enumerable that represents the query executing sequentially. // internal override IEnumerable<TInputOutput> AsSequentialQuery(CancellationToken token) { IEnumerable<TInputOutput> wrappedChild = CancellableEnumerable.Wrap(Child.AsSequentialQuery(token), token); return wrappedChild.Distinct(_comparer); } class OrderedDistinctQueryOperatorEnumerator<TKey> : QueryOperatorEnumerator<TInputOutput, TKey> { private QueryOperatorEnumerator<Pair, TKey> _source; // The data source. private Dictionary<Wrapper<TInputOutput>, TKey> _hashLookup; // The hash lookup, used to produce the distinct set. private IComparer<TKey> _keyComparer; // Comparer to decide the key order. private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, TKey>> _hashLookupEnumerator; // Enumerates over _hashLookup. private CancellationToken _cancellationToken; //--------------------------------------------------------------------------------------- // Instantiates a new distinction operator. // internal OrderedDistinctQueryOperatorEnumerator( QueryOperatorEnumerator<Pair, TKey> source, IEqualityComparer<TInputOutput> comparer, IComparer<TKey> keyComparer, CancellationToken cancellationToken) { Debug.Assert(source != null); _source = source; _keyComparer = keyComparer; _hashLookup = new Dictionary<Wrapper<TInputOutput>, TKey>( new WrapperEqualityComparer<TInputOutput>(comparer)); _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // Walks the single data source, skipping elements it has already seen. // internal override bool MoveNext(ref TInputOutput currentElement, ref TKey currentKey) { Debug.Assert(_source != null); Debug.Assert(_hashLookup != null); if (_hashLookupEnumerator == null) { Pair elem = new Pair(default(TInputOutput), default(NoKeyMemoizationRequired)); TKey orderKey = default(TKey); int i = 0; while (_source.MoveNext(ref elem, ref orderKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // For each element, we track the smallest order key for that element that we saw so far TKey oldEntry; Wrapper<TInputOutput> wrappedElem = new Wrapper<TInputOutput>((TInputOutput)elem.First); // If this is the first occurence of this element, or the order key is lower than all keys we saw previously, // update the order key for this element. if (!_hashLookup.TryGetValue(wrappedElem, out oldEntry) || _keyComparer.Compare(orderKey, oldEntry) < 0) { // For each "elem" value, we store the smallest key, and the element value that had that key. // Note that even though two element values are "equal" according to the EqualityComparer, // we still cannot choose arbitrarily which of the two to yield. _hashLookup[wrappedElem] = orderKey; } } _hashLookupEnumerator = _hashLookup.GetEnumerator(); } if (_hashLookupEnumerator.MoveNext()) { KeyValuePair<Wrapper<TInputOutput>, TKey> currentPair = _hashLookupEnumerator.Current; currentElement = currentPair.Key.Value; currentKey = currentPair.Value; return true; } return false; } protected override void Dispose(bool disposing) { Debug.Assert(_source != null); _source.Dispose(); if (_hashLookupEnumerator != null) { _hashLookupEnumerator.Dispose(); } } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.IO; using System.Threading; using System.Collections; using System.Collections.Generic; using System.Security.Policy; using System.Reflection; using System.Globalization; using System.Xml; using OpenMetaverse; using log4net; using Nini.Config; using Amib.Threading; using OpenSim.Framework; using OpenSim.Region.CoreModules; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.ScriptEngine.Shared; using OpenSim.Region.ScriptEngine.Shared.Api; using OpenSim.Region.ScriptEngine.Shared.ScriptBase; using OpenSim.Region.ScriptEngine.Shared.CodeTools; using OpenSim.Region.ScriptEngine.Interfaces; namespace OpenSim.Region.ScriptEngine.Shared.Instance { public class ScriptSerializer { public static string Serialize(ScriptInstance instance) { bool running = instance.Running; XmlDocument xmldoc = new XmlDocument(); XmlNode xmlnode = xmldoc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); xmldoc.AppendChild(xmlnode); XmlElement rootElement = xmldoc.CreateElement("", "ScriptState", ""); xmldoc.AppendChild(rootElement); XmlElement state = xmldoc.CreateElement("", "State", ""); state.AppendChild(xmldoc.CreateTextNode(instance.State)); rootElement.AppendChild(state); XmlElement run = xmldoc.CreateElement("", "Running", ""); run.AppendChild(xmldoc.CreateTextNode( running.ToString())); rootElement.AppendChild(run); Dictionary<string, Object> vars = instance.GetVars(); XmlElement variables = xmldoc.CreateElement("", "Variables", ""); foreach (KeyValuePair<string, Object> var in vars) WriteTypedValue(xmldoc, variables, "Variable", var.Key, var.Value); rootElement.AppendChild(variables); XmlElement queue = xmldoc.CreateElement("", "Queue", ""); int count = instance.EventQueue.Count; while (count > 0) { EventParams ep = (EventParams)instance.EventQueue.Dequeue(); instance.EventQueue.Enqueue(ep); count--; XmlElement item = xmldoc.CreateElement("", "Item", ""); XmlAttribute itemEvent = xmldoc.CreateAttribute("", "event", ""); itemEvent.Value = ep.EventName; item.Attributes.Append(itemEvent); XmlElement parms = xmldoc.CreateElement("", "Params", ""); foreach (Object o in ep.Params) WriteTypedValue(xmldoc, parms, "Param", String.Empty, o); item.AppendChild(parms); XmlElement detect = xmldoc.CreateElement("", "Detected", ""); foreach (DetectParams det in ep.DetectParams) { XmlElement objectElem = xmldoc.CreateElement("", "Object", ""); XmlAttribute pos = xmldoc.CreateAttribute("", "pos", ""); pos.Value = det.OffsetPos.ToString(); objectElem.Attributes.Append(pos); XmlAttribute d_linkNum = xmldoc.CreateAttribute("", "linkNum", ""); d_linkNum.Value = det.LinkNum.ToString(); objectElem.Attributes.Append(d_linkNum); XmlAttribute d_group = xmldoc.CreateAttribute("", "group", ""); d_group.Value = det.Group.ToString(); objectElem.Attributes.Append(d_group); XmlAttribute d_name = xmldoc.CreateAttribute("", "name", ""); d_name.Value = det.Name.ToString(); objectElem.Attributes.Append(d_name); XmlAttribute d_owner = xmldoc.CreateAttribute("", "owner", ""); d_owner.Value = det.Owner.ToString(); objectElem.Attributes.Append(d_owner); XmlAttribute d_position = xmldoc.CreateAttribute("", "position", ""); d_position.Value = det.Position.ToString(); objectElem.Attributes.Append(d_position); XmlAttribute d_rotation = xmldoc.CreateAttribute("", "rotation", ""); d_rotation.Value = det.Rotation.ToString(); objectElem.Attributes.Append(d_rotation); XmlAttribute d_type = xmldoc.CreateAttribute("", "type", ""); d_type.Value = det.Type.ToString(); objectElem.Attributes.Append(d_type); XmlAttribute d_velocity = xmldoc.CreateAttribute("", "velocity", ""); d_velocity.Value = det.Velocity.ToString(); objectElem.Attributes.Append(d_velocity); objectElem.AppendChild( xmldoc.CreateTextNode(det.Key.ToString())); detect.AppendChild(objectElem); } item.AppendChild(detect); queue.AppendChild(item); } rootElement.AppendChild(queue); XmlNode plugins = xmldoc.CreateElement("", "Plugins", ""); DumpList(xmldoc, plugins, new LSL_Types.list(instance.PluginData)); rootElement.AppendChild(plugins); if (instance.ScriptTask != null) { if (instance.ScriptTask.PermsMask != 0 && instance.ScriptTask.PermsGranter != UUID.Zero) { XmlNode permissions = xmldoc.CreateElement("", "Permissions", ""); XmlAttribute granter = xmldoc.CreateAttribute("", "granter", ""); granter.Value = instance.ScriptTask.PermsGranter.ToString(); permissions.Attributes.Append(granter); XmlAttribute mask = xmldoc.CreateAttribute("", "mask", ""); mask.Value = instance.ScriptTask.PermsMask.ToString(); permissions.Attributes.Append(mask); rootElement.AppendChild(permissions); } } if (instance.MinEventDelay > 0.0) { XmlElement eventDelay = xmldoc.CreateElement("", "MinEventDelay", ""); eventDelay.AppendChild(xmldoc.CreateTextNode(instance.MinEventDelay.ToString())); rootElement.AppendChild(eventDelay); } return xmldoc.InnerXml; } public static void Deserialize(string xml, ScriptInstance instance) { XmlDocument doc = new XmlDocument(); Dictionary<string, object> vars = instance.GetVars(); instance.PluginData = new Object[0]; doc.LoadXml(xml); XmlNodeList rootL = doc.GetElementsByTagName("ScriptState"); if (rootL.Count != 1) { return; } XmlNode rootNode = rootL[0]; if (rootNode != null) { object varValue; XmlNodeList partL = rootNode.ChildNodes; foreach (XmlNode part in partL) { switch (part.Name) { case "State": instance.State=part.InnerText; break; case "Running": instance.Running=bool.Parse(part.InnerText); break; case "Variables": XmlNodeList varL = part.ChildNodes; foreach (XmlNode var in varL) { string varName; varValue=ReadTypedValue(var, out varName); if (vars.ContainsKey(varName)) vars[varName] = varValue; } instance.SetVars(vars); break; case "Queue": XmlNodeList itemL = part.ChildNodes; foreach (XmlNode item in itemL) { List<Object> parms = new List<Object>(); List<DetectParams> detected = new List<DetectParams>(); string eventName = item.Attributes.GetNamedItem("event").Value; XmlNodeList eventL = item.ChildNodes; foreach (XmlNode evt in eventL) { switch (evt.Name) { case "Params": XmlNodeList prms = evt.ChildNodes; foreach (XmlNode pm in prms) parms.Add(ReadTypedValue(pm)); break; case "Detected": XmlNodeList detL = evt.ChildNodes; foreach (XmlNode det in detL) { string vect = det.Attributes.GetNamedItem( "pos").Value; LSL_Types.Vector3 v = new LSL_Types.Vector3(vect); int d_linkNum=0; UUID d_group = UUID.Zero; string d_name = String.Empty; UUID d_owner = UUID.Zero; LSL_Types.Vector3 d_position = new LSL_Types.Vector3(); LSL_Types.Quaternion d_rotation = new LSL_Types.Quaternion(); int d_type = 0; LSL_Types.Vector3 d_velocity = new LSL_Types.Vector3(); try { string tmp; tmp = det.Attributes.GetNamedItem( "linkNum").Value; int.TryParse(tmp, out d_linkNum); tmp = det.Attributes.GetNamedItem( "group").Value; UUID.TryParse(tmp, out d_group); d_name = det.Attributes.GetNamedItem( "name").Value; tmp = det.Attributes.GetNamedItem( "owner").Value; UUID.TryParse(tmp, out d_owner); tmp = det.Attributes.GetNamedItem( "position").Value; d_position = new LSL_Types.Vector3(tmp); tmp = det.Attributes.GetNamedItem( "rotation").Value; d_rotation = new LSL_Types.Quaternion(tmp); tmp = det.Attributes.GetNamedItem( "type").Value; int.TryParse(tmp, out d_type); tmp = det.Attributes.GetNamedItem( "velocity").Value; d_velocity = new LSL_Types.Vector3(tmp); } catch (Exception) // Old version XML { } UUID uuid = new UUID(); UUID.TryParse(det.InnerText, out uuid); DetectParams d = new DetectParams(); d.Key = uuid; d.OffsetPos = v; d.LinkNum = d_linkNum; d.Group = d_group; d.Name = d_name; d.Owner = d_owner; d.Position = d_position; d.Rotation = d_rotation; d.Type = d_type; d.Velocity = d_velocity; detected.Add(d); } break; } } EventParams ep = new EventParams( eventName, parms.ToArray(), detected.ToArray()); instance.EventQueue.Enqueue(ep); } break; case "Plugins": instance.PluginData = ReadList(part).Data; break; case "Permissions": string tmpPerm; int mask = 0; tmpPerm = part.Attributes.GetNamedItem("mask").Value; if (tmpPerm != null) { int.TryParse(tmpPerm, out mask); if (mask != 0) { tmpPerm = part.Attributes.GetNamedItem("granter").Value; if (tmpPerm != null) { UUID granter = new UUID(); UUID.TryParse(tmpPerm, out granter); if (granter != UUID.Zero) { instance.ScriptTask.PermsMask = mask; instance.ScriptTask.PermsGranter = granter; } } } } break; case "MinEventDelay": double minEventDelay = 0.0; double.TryParse(part.InnerText, out minEventDelay); instance.MinEventDelay = minEventDelay; break; } } } } private static void DumpList(XmlDocument doc, XmlNode parent, LSL_Types.list l) { foreach (Object o in l.Data) WriteTypedValue(doc, parent, "ListItem", "", o); } private static LSL_Types.list ReadList(XmlNode parent) { List<Object> olist = new List<Object>(); XmlNodeList itemL = parent.ChildNodes; foreach (XmlNode item in itemL) olist.Add(ReadTypedValue(item)); return new LSL_Types.list(olist.ToArray()); } private static void WriteTypedValue(XmlDocument doc, XmlNode parent, string tag, string name, object value) { Type t=value.GetType(); XmlAttribute typ = doc.CreateAttribute("", "type", ""); XmlNode n = doc.CreateElement("", tag, ""); if (value is LSL_Types.list) { typ.Value = "list"; n.Attributes.Append(typ); DumpList(doc, n, (LSL_Types.list) value); if (name != String.Empty) { XmlAttribute nam = doc.CreateAttribute("", "name", ""); nam.Value = name; n.Attributes.Append(nam); } parent.AppendChild(n); return; } n.AppendChild(doc.CreateTextNode(value.ToString())); typ.Value = t.ToString(); n.Attributes.Append(typ); if (name != String.Empty) { XmlAttribute nam = doc.CreateAttribute("", "name", ""); nam.Value = name; n.Attributes.Append(nam); } parent.AppendChild(n); } private static object ReadTypedValue(XmlNode tag, out string name) { name = tag.Attributes.GetNamedItem("name").Value; return ReadTypedValue(tag); } private static object ReadTypedValue(XmlNode tag) { Object varValue; string assembly; string itemType = tag.Attributes.GetNamedItem("type").Value; if (itemType == "list") return ReadList(tag); if (itemType == "OpenMetaverse.UUID") { UUID val = new UUID(); UUID.TryParse(tag.InnerText, out val); return val; } Type itemT = Type.GetType(itemType); if (itemT == null) { Object[] args = new Object[] { tag.InnerText }; assembly = itemType+", OpenSim.Region.ScriptEngine.Shared"; itemT = Type.GetType(assembly); if (itemT == null) return null; varValue = Activator.CreateInstance(itemT, args); if (varValue == null) return null; } else { varValue = Convert.ChangeType(tag.InnerText, itemT); } return varValue; } } }
// // (C) Copyright 2003-2011 by Autodesk, Inc. // // Permission to use, copy, modify, and distribute this software in // object code form for any purpose and without fee is hereby granted, // provided that the above copyright notice appears in all copies and // that both that copyright notice and the limited warranty and // restricted rights notice below appear in all supporting // documentation. // // AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. // AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF // MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC. // DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE // UNINTERRUPTED OR ERROR FREE. // // Use, duplication, or disclosure by the U.S. Government is subject to // restrictions set forth in FAR 52.227-19 (Commercial Computer // Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii) // (Rights in Technical Data and Computer Software), as applicable. // namespace Revit.SDK.Samples.FrameBuilder.CS { partial class CreateFrameForm { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.unitLabel = new System.Windows.Forms.Label(); this.cancelButton = new System.Windows.Forms.Button(); this.floornumberLabel = new System.Windows.Forms.Label(); this.XLabel = new System.Windows.Forms.Label(); this.YLabel = new System.Windows.Forms.Label(); this.DistanceLabel = new System.Windows.Forms.Label(); this.floorNumberTextBox = new System.Windows.Forms.TextBox(); this.distanceTextBox = new System.Windows.Forms.TextBox(); this.yNumberTextBox = new System.Windows.Forms.TextBox(); this.xNumberTextBox = new System.Windows.Forms.TextBox(); this.braceLabel = new System.Windows.Forms.Label(); this.beamLabel = new System.Windows.Forms.Label(); this.columnLabel = new System.Windows.Forms.Label(); this.braceTypeComboBox = new System.Windows.Forms.ComboBox(); this.beamTypeComboBox = new System.Windows.Forms.ComboBox(); this.columnTypeComboBox = new System.Windows.Forms.ComboBox(); this.OKButton = new System.Windows.Forms.Button(); this.columnDuplicateButton = new System.Windows.Forms.Button(); this.beamDuplicateButton = new System.Windows.Forms.Button(); this.braceDuplicateButton = new System.Windows.Forms.Button(); this.label1 = new System.Windows.Forms.Label(); this.levelHeightTextBox = new System.Windows.Forms.TextBox(); this.label2 = new System.Windows.Forms.Label(); this.originXtextBox = new System.Windows.Forms.TextBox(); this.originYtextBox = new System.Windows.Forms.TextBox(); this.label4 = new System.Windows.Forms.Label(); this.originAngletextBox = new System.Windows.Forms.TextBox(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.label3 = new System.Windows.Forms.Label(); this.label7 = new System.Windows.Forms.Label(); this.label6 = new System.Windows.Forms.Label(); this.label5 = new System.Windows.Forms.Label(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // unitLabel // this.unitLabel.Location = new System.Drawing.Point(133, 36); this.unitLabel.Name = "unitLabel"; this.unitLabel.Size = new System.Drawing.Size(32, 18); this.unitLabel.TabIndex = 35; this.unitLabel.Text = "feet"; // // cancelButton // this.cancelButton.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.cancelButton.Location = new System.Drawing.Point(516, 324); this.cancelButton.Name = "cancelButton"; this.cancelButton.Size = new System.Drawing.Size(75, 23); this.cancelButton.TabIndex = 27; this.cancelButton.Text = "&Cancel"; this.cancelButton.Click += new System.EventHandler(this.cancelButton_Click); // // floornumberLabel // this.floornumberLabel.Location = new System.Drawing.Point(12, 177); this.floornumberLabel.Name = "floornumberLabel"; this.floornumberLabel.Size = new System.Drawing.Size(144, 21); this.floornumberLabel.TabIndex = 34; this.floornumberLabel.Text = "Number of Floors:"; // // XLabel // this.XLabel.Location = new System.Drawing.Point(12, 65); this.XLabel.Name = "XLabel"; this.XLabel.Size = new System.Drawing.Size(200, 21); this.XLabel.TabIndex = 33; this.XLabel.Text = "Number of Columns in the X Direction:"; // // YLabel // this.YLabel.Location = new System.Drawing.Point(12, 121); this.YLabel.Name = "YLabel"; this.YLabel.Size = new System.Drawing.Size(200, 21); this.YLabel.TabIndex = 32; this.YLabel.Text = "Number of Columns in the Y Direction:"; // // DistanceLabel // this.DistanceLabel.Location = new System.Drawing.Point(12, 9); this.DistanceLabel.Name = "DistanceLabel"; this.DistanceLabel.Size = new System.Drawing.Size(152, 21); this.DistanceLabel.TabIndex = 31; this.DistanceLabel.Text = "Distance between Columns:"; // // floorNumberTextBox // this.floorNumberTextBox.Location = new System.Drawing.Point(15, 200); this.floorNumberTextBox.Name = "floorNumberTextBox"; this.floorNumberTextBox.Size = new System.Drawing.Size(112, 20); this.floorNumberTextBox.TabIndex = 22; this.floorNumberTextBox.Validating += new System.ComponentModel.CancelEventHandler(this.floorNumberTextBox_Validating); // // distanceTextBox // this.distanceTextBox.Location = new System.Drawing.Point(15, 33); this.distanceTextBox.Name = "distanceTextBox"; this.distanceTextBox.Size = new System.Drawing.Size(112, 20); this.distanceTextBox.TabIndex = 19; this.distanceTextBox.Validating += new System.ComponentModel.CancelEventHandler(this.distanceTextBox_Validating); // // yNumberTextBox // this.yNumberTextBox.Location = new System.Drawing.Point(15, 145); this.yNumberTextBox.Name = "yNumberTextBox"; this.yNumberTextBox.Size = new System.Drawing.Size(112, 20); this.yNumberTextBox.TabIndex = 21; this.yNumberTextBox.Validating += new System.ComponentModel.CancelEventHandler(this.yNumberTextBox_Validating); // // xNumberTextBox // this.xNumberTextBox.Location = new System.Drawing.Point(15, 89); this.xNumberTextBox.Name = "xNumberTextBox"; this.xNumberTextBox.Size = new System.Drawing.Size(112, 20); this.xNumberTextBox.TabIndex = 20; this.xNumberTextBox.Validating += new System.ComponentModel.CancelEventHandler(this.xNumberTextBox_Validating); // // braceLabel // this.braceLabel.Location = new System.Drawing.Point(214, 121); this.braceLabel.Name = "braceLabel"; this.braceLabel.Size = new System.Drawing.Size(120, 20); this.braceLabel.TabIndex = 30; this.braceLabel.Text = "Type of Braces:"; // // beamLabel // this.beamLabel.Location = new System.Drawing.Point(214, 64); this.beamLabel.Name = "beamLabel"; this.beamLabel.Size = new System.Drawing.Size(120, 21); this.beamLabel.TabIndex = 29; this.beamLabel.Text = "Type of Beams:"; // // columnLabel // this.columnLabel.Location = new System.Drawing.Point(214, 9); this.columnLabel.Name = "columnLabel"; this.columnLabel.Size = new System.Drawing.Size(120, 20); this.columnLabel.TabIndex = 28; this.columnLabel.Text = "Type of Columns:"; // // braceTypeComboBox // this.braceTypeComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.braceTypeComboBox.Location = new System.Drawing.Point(214, 144); this.braceTypeComboBox.MaxDropDownItems = 10; this.braceTypeComboBox.Name = "braceTypeComboBox"; this.braceTypeComboBox.Size = new System.Drawing.Size(281, 21); this.braceTypeComboBox.TabIndex = 25; this.braceTypeComboBox.SelectedIndexChanged += new System.EventHandler(this.braceTypeComboBox_SelectedIndexChanged); // // beamTypeComboBox // this.beamTypeComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.beamTypeComboBox.Location = new System.Drawing.Point(214, 88); this.beamTypeComboBox.MaxDropDownItems = 10; this.beamTypeComboBox.Name = "beamTypeComboBox"; this.beamTypeComboBox.Size = new System.Drawing.Size(281, 21); this.beamTypeComboBox.TabIndex = 24; this.beamTypeComboBox.SelectedIndexChanged += new System.EventHandler(this.beamTypeComboBox_SelectedIndexChanged); // // columnTypeComboBox // this.columnTypeComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.columnTypeComboBox.Location = new System.Drawing.Point(214, 32); this.columnTypeComboBox.MaxDropDownItems = 10; this.columnTypeComboBox.Name = "columnTypeComboBox"; this.columnTypeComboBox.Size = new System.Drawing.Size(281, 21); this.columnTypeComboBox.TabIndex = 23; this.columnTypeComboBox.SelectedIndexChanged += new System.EventHandler(this.columnTypeComboBox_SelectedIndexChanged); // // OKButton // this.OKButton.Location = new System.Drawing.Point(435, 324); this.OKButton.Name = "OKButton"; this.OKButton.Size = new System.Drawing.Size(75, 23); this.OKButton.TabIndex = 26; this.OKButton.Text = "&OK"; this.OKButton.Click += new System.EventHandler(this.OKButton_Click); // // columnDuplicateButton // this.columnDuplicateButton.Location = new System.Drawing.Point(501, 30); this.columnDuplicateButton.Name = "columnDuplicateButton"; this.columnDuplicateButton.Size = new System.Drawing.Size(102, 24); this.columnDuplicateButton.TabIndex = 36; this.columnDuplicateButton.Text = "&ColumnDuplicate"; this.columnDuplicateButton.UseVisualStyleBackColor = true; this.columnDuplicateButton.Click += new System.EventHandler(this.columnDuplicateButton_Click); // // beamDuplicateButton // this.beamDuplicateButton.Location = new System.Drawing.Point(501, 85); this.beamDuplicateButton.Name = "beamDuplicateButton"; this.beamDuplicateButton.Size = new System.Drawing.Size(102, 24); this.beamDuplicateButton.TabIndex = 37; this.beamDuplicateButton.Text = "&BeamDuplicate"; this.beamDuplicateButton.UseVisualStyleBackColor = true; this.beamDuplicateButton.Click += new System.EventHandler(this.beamDuplicateButton_Click); // // braceDuplicateButton // this.braceDuplicateButton.Location = new System.Drawing.Point(501, 142); this.braceDuplicateButton.Name = "braceDuplicateButton"; this.braceDuplicateButton.Size = new System.Drawing.Size(102, 23); this.braceDuplicateButton.TabIndex = 38; this.braceDuplicateButton.Text = "B&raceDuplicate"; this.braceDuplicateButton.UseVisualStyleBackColor = true; this.braceDuplicateButton.Click += new System.EventHandler(this.braceDuplicateButton_Click); // // label1 // this.label1.Location = new System.Drawing.Point(12, 233); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(175, 21); this.label1.TabIndex = 40; this.label1.Text = "Height of Auto Generated Levels:"; // // levelHeightTextBox // this.levelHeightTextBox.Enabled = false; this.levelHeightTextBox.Location = new System.Drawing.Point(15, 255); this.levelHeightTextBox.Name = "levelHeightTextBox"; this.levelHeightTextBox.Size = new System.Drawing.Size(112, 20); this.levelHeightTextBox.TabIndex = 39; this.levelHeightTextBox.Validating += new System.ComponentModel.CancelEventHandler(this.levelHeightTextBox_Validating); // // label2 // this.label2.Location = new System.Drawing.Point(132, 258); this.label2.Name = "label2"; this.label2.Size = new System.Drawing.Size(32, 18); this.label2.TabIndex = 41; this.label2.Text = "feet"; // // originXtextBox // this.originXtextBox.Location = new System.Drawing.Point(12, 45); this.originXtextBox.Name = "originXtextBox"; this.originXtextBox.Size = new System.Drawing.Size(100, 20); this.originXtextBox.TabIndex = 42; this.originXtextBox.Validating += new System.ComponentModel.CancelEventHandler(this.originXtextBox_Validating); // // originYtextBox // this.originYtextBox.Location = new System.Drawing.Point(156, 45); this.originYtextBox.Name = "originYtextBox"; this.originYtextBox.Size = new System.Drawing.Size(100, 20); this.originYtextBox.TabIndex = 43; this.originYtextBox.Validating += new System.ComponentModel.CancelEventHandler(this.originYtextBox_Validating); // // label4 // this.label4.AutoSize = true; this.label4.Location = new System.Drawing.Point(9, 21); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(50, 13); this.label4.TabIndex = 45; this.label4.Text = "Origin (X)"; // // originAngletextBox // this.originAngletextBox.Location = new System.Drawing.Point(12, 100); this.originAngletextBox.Name = "originAngletextBox"; this.originAngletextBox.Size = new System.Drawing.Size(100, 20); this.originAngletextBox.TabIndex = 46; this.originAngletextBox.Validating += new System.ComponentModel.CancelEventHandler(this.originAngletextBox_Validating); // // groupBox1 // this.groupBox1.Controls.Add(this.label3); this.groupBox1.Controls.Add(this.label7); this.groupBox1.Controls.Add(this.label6); this.groupBox1.Controls.Add(this.label5); this.groupBox1.Controls.Add(this.originYtextBox); this.groupBox1.Controls.Add(this.originAngletextBox); this.groupBox1.Controls.Add(this.originXtextBox); this.groupBox1.Controls.Add(this.label4); this.groupBox1.Location = new System.Drawing.Point(214, 179); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(377, 139); this.groupBox1.TabIndex = 47; this.groupBox1.TabStop = false; this.groupBox1.Text = "Location"; // // label3 // this.label3.Location = new System.Drawing.Point(262, 48); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(32, 18); this.label3.TabIndex = 50; this.label3.Text = "feet"; // // label7 // this.label7.Location = new System.Drawing.Point(118, 48); this.label7.Name = "label7"; this.label7.Size = new System.Drawing.Size(32, 18); this.label7.TabIndex = 49; this.label7.Text = "feet,"; // // label6 // this.label6.AutoSize = true; this.label6.Location = new System.Drawing.Point(161, 21); this.label6.Name = "label6"; this.label6.Size = new System.Drawing.Size(20, 13); this.label6.TabIndex = 48; this.label6.Text = "(Y)"; // // label5 // this.label5.AutoSize = true; this.label5.Location = new System.Drawing.Point(12, 76); this.label5.Name = "label5"; this.label5.Size = new System.Drawing.Size(69, 13); this.label5.TabIndex = 47; this.label5.Text = "Rotate Angle"; // // CreateFrameForm // this.AcceptButton = this.OKButton; this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.CancelButton = this.cancelButton; this.ClientSize = new System.Drawing.Size(607, 353); this.Controls.Add(this.groupBox1); this.Controls.Add(this.label2); this.Controls.Add(this.label1); this.Controls.Add(this.levelHeightTextBox); this.Controls.Add(this.braceDuplicateButton); this.Controls.Add(this.beamDuplicateButton); this.Controls.Add(this.columnDuplicateButton); this.Controls.Add(this.unitLabel); this.Controls.Add(this.cancelButton); this.Controls.Add(this.floornumberLabel); this.Controls.Add(this.XLabel); this.Controls.Add(this.YLabel); this.Controls.Add(this.DistanceLabel); this.Controls.Add(this.floorNumberTextBox); this.Controls.Add(this.distanceTextBox); this.Controls.Add(this.yNumberTextBox); this.Controls.Add(this.xNumberTextBox); this.Controls.Add(this.braceLabel); this.Controls.Add(this.beamLabel); this.Controls.Add(this.columnLabel); this.Controls.Add(this.braceTypeComboBox); this.Controls.Add(this.beamTypeComboBox); this.Controls.Add(this.columnTypeComboBox); this.Controls.Add(this.OKButton); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "CreateFrameForm"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; this.Text = "Frame Builder"; this.Load += new System.EventHandler(this.CreateFramingForm_Load); this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Label unitLabel; private System.Windows.Forms.Button cancelButton; private System.Windows.Forms.Label floornumberLabel; private System.Windows.Forms.Label XLabel; private System.Windows.Forms.Label YLabel; private System.Windows.Forms.Label DistanceLabel; private System.Windows.Forms.TextBox floorNumberTextBox; private System.Windows.Forms.TextBox distanceTextBox; private System.Windows.Forms.TextBox yNumberTextBox; private System.Windows.Forms.TextBox xNumberTextBox; private System.Windows.Forms.Label braceLabel; private System.Windows.Forms.Label beamLabel; private System.Windows.Forms.Label columnLabel; private System.Windows.Forms.ComboBox braceTypeComboBox; private System.Windows.Forms.ComboBox beamTypeComboBox; private System.Windows.Forms.ComboBox columnTypeComboBox; private System.Windows.Forms.Button OKButton; private System.Windows.Forms.Button columnDuplicateButton; private System.Windows.Forms.Button beamDuplicateButton; private System.Windows.Forms.Button braceDuplicateButton; private System.Windows.Forms.Label label1; private System.Windows.Forms.TextBox levelHeightTextBox; private System.Windows.Forms.Label label2; private System.Windows.Forms.TextBox originXtextBox; private System.Windows.Forms.TextBox originYtextBox; private System.Windows.Forms.Label label4; private System.Windows.Forms.TextBox originAngletextBox; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.Label label3; private System.Windows.Forms.Label label7; private System.Windows.Forms.Label label6; private System.Windows.Forms.Label label5; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // // This file was autogenerated by a tool. // Do not modify it. // namespace Microsoft.Azure.Batch { using Models = Microsoft.Azure.Batch.Protocol.Models; using System; using System.Collections.Generic; using System.Linq; /// <summary> /// A inbound NAT pool that can be used to address specific ports on compute nodes in a Batch pool externally. /// </summary> public partial class InboundNatPool : ITransportObjectProvider<Models.InboundNATPool>, IPropertyMetadata { #region Constructors /// <summary> /// Initializes a new instance of the <see cref="InboundNatPool"/> class. /// </summary> /// <param name='name'>The name of the endpoint.</param> /// <param name='protocol'>The protocol of the endpoint.</param> /// <param name='backendPort'>The port number on the compute node.</param> /// <param name='frontendPortRangeStart'>The first port number in the range of external ports that will be used to provide inbound access to the backendPort /// on individual compute nodes.</param> /// <param name='frontendPortRangeEnd'>The last port number in the range of external ports that will be used to provide inbound access to the backendPort /// on individual compute nodes.</param> /// <param name='networkSecurityGroupRules'>A list of network security group rules that will be applied to the endpoint.</param> public InboundNatPool( string name, Common.InboundEndpointProtocol protocol, int backendPort, int frontendPortRangeStart, int frontendPortRangeEnd, IReadOnlyList<NetworkSecurityGroupRule> networkSecurityGroupRules = default(IReadOnlyList<NetworkSecurityGroupRule>)) { this.Name = name; this.Protocol = protocol; this.BackendPort = backendPort; this.FrontendPortRangeStart = frontendPortRangeStart; this.FrontendPortRangeEnd = frontendPortRangeEnd; this.NetworkSecurityGroupRules = networkSecurityGroupRules; } internal InboundNatPool(Models.InboundNATPool protocolObject) { this.BackendPort = protocolObject.BackendPort; this.FrontendPortRangeEnd = protocolObject.FrontendPortRangeEnd; this.FrontendPortRangeStart = protocolObject.FrontendPortRangeStart; this.Name = protocolObject.Name; this.NetworkSecurityGroupRules = NetworkSecurityGroupRule.ConvertFromProtocolCollectionReadOnly(protocolObject.NetworkSecurityGroupRules); this.Protocol = UtilitiesInternal.MapEnum<Models.InboundEndpointProtocol, Common.InboundEndpointProtocol>(protocolObject.Protocol); } #endregion Constructors #region InboundNatPool /// <summary> /// Gets the port number on the compute node. /// </summary> /// <remarks> /// This must be unique within a Batch pool. Acceptable values are between 1 and 65535 except for 22, 3389, 29876 /// and 29877 as these are reserved. /// </remarks> public int BackendPort { get; } /// <summary> /// Gets the last port number in the range of external ports that will be used to provide inbound access to the backendPort /// on individual compute nodes. /// </summary> /// <remarks> /// Acceptable values range between 1 and 65534 except ports from 50000 to 55000 which are reserved by the Batch /// service. All ranges within a pool must be distinct and cannot overlap. /// </remarks> public int FrontendPortRangeEnd { get; } /// <summary> /// Gets the first port number in the range of external ports that will be used to provide inbound access to the /// backendPort on individual compute nodes. /// </summary> /// <remarks> /// Acceptable values range between 1 and 65534 except ports from 50000 to 55000 which are reserved. All ranges within /// a pool must be distinct and cannot overlap. /// </remarks> public int FrontendPortRangeStart { get; } /// <summary> /// Gets the name of the endpoint. /// </summary> /// <remarks> /// The name must be unique within a Batch pool, can contain letters, numbers, underscores, periods, and hyphens. /// Names must start with a letter or number, must end with a letter, number, or underscore, and cannot exceed 77 /// characters. /// </remarks> public string Name { get; } /// <summary> /// Gets a list of network security group rules that will be applied to the endpoint. /// </summary> /// <remarks> /// The maximum number of rules that can be specified across all the endpoints on a pool is 25. If no network security /// group rules are specified, a default rule will be created to allow inbound access to the specified backendPort. /// </remarks> public IReadOnlyList<NetworkSecurityGroupRule> NetworkSecurityGroupRules { get; } /// <summary> /// Gets the protocol of the endpoint. /// </summary> public Common.InboundEndpointProtocol Protocol { get; } #endregion // InboundNatPool #region IPropertyMetadata bool IModifiable.HasBeenModified { //This class is compile time readonly so it cannot have been modified get { return false; } } bool IReadOnly.IsReadOnly { get { return true; } set { // This class is compile time readonly already } } #endregion // IPropertyMetadata #region Internal/private methods /// <summary> /// Return a protocol object of the requested type. /// </summary> /// <returns>The protocol object of the requested type.</returns> Models.InboundNATPool ITransportObjectProvider<Models.InboundNATPool>.GetTransportObject() { Models.InboundNATPool result = new Models.InboundNATPool() { BackendPort = this.BackendPort, FrontendPortRangeEnd = this.FrontendPortRangeEnd, FrontendPortRangeStart = this.FrontendPortRangeStart, Name = this.Name, NetworkSecurityGroupRules = UtilitiesInternal.ConvertToProtocolCollection(this.NetworkSecurityGroupRules), Protocol = UtilitiesInternal.MapEnum<Common.InboundEndpointProtocol, Models.InboundEndpointProtocol>(this.Protocol), }; return result; } /// <summary> /// Converts a collection of protocol layer objects to object layer collection objects. /// </summary> internal static IList<InboundNatPool> ConvertFromProtocolCollection(IEnumerable<Models.InboundNATPool> protoCollection) { ConcurrentChangeTrackedModifiableList<InboundNatPool> converted = UtilitiesInternal.CollectionToThreadSafeCollectionIModifiable( items: protoCollection, objectCreationFunc: o => new InboundNatPool(o)); return converted; } /// <summary> /// Converts a collection of protocol layer objects to object layer collection objects, in a frozen state. /// </summary> internal static IList<InboundNatPool> ConvertFromProtocolCollectionAndFreeze(IEnumerable<Models.InboundNATPool> protoCollection) { ConcurrentChangeTrackedModifiableList<InboundNatPool> converted = UtilitiesInternal.CollectionToThreadSafeCollectionIModifiable( items: protoCollection, objectCreationFunc: o => new InboundNatPool(o).Freeze()); converted = UtilitiesInternal.CreateObjectWithNullCheck(converted, o => o.Freeze()); return converted; } /// <summary> /// Converts a collection of protocol layer objects to object layer collection objects, with each object marked readonly /// and returned as a readonly collection. /// </summary> internal static IReadOnlyList<InboundNatPool> ConvertFromProtocolCollectionReadOnly(IEnumerable<Models.InboundNATPool> protoCollection) { IReadOnlyList<InboundNatPool> converted = UtilitiesInternal.CreateObjectWithNullCheck( UtilitiesInternal.CollectionToNonThreadSafeCollection( items: protoCollection, objectCreationFunc: o => new InboundNatPool(o).Freeze()), o => o.AsReadOnly()); return converted; } #endregion // Internal/private methods } }
// <copyright file="KeyFilterTests.cs" company="Basho Technologies, Inc."> // Copyright 2011 - OJ Reeves & Jeremiah Peschka // Copyright 2014 - Basho Technologies, Inc. // // This file is provided to you under the Apache License, // Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain // a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // </copyright> namespace RiakClientTests.KeyFilters { using System.Collections.Generic; using NUnit.Framework; using RiakClient.Models.MapReduce.KeyFilters; [TestFixture, UnitTest] public abstract class KeyFilterTests { internal const string IntToStringJson = @"[""int_to_string""]"; internal const string StringToIntJson = @"[""string_to_int""]"; internal const string FloatToStringJson = @"[""float_to_string""]"; internal const string StringToFloatJson = @"[""string_to_float""]"; internal const string ToUpperJson = @"[""to_upper""]"; internal const string ToLowerJson = @"[""to_lower""]"; internal const string TokenizeJson = @"[""tokenize"",""/"",4]"; internal const string UrlDecodeJson = @"[""urldecode""]"; internal const string GreaterThanJson = @"[""greater_than"",50]"; internal const string LessThanJson = @"[""less_than"",10]"; internal const string GreaterThanOrEqualToJson = @"[""greater_than_eq"",2000]"; internal const string LessThanOrEqualToJson = @"[""less_than_eq"",-2]"; internal const string BetweenJson = @"[""between"",10,20,false]"; internal const string MatchesJson = @"[""matches"",""solutions""]"; internal const string NotEqualJson = @"[""neq"",""foo""]"; internal const string EqualJson = @"[""eq"",""basho""]"; internal const string SetMemberJson = @"[""set_member"",""basho"",""google"",""yahoo""]"; internal const string SimilarToJson = @"[""similar_to"",""newyork"",3]"; internal const string StartsWithJson = @"[""starts_with"",""closed""]"; internal const string EndsWithJson = @"[""ends_with"",""0603""]"; internal const string AndJson = @"[""and"",[[""ends_with"",""0603""]],[[""starts_with"",""basho""]]]"; internal const string OrJson = @"[""or"",[[""eq"",""google""]],[[""less_than"",""g""]]]"; internal const string NotJson = @"[""not"",[[""matches"",""solution""]]]"; } public class WhenConstructingSimpleKeyFilters : KeyFilterTests { [Test] public void IntToStringCorrectlyConvertsToJson() { var its = new IntToString(); its.ToString().ShouldEqual(IntToStringJson); } [Test] public void StringToIntCorrectlyConvertsToJson() { var sti = new StringToInt(); sti.ToString().ShouldEqual(StringToIntJson); } [Test] public void FloatToStringCorrectlyConvertsToJson() { var fts = new FloatToString(); fts.ToString().ShouldEqual(FloatToStringJson); } [Test] public void StringToFloatCorrectlyConvertsToJson() { var stf = new StringToFloat(); stf.ToString().ShouldEqual(StringToFloatJson); } [Test] public void ToUpperCorrectlyConvertsToJson() { var tu = new ToUpper(); tu.ToString().ShouldEqual(ToUpperJson); } [Test] public void ToLowerCorrectlyConvertsToJson() { var tl = new ToLower(); tl.ToString().ShouldEqual(ToLowerJson); } [Test] public void TokenizeCorrectlyConvertsToJson() { var tokenize = new Tokenize("/", 4); tokenize.ToString().ShouldEqual(TokenizeJson); } [Test] public void UrlDecodeCorrectlyConvertsToJson() { var ud = new UrlDecode(); ud.ToString().ShouldEqual(UrlDecodeJson); } } public class WhenConstructingSimplePredicates : KeyFilterTests { [Test] public void GreaterThanCorrectlyConvertsToJson() { var gt = new GreaterThan<int>(50); gt.ToString().ShouldEqual(GreaterThanJson); } [Test] public void LessThanCorrectlyConvertsToJson() { var lt = new LessThan<int>(10); lt.ToString().ShouldEqual(LessThanJson); } [Test] public void GreaterThanOrEqualCorrectlyConvertsToJson() { var gte = new GreaterThanOrEqualTo<int>(2000); gte.ToString().ShouldEqual(GreaterThanOrEqualToJson); } [Test] public void LessThanOrEqualCorrectlyConvertsToJson() { var lte = new LessThanOrEqualTo<int>(-2); lte.ToString().ShouldEqual(LessThanOrEqualToJson); } [Test] public void BetweenCorrectlyConvertsToJson() { var between = new Between<int>(10, 20, false); between.ToString().ShouldEqual(BetweenJson); } [Test] public void MatchesCorrectlyConvertsToJson() { var matches = new Matches("solutions"); matches.ToString().ShouldEqual(MatchesJson); } [Test] public void NotEqualCorrectlyConvertsToJson() { var neq = new NotEqual<string>("foo"); neq.ToString().ShouldEqual(NotEqualJson); } [Test] public void EqualCorrectlyConvertsToJson() { var eq = new Equal<string>("basho"); eq.ToString().ShouldEqual(EqualJson); } [Test] public void SetMemberCorrectlyConvertsToJson() { var setMember = new SetMember<string>(new List<string> { "basho", "google", "yahoo" }); setMember.ToString().ShouldEqual(SetMemberJson); } [Test] public void SimilarToCorrectlyConvertsToJson() { var st = new SimilarTo<string>("newyork", 3); st.ToString().ShouldEqual(SimilarToJson); } [Test] public void StartsWithCorrectlyConvertsToJson() { var sw = new StartsWith("closed"); sw.ToString().ShouldEqual(StartsWithJson); } [Test] public void EndsWithCorrectlyConvertsToJson() { var ew = new EndsWith("0603"); ew.ToString().ShouldEqual(EndsWithJson); } } public class WhenConstructingComplexPredicates : KeyFilterTests { [Test] [Ignore] public void AndCorrectlyConvertsToJson() { //var and = new And(new EndsWith("0603"), new StartsWith("basho")); //and.ToString().ShouldEqual(AndJson); } [Test] [Ignore] public void OrCorrectlyConvertsToJson() { //var or = new Or(new Equal<string>("google"), new LessThan<string>("g")); //or.ToString().ShouldEqual(OrJson); } [Test] [Ignore] public void NotCorrectlyConvertsToJson() { //var not = new Not(new Matches("solution")); //not.ToString().ShouldEqual(NotJson); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using Lucene.Net.Documents; using FieldInvertState = Lucene.Net.Index.FieldInvertState; using Term = Lucene.Net.Index.Term; using SmallFloat = Lucene.Net.Util.SmallFloat; using IDFExplanation = Lucene.Net.Search.Explanation.IDFExplanation; namespace Lucene.Net.Search { /// <summary>Expert: Scoring API. /// <p/>Subclasses implement search scoring. /// /// <p/>The score of query <c>q</c> for document <c>d</c> correlates to the /// cosine-distance or dot-product between document and query vectors in a /// <a href="http://en.wikipedia.org/wiki/Vector_Space_Model"> /// Vector Space Model (VSM) of Information Retrieval</a>. /// A document whose vector is closer to the query vector in that model is scored higher. /// /// The score is computed as follows: /// /// <p/> /// <table cellpadding="1" cellspacing="0" border="1" align="center"> /// <tr><td> /// <table cellpadding="1" cellspacing="0" border="0" align="center"> /// <tr> /// <td valign="middle" align="right" rowspan="1"> /// score(q,d) &#160; = &#160; /// <A HREF="#formula_coord">coord(q,d)</A> &#160;&#183;&#160; /// <A HREF="#formula_queryNorm">queryNorm(q)</A> &#160;&#183;&#160; /// </td> /// <td valign="bottom" align="center" rowspan="1"> /// <big><big><big>&#8721;</big></big></big> /// </td> /// <td valign="middle" align="right" rowspan="1"> /// <big><big>(</big></big> /// <A HREF="#formula_tf">tf(t in d)</A> &#160;&#183;&#160; /// <A HREF="#formula_idf">idf(t)</A><sup>2</sup> &#160;&#183;&#160; /// <A HREF="#formula_termBoost">t.Boost</A>&#160;&#183;&#160; /// <A HREF="#formula_norm">norm(t,d)</A> /// <big><big>)</big></big> /// </td> /// </tr> /// <tr valigh="top"> /// <td></td> /// <td align="center"><small>t in q</small></td> /// <td></td> /// </tr> /// </table> /// </td></tr> /// </table> /// /// <p/> where /// <list type="bullet"> /// <item> /// <A NAME="formula_tf"></A> /// <b>tf(t in d)</b> /// correlates to the term's <i>frequency</i>, /// defined as the number of times term <i>t</i> appears in the currently scored document <i>d</i>. /// Documents that have more occurrences of a given term receive a higher score. /// The default computation for <i>tf(t in d)</i> in /// <see cref="Lucene.Net.Search.DefaultSimilarity.Tf(float)">DefaultSimilarity</see> is: /// /// <br/>&#160;<br/> /// <table cellpadding="2" cellspacing="2" border="0" align="center"> /// <tr> /// <td valign="middle" align="right" rowspan="1"> /// <see cref="Lucene.Net.Search.DefaultSimilarity.Tf(float)">tf(t in d)</see> &#160; = &#160; /// </td> /// <td valign="top" align="center" rowspan="1"> /// frequency<sup><big>&#189;</big></sup> /// </td> /// </tr> /// </table> /// <br/>&#160;<br/> /// </item> /// /// <item> /// <A NAME="formula_idf"></A> /// <b>idf(t)</b> stands for Inverse Document Frequency. This value /// correlates to the inverse of <i>docFreq</i> /// (the number of documents in which the term <i>t</i> appears). /// This means rarer terms give higher contribution to the total score. /// The default computation for <i>idf(t)</i> in /// <see cref="Lucene.Net.Search.DefaultSimilarity.Idf(int, int)">DefaultSimilarity</see> is: /// /// <br/>&#160;<br/> /// <table cellpadding="2" cellspacing="2" border="0" align="center"> /// <tr> /// <td valign="middle" align="right"> /// <see cref="Lucene.Net.Search.DefaultSimilarity.Idf(int, int)">idf(t)</see>&#160; = &#160; /// </td> /// <td valign="middle" align="center"> /// 1 + log <big>(</big> /// </td> /// <td valign="middle" align="center"> /// <table> /// <tr><td align="center"><small>numDocs</small></td></tr> /// <tr><td align="center">&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;</td></tr> /// <tr><td align="center"><small>docFreq+1</small></td></tr> /// </table> /// </td> /// <td valign="middle" align="center"> /// <big>)</big> /// </td> /// </tr> /// </table> /// <br/>&#160;<br/> /// </item> /// /// <item> /// <A NAME="formula_coord"></A> /// <b>coord(q,d)</b> /// is a score factor based on how many of the query terms are found in the specified document. /// Typically, a document that contains more of the query's terms will receive a higher score /// than another document with fewer query terms. /// This is a search time factor computed in /// <see cref="Coord(int, int)">coord(q,d)</see> /// by the Similarity in effect at search time. /// <br/>&#160;<br/> /// </item> /// /// <item><b> /// <A NAME="formula_queryNorm"></A> /// queryNorm(q) /// </b> /// is a normalizing factor used to make scores between queries comparable. /// This factor does not affect document ranking (since all ranked documents are multiplied by the same factor), /// but rather just attempts to make scores from different queries (or even different indexes) comparable. /// This is a search time factor computed by the Similarity in effect at search time. /// /// The default computation in /// <see cref="Lucene.Net.Search.DefaultSimilarity.QueryNorm(float)">DefaultSimilarity</see> /// is: /// <br/>&#160;<br/> /// <table cellpadding="1" cellspacing="0" border="0" align="center"> /// <tr> /// <td valign="middle" align="right" rowspan="1"> /// queryNorm(q) &#160; = &#160; /// <see cref="Lucene.Net.Search.DefaultSimilarity.QueryNorm(float)">queryNorm(sumOfSquaredWeights)</see> /// &#160; = &#160; /// </td> /// <td valign="middle" align="center" rowspan="1"> /// <table> /// <tr><td align="center"><big>1</big></td></tr> /// <tr><td align="center"><big> /// &#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211;&#8211; /// </big></td></tr> /// <tr><td align="center">sumOfSquaredWeights<sup><big>&#189;</big></sup></td></tr> /// </table> /// </td> /// </tr> /// </table> /// <br/>&#160;<br/> /// /// The sum of squared weights (of the query terms) is /// computed by the query <see cref="Lucene.Net.Search.Weight" /> object. /// For example, a <see cref="Lucene.Net.Search.BooleanQuery">boolean query</see> /// computes this value as: /// /// <br/>&#160;<br/> /// <table cellpadding="1" cellspacing="0" border="0" align="center"> /// <tr> /// <td valign="middle" align="right" rowspan="1"> /// <see cref="Lucene.Net.Search.Weight.GetSumOfSquaredWeights">GetSumOfSquaredWeights</see> &#160; = &#160; /// <see cref="Lucene.Net.Search.Query.Boost">q.Boost</see> <sup><big>2</big></sup> /// &#160;&#183;&#160; /// </td> /// <td valign="bottom" align="center" rowspan="1"> /// <big><big><big>&#8721;</big></big></big> /// </td> /// <td valign="middle" align="right" rowspan="1"> /// <big><big>(</big></big> /// <A HREF="#formula_idf">idf(t)</A> &#160;&#183;&#160; /// <A HREF="#formula_termBoost">t.Boost</A> /// <big><big>) <sup>2</sup> </big></big> /// </td> /// </tr> /// <tr valigh="top"> /// <td></td> /// <td align="center"><small>t in q</small></td> /// <td></td> /// </tr> /// </table> /// <br/>&#160;<br/> /// /// </item> /// /// <item> /// <A NAME="formula_termBoost"></A> /// <b>t.Boost</b> /// is a search time boost of term <i>t</i> in the query <i>q</i> as /// specified in the query text /// (see <A HREF="../../../../../../queryparsersyntax.html#Boosting a Term">query syntax</A>), /// or as set by application calls to /// <see cref="Lucene.Net.Search.Query.Boost" />. /// Notice that there is really no direct API for accessing a boost of one term in a multi term query, /// but rather multi terms are represented in a query as multi /// <see cref="Lucene.Net.Search.TermQuery">TermQuery</see> objects, /// and so the boost of a term in the query is accessible by calling the sub-query /// <see cref="Lucene.Net.Search.Query.Boost" />. /// <br/>&#160;<br/> /// </item> /// /// <item> /// <A NAME="formula_norm"></A> /// <b>norm(t,d)</b> encapsulates a few (indexing time) boost and length factors: /// /// <list type="bullet"> /// <item><b>Document boost</b> - set by calling /// <see cref="Lucene.Net.Documents.Document.Boost">doc.Boost</see> /// before adding the document to the index. /// </item> /// <item><b>Field boost</b> - set by calling /// <see cref="IFieldable.Boost">field.Boost</see> /// before adding the field to a document. /// </item> /// <item><see cref="LengthNorm(String, int)">LengthNorm(field)</see> - computed /// when the document is added to the index in accordance with the number of tokens /// of this field in the document, so that shorter fields contribute more to the score. /// LengthNorm is computed by the Similarity class in effect at indexing. /// </item> /// </list> /// /// <p/> /// When a document is added to the index, all the above factors are multiplied. /// If the document has multiple fields with the same name, all their boosts are multiplied together: /// /// <br/>&#160;<br/> /// <table cellpadding="1" cellspacing="0" border="0" align="center"> /// <tr> /// <td valign="middle" align="right" rowspan="1"> /// norm(t,d) &#160; = &#160; /// <see cref="Lucene.Net.Documents.Document.Boost">doc.Boost</see> /// &#160;&#183;&#160; /// <see cref="LengthNorm(String, int)">LengthNorm(field)</see> /// &#160;&#183;&#160; /// </td> /// <td valign="bottom" align="center" rowspan="1"> /// <big><big><big>&#8719;</big></big></big> /// </td> /// <td valign="middle" align="right" rowspan="1"> /// <see cref="IFieldable.Boost">field.Boost</see> /// </td> /// </tr> /// <tr valigh="top"> /// <td></td> /// <td align="center"><small>field <i><b>f</b></i> in <i>d</i> named as <i><b>t</b></i></small></td> /// <td></td> /// </tr> /// </table> /// <br/>&#160;<br/> /// However the resulted <i>norm</i> value is <see cref="EncodeNorm(float)">encoded</see> as a single byte /// before being stored. /// At search time, the norm byte value is read from the index /// <see cref="Lucene.Net.Store.Directory">directory</see> and /// <see cref="DecodeNorm(byte)">decoded</see> back to a float <i>norm</i> value. /// This encoding/decoding, while reducing index size, comes with the price of /// precision loss - it is not guaranteed that decode(encode(x)) = x. /// For instance, decode(encode(0.89)) = 0.75. /// Also notice that search time is too late to modify this <i>norm</i> part of scoring, e.g. by /// using a different <see cref="Similarity" /> for search. /// <br/>&#160;<br/> /// </item> /// </list> /// /// </summary> /// <seealso cref="Default"> /// </seealso> /// <seealso cref="Lucene.Net.Index.IndexWriter.Similarity"> /// </seealso> /// <seealso cref="Searcher.Similarity"> /// </seealso> //[Serializable] //Disabled for https://github.com/dotnet/standard/issues/300 public abstract class Similarity { protected Similarity() { InitBlock(); } //[Serializable] //Disabled for https://github.com/dotnet/standard/issues/300 private class AnonymousClassIDFExplanation1:IDFExplanation { public AnonymousClassIDFExplanation1(int df, int max, float idf, Similarity enclosingInstance) { InitBlock(df, max, idf, enclosingInstance); } private void InitBlock(int df, int max, float idf, Similarity enclosingInstance) { this.df = df; this.max = max; this.idf = idf; this.enclosingInstance = enclosingInstance; } private int df; private int max; private float idf; private Similarity enclosingInstance; public Similarity Enclosing_Instance { get { return enclosingInstance; } } //@Override public override System.String Explain() { return "idf(docFreq=" + df + ", maxDocs=" + max + ")"; } //@Override public override float Idf { get { return idf; } } } //[Serializable] //Disabled for https://github.com/dotnet/standard/issues/300 private class AnonymousClassIDFExplanation3:IDFExplanation { public AnonymousClassIDFExplanation3(float fIdf, System.Text.StringBuilder exp, Similarity enclosingInstance) { InitBlock(fIdf, exp, enclosingInstance); } private void InitBlock(float fIdf, System.Text.StringBuilder exp, Similarity enclosingInstance) { this.fIdf = fIdf; this.exp = exp; this.enclosingInstance = enclosingInstance; } private float fIdf; private System.Text.StringBuilder exp; private Similarity enclosingInstance; public Similarity Enclosing_Instance { get { return enclosingInstance; } } //@Override public override float Idf { get { return fIdf; } } //@Override public override System.String Explain() { return exp.ToString(); } } private void InitBlock() { } /// <summary>The Similarity implementation used by default.</summary> private static Similarity defaultImpl = new DefaultSimilarity(); public const int NO_DOC_ID_PROVIDED = - 1; /// <summary>Gets or sets the default Similarity implementation /// used by indexing and search code. /// <p/>This is initially an instance of <see cref="DefaultSimilarity" />. /// </summary> /// <seealso cref="Searcher.Similarity"> /// </seealso> /// <seealso cref="Lucene.Net.Index.IndexWriter.SetSimilarity(Similarity)"> /// </seealso> public static Similarity Default { get { return defaultImpl; } set { defaultImpl = value; } } /// <summary>Cache of decoded bytes. </summary> private static readonly float[] NORM_TABLE = new float[256]; /// <summary>Decodes a normalization factor stored in an index.</summary> /// <seealso cref="EncodeNorm(float)"> /// </seealso> public static float DecodeNorm(byte b) { return NORM_TABLE[b & 0xFF]; // & 0xFF maps negative bytes to positive above 127 } /// <summary>Returns a table for decoding normalization bytes.</summary> /// <seealso cref="EncodeNorm(float)"> /// </seealso> public static float[] GetNormDecoder() { return NORM_TABLE; } /// <summary> Compute the normalization value for a field, given the accumulated /// state of term processing for this field (see <see cref="FieldInvertState" />). /// /// <p/>Implementations should calculate a float value based on the field /// state and then return that value. /// /// <p/>For backward compatibility this method by default calls /// <see cref="LengthNorm(String, int)" /> passing /// <see cref="FieldInvertState.Length" /> as the second argument, and /// then multiplies this value by <see cref="FieldInvertState.Boost" />.<p/> /// /// <p/><b>WARNING</b>: This API is new and experimental and may /// suddenly change.<p/> /// /// </summary> /// <param name="field">field name /// </param> /// <param name="state">current processing state for this field /// </param> /// <returns> the calculated float norm /// </returns> public virtual float ComputeNorm(System.String field, FieldInvertState state) { return (float) (state.Boost * LengthNorm(field, state.Length)); } /// <summary>Computes the normalization value for a field given the total number of /// terms contained in a field. These values, together with field boosts, are /// stored in an index and multipled into scores for hits on each field by the /// search code. /// /// <p/>Matches in longer fields are less precise, so implementations of this /// method usually return smaller values when <c>numTokens</c> is large, /// and larger values when <c>numTokens</c> is small. /// /// <p/>Note that the return values are computed under /// <see cref="Lucene.Net.Index.IndexWriter.AddDocument(Lucene.Net.Documents.Document)" /> /// and then stored using /// <see cref="EncodeNorm(float)" />. /// Thus they have limited precision, and documents /// must be re-indexed if this method is altered. /// /// </summary> /// <param name="fieldName">the name of the field /// </param> /// <param name="numTokens">the total number of tokens contained in fields named /// <i>fieldName</i> of <i>doc</i>. /// </param> /// <returns> a normalization factor for hits on this field of this document /// /// </returns> /// <seealso cref="Lucene.Net.Documents.AbstractField.Boost" /> public abstract float LengthNorm(System.String fieldName, int numTokens); /// <summary>Computes the normalization value for a query given the sum of the squared /// weights of each of the query terms. This value is then multipled into the /// weight of each query term. /// /// <p/>This does not affect ranking, but rather just attempts to make scores /// from different queries comparable. /// /// </summary> /// <param name="sumOfSquaredWeights">the sum of the squares of query term weights /// </param> /// <returns> a normalization factor for query weights /// </returns> public abstract float QueryNorm(float sumOfSquaredWeights); /// <summary>Encodes a normalization factor for storage in an index. /// /// <p/>The encoding uses a three-bit mantissa, a five-bit exponent, and /// the zero-exponent point at 15, thus /// representing values from around 7x10^9 to 2x10^-9 with about one /// significant decimal digit of accuracy. Zero is also represented. /// Negative numbers are rounded up to zero. Values too large to represent /// are rounded down to the largest representable value. Positive values too /// small to represent are rounded up to the smallest positive representable /// value. /// /// </summary> /// <seealso cref="Lucene.Net.Documents.AbstractField.Boost" /> /// <seealso cref="Lucene.Net.Util.SmallFloat" /> public static byte EncodeNorm(float f) { return (byte) SmallFloat.FloatToByte315(f); } /// <summary>Computes a score factor based on a term or phrase's frequency in a /// document. This value is multiplied by the <see cref="Idf(int, int)" /> /// factor for each term in the query and these products are then summed to /// form the initial score for a document. /// /// <p/>Terms and phrases repeated in a document indicate the topic of the /// document, so implementations of this method usually return larger values /// when <c>freq</c> is large, and smaller values when <c>freq</c> /// is small. /// /// <p/>The default implementation calls <see cref="Tf(float)" />. /// /// </summary> /// <param name="freq">the frequency of a term within a document /// </param> /// <returns> a score factor based on a term's within-document frequency /// </returns> public virtual float Tf(int freq) { return Tf((float) freq); } /// <summary>Computes the amount of a sloppy phrase match, based on an edit distance. /// This value is summed for each sloppy phrase match in a document to form /// the frequency that is passed to <see cref="Tf(float)" />. /// /// <p/>A phrase match with a small edit distance to a document passage more /// closely matches the document, so implementations of this method usually /// return larger values when the edit distance is small and smaller values /// when it is large. /// /// </summary> /// <seealso cref="PhraseQuery.Slop" /> /// <param name="distance">the edit distance of this sloppy phrase match </param> /// <returns> the frequency increment for this match </returns> public abstract float SloppyFreq(int distance); /// <summary>Computes a score factor based on a term or phrase's frequency in a /// document. This value is multiplied by the <see cref="Idf(int, int)" /> /// factor for each term in the query and these products are then summed to /// form the initial score for a document. /// /// <p/>Terms and phrases repeated in a document indicate the topic of the /// document, so implementations of this method usually return larger values /// when <c>freq</c> is large, and smaller values when <c>freq</c> /// is small. /// /// </summary> /// <param name="freq">the frequency of a term within a document /// </param> /// <returns> a score factor based on a term's within-document frequency /// </returns> public abstract float Tf(float freq); /// <summary> Computes a score factor for a simple term and returns an explanation /// for that score factor. /// /// <p/> /// The default implementation uses: /// /// <code> /// idf(searcher.docFreq(term), searcher.MaxDoc); /// </code> /// /// Note that <see cref="Searcher.MaxDoc" /> is used instead of /// <see cref="Lucene.Net.Index.IndexReader.NumDocs()" /> because it is /// proportional to <see cref="Searcher.DocFreq(Term)" /> , i.e., when one is /// inaccurate, so is the other, and in the same direction. /// /// </summary> /// <param name="term">the term in question /// </param> /// <param name="searcher">the document collection being searched /// </param> /// <returns> an IDFExplain object that includes both an idf score factor /// and an explanation for the term. /// </returns> /// <throws> IOException </throws> public virtual IDFExplanation IdfExplain(Term term, Searcher searcher) { int df = searcher.DocFreq(term); int max = searcher.MaxDoc; float idf2 = Idf(df, max); return new AnonymousClassIDFExplanation1(df, max, idf2, this); } /// <summary> Computes a score factor for a phrase. /// /// <p/> /// The default implementation sums the idf factor for /// each term in the phrase. /// /// </summary> /// <param name="terms">the terms in the phrase /// </param> /// <param name="searcher">the document collection being searched /// </param> /// <returns> an IDFExplain object that includes both an idf /// score factor for the phrase and an explanation /// for each term. /// </returns> /// <throws> IOException </throws> public virtual IDFExplanation IdfExplain(ICollection<Term> terms, Searcher searcher) { int max = searcher.MaxDoc; float idf2 = 0.0f; System.Text.StringBuilder exp = new System.Text.StringBuilder(); foreach (Term term in terms) { int df = searcher.DocFreq(term); idf2 += Idf(df, max); exp.Append(" "); exp.Append(term.Text); exp.Append("="); exp.Append(df); } float fIdf = idf2; return new AnonymousClassIDFExplanation3(fIdf, exp, this); } /// <summary>Computes a score factor based on a term's document frequency (the number /// of documents which contain the term). This value is multiplied by the /// <see cref="Tf(int)" /> factor for each term in the query and these products are /// then summed to form the initial score for a document. /// /// <p/>Terms that occur in fewer documents are better indicators of topic, so /// implementations of this method usually return larger values for rare terms, /// and smaller values for common terms. /// /// </summary> /// <param name="docFreq">the number of documents which contain the term /// </param> /// <param name="numDocs">the total number of documents in the collection /// </param> /// <returns> a score factor based on the term's document frequency /// </returns> public abstract float Idf(int docFreq, int numDocs); /// <summary>Computes a score factor based on the fraction of all query terms that a /// document contains. This value is multiplied into scores. /// /// <p/>The presence of a large portion of the query terms indicates a better /// match with the query, so implementations of this method usually return /// larger values when the ratio between these parameters is large and smaller /// values when the ratio between them is small. /// /// </summary> /// <param name="overlap">the number of query terms matched in the document /// </param> /// <param name="maxOverlap">the total number of terms in the query /// </param> /// <returns> a score factor based on term overlap with the query /// </returns> public abstract float Coord(int overlap, int maxOverlap); /// <summary> Calculate a scoring factor based on the data in the payload. Overriding implementations /// are responsible for interpreting what is in the payload. Lucene makes no assumptions about /// what is in the byte array. /// <p/> /// The default implementation returns 1. /// /// </summary> /// <param name="docId">The docId currently being scored. If this value is <see cref="NO_DOC_ID_PROVIDED" />, then it should be assumed that the PayloadQuery implementation does not provide document information /// </param> /// <param name="fieldName">The fieldName of the term this payload belongs to /// </param> /// <param name="start">The start position of the payload /// </param> /// <param name="end">The end position of the payload /// </param> /// <param name="payload">The payload byte array to be scored /// </param> /// <param name="offset">The offset into the payload array /// </param> /// <param name="length">The length in the array /// </param> /// <returns> An implementation dependent float to be used as a scoring factor /// /// </returns> public virtual float ScorePayload(int docId, System.String fieldName, int start, int end, byte[] payload, int offset, int length) { return 1; } static Similarity() { { for (int i = 0; i < 256; i++) NORM_TABLE[i] = SmallFloat.Byte315ToFloat((byte) i); } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Net; using System.Windows.Forms; using Bloom.Api; using Bloom.Book; using Bloom.Collection; using Bloom.CollectionTab; using Bloom.MiscUI; using Bloom.Registration; using Bloom.Utils; using DesktopAnalytics; using L10NSharp; using Newtonsoft.Json; using Sentry; using SIL.Reporting; namespace Bloom.TeamCollection { // Implements functions used by the HTML/Typescript parts of the Team Collection code. // Review: should this be in web/controllers with all the other API classes, or here with all the other sharing code? public class TeamCollectionApi { private ITeamCollectionManager _tcManager; private BookSelection _bookSelection; // configured by autofac, tells us what book is selected private BookServer _bookServer; private string CurrentUser => TeamCollectionManager.CurrentUser; private BloomWebSocketServer _socketServer; private readonly CurrentEditableCollectionSelection _currentBookCollectionSelection; private CollectionSettings _settings; private LibraryModel _libraryModel; public static TeamCollectionApi TheOneInstance { get; private set; } // Called by autofac, which creates the one instance and registers it with the server. public TeamCollectionApi(CurrentEditableCollectionSelection currentBookCollectionSelection, CollectionSettings settings, BookSelection bookSelection, ITeamCollectionManager tcManager, BookServer bookServer, BloomWebSocketServer socketServer, LibraryModel libraryModel) { _currentBookCollectionSelection = currentBookCollectionSelection; _settings = settings; _tcManager = tcManager; _tcManager.CurrentCollection?.SetupMonitoringBehavior(); _bookSelection = bookSelection; _socketServer = socketServer; _bookServer = bookServer; _libraryModel = libraryModel; TheOneInstance = this; } public void RegisterWithApiHandler(BloomApiHandler apiHandler) { apiHandler.RegisterEndpointHandlerExact("teamCollection/repoFolderPath", HandleRepoFolderPath, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/isTeamCollectionEnabled", HandleIsTeamCollectionEnabled, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/bookStatus", HandleBookStatus, false, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/selectedBookStatus", HandleSelectedBookStatus, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/attemptLockOfCurrentBook", HandleAttemptLockOfCurrentBook, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/checkInCurrentBook", HandleCheckInCurrentBook, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/forgetChangesInSelectedBook", HandleForgetChangesInSelectedBook, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/chooseFolderLocation", HandleChooseFolderLocation, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/createTeamCollection", HandleCreateTeamCollection, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/joinTeamCollection", HandleJoinTeamCollection, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/getLog", HandleGetLog, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/getCollectionName", HandleGetCollectionName, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/showCreateTeamCollectionDialog", HandleShowCreateTeamCollectionDialog, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/reportBadZip", HandleReportBadZip, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/showRegistrationDialog", HandleShowRegistrationDialog, true, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/getHistory", HandleGetHistory, true); apiHandler.RegisterEndpointHandlerExact("teamCollection/checkinMessage", HandleCheckinMessage, false); apiHandler.RegisterEndpointHandlerExact("teamCollection/forceUnlock", HandleForceUnlock, false); } private void HandleForceUnlock(ApiRequest request) { if (!_tcManager.CheckConnection()) { request.Failed(); return; } try { var bookStatus = _tcManager.CurrentCollection.GetStatus(BookFolderName); var lockedBy = bookStatus.lockedByFirstName; if (string.IsNullOrEmpty(lockedBy)) lockedBy = bookStatus.lockedBy; // Could be a problem if there's no current book or it's not in the collection folder. // But in that case, we don't show the UI that leads to this being called. _tcManager.CurrentCollection.ForceUnlock(BookFolderName); BookHistory.AddEvent(_bookSelection.CurrentSelection, BookHistoryEventType.ForcedUnlock, $"Admin force-unlocked while checked out to {lockedBy}."); UpdateUiForBook(); Analytics.Track("TeamCollectionRevertOtherCheckout", new Dictionary<string, string>() { { "CollectionId", _settings?.CollectionId }, { "CollectionName", _settings?.CollectionName }, { "Backend", _tcManager?.CurrentCollection?.GetBackendType() }, { "User", CurrentUser }, { "BookId", _bookSelection?.CurrentSelection?.ID }, { "BookName", _bookSelection?.CurrentSelection?.Title } }); request.PostSucceeded(); } catch (Exception e) { NonFatalProblem.Report(ModalIf.All, PassiveIf.All, "Could not force unlock", null, e, true); request.Failed("could not unlock"); } } /// <summary> /// When the user edits the pending checkin message, save it away in the book history database. /// </summary> /// <param name="request"></param> private void HandleCheckinMessage(ApiRequest request) { var message = request.GetPostStringOrNull() ?? ""; BookHistory.SetPendingCheckinMessage(request.CurrentBook, message); request.PostSucceeded(); } public static string BadZipPath; private void HandleReportBadZip(ApiRequest request) { var fileEncoded = request.Parameters["file"]; var file = UrlPathString.CreateFromUrlEncodedString(fileEncoded).NotEncoded; NonFatalProblem.Report(ModalIf.All, PassiveIf.All, (_tcManager.CurrentCollection as FolderTeamCollection) .GetSimpleBadZipFileMessage(Path.GetFileNameWithoutExtension(file)),additionalFilesToInclude: new[] { file }); request.PostSucceeded(); } private void HandleShowRegistrationDialog(ApiRequest request) { using (var dlg = new RegistrationDialog(false, _tcManager.UserMayChangeEmail)) { dlg.ShowDialog(); } request.PostSucceeded(); } private void HandleShowCreateTeamCollectionDialog(ApiRequest request) { ReactDialog.ShowOnIdle("createTeamCollectionDialogBundle", new { defaultRepoFolder = DropboxUtils.GetDropboxFolderPath() }, 600, 580, null, null, "Create Team Collection"); request.PostSucceeded(); } private void HandleGetCollectionName(ApiRequest request) { request.ReplyWithText(_settings.CollectionName); } private void HandleGetLog(ApiRequest request) { /* keeping this around as a comment to make it easier to work on the display _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.History, "", "blah blah blah blah"); _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.History, "", "Another message. I just simplified this English, but the surrounding code would lead me to think. I just simplified this English, but the surrounding code would lead me to think."); _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.Error, "", "An error of some sort. I just simplified this English, but the surrounding code would lead me to think. I just simplified this English, but the surrounding code would lead me to think."); _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.Error, "", "An error of some sort. I just simplified this English, but the surrounding code would lead me to think. I just simplified this English, but the surrounding code would lead me to think."); _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.History, "", "Another message."); _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.NewStuff, "", "a new stuff message."); _tcManager.MessageLog.WriteMessage(MessageAndMilestoneType.History, "", "Another message."); */ try { if (_tcManager.MessageLog == null) { request.Failed(); return; } request.ReplyWithJson(JsonConvert.SerializeObject(_tcManager.MessageLog.GetProgressMessages())); } catch (Exception e) { // Not sure what to do here: getting the log should never crash. Logger.WriteError("TeamCollectionApi.HandleGetLog() crashed", e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); request.Failed("get log failed"); } } public void HandleRepoFolderPath(ApiRequest request) { try { Debug.Assert(request.HttpMethod == HttpMethods.Get, "only get is implemented for the teamCollection/repoFolderPath api endpoint"); request.ReplyWithText(_tcManager.CurrentCollectionEvenIfDisconnected?.RepoDescription ?? ""); } catch (Exception e) { // Not sure what to do here: getting the repo's folder path should never crash. Logger.WriteError("TeamCollectionApi.HandleRepoFolderPath() crashed", e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); request.Failed("get repo folder path failed"); } } private void HandleJoinTeamCollection(ApiRequest request) { try { FolderTeamCollection.JoinCollectionTeam(); ReactDialog.CloseCurrentModal(); Analytics.Track("TeamCollectionJoin", new Dictionary<string, string>(){ {"CollectionId", _settings?.CollectionId}, {"CollectionName", _settings?.CollectionName}, {"Backend", _tcManager?.CurrentCollection?.GetBackendType()}, {"User", CurrentUser} }); request.PostSucceeded(); } catch (Exception e) { // Not sure what to do here: joining the collection crashed. Logger.WriteError("TeamCollectionApi.HandleJoinTeamCollection() crashed", e); var msg = LocalizationManager.GetString("TeamCollection.ErrorJoining", "Could not join Team Collection"); ErrorReport.NotifyUserOfProblem(e, msg); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); // Since we have already informed the user above, it is better to just report a success here. // Otherwise, they will also get a toast. request.PostSucceeded(); } } public void HandleIsTeamCollectionEnabled(ApiRequest request) { try { // We don't need any of the Sharing UI if the selected book isn't in the editable // collection (or if the collection doesn't have a Team Collection at all). request.ReplyWithBoolean(_tcManager.CurrentCollectionEvenIfDisconnected != null && (_bookSelection.CurrentSelection == null || _bookSelection.CurrentSelection.IsEditable)); } catch (Exception e) { // Not sure what to do here: checking whether TeamCollection is enabled should never crash. Logger.WriteError("TeamCollectionApi.HandleIsTeamCollectionEnabled() crashed", e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); request.Failed("checking if Team Collections are enabled failed"); } } // needs to be thread-safe public void HandleBookStatus(ApiRequest request) { try { if (!TeamCollectionManager.IsRegistrationSufficient()) { request.Failed(HttpStatusCode.ServiceUnavailable, "Team Collection not active"); return; } var bookFolderName = request.RequiredParam("folderName"); request.ReplyWithJson(GetBookStatusJson(bookFolderName, null)); } catch (Exception e) { // Not sure what to do here: getting the current book status crashed. Logger.WriteError("TeamCollectionApi.HandleCurrentBookStatus() crashed", e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); request.Failed("getting the book status failed"); } } // Needs to be thread-safe private string GetBookStatusJson(string bookFolderName, Book.Book book) { string whoHasBookLocked = null; DateTime whenLocked = DateTime.MaxValue; bool problem = false; // bookFolderName may be null when no book is selected, e.g., after deleting one. var status = bookFolderName == null ? null :_tcManager.CurrentCollection?.GetStatus(bookFolderName); // At this level, we know this is the path to the .bloom file in the repo // (though if we implement another backend, we'll have to generalize the notion somehow). // For the Javascript, it's just an argument to pass to // CommonMessages.GetPleaseClickHereForHelpMessage(). It's only used if hasInvalidRepoData is non-empty. string clickHereArg = ""; var folderTC = _tcManager.CurrentCollection as FolderTeamCollection; if (folderTC != null && bookFolderName != null) { clickHereArg = UrlPathString.CreateFromUnencodedString(folderTC.GetPathToBookFileInRepo(bookFolderName)) .UrlEncoded; } string hasInvalidRepoData = (status?.hasInvalidRepoData ?? false) ? (folderTC)?.GetCouldNotOpenCorruptZipMessage() : ""; if (bookFolderName == null) { return JsonConvert.SerializeObject( new { // Keep this in sync with IBookTeamCollectionStatus defined in TeamCollectionApi.tsx who = "", whoFirstName = "", whoSurname = "", when = DateTime.Now.ToShortDateString(), where = "", currentUser = CurrentUser, currentUserName = TeamCollectionManager.CurrentUserFirstName, currentMachine = TeamCollectionManager.CurrentMachine, problem = "", hasInvalidRepoData = false, clickHereArg = "", changedRemotely = false, disconnected = false, newLocalBook = true, checkinMessage = "", isUserAdmin = _tcManager.OkToEditCollectionSettings }); } bool newLocalBook = false; try { whoHasBookLocked = _tcManager.CurrentCollectionEvenIfDisconnected?.WhoHasBookLocked(bookFolderName); // It's debatable whether to use CurrentCollectionEvenIfDisconnected everywhere. For now, I've only changed // it for the two bits of information actually needed by the status panel when disconnected. whenLocked = _tcManager.CurrentCollection?.WhenWasBookLocked(bookFolderName) ?? DateTime.MaxValue; newLocalBook = whoHasBookLocked == TeamCollection.FakeUserIndicatingNewBook; if (newLocalBook) whoHasBookLocked = CurrentUser; problem = _tcManager.CurrentCollection?.HasLocalChangesThatMustBeClobbered(bookFolderName) ?? false; } catch (Exception e) when (e is ICSharpCode.SharpZipLib.Zip.ZipException || e is IOException) { hasInvalidRepoData = (_tcManager.CurrentCollection as FolderTeamCollection)?.GetCouldNotOpenCorruptZipMessage(); } // If the request asked for the book by name, we don't have an actual Book object. // However, it happens that those requests don't need the checkinMessage. var checkinMessage = book == null ? "" : BookHistory.GetPendingCheckinMessage(book); return JsonConvert.SerializeObject( new { // Keep this in sync with IBookTeamCollectionStatus defined in TeamCollectionApi.tsx who = whoHasBookLocked, whoFirstName = _tcManager.CurrentCollection?.WhoHasBookLockedFirstName(bookFolderName), whoSurname = _tcManager.CurrentCollection?.WhoHasBookLockedSurname(bookFolderName), when = whenLocked.ToLocalTime().ToShortDateString(), where = _tcManager.CurrentCollectionEvenIfDisconnected?.WhatComputerHasBookLocked(bookFolderName), currentUser = CurrentUser, currentUserName = TeamCollectionManager.CurrentUserFirstName, currentMachine = TeamCollectionManager.CurrentMachine, problem, hasInvalidRepoData, clickHereArg, changedRemotely = _tcManager.CurrentCollection?.HasBeenChangedRemotely(bookFolderName), disconnected = _tcManager.CurrentCollectionEvenIfDisconnected?.IsDisconnected, newLocalBook, checkinMessage, isUserAdmin = _tcManager.OkToEditCollectionSettings }); } public void HandleSelectedBookStatus(ApiRequest request) { try { if (!TeamCollectionManager.IsRegistrationSufficient()) { request.Failed("not registered"); return; } request.ReplyWithJson(GetBookStatusJson(BookFolderName, request.CurrentBook)); } catch (Exception e) { // Not sure what to do here: getting the current book status crashed. Logger.WriteError("TeamCollectionApi.HandleSelectedBookStatus() crashed", e); SentrySdk.AddBreadcrumb(string.Format("Something went wrong for {0}", request.LocalPath())); SentrySdk.CaptureException(e); request.Failed("getting the current book status failed"); } } public void HandleGetHistory(ApiRequest request) { var x = CollectionHistory.GetAllEvents(_currentBookCollectionSelection.CurrentSelection) .OrderByDescending(b => b.When).ToArray(); request.ReplyWithJson(JsonConvert.SerializeObject( x )); } private string BookFolderName => Path.GetFileName(_bookSelection.CurrentSelection?.FolderPath); public void HandleAttemptLockOfCurrentBook(ApiRequest request) { if (!_tcManager.CheckConnection()) { request.Failed(); return; } try { // Could be a problem if there's no current book or it's not in the collection folder. // But in that case, we don't show the UI that leads to this being called. var success = _tcManager.CurrentCollection.AttemptLock(BookFolderName); if (success) { UpdateUiForBook(); Analytics.Track("TeamCollectionCheckoutBook", new Dictionary<string, string>() { {"CollectionId", _settings?.CollectionId}, {"CollectionName", _settings?.CollectionName}, {"Backend", _tcManager?.CurrentCollection?.GetBackendType()}, {"User", CurrentUser}, {"BookId", _bookSelection?.CurrentSelection?.ID}, {"BookName", _bookSelection?.CurrentSelection?.Title} }); } request.ReplyWithBoolean(success); } catch (Exception e) { var msg = MakeLockFailedMessageFromException(e, BookFolderName); // Pushing an error into the log will show the Reload Collection button. It's not obvious this // is useful here, since we don't know exactly what went wrong. However, it at least gives the user // the option to try it. var log = _tcManager?.CurrentCollection?.MessageLog; if (log != null) log.WriteMessage(msg); Logger.WriteError(msg.TextForDisplay, e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); request.Failed("lock failed"); } } // internal, and taking bookFolder (which is always this.BookFolderName in production) for ease of testing. internal TeamCollectionMessage MakeLockFailedMessageFromException(Exception e, string bookFolder) { var msgId = "TeamCollection.CheckoutError"; var msgEnglish = "Bloom was not able to check out \"{0}\"."; var syncAgent = ""; // becomes SyncAgent for longer versions of message that need it if (e is FolderTeamCollection.CannotLockException cannotLockException) { var msgTryAgain = LocalizationManager.GetString("Common.TryAgainOrRestart", "Please try again later. If the problem continues, restart your computer."); msgId = null; // this branch uses a 3-part message which can't be relocalized later. string part2; if (cannotLockException.SyncAgent != "Unknown") { part2 = string.Format(LocalizationManager.GetString("TeamCollection.AgentSynchronizing", "Some other program may be busy with it. This may just be {0} synchronizing the file."), cannotLockException.SyncAgent); } else { part2 = LocalizationManager.GetString("TeamCollection.SomethingSynchronizing", "Some other program may be busy with it. This may just be something synchronizing the file."); } msgEnglish += " " + part2 + " " + msgTryAgain; } var msg = new TeamCollectionMessage(MessageAndMilestoneType.Error, msgId, msgEnglish, Path.GetFileName(bookFolder), syncAgent); return msg; } public void HandleForgetChangesInSelectedBook(ApiRequest request) { try { if (!_tcManager.CheckConnection()) { request.Failed(); return; } // Enhance: do we need progress here? var bookName = Path.GetFileName(_bookSelection.CurrentSelection.FolderPath); // Todo before 5.1: forgetting changes might involve undoing a rename. // If so, ForgetChanges will return a list of folders affected (up to 3). // We need to notify the new collection tab to update its book list // and also possibly update the current selection, and in case we undid // things in the book, we should update the preview. var modifiedBookFolders =_tcManager.CurrentCollection.ForgetChangesCheckin(bookName); string updatedBookFolder = null; var finalBookName = bookName; if (modifiedBookFolders.Count > 0) { updatedBookFolder = modifiedBookFolders[0]; finalBookName = Path.GetFileName(updatedBookFolder); } if (finalBookName != bookName) { _bookSelection.CurrentSelection.Storage.RestoreBookName(finalBookName); } // We've restored an old meta.json, things might be different...book titles for one. // This needs to come AFTER RestoreBookName, which fixes the book's FolderPath // so it knows where to load the restored meta.json from. But BEFORE // UpdateLabelOfBookInEditableCollection, which wants to use the restored BookInfo // to get a name (and fix the one in the Model). _bookSelection.CurrentSelection.UpdateBookInfoFromDisk(); // We need to do this as early as possible so that as notifications start to // go to the UI and it starts to request things from our server the answers are // up to date. _bookSelection.CurrentSelection.ReloadFromDisk(updatedBookFolder); if (finalBookName != bookName) { _libraryModel.UpdateLabelOfBookInEditableCollection(_bookSelection.CurrentSelection); } BookHistory.SetPendingCheckinMessage(_bookSelection.CurrentSelection, ""); UpdateUiForBook(reloadFromDisk:false, renamedTo: updatedBookFolder); // We need to do this after updating the rest of the UI, so the button we're // looking for has been adjusted. _tcManager.CurrentCollection.UpdateBookStatus(finalBookName, true); request.PostSucceeded(); } catch (Exception ex) { var msgId = "TeamCollection.ErrorForgettingChanges"; var msgEnglish = "Error forgetting changes for {0}: {1}"; var log = _tcManager?.CurrentCollection?.MessageLog; // Pushing an error into the log will show the Reload Collection button. It's not obvious this // is useful here, since we don't know exactly what went wrong. However, it at least gives the user // the option to try it. if (log != null) log.WriteMessage(MessageAndMilestoneType.Error, msgId, msgEnglish, _bookSelection?.CurrentSelection?.FolderPath, ex.Message); Logger.WriteError(String.Format(msgEnglish, _bookSelection?.CurrentSelection?.FolderPath, ex.Message), ex); request.Failed("forget changes failed"); } } public void HandleCheckInCurrentBook(ApiRequest request) { Action<float> reportCheckinProgress = (fraction) => { dynamic messageBundle = new DynamicJson(); messageBundle.fraction = fraction; _socketServer.SendBundle("checkinProgress", "progress", messageBundle); // The status panel is supposed to be showing a progress bar in response to getting the bundle, // but since we're doing the checkin on the UI thread, it doesn't get painted without this. Application.DoEvents(); }; try { // Right before calling this API, the status panel makes a change that // should make the progress bar visible. But this method is running on // the UI thread so without this call it won't appear until later, when // we have Application.DoEvents() as part of reporting progress. We do // quite a bit on large books before the first file is written to the // zip, so one more DoEvents() here lets the bar appear at once. Application.DoEvents(); _bookSelection.CurrentSelection.Save(); if (!_tcManager.CheckConnection()) { request.Failed(); return; } var bookName = Path.GetFileName(_bookSelection.CurrentSelection.FolderPath); if (_tcManager.CurrentCollection.OkToCheckIn(bookName)) { // review: not super happy about this being here in the api. Was stymied by // PutBook not knowing about the actual book object, but maybe that could be passed in. // It's important that this is done BEFORE the checkin: we want other users to see the // comment, and NOT see the pending comment as if it was their own if they check out. var message = BookHistory.GetPendingCheckinMessage(_bookSelection.CurrentSelection); BookHistory.AddEvent(_bookSelection.CurrentSelection, BookHistoryEventType.CheckIn, message); BookHistory.SetPendingCheckinMessage(_bookSelection.CurrentSelection, ""); _tcManager.CurrentCollection.PutBook(_bookSelection.CurrentSelection.FolderPath, true, false, reportCheckinProgress); reportCheckinProgress(0); // hides the progress bar (important if a different book has been selected that is still checked out) Analytics.Track("TeamCollectionCheckinBook", new Dictionary<string, string>(){ {"CollectionId", _settings?.CollectionId}, {"CollectionName", _settings?.CollectionName}, {"Backend", _tcManager?.CurrentCollection?.GetBackendType()}, {"User", CurrentUser}, {"BookId", _bookSelection?.CurrentSelection.ID }, {"BookName", _bookSelection?.CurrentSelection.Title } }); } else { // We can't check in! The system has broken down...perhaps conflicting checkouts while offline. // Save our version in Lost-and-Found _tcManager.CurrentCollection.PutBook(_bookSelection.CurrentSelection.FolderPath, false, true, reportCheckinProgress); reportCheckinProgress(0); // cleans up panel for next time // overwrite it with the current repo version. _tcManager.CurrentCollection.CopyBookFromRepoToLocal(bookName, dialogOnError:true); // Force a full reload of the book from disk and update the UI to match. _bookSelection.SelectBook(_bookServer.GetBookFromBookInfo(_bookSelection.CurrentSelection.BookInfo, true)); var msg = LocalizationManager.GetString("TeamCollection.ConflictingEditOrCheckout", "Someone else has edited this book or checked it out even though you were editing it! Your changes have been saved to Lost and Found"); ErrorReport.NotifyUserOfProblem(msg); Analytics.Track("TeamCollectionConflictingEditOrCheckout", new Dictionary<string, string>() { {"CollectionId", _settings?.CollectionId}, {"CollectionName", _settings?.CollectionName}, {"Backend", _tcManager?.CurrentCollection?.GetBackendType()}, {"User", CurrentUser}, {"BookId", _bookSelection?.CurrentSelection?.ID}, {"BookName", _bookSelection?.CurrentSelection?.Title} }); } UpdateUiForBook(); request.PostSucceeded(); Application.Idle += OnIdleConnectionCheck; } catch (Exception e) { reportCheckinProgress(0); // cleans up panel progress indicator var msgId = "TeamCollection.ErrorCheckingBookIn"; var msgEnglish = "Error checking in {0}: {1}"; var log = _tcManager?.CurrentCollection?.MessageLog; // Pushing an error into the log will show the Reload Collection button. It's not obvious this // is useful here, since we don't know exactly what went wrong. However, it at least gives the user // the option to try it. if (log != null) log.WriteMessage(MessageAndMilestoneType.Error, msgId, msgEnglish, _bookSelection?.CurrentSelection?.FolderPath, e.Message); Logger.WriteError(String.Format(msgEnglish, _bookSelection?.CurrentSelection?.FolderPath, e.Message), e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()} ({_bookSelection?.CurrentSelection?.FolderPath})"); request.Failed("checkin failed"); } } private void OnIdleConnectionCheck(object sender, EventArgs e) { Application.Idle -= OnIdleConnectionCheck; // BL-10704: In case the Internet went away while we were trying to CheckIn a book... // This will at least signal to the user in the Dropbox case, that while his checkin // may have succeeded, his colleagues won't know about it until the Internet is up again. // If we don't do it "OnIdle", the book status pane doesn't reflect that we actually did // (probably, assuming we are on Dropbox, anyway) complete the checkin. _tcManager.CheckConnection(); } // Tell the CollectionSettingsDialog that we should reopen the collection now private Action _callbackToReopenCollection; public void SetCallbackToReopenCollection(Action callback) { _callbackToReopenCollection = callback; } public void HandleChooseFolderLocation(ApiRequest request) { try { string sharedFolder; // One of the few places that knows we're using a particular implementation // of TeamRepo. But we have to know that to create it. And of course the user // has to chose a folder to get things started. // We'll need a different API or something similar if we ever want to create // some other kind of repo. using (var dlg = new FolderBrowserDialog()) { // Default to the Dropbox folder if one is found. var dropboxFolder = DropboxUtils.GetDropboxFolderPath(); if (!String.IsNullOrEmpty(dropboxFolder)) dlg.SelectedPath = dropboxFolder; dlg.ShowNewFolderButton = true; dlg.Description = LocalizationManager.GetString("TeamCollection.SelectFolder", "Select or create the folder where this collection will be shared"); if (DialogResult.OK != dlg.ShowDialog()) { request.Failed(); return; } sharedFolder = dlg.SelectedPath; } // We send the result through a websocket rather than simply returning it because // if the user is very slow (one site said FF times out after 90s) the browser may // abandon the request before it completes. The POST result is ignored and the // browser simply listens to the socket. // We'd prefer this request to return immediately and set a callback to run // when the dialog closes and handle the results, but FolderBrowserDialog // does not offer such an API. Instead, we just ignore any timeout // in our Javascript code. dynamic messageBundle = new DynamicJson(); messageBundle.repoFolderPath = sharedFolder; messageBundle.problem = ProblemsWithLocation(sharedFolder); // This clientContext must match what is being listened for in CreateTeamCollection.tsx _socketServer.SendBundle("teamCollectionCreate", "shared-folder-path", messageBundle); request.PostSucceeded(); } catch (Exception e) { // Not sure what to do here: choosing the collection folder should never crash. Logger.WriteError("TeamCollectionApi.HandleChooseFolderLocation() crashed", e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); request.Failed("choose folder location failed"); } } internal string ProblemsWithLocation(string sharedFolder) { // For now we use this generic message, because it's too hard to come up with concise // understandable messages explaining why these locations are a problem. var defaultMessage = LocalizationManager.GetString("TeamCollection.ProblemLocation", "There is a problem with this location"); try { if (Directory.EnumerateFiles(sharedFolder, "*.JoinBloomTC").Any()) { return defaultMessage; //return LocalizationManager.GetString("TeamCollection.AlreadyTC", // "This folder appears to already be in use as a Team Collection"); } if (Directory.EnumerateFiles(sharedFolder, "*.bloomCollection").Any()) { return defaultMessage; //return LocalizationManager.GetString("TeamCollection.LocalCollection", // "This appears to be a local Bloom collection. The Team Collection must be created in a distinct place."); } if (Directory.Exists(_tcManager.PlannedRepoFolderPath(sharedFolder))) { return defaultMessage; //return LocalizationManager.GetString("TeamCollection.TCExists", // "There is already a Folder in that location with the same name as this collection"); } // We're not in a big hurry here, and the most decisive test that we can actually put things in this // folder is to do it. var testFolder = Path.Combine(sharedFolder, "test"); Directory.CreateDirectory(testFolder); File.WriteAllText(Path.Combine(testFolder, "test"), "This is a test"); SIL.IO.RobustIO.DeleteDirectoryAndContents(testFolder); } catch (Exception ex) { // This might also catch errors such as not having permission to enumerate things // in the directory. return LocalizationManager.GetString("TeamCollection.NoWriteAccess", "Bloom does not have permission to write to the selected folder. The system reported " + ex.Message); } return ""; } public void HandleCreateTeamCollection(ApiRequest request) { string repoFolderParentPath = null; try { if (!TeamCollection.PromptForSufficientRegistrationIfNeeded()) { request.PostSucceeded(); return; } repoFolderParentPath = request.RequiredPostString(); _tcManager.ConnectToTeamCollection(repoFolderParentPath, _settings.CollectionId); _callbackToReopenCollection?.Invoke(); Analytics.Track("TeamCollectionCreate", new Dictionary<string, string>() { {"CollectionId", _settings?.CollectionId}, {"CollectionName", _settings?.CollectionName}, {"Backend", _tcManager?.CurrentCollection?.GetBackendType()}, {"User", CurrentUser} }); request.PostSucceeded(); } catch (Exception e) { var msgEnglish = "Error creating Team Collection {0}: {1}"; var msgFmt = LocalizationManager.GetString("TeamCollection.ErrorCreating", msgEnglish); ErrorReport.NotifyUserOfProblem(e, msgFmt, repoFolderParentPath, e.Message); Logger.WriteError(String.Format(msgEnglish, repoFolderParentPath, e.Message), e); NonFatalProblem.ReportSentryOnly(e, $"Something went wrong for {request.LocalPath()}"); // Since we have already informed the user above, it is better to just report a success here. // Otherwise, they will also get a toast. request.PostSucceeded(); } } // Called when we cause the book's status to change, so things outside the HTML world, like visibility of the // "Edit this book" button, can change appropriately. Pretending the user chose a different book seems to // do all the necessary stuff for now. private void UpdateUiForBook(bool reloadFromDisk = false, string renamedTo = null) { // Todo: This is not how we want to do this. Probably the UI should listen for changes to the status of books, // whether selected or not, talking to the repo directly. if (Form.ActiveForm == null) { // On Linux (at least for Bionic), Form.ActiveForm can sometimes be null when // this executes. The following loop seems to be as simple a fix as possible. foreach (var form in Application.OpenForms) { if (form is Shell shell) { shell.Invoke((Action)(() => _bookSelection.InvokeSelectionChanged(false))); return; } } } Form.ActiveForm.Invoke((Action) (() => { if (reloadFromDisk) _bookSelection.CurrentSelection.ReloadFromDisk(renamedTo); _bookSelection.InvokeSelectionChanged(false); })); } } }
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Text; using System.Xml; namespace FileHelpers.Dynamic { /// <summary>Base class for the field converters</summary> [EditorBrowsable(EditorBrowsableState.Never)] public abstract class FieldBuilder { [DebuggerBrowsable(DebuggerBrowsableState.Never)] private string mFieldName; [DebuggerBrowsable(DebuggerBrowsableState.Never)] private string mFieldType; /// <summary> /// Create a field of Name with type /// </summary> /// <param name="fieldName">name of the field</param> /// <param name="fieldType">Type of the field</param> internal FieldBuilder(string fieldName, Type fieldType) { fieldName = fieldName.Trim(); if (ValidIdentifierValidator.ValidIdentifier(fieldName) == false) { throw new FileHelpersException(Messages.Errors.InvalidIdentifier .Identifier(fieldName) .Text); } mFieldName = fieldName; mFieldType = ClassBuilder.TypeToString(fieldType); } /// <summary> /// Create a field of name and type /// </summary> /// <param name="fieldName">Name of the field</param> /// <param name="fieldType">Type of the field</param> internal FieldBuilder(string fieldName, string fieldType) { fieldName = fieldName.Trim(); if (ValidIdentifierValidator.ValidIdentifier(fieldName) == false) { throw new FileHelpersException(Messages.Errors.InvalidIdentifier .Identifier(fieldName) .Text); } if (ValidIdentifierValidator.ValidIdentifier(fieldType, true) == false) { throw new FileHelpersException(Messages.Errors.InvalidIdentifier .Identifier(fieldType) .Text); } mFieldName = fieldName; mFieldType = fieldType; } #region TrimMode [DebuggerBrowsable(DebuggerBrowsableState.Never)] private TrimMode mTrimMode = TrimMode.None; /// <summary>Indicates the TrimMode for the field.</summary> public TrimMode TrimMode { get { return mTrimMode; } set { mTrimMode = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private string mTrimChars = " \t"; /// <summary>Indicates the trim chars used if TrimMode is set.</summary> public string TrimChars { get { return mTrimChars; } set { mTrimChars = value; } } #endregion [DebuggerBrowsable(DebuggerBrowsableState.Never)] internal int mFieldIndex = -1; /// <summary>The position index inside the class.</summary> public int FieldIndex { get { return mFieldIndex; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private bool mFieldInNewLine = false; /// <summary>Indicates that this field is at the beginning of a new line.</summary> public bool FieldInNewLine { get { return mFieldInNewLine; } set { mFieldInNewLine = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private bool mFieldHidden = false; /// <summary>Indicates that this field must be ignored by the engine.</summary> public bool FieldHidden { get { return mFieldHidden; } set { mFieldHidden = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private bool mFieldValueDiscarded = false; /// <summary>Discards the values for the target field.</summary> public bool FieldValueDiscarded { get { return mFieldValueDiscarded; } set { mFieldValueDiscarded = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private bool mFieldOptional = false; /// <summary>Indicates that this field is optional.</summary> public bool FieldOptional { get { return mFieldOptional; } set { mFieldOptional = value; } } /// <summary>Used to create the converter for the current field.</summary> public ConverterBuilder Converter { get { return mConverter; } } /// <summary>The name of the field.</summary> public string FieldName { get { return mFieldName; } set { mFieldName = value; } } /// <summary>The Type of the field</summary> public string FieldType { get { return mFieldType; } set { mFieldType = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private object mFieldNullValue = null; /// <summary>The null value of the field when their value not is in the file.</summary> public object FieldNullValue { get { return mFieldNullValue; } set { mFieldNullValue = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private bool mFieldValidateIsNotEmpty = false; /// <summary>Indicates that the field cannot be empty.</summary> public bool FieldValidateIsNotEmpty { get { return mFieldValidateIsNotEmpty; } set { mFieldValidateIsNotEmpty = value; } } [DebuggerBrowsable(DebuggerBrowsableState.Never)] private readonly ConverterBuilder mConverter = new ConverterBuilder(); /// <summary> /// Create the field with attributes so that it can be added to the class /// </summary> /// <param name="lang">Language C# of Visual Basic</param> /// <returns>Field as text</returns> internal string GetFieldCode(NetLanguage lang) { var sb = new StringBuilder(100); var attbs = new AttributesBuilder(lang); AddAttributesInternal(attbs, lang); AddAttributesCode(attbs, lang); sb.Append(attbs.GetAttributesCode()); NetVisibility visi = mVisibility; string currentName = mFieldName; if (mClassBuilder.GenerateProperties) { visi = NetVisibility.Private; currentName = "m" + mFieldName; } switch (lang) { case NetLanguage.VbNet: sb.Append(ClassBuilder.GetVisibility(lang, visi) + currentName + " As " + mFieldType); break; case NetLanguage.CSharp: sb.Append(ClassBuilder.GetVisibility(lang, visi) + mFieldType + " " + currentName + ";"); break; default: break; } sb.Append(StringHelper.NewLine); if (mClassBuilder.GenerateProperties) { sb.Append(StringHelper.NewLine); switch (lang) { case NetLanguage.VbNet: sb.Append("Public Property " + mFieldName + " As " + mFieldType); sb.Append(StringHelper.NewLine); sb.Append(" Get"); sb.Append(StringHelper.NewLine); sb.Append(" Return m" + mFieldName); sb.Append(StringHelper.NewLine); sb.Append(" End Get"); sb.Append(StringHelper.NewLine); sb.Append(" Set (value As " + mFieldType + ")"); sb.Append(StringHelper.NewLine); sb.Append(" m" + mFieldName + " = value"); sb.Append(StringHelper.NewLine); sb.Append(" End Set"); sb.Append(StringHelper.NewLine); sb.Append("End Property"); break; case NetLanguage.CSharp: sb.Append("public " + mFieldType + " " + mFieldName); sb.Append(StringHelper.NewLine); sb.Append("{"); sb.Append(StringHelper.NewLine); sb.Append(" get { return m" + mFieldName + "; }"); sb.Append(StringHelper.NewLine); sb.Append(" set { m" + mFieldName + " = value; }"); sb.Append(StringHelper.NewLine); sb.Append("}"); break; default: break; } sb.Append(StringHelper.NewLine); sb.Append(StringHelper.NewLine); } return sb.ToString(); } /// <summary> /// Allow child classes to add attributes at the right spot /// </summary> /// <param name="attbs">Attributes added here</param> /// <param name="lang">Language C# or Visual Basic</param> internal abstract void AddAttributesCode(AttributesBuilder attbs, NetLanguage lang); /// <summary> /// Add the general attributes to the field /// </summary> /// <param name="attbs">Attributes added here</param> /// <param name="lang">Language C# or Visual Basic</param> private void AddAttributesInternal(AttributesBuilder attbs, NetLanguage lang) { if (mFieldOptional) attbs.AddAttribute("FieldOptional()"); if (mFieldHidden) attbs.AddAttribute("FieldHidden()"); if (mFieldValueDiscarded) attbs.AddAttribute("FieldValueDiscarded()"); if (mFieldInNewLine) attbs.AddAttribute("FieldInNewLine()"); if (mFieldValidateIsNotEmpty) attbs.AddAttribute("FieldValidateIsNotEmpty()"); if (mFieldNullValue != null) { if (mFieldNullValue is string) attbs.AddAttribute("FieldNullValue(\"" + mFieldNullValue.ToString() + "\")"); else { string t = ClassBuilder.TypeToString(mFieldNullValue.GetType()); string gt = string.Empty; if (lang == NetLanguage.CSharp) gt = "typeof(" + t + ")"; else if (lang == NetLanguage.VbNet) gt = "GetType(" + t + ")"; attbs.AddAttribute("FieldNullValue(" + gt + ", \"" + mFieldNullValue.ToString() + "\")"); } } attbs.AddAttribute(mConverter.GetConverterCode(lang)); if (mTrimMode != TrimMode.None) { if (" \t" == mTrimChars) attbs.AddAttribute("FieldTrim(TrimMode." + mTrimMode.ToString() + ")"); else { attbs.AddAttribute("FieldTrim(TrimMode." + mTrimMode.ToString() + ", \"" + mTrimChars.ToString() + "\")"); } } } /// <summary> /// Parent class of this field /// </summary> [DebuggerBrowsable(DebuggerBrowsableState.Never)] internal ClassBuilder mClassBuilder; [DebuggerBrowsable(DebuggerBrowsableState.Never)] private NetVisibility mVisibility = NetVisibility.Public; /// <summary> /// Gets or sets the visibility of the field. /// </summary> public NetVisibility Visibility { get { return mVisibility; } set { mVisibility = value; } } /// <summary> /// Serialise the FiledBuilder to XML /// </summary> /// <param name="writer">writer to add XML to</param> internal void SaveToXml(XmlHelper writer) { writer.Writer.WriteStartElement("Field"); writer.Writer.WriteStartAttribute("Name", ""); writer.Writer.WriteString(mFieldName); writer.Writer.WriteEndAttribute(); writer.Writer.WriteStartAttribute("Type", ""); writer.Writer.WriteString(mFieldType); writer.Writer.WriteEndAttribute(); WriteHeaderAttributes(writer); Converter.WriteXml(writer); writer.WriteElement("Visibility", this.Visibility.ToString(), "Public"); writer.WriteElement("FieldHidden", this.FieldHidden); writer.WriteElement("FieldOptional", this.FieldOptional); writer.WriteElement("FieldValueDiscarded", this.FieldValueDiscarded); writer.WriteElement("FieldInNewLine", this.FieldInNewLine); writer.WriteElement("TrimChars", this.TrimChars, " \t"); writer.WriteElement("TrimMode", this.TrimMode.ToString(), "None"); if (FieldNullValue != null) { writer.Writer.WriteStartElement("FieldNullValue"); writer.Writer.WriteStartAttribute("Type", ""); writer.Writer.WriteString(ClassBuilder.TypeToString(mFieldNullValue.GetType())); writer.Writer.WriteEndAttribute(); writer.Writer.WriteString(mFieldNullValue.ToString()); writer.Writer.WriteEndElement(); } WriteExtraElements(writer); writer.Writer.WriteEndElement(); } /// <summary> /// Write any attributes to the first element /// </summary> /// <param name="writer"></param> internal abstract void WriteHeaderAttributes(XmlHelper writer); /// <summary> /// Write any extra fields to the end of the XML /// </summary> /// <param name="writer">Writer to output XML to</param> internal abstract void WriteExtraElements(XmlHelper writer); /// <summary> /// Read the generic XML elements and store in the field details /// </summary> /// <param name="node"></param> internal void ReadField(XmlNode node) { XmlNode ele; ele = node["Visibility"]; if (ele != null) Visibility = (NetVisibility) Enum.Parse(typeof (NetVisibility), ele.InnerText); FieldHidden = node["FieldHidden"] != null || node["FieldNotInFile"] != null || node["FieldIgnored"] != null; FieldValueDiscarded = node["FieldValueDiscarded"] != null; FieldOptional = node["FieldOptional"] != null; FieldInNewLine = node["FieldInNewLine"] != null; ele = node["TrimChars"]; if (ele != null) TrimChars = ele.InnerText; ele = node["TrimMode"]; if (ele != null) TrimMode = (TrimMode) Enum.Parse(typeof (TrimMode), ele.InnerText); ele = node["FieldNullValue"]; if (ele != null) FieldNullValue = Convert.ChangeType(ele.InnerText, Type.GetType(ele.Attributes["Type"].InnerText)); ele = node["Converter"]; if (ele != null) Converter.LoadXml(ele); ReadFieldInternal(node); } /// <summary> /// Read field details from the main XML element /// </summary> /// <param name="node">Node to read</param> internal abstract void ReadFieldInternal(XmlNode node); } }
using System; using System.Collections.Generic; using Retrospector.DataStorage.Models; using Retrospector.Search; using Retrospector.Search.Interfaces; using Retrospector.Search.Models; using Retrospector.Tests.Utilities; using Xunit; namespace Retrospector.Tests.Tests.Search { public class LeafExpressionBuilderTests { private ILeafExpressionBuilder _builder; public LeafExpressionBuilderTests() { _builder = new LeafExpressionBuilder(); } [Fact] public void build_handles_null() { var function = _builder.BuildExpression(null); Assert.NotNull(function); } [Fact] public void returns_valid_delegate() { var function = _builder.BuildExpression(new QueryLeaf()); function.Compile().Invoke(new Media(), new Review(), new Factoid()); } [Fact] public void delegate_handles_nulls() { var query = ArrangeQuery(); var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, null, null); Assert.False(isMatch); } [Theory] [CombinationData(nameof(MediaAttributes), nameof(GreaterThanComparator), nameof(GreaterThanStringData))] [CombinationData(nameof(MediaAttributes), nameof(LessThanComparator), nameof(LessThanStringData))] [CombinationData(nameof(MediaAttributes), nameof(ContainsComparator), nameof(ContainsStringData))] [CombinationData(nameof(MediaAttributes), nameof(EqualComparator), nameof(EqualStringData))] public void creates_expression_that_filters_out_media( RetrospectorAttribute attribute, Comparator comparator, string actualValue, string searchValue, bool shouldMatch ) { var query = ArrangeQuery(attribute, comparator, searchValue); var media = ArrangeMedia(attribute, actualValue); var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(media, null, null); Assert.Equal(shouldMatch, isMatch); } [Theory] [CombinationData(nameof(ReviewAttributes), nameof(GreaterThanComparator), nameof(GreaterThanStringData))] [CombinationData(nameof(ReviewAttributes), nameof(LessThanComparator), nameof(LessThanStringData))] [CombinationData(nameof(ReviewAttributes), nameof(ContainsComparator), nameof(ContainsStringData))] [CombinationData(nameof(ReviewAttributes), nameof(EqualComparator), nameof(EqualStringData))] public void creates_expression_that_filters_out_review( RetrospectorAttribute attribute, Comparator comparator, string actualValue, string searchValue, bool shouldMatch ) { var query = ArrangeQuery(attribute, comparator, searchValue); var review = ArrangeReview(attribute, actualValue); var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, review, null); Assert.Equal(shouldMatch, isMatch); } [Theory] [InlineData(Comparator.LessThan, "2021-02-11", "2021-02-10", false)] [InlineData(Comparator.LessThan, "2021-02-11", "2021-02-12", true)] [InlineData(Comparator.GreaterThan, "2021-02-11", "2021-02-12", false)] [InlineData(Comparator.GreaterThan, "2021-02-11", "2021-02-10", true)] [InlineData(Comparator.Equal, "2021-02-11", "2021-02-11", true)] [InlineData(Comparator.Equal, "2021-02-11", "2021-03-11", false)] [InlineData(Comparator.Contains, "2021-02-11", "2021-02-11", true)] [InlineData(Comparator.Contains, "2021-02-11", "2021-03-11", false)] public void creates_expression_that_filters_out_review_based_on_date(Comparator comparator, string actualDate, string searchValue, bool shouldMatch) { var query = ArrangeQuery(RetrospectorAttribute.ReviewDate, comparator, searchValue); var review = new Review { Date = DateTime.Parse(actualDate) }; var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, review, null); Assert.Equal(shouldMatch, isMatch); } [Theory] [InlineData(Comparator.LessThan)] [InlineData(Comparator.GreaterThan)] [InlineData(Comparator.Equal)] [InlineData(Comparator.Contains)] public void handles_non_integer_when_searching_on_review_date(Comparator comparator) { var query = ArrangeQuery(RetrospectorAttribute.ReviewDate, comparator, "not date"); var review = new Review(); var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, review, null); Assert.False(isMatch); } [Theory] [InlineData(Comparator.LessThan, 6, "5", false)] [InlineData(Comparator.LessThan, 5, "6", true)] [InlineData(Comparator.GreaterThan, 5, "6", false)] [InlineData(Comparator.GreaterThan, 6, "5", true)] [InlineData(Comparator.Equal, 5, "5", true)] [InlineData(Comparator.Equal, 5, "6", false)] [InlineData(Comparator.Contains, 5, "5", true)] [InlineData(Comparator.Contains, 5, "6", false)] public void creates_expression_that_filters_out_review_based_on_rating(Comparator comparator, int actualRating, string searchValue, bool shouldMatch) { var query = ArrangeQuery(RetrospectorAttribute.ReviewRating, comparator, searchValue); var review = new Review { Rating = actualRating }; var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, review, null); Assert.Equal(shouldMatch, isMatch); } [Theory] [InlineData(Comparator.LessThan)] [InlineData(Comparator.GreaterThan)] [InlineData(Comparator.Equal)] [InlineData(Comparator.Contains)] public void handles_non_integer_when_searching_on_review_rating(Comparator comparator) { var query = ArrangeQuery(RetrospectorAttribute.ReviewRating, comparator, "not int"); var review = new Review(); var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, review, null); Assert.False(isMatch); } [Theory] [CombinationData(nameof(FactoidAttributes), nameof(GreaterThanComparator), nameof(GreaterThanStringData))] [CombinationData(nameof(FactoidAttributes), nameof(LessThanComparator), nameof(LessThanStringData))] [CombinationData(nameof(FactoidAttributes), nameof(ContainsComparator), nameof(ContainsStringData))] [CombinationData(nameof(FactoidAttributes), nameof(EqualComparator), nameof(EqualStringData))] public void creates_expression_that_filters_out_factoid( RetrospectorAttribute attribute, Comparator comparator, string actualValue, string searchValue, bool shouldMatch ) { var query = ArrangeQuery(attribute, comparator, searchValue); var factoid = ArrangeFactoid(attribute, actualValue); var function = _builder.BuildExpression(query); var isMatch = function.Compile().Invoke(null, null, factoid); Assert.Equal(shouldMatch, isMatch); } public static IEnumerable<object[]> MediaAttributes = new[] { new object[] {RetrospectorAttribute.MediaTitle}, new object[] {RetrospectorAttribute.MediaCreator}, new object[] {RetrospectorAttribute.MediaSeason}, new object[] {RetrospectorAttribute.MediaEpisode}, new object[] {RetrospectorAttribute.MediaCategory}, new object[] {RetrospectorAttribute.MediaDescription} }; public static IEnumerable<object[]> ReviewAttributes = new[] { new object[] {RetrospectorAttribute.ReviewContent}, new object[] {RetrospectorAttribute.ReviewUser} }; public static IEnumerable<object[]> FactoidAttributes = new[] { new object[] {RetrospectorAttribute.FactoidTitle}, new object[] {RetrospectorAttribute.FactoidContent} }; public static IEnumerable<object[]> ContainsComparator = new[] { new object[] {Comparator.Contains} }; public static IEnumerable<object[]> EqualComparator = new[] { new object[] {Comparator.Equal} }; public static IEnumerable<object[]> GreaterThanComparator = new[] { new object[] {Comparator.GreaterThan} }; public static IEnumerable<object[]> LessThanComparator = new[] { new object[] {Comparator.LessThan} }; public static IEnumerable<object[]> EqualStringData = new[] { new object[] {"Equal", "Equal", true}, new object[] {"EQUAL", "Equal", true}, new object[] {"Equal", "EQUAL", true}, new object[] {"Equal", "Not Equal", false}, new object[] {"Not Equal", "Equal", false} }; public static IEnumerable<object[]> ContainsStringData = new[] { new object[] {"Contains", "Con", true}, new object[] {"CONTAINS", "Con", true}, new object[] {"Contains", "CON", true}, new object[] {"No Contains", "Yogurt", false} }; public static IEnumerable<object[]> LessThanStringData = new[] { new object[] {"A", "B", false}, new object[] {"a", "B", false}, new object[] {"A", "b", false}, new object[] {"B", "A", true}, new object[] {"b", "A", true}, new object[] {"B", "a", true} }; public static IEnumerable<object[]> GreaterThanStringData = new[] { new object[] {"A", "B", true}, new object[] {"a", "B", true}, new object[] {"A", "b", true}, new object[] {"B", "A", false}, new object[] {"b", "A", false}, new object[] {"B", "a", false} }; private QueryLeaf ArrangeQuery( RetrospectorAttribute attribute = RetrospectorAttribute.FactoidContent, Comparator comparator = Comparator.Contains, string value = null ) => new QueryLeaf { Attribute = attribute, Comparator = comparator, SearchValue = value }; private Media ArrangeMedia(RetrospectorAttribute attribute, string value) { var media = new Media(); switch (attribute) { case RetrospectorAttribute.MediaTitle: media.Title = value; break; case RetrospectorAttribute.MediaCreator: media.Creator = value; break; case RetrospectorAttribute.MediaSeason: media.SeasonId = value; break; case RetrospectorAttribute.MediaEpisode: media.EpisodeId = value; break; case RetrospectorAttribute.MediaCategory: media.Category = value; break; case RetrospectorAttribute.MediaDescription: media.Description = value; break; } return media; } private Review ArrangeReview(RetrospectorAttribute attribute, string value) { var review = new Review(); switch (attribute) { case RetrospectorAttribute.ReviewUser: review.User = value; break; case RetrospectorAttribute.ReviewContent: review.Content = value; break; } return review; } private Factoid ArrangeFactoid(RetrospectorAttribute attribute, string value) { var factoid = new Factoid(); switch (attribute) { case RetrospectorAttribute.FactoidTitle: factoid.Title = value; break; case RetrospectorAttribute.FactoidContent: factoid.Content = value; break; } return factoid; } } }
//------------------------------------------------------------------------------ // <copyright file="DefaultTraceListener.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ #define DEBUG #define TRACE namespace System.Diagnostics { using System; using System.IO; using System.Text; using System.Collections; using System.Reflection; using System.Runtime.InteropServices; using System.Security.Permissions; using System.Security; using Microsoft.Win32; using System.Globalization; using System.Runtime.Versioning; using Runtime.CompilerServices; /// <devdoc> /// <para>Provides /// the default output methods and behavior for tracing.</para> /// </devdoc> [HostProtection(Synchronization=true)] public class DefaultTraceListener : TraceListener { //////bool assertUIEnabled; string logFileName; bool settingsInitialized; const int internalWriteSize = 16384; /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.Diagnostics.DefaultTraceListener'/> class with /// Default as its <see cref='System.Diagnostics.TraceListener.Name'/>.</para> /// </devdoc> public DefaultTraceListener() : base("Default") { } ///////// <devdoc> ///////// <para>[To be supplied.]</para> ///////// </devdoc> //////public bool AssertUiEnabled { ////// get { ////// if (!settingsInitialized) InitializeSettings(); ////// return assertUIEnabled; ////// } ////// set { ////// if (!settingsInitialized) InitializeSettings(); ////// assertUIEnabled = value; ////// } //////} /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public string LogFileName { //////[ResourceExposure(ResourceScope.Machine)] //////[ResourceConsumption(ResourceScope.Machine)] get { if (!settingsInitialized) InitializeSettings(); return logFileName; } //////[ResourceExposure(ResourceScope.Machine)] //////[ResourceConsumption(ResourceScope.Machine)] set { if (!settingsInitialized) InitializeSettings(); logFileName = value; } } /// <devdoc> /// <para> /// Emits or displays a message /// and a stack trace for an assertion that /// always fails. /// </para> /// </devdoc> public override void Fail(string message) { Fail(message, null); } /// <devdoc> /// <para> /// Emits or displays messages and a stack trace for an assertion that /// always fails. /// </para> /// </devdoc> public override void Fail(string message, string detailMessage) { //////StackTrace stack = new StackTrace(0, true); //////int userStackFrameIndex = 0; string stackTrace; //////bool uiPermission = UiPermission; //////try { ////// stackTrace = stack.ToString(); //////} //////catch { ////// stackTrace = ""; //////} stackTrace = ""; WriteAssert(stackTrace, message, detailMessage); //////if (AssertUiEnabled && uiPermission) { ////// AssertWrapper.ShowAssert(stackTrace, stack.GetFrame(userStackFrameIndex), message, detailMessage); //////} } //////[ResourceExposure(ResourceScope.None)] //////[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] private void InitializeSettings() { // don't use the property setters here to avoid infinite recursion. //////assertUIEnabled = DiagnosticsConfiguration.AssertUIEnabled; //////logFileName = DiagnosticsConfiguration.LogFileName; settingsInitialized = true; } private void WriteAssert(string stackTrace, string message, string detailMessage) { //////string assertMessage = SR.GetString(SR.DebugAssertBanner) + Environment.NewLine ////// + SR.GetString(SR.DebugAssertShortMessage) + Environment.NewLine ////// + message + Environment.NewLine ////// + SR.GetString(SR.DebugAssertLongMessage) + Environment.NewLine + ////// detailMessage + Environment.NewLine ////// + stackTrace; string assertMessage = "Assert!" + Environment.NewLine + "Message:" + Environment.NewLine + message + Environment.NewLine + "Details:" + Environment.NewLine + detailMessage + Environment.NewLine + stackTrace; WriteLine( assertMessage); } //////[ResourceExposure(ResourceScope.None)] //////[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] private void WriteToLogFile( string message, bool useWriteLine ) { try { FileInfo file = new FileInfo(LogFileName); using(Stream stream = file.Open( FileMode.OpenOrCreate )) { using(StreamWriter writer = new StreamWriter( stream )) { stream.Position = stream.Length; if(useWriteLine) writer.WriteLine( message ); else writer.Write( message ); } } } catch /*(Exception e)*/ { //////WriteLine(SR.GetString(SR.ExceptionOccurred, LogFileName, e.ToString()), false); } } /// <devdoc> /// <para> /// Writes the output to the OutputDebugString /// API and /// to System.Diagnostics.Debugger.Log. /// </para> /// </devdoc> public override void Write(string message) { Write(message, true); } //////[ResourceExposure(ResourceScope.None)] //////[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] private void Write(string message, bool useLogFile) { if (NeedIndent) WriteIndent(); // really huge messages mess up both VS and dbmon, so we chop it up into // reasonable chunks if it's too big if (message == null || message.Length <= internalWriteSize) { internalWrite(message); } else { int offset; for (offset = 0; offset < message.Length - internalWriteSize; offset += internalWriteSize) { internalWrite(message.Substring(offset, internalWriteSize)); } internalWrite(message.Substring(offset)); } //////if (useLogFile && LogFileName.Length != 0) ////// WriteToLogFile(message, false); } void internalWrite(string message) { //////if (Debugger.IsLogging()) { ////// Debugger.Log(0, null, message); //////} else { if (message == null) /*SafeNativeMethods.*/OutputDebugString(String.Empty); else /*SafeNativeMethods.*/OutputDebugString(message); //////} } [MethodImpl( MethodImplOptions.InternalCall )] public static extern void OutputDebugString(String message); /// <devdoc> /// <para> /// Writes the output to the OutputDebugString /// API and to System.Diagnostics.Debugger.Log /// followed by a line terminator. /// </para> /// </devdoc> public override void WriteLine(string message) { WriteLine(message, true); } private void WriteLine(string message, bool useLogFile) { if (NeedIndent) WriteIndent(); // I do the concat here to make sure it goes as one call to the output. // we would save a stringbuilder operation by calling Write twice. Write(message + Environment.NewLine, useLogFile); NeedIndent = true; } ///////// <devdoc> ///////// It returns true if the current permission set allows an assert dialog to be displayed. ///////// </devdoc> //////private static bool UiPermission { ////// get { ////// bool uiPermission = false; ////// try { ////// new UIPermission(UIPermissionWindow.SafeSubWindows).Demand(); ////// uiPermission = true; ////// } ////// catch { ////// } ////// return uiPermission; ////// } //////} } }
namespace android.media { [global::MonoJavaBridge.JavaClass()] public partial class MediaRecorder : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static MediaRecorder() { InitJNI(); } protected MediaRecorder(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } [global::MonoJavaBridge.JavaClass()] public sealed partial class AudioEncoder : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static AudioEncoder() { InitJNI(); } internal AudioEncoder(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } public static int DEFAULT { get { return 0; } } public static int AMR_NB { get { return 1; } } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.AudioEncoder.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$AudioEncoder")); } } [global::MonoJavaBridge.JavaClass()] public sealed partial class AudioSource : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static AudioSource() { InitJNI(); } internal AudioSource(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } public static int DEFAULT { get { return 0; } } public static int MIC { get { return 1; } } public static int VOICE_UPLINK { get { return 2; } } public static int VOICE_DOWNLINK { get { return 3; } } public static int VOICE_CALL { get { return 4; } } public static int CAMCORDER { get { return 5; } } public static int VOICE_RECOGNITION { get { return 6; } } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.AudioSource.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$AudioSource")); } } [global::MonoJavaBridge.JavaInterface(typeof(global::android.media.MediaRecorder.OnErrorListener_))] public interface OnErrorListener : global::MonoJavaBridge.IJavaObject { void onError(android.media.MediaRecorder arg0, int arg1, int arg2); } [global::MonoJavaBridge.JavaProxy(typeof(global::android.media.MediaRecorder.OnErrorListener))] public sealed partial class OnErrorListener_ : java.lang.Object, OnErrorListener { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static OnErrorListener_() { InitJNI(); } internal OnErrorListener_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } internal static global::MonoJavaBridge.MethodId _onError5024; void android.media.MediaRecorder.OnErrorListener.onError(android.media.MediaRecorder arg0, int arg1, int arg2) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.OnErrorListener_._onError5024, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.OnErrorListener_.staticClass, global::android.media.MediaRecorder.OnErrorListener_._onError5024, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.OnErrorListener_.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$OnErrorListener")); global::android.media.MediaRecorder.OnErrorListener_._onError5024 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.OnErrorListener_.staticClass, "onError", "(Landroid/media/MediaRecorder;II)V"); } } [global::MonoJavaBridge.JavaInterface(typeof(global::android.media.MediaRecorder.OnInfoListener_))] public interface OnInfoListener : global::MonoJavaBridge.IJavaObject { void onInfo(android.media.MediaRecorder arg0, int arg1, int arg2); } [global::MonoJavaBridge.JavaProxy(typeof(global::android.media.MediaRecorder.OnInfoListener))] public sealed partial class OnInfoListener_ : java.lang.Object, OnInfoListener { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static OnInfoListener_() { InitJNI(); } internal OnInfoListener_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } internal static global::MonoJavaBridge.MethodId _onInfo5025; void android.media.MediaRecorder.OnInfoListener.onInfo(android.media.MediaRecorder arg0, int arg1, int arg2) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.OnInfoListener_._onInfo5025, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.OnInfoListener_.staticClass, global::android.media.MediaRecorder.OnInfoListener_._onInfo5025, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.OnInfoListener_.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$OnInfoListener")); global::android.media.MediaRecorder.OnInfoListener_._onInfo5025 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.OnInfoListener_.staticClass, "onInfo", "(Landroid/media/MediaRecorder;II)V"); } } [global::MonoJavaBridge.JavaClass()] public sealed partial class OutputFormat : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static OutputFormat() { InitJNI(); } internal OutputFormat(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } public static int DEFAULT { get { return 0; } } public static int THREE_GPP { get { return 1; } } public static int MPEG_4 { get { return 2; } } public static int RAW_AMR { get { return 3; } } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.OutputFormat.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$OutputFormat")); } } [global::MonoJavaBridge.JavaClass()] public sealed partial class VideoEncoder : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static VideoEncoder() { InitJNI(); } internal VideoEncoder(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } public static int DEFAULT { get { return 0; } } public static int H263 { get { return 1; } } public static int H264 { get { return 2; } } public static int MPEG_4_SP { get { return 3; } } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.VideoEncoder.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$VideoEncoder")); } } [global::MonoJavaBridge.JavaClass()] public sealed partial class VideoSource : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static VideoSource() { InitJNI(); } internal VideoSource(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } public static int DEFAULT { get { return 0; } } public static int CAMERA { get { return 1; } } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.VideoSource.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder$VideoSource")); } } internal static global::MonoJavaBridge.MethodId _finalize5026; protected override void finalize() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._finalize5026); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._finalize5026); } internal static global::MonoJavaBridge.MethodId _start5027; public virtual void start() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._start5027); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._start5027); } internal static global::MonoJavaBridge.MethodId _stop5028; public virtual void stop() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._stop5028); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._stop5028); } internal static global::MonoJavaBridge.MethodId _reset5029; public virtual void reset() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._reset5029); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._reset5029); } internal static global::MonoJavaBridge.MethodId _release5030; public virtual void release() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._release5030); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._release5030); } internal static global::MonoJavaBridge.MethodId _setOutputFormat5031; public virtual void setOutputFormat(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setOutputFormat5031, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setOutputFormat5031, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _prepare5032; public virtual void prepare() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._prepare5032); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._prepare5032); } internal static global::MonoJavaBridge.MethodId _setPreviewDisplay5033; public virtual void setPreviewDisplay(android.view.Surface arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setPreviewDisplay5033, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setPreviewDisplay5033, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setOnErrorListener5034; public virtual void setOnErrorListener(android.media.MediaRecorder.OnErrorListener arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setOnErrorListener5034, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setOnErrorListener5034, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setOnInfoListener5035; public virtual void setOnInfoListener(android.media.MediaRecorder.OnInfoListener arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setOnInfoListener5035, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setOnInfoListener5035, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setCamera5036; public virtual void setCamera(android.hardware.Camera arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setCamera5036, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setCamera5036, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setAudioSource5037; public virtual void setAudioSource(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setAudioSource5037, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setAudioSource5037, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _getAudioSourceMax5038; public static int getAudioSourceMax() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; return @__env.CallStaticIntMethod(android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._getAudioSourceMax5038); } internal static global::MonoJavaBridge.MethodId _setVideoSource5039; public virtual void setVideoSource(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setVideoSource5039, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setVideoSource5039, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setProfile5040; public virtual void setProfile(android.media.CamcorderProfile arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setProfile5040, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setProfile5040, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setVideoSize5041; public virtual void setVideoSize(int arg0, int arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setVideoSize5041, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setVideoSize5041, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _setVideoFrameRate5042; public virtual void setVideoFrameRate(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setVideoFrameRate5042, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setVideoFrameRate5042, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setMaxDuration5043; public virtual void setMaxDuration(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setMaxDuration5043, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setMaxDuration5043, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setMaxFileSize5044; public virtual void setMaxFileSize(long arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setMaxFileSize5044, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setMaxFileSize5044, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setAudioEncoder5045; public virtual void setAudioEncoder(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setAudioEncoder5045, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setAudioEncoder5045, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setVideoEncoder5046; public virtual void setVideoEncoder(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setVideoEncoder5046, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setVideoEncoder5046, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setAudioSamplingRate5047; public virtual void setAudioSamplingRate(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setAudioSamplingRate5047, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setAudioSamplingRate5047, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setAudioChannels5048; public virtual void setAudioChannels(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setAudioChannels5048, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setAudioChannels5048, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setAudioEncodingBitRate5049; public virtual void setAudioEncodingBitRate(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setAudioEncodingBitRate5049, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setAudioEncodingBitRate5049, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setVideoEncodingBitRate5050; public virtual void setVideoEncodingBitRate(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setVideoEncodingBitRate5050, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setVideoEncodingBitRate5050, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setOutputFile5051; public virtual void setOutputFile(java.io.FileDescriptor arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setOutputFile5051, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setOutputFile5051, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setOutputFile5052; public virtual void setOutputFile(java.lang.String arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.MediaRecorder._setOutputFile5052, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._setOutputFile5052, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _getMaxAmplitude5053; public virtual int getMaxAmplitude() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallIntMethod(this.JvmHandle, global::android.media.MediaRecorder._getMaxAmplitude5053); else return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._getMaxAmplitude5053); } internal static global::MonoJavaBridge.MethodId _MediaRecorder5054; public MediaRecorder() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.media.MediaRecorder.staticClass, global::android.media.MediaRecorder._MediaRecorder5054); Init(@__env, handle); } public static int MEDIA_RECORDER_ERROR_UNKNOWN { get { return 1; } } public static int MEDIA_RECORDER_INFO_UNKNOWN { get { return 1; } } public static int MEDIA_RECORDER_INFO_MAX_DURATION_REACHED { get { return 800; } } public static int MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED { get { return 801; } } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.MediaRecorder.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/MediaRecorder")); global::android.media.MediaRecorder._finalize5026 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "finalize", "()V"); global::android.media.MediaRecorder._start5027 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "start", "()V"); global::android.media.MediaRecorder._stop5028 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "stop", "()V"); global::android.media.MediaRecorder._reset5029 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "reset", "()V"); global::android.media.MediaRecorder._release5030 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "release", "()V"); global::android.media.MediaRecorder._setOutputFormat5031 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setOutputFormat", "(I)V"); global::android.media.MediaRecorder._prepare5032 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "prepare", "()V"); global::android.media.MediaRecorder._setPreviewDisplay5033 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setPreviewDisplay", "(Landroid/view/Surface;)V"); global::android.media.MediaRecorder._setOnErrorListener5034 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setOnErrorListener", "(Landroid/media/MediaRecorder$OnErrorListener;)V"); global::android.media.MediaRecorder._setOnInfoListener5035 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setOnInfoListener", "(Landroid/media/MediaRecorder$OnInfoListener;)V"); global::android.media.MediaRecorder._setCamera5036 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setCamera", "(Landroid/hardware/Camera;)V"); global::android.media.MediaRecorder._setAudioSource5037 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setAudioSource", "(I)V"); global::android.media.MediaRecorder._getAudioSourceMax5038 = @__env.GetStaticMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "getAudioSourceMax", "()I"); global::android.media.MediaRecorder._setVideoSource5039 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setVideoSource", "(I)V"); global::android.media.MediaRecorder._setProfile5040 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setProfile", "(Landroid/media/CamcorderProfile;)V"); global::android.media.MediaRecorder._setVideoSize5041 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setVideoSize", "(II)V"); global::android.media.MediaRecorder._setVideoFrameRate5042 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setVideoFrameRate", "(I)V"); global::android.media.MediaRecorder._setMaxDuration5043 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setMaxDuration", "(I)V"); global::android.media.MediaRecorder._setMaxFileSize5044 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setMaxFileSize", "(J)V"); global::android.media.MediaRecorder._setAudioEncoder5045 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setAudioEncoder", "(I)V"); global::android.media.MediaRecorder._setVideoEncoder5046 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setVideoEncoder", "(I)V"); global::android.media.MediaRecorder._setAudioSamplingRate5047 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setAudioSamplingRate", "(I)V"); global::android.media.MediaRecorder._setAudioChannels5048 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setAudioChannels", "(I)V"); global::android.media.MediaRecorder._setAudioEncodingBitRate5049 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setAudioEncodingBitRate", "(I)V"); global::android.media.MediaRecorder._setVideoEncodingBitRate5050 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setVideoEncodingBitRate", "(I)V"); global::android.media.MediaRecorder._setOutputFile5051 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setOutputFile", "(Ljava/io/FileDescriptor;)V"); global::android.media.MediaRecorder._setOutputFile5052 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "setOutputFile", "(Ljava/lang/String;)V"); global::android.media.MediaRecorder._getMaxAmplitude5053 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "getMaxAmplitude", "()I"); global::android.media.MediaRecorder._MediaRecorder5054 = @__env.GetMethodIDNoThrow(global::android.media.MediaRecorder.staticClass, "<init>", "()V"); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Reflection; using System.Diagnostics.Contracts; using System.IO; using System.Runtime.Versioning; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Security; using System.Threading; namespace System.Runtime.Loader { public abstract class AssemblyLoadContext { [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern bool CanUseAppPathAssemblyLoadContextInCurrentDomain(); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern IntPtr InitializeAssemblyLoadContext(IntPtr ptrAssemblyLoadContext, bool fRepresentsTPALoadContext); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern IntPtr LoadFromStream(IntPtr ptrNativeAssemblyLoadContext, IntPtr ptrAssemblyArray, int iAssemblyArrayLen, IntPtr ptrSymbols, int iSymbolArrayLen, ObjectHandleOnStack retAssembly); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] internal static extern void InternalSetProfileRoot(string directoryPath); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] internal static extern void InternalStartProfile(string profile, IntPtr ptrNativeAssemblyLoadContext); protected AssemblyLoadContext() { // Initialize the ALC representing non-TPA LoadContext InitializeLoadContext(false); } internal AssemblyLoadContext(bool fRepresentsTPALoadContext) { // Initialize the ALC representing TPA LoadContext InitializeLoadContext(fRepresentsTPALoadContext); } private void InitializeLoadContext(bool fRepresentsTPALoadContext) { // Initialize the VM side of AssemblyLoadContext if not already done. GCHandle gchALC = GCHandle.Alloc(this); IntPtr ptrALC = GCHandle.ToIntPtr(gchALC); m_pNativeAssemblyLoadContext = InitializeAssemblyLoadContext(ptrALC, fRepresentsTPALoadContext); // Initialize event handlers to be null by default Resolving = null; Unloading = null; // Since unloading an AssemblyLoadContext is not yet implemented, this is a temporary solution to raise the // Unloading event on process exit. Register for the current AppDomain's ProcessExit event, and the handler will in // turn raise the Unloading event. AppContext.Unloading += OnAppContextUnloading; } [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void LoadFromPath(IntPtr ptrNativeAssemblyLoadContext, string ilPath, string niPath, ObjectHandleOnStack retAssembly); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void GetLoadedAssembliesInternal(ObjectHandleOnStack assemblies); public static Assembly[] GetLoadedAssemblies() { Assembly[] assemblies = null; GetLoadedAssembliesInternal(JitHelpers.GetObjectHandleOnStack(ref assemblies)); return assemblies; } // These are helpers that can be used by AssemblyLoadContext derivations. // They are used to load assemblies in DefaultContext. public Assembly LoadFromAssemblyPath(string assemblyPath) { if (assemblyPath == null) { throw new ArgumentNullException(nameof(assemblyPath)); } if (PathInternal.IsPartiallyQualified(assemblyPath)) { throw new ArgumentException(Environment.GetResourceString("Argument_AbsolutePathRequired"), nameof(assemblyPath)); } RuntimeAssembly loadedAssembly = null; LoadFromPath(m_pNativeAssemblyLoadContext, assemblyPath, null, JitHelpers.GetObjectHandleOnStack(ref loadedAssembly)); return loadedAssembly; } public Assembly LoadFromNativeImagePath(string nativeImagePath, string assemblyPath) { if (nativeImagePath == null) { throw new ArgumentNullException(nameof(nativeImagePath)); } if (PathInternal.IsPartiallyQualified(nativeImagePath)) { throw new ArgumentException(Environment.GetResourceString("Argument_AbsolutePathRequired"), nameof(nativeImagePath)); } if (assemblyPath != null && PathInternal.IsPartiallyQualified(assemblyPath)) { throw new ArgumentException(Environment.GetResourceString("Argument_AbsolutePathRequired"), nameof(assemblyPath)); } // Basic validation has succeeded - lets try to load the NI image. // Ask LoadFile to load the specified assembly in the DefaultContext RuntimeAssembly loadedAssembly = null; LoadFromPath(m_pNativeAssemblyLoadContext, assemblyPath, nativeImagePath, JitHelpers.GetObjectHandleOnStack(ref loadedAssembly)); return loadedAssembly; } public Assembly LoadFromStream(Stream assembly) { return LoadFromStream(assembly, null); } public Assembly LoadFromStream(Stream assembly, Stream assemblySymbols) { if (assembly == null) { throw new ArgumentNullException(nameof(assembly)); } int iAssemblyStreamLength = (int)assembly.Length; int iSymbolLength = 0; // Allocate the byte[] to hold the assembly byte[] arrAssembly = new byte[iAssemblyStreamLength]; // Copy the assembly to the byte array assembly.Read(arrAssembly, 0, iAssemblyStreamLength); // Get the symbol stream in byte[] if provided byte[] arrSymbols = null; if (assemblySymbols != null) { iSymbolLength = (int)assemblySymbols.Length; arrSymbols = new byte[iSymbolLength]; assemblySymbols.Read(arrSymbols, 0, iSymbolLength); } RuntimeAssembly loadedAssembly = null; unsafe { fixed (byte* ptrAssembly = arrAssembly, ptrSymbols = arrSymbols) { LoadFromStream(m_pNativeAssemblyLoadContext, new IntPtr(ptrAssembly), iAssemblyStreamLength, new IntPtr(ptrSymbols), iSymbolLength, JitHelpers.GetObjectHandleOnStack(ref loadedAssembly)); } } return loadedAssembly; } // Custom AssemblyLoadContext implementations can override this // method to perform custom processing and use one of the protected // helpers above to load the assembly. protected abstract Assembly Load(AssemblyName assemblyName); // This method is invoked by the VM when using the host-provided assembly load context // implementation. private static Assembly Resolve(IntPtr gchManagedAssemblyLoadContext, AssemblyName assemblyName) { AssemblyLoadContext context = (AssemblyLoadContext)(GCHandle.FromIntPtr(gchManagedAssemblyLoadContext).Target); return context.ResolveUsingLoad(assemblyName); } // This method is invoked by the VM to resolve an assembly reference using the Resolving event // after trying assembly resolution via Load override and TPA load context without success. private static Assembly ResolveUsingResolvingEvent(IntPtr gchManagedAssemblyLoadContext, AssemblyName assemblyName) { AssemblyLoadContext context = (AssemblyLoadContext)(GCHandle.FromIntPtr(gchManagedAssemblyLoadContext).Target); // Invoke the AssemblyResolve event callbacks if wired up return context.ResolveUsingEvent(assemblyName); } private Assembly GetFirstResolvedAssembly(AssemblyName assemblyName) { Assembly resolvedAssembly = null; Func<AssemblyLoadContext, AssemblyName, Assembly> assemblyResolveHandler = Resolving; if (assemblyResolveHandler != null) { // Loop through the event subscribers and return the first non-null Assembly instance Delegate[] arrSubscribers = assemblyResolveHandler.GetInvocationList(); for (int i = 0; i < arrSubscribers.Length; i++) { resolvedAssembly = ((Func<AssemblyLoadContext, AssemblyName, Assembly>)arrSubscribers[i])(this, assemblyName); if (resolvedAssembly != null) { break; } } } return resolvedAssembly; } private Assembly ValidateAssemblyNameWithSimpleName(Assembly assembly, string requestedSimpleName) { // Get the name of the loaded assembly string loadedSimpleName = null; // Derived type's Load implementation is expected to use one of the LoadFrom* methods to get the assembly // which is a RuntimeAssembly instance. However, since Assembly type can be used build any other artifact (e.g. AssemblyBuilder), // we need to check for RuntimeAssembly. RuntimeAssembly rtLoadedAssembly = assembly as RuntimeAssembly; if (rtLoadedAssembly != null) { loadedSimpleName = rtLoadedAssembly.GetSimpleName(); } // The simple names should match at the very least if (String.IsNullOrEmpty(loadedSimpleName) || (!requestedSimpleName.Equals(loadedSimpleName, StringComparison.InvariantCultureIgnoreCase))) throw new InvalidOperationException(Environment.GetResourceString("Argument_CustomAssemblyLoadContextRequestedNameMismatch")); return assembly; } private Assembly ResolveUsingLoad(AssemblyName assemblyName) { string simpleName = assemblyName.Name; Assembly assembly = Load(assemblyName); if (assembly != null) { assembly = ValidateAssemblyNameWithSimpleName(assembly, simpleName); } return assembly; } private Assembly ResolveUsingEvent(AssemblyName assemblyName) { string simpleName = assemblyName.Name; // Invoke the AssemblyResolve event callbacks if wired up Assembly assembly = GetFirstResolvedAssembly(assemblyName); if (assembly != null) { assembly = ValidateAssemblyNameWithSimpleName(assembly, simpleName); } // Since attempt to resolve the assembly via Resolving event is the last option, // throw an exception if we do not find any assembly. if (assembly == null) { throw new FileNotFoundException(Environment.GetResourceString("IO.FileLoad"), simpleName); } return assembly; } public Assembly LoadFromAssemblyName(AssemblyName assemblyName) { // Attempt to load the assembly, using the same ordering as static load, in the current load context. Assembly loadedAssembly = Assembly.Load(assemblyName, m_pNativeAssemblyLoadContext); return loadedAssembly; } [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern IntPtr InternalLoadUnmanagedDllFromPath(string unmanagedDllPath); // This method provides a way for overriders of LoadUnmanagedDll() to load an unmanaged DLL from a specific path in a // platform-independent way. The DLL is loaded with default load flags. protected IntPtr LoadUnmanagedDllFromPath(string unmanagedDllPath) { if (unmanagedDllPath == null) { throw new ArgumentNullException(nameof(unmanagedDllPath)); } if (unmanagedDllPath.Length == 0) { throw new ArgumentException(Environment.GetResourceString("Argument_EmptyPath"), nameof(unmanagedDllPath)); } if (PathInternal.IsPartiallyQualified(unmanagedDllPath)) { throw new ArgumentException(Environment.GetResourceString("Argument_AbsolutePathRequired"), nameof(unmanagedDllPath)); } return InternalLoadUnmanagedDllFromPath(unmanagedDllPath); } // Custom AssemblyLoadContext implementations can override this // method to perform the load of unmanaged native dll // This function needs to return the HMODULE of the dll it loads protected virtual IntPtr LoadUnmanagedDll(String unmanagedDllName) { //defer to default coreclr policy of loading unmanaged dll return IntPtr.Zero; } // This method is invoked by the VM when using the host-provided assembly load context // implementation. private static IntPtr ResolveUnmanagedDll(String unmanagedDllName, IntPtr gchManagedAssemblyLoadContext) { AssemblyLoadContext context = (AssemblyLoadContext)(GCHandle.FromIntPtr(gchManagedAssemblyLoadContext).Target); return context.LoadUnmanagedDll(unmanagedDllName); } public static AssemblyLoadContext Default { get { if (s_DefaultAssemblyLoadContext == null) { // Try to initialize the default assembly load context with apppath one if we are allowed to if (AssemblyLoadContext.CanUseAppPathAssemblyLoadContextInCurrentDomain()) { // Synchronize access to initializing Default ALC lock (s_initLock) { if (s_DefaultAssemblyLoadContext == null) { s_DefaultAssemblyLoadContext = new AppPathAssemblyLoadContext(); } } } } return s_DefaultAssemblyLoadContext; } } // This call opens and closes the file, but does not add the // assembly to the domain. [MethodImplAttribute(MethodImplOptions.InternalCall)] static internal extern AssemblyName nGetFileInformation(String s); // Helper to return AssemblyName corresponding to the path of an IL assembly public static AssemblyName GetAssemblyName(string assemblyPath) { if (assemblyPath == null) { throw new ArgumentNullException(nameof(assemblyPath)); } string fullPath = Path.GetFullPath(assemblyPath); return nGetFileInformation(fullPath); } [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern IntPtr GetLoadContextForAssembly(RuntimeAssembly assembly); // Returns the load context in which the specified assembly has been loaded public static AssemblyLoadContext GetLoadContext(Assembly assembly) { if (assembly == null) { throw new ArgumentNullException(nameof(assembly)); } AssemblyLoadContext loadContextForAssembly = null; RuntimeAssembly rtAsm = assembly as RuntimeAssembly; // We only support looking up load context for runtime assemblies. if (rtAsm != null) { IntPtr ptrAssemblyLoadContext = GetLoadContextForAssembly(rtAsm); if (ptrAssemblyLoadContext == IntPtr.Zero) { // If the load context is returned null, then the assembly was bound using the TPA binder // and we shall return reference to the active "Default" binder - which could be the TPA binder // or an overridden CLRPrivBinderAssemblyLoadContext instance. loadContextForAssembly = AssemblyLoadContext.Default; } else { loadContextForAssembly = (AssemblyLoadContext)(GCHandle.FromIntPtr(ptrAssemblyLoadContext).Target); } } return loadContextForAssembly; } // Set the root directory path for profile optimization. public void SetProfileOptimizationRoot(string directoryPath) { InternalSetProfileRoot(directoryPath); } // Start profile optimization for the specified profile name. public void StartProfileOptimization(string profile) { InternalStartProfile(profile, m_pNativeAssemblyLoadContext); } private void OnAppContextUnloading(object sender, EventArgs e) { var unloading = Unloading; if (unloading != null) { unloading(this); } } public event Func<AssemblyLoadContext, AssemblyName, Assembly> Resolving; public event Action<AssemblyLoadContext> Unloading; // Contains the reference to VM's representation of the AssemblyLoadContext private IntPtr m_pNativeAssemblyLoadContext; // Each AppDomain contains the reference to its AssemblyLoadContext instance, if one is // specified by the host. By having the field as a static, we are // making it an AppDomain-wide field. private static volatile AssemblyLoadContext s_DefaultAssemblyLoadContext; // Synchronization primitive for controlling initialization of Default load context private static readonly object s_initLock = new Object(); // Occurs when an Assembly is loaded public static event AssemblyLoadEventHandler AssemblyLoad { add { AppDomain.CurrentDomain.AssemblyLoad += value; } remove { AppDomain.CurrentDomain.AssemblyLoad -= value; } } // Occurs when resolution of type fails public static event ResolveEventHandler TypeResolve { add { AppDomain.CurrentDomain.TypeResolve += value; } remove { AppDomain.CurrentDomain.TypeResolve -= value; } } // Occurs when resolution of resource fails public static event ResolveEventHandler ResourceResolve { add { AppDomain.CurrentDomain.ResourceResolve += value; } remove { AppDomain.CurrentDomain.ResourceResolve -= value; } } // Occurs when resolution of assembly fails // This event is fired after resolve events of AssemblyLoadContext fails public static event ResolveEventHandler AssemblyResolve { add { AppDomain.CurrentDomain.AssemblyResolve += value; } remove { AppDomain.CurrentDomain.AssemblyResolve -= value; } } } internal class AppPathAssemblyLoadContext : AssemblyLoadContext { internal AppPathAssemblyLoadContext() : base(true) { } protected override Assembly Load(AssemblyName assemblyName) { // We were loading an assembly into TPA ALC that was not found on TPA list. As a result we are here. // Returning null will result in the AssemblyResolve event subscribers to be invoked to help resolve the assembly. return null; } } internal class IndividualAssemblyLoadContext : AssemblyLoadContext { internal IndividualAssemblyLoadContext() : base(false) { } protected override Assembly Load(AssemblyName assemblyName) { return null; } } }
using GalaSoft.MvvmLight; using GalaSoft.MvvmLight.CommandWpf; using MahApps.Metro.Controls.Dialogs; using pistha.universal.lettercounter.core; using pistha.universal.lettercounter.core.Models; using System; using System.Collections.Generic; using System.Linq; namespace pistha.universal.lettercounter.desktop.ViewModel { /// <summary> /// This class contains properties that the main View can data bind to. /// <para> /// Use the <strong>mvvminpc</strong> snippet to add bindable properties to this ViewModel. /// </para> /// <para> /// You can also use Blend to data bind with the tool's support. /// </para> /// <para> /// See http://www.galasoft.ch/mvvm /// </para> /// </summary> public class MainViewModel : ViewModelBase { private TextModel _model; public String Text { get { return this._model == null ? String.Empty : this._model.Text; } set { this.ResetModel(value, this.IgnoreCase); } } public Int32 TextLength { get { return this._model == null ? 0 : this._model.TextLength; } //set //{ // if (this._model != null) // { // this._model.TextLength = value; // this.RaisePropertyChanged("TextLength"); // } //} } public Dictionary<Char, Int32> LettersCount { get { return this._model == null ? null : this._model.LettersCount; } } public Int32 WordsCount { get { return this._model == null ? 0 : this._model.WordsCount; } //set //{ // if (this._model != null) // { // this._model.WordsCount = value; // this.RaisePropertyChanged("WordsCount"); // } //} } public Int32 SentencesCount { get { return this._model == null ? 0 : this._model.SentencesCount; } } public Boolean AreNextTabsEnabled { get { return this._model != null && !String.IsNullOrEmpty(this._model.Text); } } public String JsonFormat { get { if (this.AreNextTabsEnabled) { return this._model != null ? this._model.JsonFormat : String.Empty; } else return String.Empty; } } public String InlineFormat { get { if (this.AreNextTabsEnabled) { return this._model != null ? this._model.InlineFormat : String.Empty; } else return String.Empty; } } public String TextFilePath { get { if (this._model != null) { return this._model.TextFilePath; } else return String.Empty; } } public Boolean IgnoreCase { get { if (this._model != null) return this._model.IgnoreCase; else return true; } set { this._model.IgnoreCase = value; this.ResetModel(this.Text, value); //this.RaisePropertyChanged("IgnoreCase"); //this.RaisePropertyChanged("Text"); } } /// <summary> /// Initializes a new instance of the MainViewModel class. /// </summary> public MainViewModel() { this.OpenTextFileButtonCommand = new RelayCommand(this.DoOpenTextFileButtonCommand); this.JsonToClipboardCommand = new RelayCommand(this.DoJsonToClipboardCommand); this.InlineToClipboardCommand = new RelayCommand(this.DoInlineToClipboardCommand); } private void ResetModel(String text, Boolean ignoreCase) { this.ResetModel(text, ignoreCase, String.Empty); } private void ResetModel(String text, Boolean ignoreCase, String path) { if (this._model != null) this._model.Dispose(); this._model = LetterCounterCore.Instance.GetModelByString(text, ignoreCase); this._model.TextFilePath = path; this.RiseAllPropertyChange(); } //private void ResetModelByPath(String path, Boolean ignoreCase) //{ // if (this._model != null) // this._model.Dispose(); // this._model = LetterCounterCore.Instance.GetModelFromFile(path, ignoreCase); // this.RiseAllPropertyChange(); //} private void RiseAllPropertyChange() { this.RaisePropertyChanged(nameof(this.Text)); this.RaisePropertyChanged(nameof(this.TextLength)); this.RaisePropertyChanged(nameof(this.LettersCount)); this.RaisePropertyChanged(nameof(this.WordsCount)); this.RaisePropertyChanged(nameof(this.SentencesCount)); this.RaisePropertyChanged(nameof(this.AreNextTabsEnabled)); this.RaisePropertyChanged(nameof(this.JsonFormat)); this.RaisePropertyChanged(nameof(this.InlineFormat)); this.RaisePropertyChanged(nameof(this.IgnoreCase)); this.RaisePropertyChanged(nameof(this.TextFilePath)); } private void DoInlineToClipboardCommand() { try { if (!String.IsNullOrEmpty(this.InlineFormat)) { System.Windows.Clipboard.SetText(this.InlineFormat); } } catch (Exception ex) { //log this App.Instance.Logger.Error("Cannot set text to clipboard", ex); App.Instance.MetroWindowShowMessage(this, new Events.MessageDialogEventArgs("Error", ex.Message, MessageDialogStyle.Affirmative)); } } private void DoJsonToClipboardCommand() { try { if (!String.IsNullOrEmpty(this.JsonFormat)) { System.Windows.Clipboard.SetText(this.JsonFormat); } } catch (Exception ex) { //log this App.Instance.Logger.Error("Cannot set text to clipboard", ex); App.Instance.MetroWindowShowMessage(this, new Events.MessageDialogEventArgs("Error", ex.Message, MessageDialogStyle.Affirmative)); } } private void DoOpenTextFileButtonCommand() { try { Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".txt"; dlg.Filter = String.Format("Text document ({0})|*{1}", dlg.DefaultExt, dlg.DefaultExt); Boolean? result = dlg.ShowDialog(); if (result.HasValue && result.Value == true) { String filename = dlg.FileName; App.Instance.Logger.Info("Try to load text file: " + filename); String text = System.IO.File.ReadAllText(filename); this.ResetModel(text, this.IgnoreCase, filename); } } catch (Exception ex) { //log this App.Instance.Logger.Error("Cannot load text file", ex); App.Instance.MetroWindowShowMessage(this, new Events.MessageDialogEventArgs("Error", ex.Message, MessageDialogStyle.Affirmative)); } } public RelayCommand OpenTextFileButtonCommand { get; private set; } public RelayCommand JsonToClipboardCommand { get; private set; } public RelayCommand InlineToClipboardCommand { get; private set; } } }
using System; using System.Linq; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Runtime.Serialization; using Newtonsoft.Json; namespace IO.Swagger.Model { /// <summary> /// Unique deed, consisting of property, borrower and charge information as well as clauses for the deed. /// </summary> [DataContract] public class OperativeDeedDeed : IEquatable<OperativeDeedDeed> { /// <summary> /// Initializes a new instance of the <see cref="OperativeDeedDeed" /> class. /// </summary> public OperativeDeedDeed() { } /// <summary> /// Unique Land Registry identifier for the registered estate. /// </summary> /// <value>Unique Land Registry identifier for the registered estate.</value> [DataMember(Name="title_number", EmitDefaultValue=false)] public string TitleNumber { get; set; } /// <summary> /// Gets or Sets Lender /// </summary> [DataMember(Name="lender", EmitDefaultValue=false)] public Lender Lender { get; set; } /// <summary> /// The address of property that the deed relates. This should be supplied in a comma separated format e.g. 30 wakefield rd, plymouth, PL6 3WA /// </summary> /// <value>The address of property that the deed relates. This should be supplied in a comma separated format e.g. 30 wakefield rd, plymouth, PL6 3WA</value> [DataMember(Name="property_address", EmitDefaultValue=false)] public string PropertyAddress { get; set; } /// <summary> /// Gets or Sets AdditionalProvisions /// </summary> [DataMember(Name="additional_provisions", EmitDefaultValue=false)] public AdditionalProvisions AdditionalProvisions { get; set; } /// <summary> /// Land Registry assigned number for a Mortgage Deed (MD). If you wish to use an existing MD reference please prefix it with e- to comply with our system (eg e-MD12345) /// </summary> /// <value>Land Registry assigned number for a Mortgage Deed (MD). If you wish to use an existing MD reference please prefix it with e- to comply with our system (eg e-MD12345)</value> [DataMember(Name="md_ref", EmitDefaultValue=false)] public string MdRef { get; set; } /// <summary> /// Gets or Sets Borrowers /// </summary> [DataMember(Name="borrowers", EmitDefaultValue=false)] public Borrowers Borrowers { get; set; } /// <summary> /// Gets or Sets ChargeClause /// </summary> [DataMember(Name="charge_clause", EmitDefaultValue=false)] public ChargeClause ChargeClause { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class OperativeDeedDeed {\n"); sb.Append(" TitleNumber: ").Append(TitleNumber).Append("\n"); sb.Append(" Lender: ").Append(Lender).Append("\n"); sb.Append(" PropertyAddress: ").Append(PropertyAddress).Append("\n"); sb.Append(" AdditionalProvisions: ").Append(AdditionalProvisions).Append("\n"); sb.Append(" MdRef: ").Append(MdRef).Append("\n"); sb.Append(" Borrowers: ").Append(Borrowers).Append("\n"); sb.Append(" ChargeClause: ").Append(ChargeClause).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as OperativeDeedDeed); } /// <summary> /// Returns true if OperativeDeedDeed instances are equal /// </summary> /// <param name="other">Instance of OperativeDeedDeed to be compared</param> /// <returns>Boolean</returns> public bool Equals(OperativeDeedDeed other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.TitleNumber == other.TitleNumber || this.TitleNumber != null && this.TitleNumber.Equals(other.TitleNumber) ) && ( this.Lender == other.Lender || this.Lender != null && this.Lender.Equals(other.Lender) ) && ( this.PropertyAddress == other.PropertyAddress || this.PropertyAddress != null && this.PropertyAddress.Equals(other.PropertyAddress) ) && ( this.AdditionalProvisions == other.AdditionalProvisions || this.AdditionalProvisions != null && this.AdditionalProvisions.Equals(other.AdditionalProvisions) ) && ( this.MdRef == other.MdRef || this.MdRef != null && this.MdRef.Equals(other.MdRef) ) && ( this.Borrowers == other.Borrowers || this.Borrowers != null && this.Borrowers.Equals(other.Borrowers) ) && ( this.ChargeClause == other.ChargeClause || this.ChargeClause != null && this.ChargeClause.Equals(other.ChargeClause) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.TitleNumber != null) hash = hash * 57 + this.TitleNumber.GetHashCode(); if (this.Lender != null) hash = hash * 57 + this.Lender.GetHashCode(); if (this.PropertyAddress != null) hash = hash * 57 + this.PropertyAddress.GetHashCode(); if (this.AdditionalProvisions != null) hash = hash * 57 + this.AdditionalProvisions.GetHashCode(); if (this.MdRef != null) hash = hash * 57 + this.MdRef.GetHashCode(); if (this.Borrowers != null) hash = hash * 57 + this.Borrowers.GetHashCode(); if (this.ChargeClause != null) hash = hash * 57 + this.ChargeClause.GetHashCode(); return hash; } } } }
//----------------------------------------------------------------------- // <copyright file="Api.cs" company="Google LLC"> // Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> //----------------------------------------------------------------------- namespace Google.XR.Cardboard { using System; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using UnityEngine; /// <summary> /// Cardboard XR Plugin API. /// </summary> public static class Api { private static int _deviceParamsCount = -1; private static Rect _cachedSafeArea; private static ScreenOrientation _cachedScreenOrientation; /// <summary> /// Whether a trigger touch started. /// </summary> private static bool _touchStarted = false; /// <summary> /// Tracks when the trigger touch sequence begins. /// </summary> private static double _startTouchStamp = 0.0; /// <summary> /// See MinTriggerHeldPressedTime property. /// </summary> private static double _minTriggerHeldPressedTime = 3.0; /// <summary> /// Gets a value indicating whether the close button is pressed this frame. /// </summary> public static bool IsCloseButtonPressed { get { if (!XRLoader._isStarted || Input.touchCount == 0) { return false; } Touch touch = Input.GetTouch(0); Vector2Int touchPosition = Vector2Int.RoundToInt(touch.position); return touch.phase == TouchPhase.Began && Widget.CloseButtonRect.Contains(touchPosition); } } /// <summary> /// Gets a value indicating whether the gear button is pressed this frame. /// </summary> public static bool IsGearButtonPressed { get { if (!XRLoader._isStarted || Input.touchCount == 0) { return false; } Touch touch = Input.GetTouch(0); Vector2Int touchPosition = Vector2Int.RoundToInt(touch.position); return touch.phase == TouchPhase.Began && Widget.GearButtonRect.Contains(touchPosition); } } /// <summary> /// Gets a value indicating whether the Cardboard trigger button is pressed this frame. /// </summary> public static bool IsTriggerPressed { get { if (!XRLoader._isStarted || Input.touchCount == 0) { return false; } Touch touch = Input.GetTouch(0); Vector2Int touchPosition = Vector2Int.RoundToInt(touch.position); return touch.phase == TouchPhase.Began && !Widget.CloseButtonRect.Contains(touchPosition) && !Widget.GearButtonRect.Contains(touchPosition); } } /// <summary> /// Gets or sets the amount of time the trigger must be held active to be held pressed. /// </summary> public static double MinTriggerHeldPressedTime { get { return _minTriggerHeldPressedTime; } set { if (value <= 0.0) { Debug.LogError( "[CardboardApi] Trying to set a negative value to " + "MinTriggerHeldPressedTime."); } else { _minTriggerHeldPressedTime = value; } } } /// <summary> /// Gets a value indicating whether Cardboard trigger button is held pressed. /// </summary> public static bool IsTriggerHeldPressed { get { if (!XRLoader._isStarted || Input.touchCount == 0) { return false; } Touch touch = Input.GetTouch(0); Vector2Int touchPosition = Vector2Int.RoundToInt(touch.position); bool retVal = false; if (touch.phase == TouchPhase.Began && !Widget.CloseButtonRect.Contains(touchPosition) && !Widget.GearButtonRect.Contains(touchPosition)) { _startTouchStamp = Time.time; _touchStarted = true; } else if (touch.phase == TouchPhase.Ended && _touchStarted) { if ((Time.time - _startTouchStamp) > MinTriggerHeldPressedTime) { retVal = true; } _touchStarted = false; } else if (touch.phase == TouchPhase.Moved || touch.phase == TouchPhase.Canceled) { // Any other phase to the touch sequence would cause to reset the time count, // except Stationary which means the touch remains in the same position. _touchStarted = false; } return retVal; } } /// <summary> /// Evaluates whether or not device params are saved in the storage. /// </summary> /// /// <returns>Whether or not device parameters are found.</returns> public static bool HasDeviceParams() { if (!XRLoader._isInitialized) { return false; } IntPtr encodedDeviceParams; int size; CardboardQrCode_getSavedDeviceParams(out encodedDeviceParams, out size); if (size == 0) { Debug.Log("[CardboardApi] No device params found."); return false; } Debug.Log("[CardboardApi] Device params found."); CardboardQrCode_destroy(encodedDeviceParams); _deviceParamsCount = CardboardQrCode_getDeviceParamsChangedCount(); return true; } /// <summary> /// Starts QR Code scanning activity. /// </summary> public static void ScanDeviceParams() { if (!XRLoader._isInitialized) { return; } _deviceParamsCount = CardboardQrCode_getDeviceParamsChangedCount(); Debug.Log("[CardboardApi] QR Code scanning activity launched."); CardboardQrCode_scanQrCodeAndSaveDeviceParams(); } /// <summary> /// Saves the encoded device parameters provided by an URI. /// /// Expected URI format for: /// - Cardboard Viewer v1: https://g.co/cardboard /// - Cardboard Viewer v2: https://google.com/cardboard/cfd?p=deviceParams (for example, /// https://google.com/cardboard/cfg?p=CgZHb29nbGUSEkNhcmRib2FyZCBJL08gMjAxNR0rGBU9JQHegj0qEAAASEIAAEhCAABIQgAASEJYADUpXA89OggeZnc-Ej6aPlAAYAM). /// /// Redirection is also supported up to a maximum of 5 possible redirects before reaching /// the proper pattern. /// /// Only URIs using HTTPS protocol are supported. /// </summary> /// /// <param name="uri"> /// The URI string. See above for supported formats. /// </param> public static void SaveDeviceParams(string uri) { if (!XRLoader._isInitialized) { Debug.LogError( "Please initialize Cardboard XR loader before calling this function."); return; } IntPtr rawUri = Marshal.StringToHGlobalAuto(uri); CardboardQrCode_saveDeviceParams(rawUri, uri.Length); Marshal.FreeHGlobal(rawUri); } /// <summary> /// Evaluates if device parameters changed from last time they were reloaded. /// </summary> /// /// <returns>true when device parameters changed.</returns> public static bool HasNewDeviceParams() { // TODO(b/156501367): Move this logic to the XR display provider. if (!XRLoader._isInitialized || _deviceParamsCount == -1) { return false; } return _deviceParamsCount != CardboardQrCode_getDeviceParamsChangedCount(); } /// <summary> /// Enables device parameter reconfiguration on next frame update. /// </summary> public static void ReloadDeviceParams() { if (!XRLoader._isInitialized) { return; } // TODO(b/156501367): Move this logic to the XR display provider. Debug.Log("[CardboardApi] Reload device parameters."); _deviceParamsCount = CardboardQrCode_getDeviceParamsChangedCount(); CardboardUnity_setDeviceParametersChanged(); } /// <summary> /// Updates screen parameters. This method must be called at framerate to ensure the current /// screen orientation is properly taken into account by the head tracker. This method also /// ensures that the safe area size is properly set on iOS devices (see note below). /// /// Note: The safe area size check is a workaround for /// <a href=https://fogbugz.unity3d.com/default.asp?1288515_t9gqdh39urj13div>Issue #1288515</a> /// in Unity. /// </summary> public static void UpdateScreenParams() { if (!XRLoader._isInitialized) { Debug.LogError( "Please initialize Cardboard XR loader before calling this function."); return; } // Only set viewport orientation if it has changed since the last check. if (_cachedScreenOrientation != Screen.orientation) { _cachedScreenOrientation = Screen.orientation; XRLoader.SetViewportOrientation(_cachedScreenOrientation); } // TODO(b/171702321): Remove this block once the safe area size can be properly // fetched by the XRLoader. // Only recalculate rectangles if safe area size has changed since last check. if (_cachedSafeArea != Screen.safeArea) { _cachedSafeArea = Screen.safeArea; XRLoader.RecalculateRectangles(_cachedSafeArea); ReloadDeviceParams(); } } /// <summary> /// Recenters the head tracker. /// </summary> public static void Recenter() { if (!XRLoader._isInitialized) { Debug.LogError( "Please initialize Cardboard XR loader before calling this function."); return; } CardboardUnity_recenterHeadTracker(); } [DllImport(ApiConstants.CardboardApi)] private static extern void CardboardQrCode_scanQrCodeAndSaveDeviceParams(); [DllImport(ApiConstants.CardboardApi)] private static extern void CardboardQrCode_saveDeviceParams( IntPtr uri, int size); [DllImport(ApiConstants.CardboardApi)] private static extern void CardboardQrCode_getSavedDeviceParams( out IntPtr encodedDeviceParams, out int size); [DllImport(ApiConstants.CardboardApi)] private static extern void CardboardQrCode_destroy(IntPtr encodedDeviceParams); [DllImport(ApiConstants.CardboardApi)] private static extern int CardboardQrCode_getDeviceParamsChangedCount(); [DllImport(ApiConstants.CardboardApi)] private static extern void CardboardUnity_setDeviceParametersChanged(); [DllImport(ApiConstants.CardboardApi)] private static extern void CardboardUnity_recenterHeadTracker(); } }
using System; using System.Data; using System.Configuration; using System.Collections; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; public partial class Backoffice_Referensi_Spesialis_List : System.Web.UI.Page { public int NoKe = 0; protected string dsReportSessionName = "dsListRefSpesialis"; protected void Page_Load(object sender, EventArgs e) { if (!Page.IsPostBack) { if (Session["SIMRS.UserId"] == null) { Response.Redirect(Request.ApplicationPath + "/Backoffice/login.aspx"); } int UserId = (int)Session["SIMRS.UserId"]; if (Session["SpesialisManagement"] == null) { Response.Redirect(Request.ApplicationPath + "/Backoffice/UnAuthorize.aspx"); } else { btnNew.Text = "<img alt=\"New\" src=\"" + Request.ApplicationPath + "/images/new_f2.gif\" align=\"middle\" border=\"0\" name=\"new\" value=\"new\">" + Resources.GetString("Referensi", "AddSpesialis"); } btnSearch.Text = Resources.GetString("", "Search"); ImageButtonFirst.ImageUrl = Request.ApplicationPath + "/images/navigator/nbFirst.gif"; ImageButtonPrev.ImageUrl = Request.ApplicationPath + "/images/navigator/nbPrevpage.gif"; ImageButtonNext.ImageUrl = Request.ApplicationPath + "/images/navigator/nbNextpage.gif"; ImageButtonLast.ImageUrl = Request.ApplicationPath + "/images/navigator/nbLast.gif"; UpdateDataView(true); } } #region .Update View Data ////////////////////////////////////////////////////////////////////// // PhysicalDataRead // ------------------------------------------------------------------ /// <summary> /// This function is responsible for loading data from database. /// </summary> /// <returns>DataSet</returns> public DataSet PhysicalDataRead() { // Local variables DataSet oDS = new DataSet(); // Get Data SIMRS.DataAccess.RS_Spesialis myObj = new SIMRS.DataAccess.RS_Spesialis(); DataTable myData = myObj.SelectAll(); oDS.Tables.Add(myData); return oDS; } /// <summary> /// This function is responsible for binding data to Datagrid. /// </summary> /// <param name="dv"></param> private void BindData(DataView dv) { // Sets the sorting order dv.Sort = DataGridList.Attributes["SortField"]; if (DataGridList.Attributes["SortAscending"] == "no") dv.Sort += " DESC"; if (dv.Count > 0) { DataGridList.ShowFooter = false; int intRowCount = dv.Count; int intPageSaze = DataGridList.PageSize; int intPageCount = intRowCount / intPageSaze; if (intRowCount - (intPageCount * intPageSaze) > 0) intPageCount = intPageCount + 1; if (DataGridList.CurrentPageIndex >= intPageCount) DataGridList.CurrentPageIndex = intPageCount - 1; } else { DataGridList.ShowFooter = true; DataGridList.CurrentPageIndex = 0; } // Re-binds the grid NoKe = DataGridList.PageSize * DataGridList.CurrentPageIndex; DataGridList.DataSource = dv; DataGridList.DataBind(); int CurrentPage = DataGridList.CurrentPageIndex + 1; lblCurrentPage.Text = CurrentPage.ToString(); lblTotalPage.Text = DataGridList.PageCount.ToString(); lblTotalRecord.Text = dv.Count.ToString(); } /// <summary> /// This function is responsible for loading data from database and store to Session. /// </summary> /// <param name="strDataSessionName"></param> public void DataFromSourceToMemory(String strDataSessionName) { // Gets rows from the data source DataSet oDS = PhysicalDataRead(); // Stores it in the session cache Session[strDataSessionName] = oDS; } /// <summary> /// This function is responsible for update data view from datagrid. /// </summary> /// <param name="requery">true = get data from database, false= get data from session</param> public void UpdateDataView(bool requery) { // Retrieves the data if ((Session[dsReportSessionName] == null) || (requery)) { if (Request.QueryString["CurrentPage"] != null && Request.QueryString["CurrentPage"].ToString() != "") DataGridList.CurrentPageIndex = int.Parse(Request.QueryString["CurrentPage"].ToString()); DataFromSourceToMemory(dsReportSessionName); } DataSet ds = (DataSet)Session[dsReportSessionName]; BindData(ds.Tables[0].DefaultView); } public void UpdateDataView() { // Retrieves the data if ((Session[dsReportSessionName] == null)) { DataFromSourceToMemory(dsReportSessionName); } DataSet ds = (DataSet)Session[dsReportSessionName]; BindData(ds.Tables[0].DefaultView); } #endregion #region .Event DataGridList ////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////// // HANDLERs // ////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////// /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a new page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void PageChanged(Object sender, DataGridPageChangedEventArgs e) { DataGridList.CurrentPageIndex = e.NewPageIndex; DataGridList.SelectedIndex = -1; UpdateDataView(); } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a new page. /// </summary> /// <param name="sender"></param> /// <param name="nPageIndex"></param> public void GoToPage(Object sender, int nPageIndex) { DataGridPageChangedEventArgs evPage; evPage = new DataGridPageChangedEventArgs(sender, nPageIndex); PageChanged(sender, evPage); } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a first page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToFirst(Object sender, ImageClickEventArgs e) { GoToPage(sender, 0); } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a previous page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToPrev(Object sender, ImageClickEventArgs e) { if (DataGridList.CurrentPageIndex > 0) { GoToPage(sender, DataGridList.CurrentPageIndex - 1); } else { GoToPage(sender, 0); } } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a next page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToNext(Object sender, System.Web.UI.ImageClickEventArgs e) { if (DataGridList.CurrentPageIndex < (DataGridList.PageCount - 1)) { GoToPage(sender, DataGridList.CurrentPageIndex + 1); } } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a last page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToLast(Object sender, ImageClickEventArgs e) { GoToPage(sender, DataGridList.PageCount - 1); } /// <summary> /// This function is invoked when you click on a column's header to /// sort by that. It just saves the current sort field name and /// refreshes the grid. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void SortByColumn(Object sender, DataGridSortCommandEventArgs e) { String strSortBy = DataGridList.Attributes["SortField"]; String strSortAscending = DataGridList.Attributes["SortAscending"]; // Sets the new sorting field DataGridList.Attributes["SortField"] = e.SortExpression; // Sets the order (defaults to ascending). If you click on the // sorted column, the order reverts. DataGridList.Attributes["SortAscending"] = "yes"; if (e.SortExpression == strSortBy) DataGridList.Attributes["SortAscending"] = (strSortAscending == "yes" ? "no" : "yes"); // Refreshes the view OnClearSelection(null, null); UpdateDataView(); } /// <summary> /// The function gets invoked when a new item is being created in /// the datagrid. This applies to pager, header, footer, regular /// and alternating items. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void PageItemCreated(Object sender, DataGridItemEventArgs e) { // Get the newly created item ListItemType itemType = e.Item.ItemType; ////////////////////////////////////////////////////////// // Is it the HEADER? if (itemType == ListItemType.Header) { for (int i = 0; i < DataGridList.Columns.Count; i++) { // draw to reflect sorting if (DataGridList.Attributes["SortField"] == DataGridList.Columns[i].SortExpression) { ////////////////////////////////////////////// // Should be much easier this way: // ------------------------------------------ // TableCell cell = e.Item.Cells[i]; // Label lblSorted = new Label(); // lblSorted.Font = "webdings"; // lblSorted.Text = strOrder; // cell.Controls.Add(lblSorted); // // but it seems it doesn't work <g> ////////////////////////////////////////////// // Add a non-clickable triangle to mean desc or asc. // The </a> ensures that what follows is non-clickable TableCell cell = e.Item.Cells[i]; LinkButton lb = (LinkButton)cell.Controls[0]; //lb.Text += "</a>&nbsp;<span style=font-family:webdings;>" + GetOrderSymbol() + "</span>"; lb.Text += "</a>&nbsp;<img src=" + Request.ApplicationPath + "/images/icons/" + GetOrderSymbol() + " >"; } } } ////////////////////////////////////////////////////////// // Is it the PAGER? if (itemType == ListItemType.Pager) { // There's just one control in the list... TableCell pager = (TableCell)e.Item.Controls[0]; // Enumerates all the items in the pager... for (int i = 0; i < pager.Controls.Count; i += 2) { // It can be either a Label or a Link button try { Label l = (Label)pager.Controls[i]; l.Text = "Hal " + l.Text; l.CssClass = "CurrentPage"; } catch { LinkButton h = (LinkButton)pager.Controls[i]; h.Text = "[ " + h.Text + " ]"; h.CssClass = "HotLink"; } } } } /// <summary> /// Verifies whether the current sort is ascending or descending and /// returns an appropriate display text (i.e., a webding) /// </summary> /// <returns></returns> private String GetOrderSymbol() { bool bDescending = (bool)(DataGridList.Attributes["SortAscending"] == "no"); //return (bDescending ? " 6" : " 5"); return (bDescending ? "downbr.gif" : "upbr.gif"); } /// <summary> /// When clicked clears the current selection if any /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void OnClearSelection(Object sender, EventArgs e) { DataGridList.SelectedIndex = -1; } #endregion #region .Event Button /// <summary> /// When clicked, redirect to form add for inserts a new record to the database /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void OnNewRecord(Object sender, EventArgs e) { string CurrentPage = DataGridList.CurrentPageIndex.ToString(); Response.Redirect("Add.aspx?CurrentPage=" + CurrentPage); } /// <summary> /// When clicked, filter data. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void OnSearch(Object sender, System.EventArgs e) { if ((Session[dsReportSessionName] == null)) { DataFromSourceToMemory(dsReportSessionName); } DataSet ds = (DataSet)Session[dsReportSessionName]; DataView dv = ds.Tables[0].DefaultView; if (cmbFilterBy.Items[cmbFilterBy.SelectedIndex].Value == "Kode") dv.RowFilter = " Kode LIKE '%" + txtSearch.Text + "%'"; else if (cmbFilterBy.Items[cmbFilterBy.SelectedIndex].Value == "Nama") dv.RowFilter = " Nama LIKE '%" + txtSearch.Text + "%'"; else if (cmbFilterBy.Items[cmbFilterBy.SelectedIndex].Value == "Keterangan") dv.RowFilter = " Keterangan LIKE '%" + txtSearch.Text + "%'"; else dv.RowFilter = ""; BindData(dv); } #endregion #region .Update Link Item Butom /// <summary> /// The function is responsible for get link button form. /// </summary> /// <param name="szId"></param> /// <param name="CurrentPage"></param> /// <returns></returns> public string GetLinkButton(string Id, string Nama, string CurrentPage) { string szResult = ""; if (Session["SpesialisManagement"] != null) { szResult += "<a class=\"toolbar\" href=\"Edit.aspx?CurrentPage=" + CurrentPage + "&Id=" + Id + "\" "; szResult += ">" + Resources.GetString("", "Edit") + "</a>"; szResult += "<a class=\"toolbar\" href=\"Delete.aspx?CurrentPage=" + CurrentPage + "&Id=" + Id + "\" "; szResult += ">" + Resources.GetString("", "Delete") + "</a>"; } return szResult; } #endregion }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using Xunit; namespace System.Linq.Parallel.Tests { public class AggregateTests { private const int ResultFuncModifier = 17; public static IEnumerable<object[]> AggregateExceptionData(int[] counts) { foreach (object[] results in UnorderedSources.Ranges(counts.Cast<int>())) { Labeled<ParallelQuery<int>> query = (Labeled<ParallelQuery<int>>)results[0]; if (query.ToString().StartsWith("Partitioner")) { yield return new object[] { Labeled.Label(query.ToString(), Partitioner.Create(UnorderedSources.GetRangeArray(0, (int)results[1]), false).AsParallel()), results[1] }; } else if (query.ToString().StartsWith("Enumerable.Range")) { yield return new object[] { Labeled.Label(query.ToString(), new StrictPartitioner<int>(Partitioner.Create(Enumerable.Range(0, (int)results[1]), EnumerablePartitionerOptions.None), (int)results[1]).AsParallel()), results[1] }; } else { yield return results; } } } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; if (count == 0) { Assert.Throws<InvalidOperationException>(() => query.Aggregate((x, y) => x + y)); } else { // The operation will overflow for long-running sizes, but that's okay: // The helper is overflowing too! Assert.Equal(Functions.SumRange(0, count), query.Aggregate((x, y) => x + y)); } } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 1024 * 1024, 1024 * 1024 * 4 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Sum(labeled, count); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Seed(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; Assert.Equal(Functions.SumRange(0, count), query.Aggregate(0, (x, y) => x + y)); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 1024 * 1024, 1024 * 1024 * 4 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Seed_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Sum_Seed(labeled, count); } [Theory] [MemberData("Ranges", 1, new int[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_Seed(Labeled<ParallelQuery<int>> labeled, int count, int start) { ParallelQuery<int> query = labeled.Item; // The operation will overflow for long-running sizes, but that's okay: // The helper is overflowing too! Assert.Equal(Functions.ProductRange(start, count), query.Aggregate(1L, (x, y) => x * y)); } [Theory] [OuterLoop] [MemberData("Ranges", 1, new int[] { 1024 * 1024, 1024 * 1024 * 4 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_Seed_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, int start) { Aggregate_Product_Seed(labeled, count, start); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_Seed(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; Assert.Equal(Enumerable.Range(0, count), query.Aggregate((IList<int>)new List<int>(), (l, x) => l.AddToCopy(x)).OrderBy(x => x)); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 512, 1024 * 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_Seed_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Collection_Seed(labeled, count); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Result(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; Assert.Equal(Functions.SumRange(0, count) + ResultFuncModifier, query.Aggregate(0, (x, y) => x + y, result => result + ResultFuncModifier)); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 1024 * 1024, 1024 * 1024 * 4 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Result_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Sum_Result(labeled, count); } [Theory] [MemberData("Ranges", 1, new int[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_Result(Labeled<ParallelQuery<int>> labeled, int count, int start) { ParallelQuery<int> query = labeled.Item; Assert.Equal(Functions.ProductRange(start, count) + ResultFuncModifier, query.Aggregate(1L, (x, y) => x * y, result => result + ResultFuncModifier)); } [Theory] [OuterLoop] [MemberData("Ranges", 1, new int[] { 1024 * 1024, 1024 * 1024 * 4 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_Results_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, int start) { Aggregate_Product_Result(labeled, count, start); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_Results(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; Assert.Equal(Enumerable.Range(0, count), query.Aggregate((IList<int>)new List<int>(), (l, x) => l.AddToCopy(x), l => l.OrderBy(x => x))); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 512, 1024 * 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_Results_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Collection_Results(labeled, count); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Accumulator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; int actual = query.Aggregate( 0, (accumulator, x) => accumulator + x, (left, right) => left + right, result => result + ResultFuncModifier); Assert.Equal(Functions.SumRange(0, count) + ResultFuncModifier, actual); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 1024 * 1024, 1024 * 1024 * 4 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_Accumulator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Sum_Accumulator(labeled, count); } [Theory] [MemberData("Ranges", 1, new int[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_Accumulator(Labeled<ParallelQuery<int>> labeled, int count, int start) { ParallelQuery<int> query = labeled.Item; long actual = query.Aggregate( 1L, (accumulator, x) => accumulator * x, (left, right) => left * right, result => result + ResultFuncModifier); Assert.Equal(Functions.ProductRange(start, count) + ResultFuncModifier, actual); } [Theory] [OuterLoop] [MemberData("Ranges", 1, new int[] { 1024 * 1024, 1024 * 1024 * 4 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_Accumulator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, int start) { Aggregate_Product_Accumulator(labeled, count, start); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_Accumulator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; IList<int> actual = query.Aggregate( (IList<int>)new List<int>(), (accumulator, x) => accumulator.AddToCopy(x), (left, right) => left.ConcatCopy(right), result => result.OrderBy(x => x).ToList()); Assert.Equal(Enumerable.Range(0, count), actual); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 512, 1024 * 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_Accumulator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Collection_Accumulator(labeled, count); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_SeedFunction(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; int actual = query.Aggregate( () => 0, (accumulator, x) => accumulator + x, (left, right) => left + right, result => result + ResultFuncModifier); Assert.Equal(Functions.SumRange(0, count) + ResultFuncModifier, actual); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 1024 * 1024, 1024 * 1024 * 4 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Sum_SeedFunction_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Sum_SeedFunction(labeled, count); } [Theory] [MemberData("Ranges", 1, new int[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_SeedFunction(Labeled<ParallelQuery<int>> labeled, int count, int start) { ParallelQuery<int> query = labeled.Item; long actual = query.Aggregate( () => 1L, (accumulator, x) => accumulator * x, (left, right) => left * right, result => result + ResultFuncModifier); Assert.Equal(Functions.ProductRange(start, count) + ResultFuncModifier, actual); } [Theory] [OuterLoop] [MemberData("Ranges", 1, new int[] { 1024 * 1024, 1024 * 1024 * 4 }, MemberType = typeof(UnorderedSources))] public static void Aggregate_Product_SeedFunction_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, int start) { Aggregate_Product_SeedFunction(labeled, count, start); } [Theory] [MemberData("Ranges", (object)(new int[] { 0, 1, 2, 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_SeedFunction(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; IList<int> actual = query.Aggregate( () => (IList<int>)new List<int>(), (accumulator, x) => accumulator.AddToCopy(x), (left, right) => left.ConcatCopy(right), result => result.OrderBy(x => x).ToList()); Assert.Equal(Enumerable.Range(0, count), actual); } [Theory] [OuterLoop] [MemberData("Ranges", (object)(new int[] { 512, 1024 * 16 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_Collection_SeedFunction_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Aggregate_Collection_SeedFunction(labeled, count); } [Fact] public static void Aggregate_InvalidOperationException() { Assert.Throws<InvalidOperationException>(() => ParallelEnumerable.Empty<int>().Aggregate((i, j) => i)); // All other invocations return the seed value. Assert.Equal(-1, ParallelEnumerable.Empty<int>().Aggregate(-1, (i, j) => i + j)); Assert.Equal(-1, ParallelEnumerable.Empty<int>().Aggregate(-1, (i, j) => i + j, i => i)); Assert.Equal(-1, ParallelEnumerable.Empty<int>().Aggregate(-1, (i, j) => i + j, (i, j) => i + j, i => i)); Assert.Equal(-1, ParallelEnumerable.Empty<int>().Aggregate(() => -1, (i, j) => i + j, (i, j) => i + j, i => i)); } [Theory] [MemberData("Ranges", (object)(new int[] { 2 }), MemberType = typeof(UnorderedSources))] public static void Aggregate_OperationCanceledException_PreCanceled(Labeled<ParallelQuery<int>> labeled, int count) { CancellationTokenSource cs = new CancellationTokenSource(); cs.Cancel(); Functions.AssertIsCanceled(cs, () => labeled.Item.WithCancellation(cs.Token).Aggregate((i, j) => i)); Functions.AssertIsCanceled(cs, () => labeled.Item.WithCancellation(cs.Token).Aggregate(0, (i, j) => i)); Functions.AssertIsCanceled(cs, () => labeled.Item.WithCancellation(cs.Token).Aggregate(0, (i, j) => i, i => i)); Functions.AssertIsCanceled(cs, () => labeled.Item.WithCancellation(cs.Token).Aggregate(0, (i, j) => i, (i, j) => i, i => i)); } [Theory] [MemberData("AggregateExceptionData", (object)(new int[] { 2 }))] public static void Aggregate_AggregateException(Labeled<ParallelQuery<int>> labeled, int count) { Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate((i, j) => { throw new DeliberateTestException(); })); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate(0, (i, j) => { throw new DeliberateTestException(); })); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate(0, (i, j) => { throw new DeliberateTestException(); }, i => i)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate<int, int, int>(0, (i, j) => i, i => { throw new DeliberateTestException(); })); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate(0, (i, j) => { throw new DeliberateTestException(); }, (i, j) => i, i => i)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate<int, int, int>(0, (i, j) => i, (i, j) => i, i => { throw new DeliberateTestException(); })); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate<int, int, int>(() => { throw new DeliberateTestException(); }, (i, j) => i, (i, j) => i, i => i)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate(() => 0, (i, j) => { throw new DeliberateTestException(); }, (i, j) => i, i => i)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate<int, int, int>(() => 0, (i, j) => i, (i, j) => i, i => { throw new DeliberateTestException(); })); if (Environment.ProcessorCount >= 2) { Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate(0, (i, j) => i, (i, j) => { throw new DeliberateTestException(); }, i => i)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.Aggregate(() => 0, (i, j) => i, (i, j) => { throw new DeliberateTestException(); }, i => i)); } } [Fact] public static void Aggregate_ArgumentNullException() { Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).Aggregate((i, j) => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(null)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).Aggregate(0, (i, j) => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(0, null)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).Aggregate(0, (i, j) => i, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(0, null, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate<int, int, int>(0, (i, j) => i, null)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).Aggregate(0, (i, j) => i, (i, j) => i, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(0, null, (i, j) => i, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(0, (i, j) => i, null, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate<int, int, int>(0, (i, j) => i, (i, j) => i, null)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).Aggregate(() => 0, (i, j) => i, (i, j) => i, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate<int, int, int>(null, (i, j) => i, (i, j) => i, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(() => 0, null, (i, j) => i, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate(() => 0, (i, j) => i, null, i => i)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Range(0, 1).Aggregate<int, int, int>(() => 0, (i, j) => i, (i, j) => i, null)); } } internal static class ListHelper { // System.Collections.Immutable.ImmutableList wasn't available. public static IList<int> AddToCopy(this IList<int> collection, int element) { collection = new List<int>(collection); collection.Add(element); return collection; } public static IList<int> ConcatCopy(this IList<int> left, IList<int> right) { List<int> results = new List<int>(left); results.AddRange(right); return results; } } }
// // GtkTheme.cs // // Author: // Aaron Bockover <[email protected]> // // Copyright (C) 2007-2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using Cairo; using Gtk; namespace Hyena.Gui.Theming { public class GtkTheme : Theme { private Cairo.Color rule_color; private Cairo.Color border_color; public GtkTheme (Widget widget) : base (widget) { } public static Cairo.Color GetCairoTextMidColor (Widget widget) { Cairo.Color text_color = CairoExtensions.GdkColorToCairoColor (widget.Style.Foreground (StateType.Normal)); Cairo.Color background_color = CairoExtensions.GdkColorToCairoColor (widget.Style.Background (StateType.Normal)); return CairoExtensions.AlphaBlend (text_color, background_color, 0.5); } public static Gdk.Color GetGdkTextMidColor (Widget widget) { Cairo.Color color = GetCairoTextMidColor (widget); Gdk.Color gdk_color = new Gdk.Color ((byte)(color.R * 255), (byte)(color.G * 255), (byte)(color.B * 255)); Gdk.Colormap.System.AllocColor (ref gdk_color, true, true); return gdk_color; } protected override void OnColorsRefreshed () { base.OnColorsRefreshed (); rule_color = CairoExtensions.ColorShade (ViewFill, 0.95); // On Windows we use Normal b/c Active incorrectly returns black (at least on XP) border_color = Colors.GetWidgetColor (GtkColorClass.Dark, Hyena.PlatformDetection.IsWindows ? StateType.Normal : StateType.Active ); } public override void DrawPie (double fraction) { // Calculate the pie path fraction = Theme.Clamp (0.0, 1.0, fraction); double a1 = 3.0 * Math.PI / 2.0; double a2 = a1 + 2.0 * Math.PI * fraction; if (fraction == 0.0) { return; } Context.Cairo.MoveTo (Context.X, Context.Y); Context.Cairo.Arc (Context.X, Context.Y, Context.Radius, a1, a2); Context.Cairo.LineTo (Context.X, Context.Y); // Fill the pie Color color_a = Colors.GetWidgetColor (GtkColorClass.Background, StateType.Selected); Color color_b = CairoExtensions.ColorShade (color_a, 1.4); RadialGradient fill = new RadialGradient (Context.X, Context.Y, 0, Context.X, Context.Y, 2.0 * Context.Radius); fill.AddColorStop (0, color_a); fill.AddColorStop (1, color_b); Context.Cairo.SetSource (fill); Context.Cairo.FillPreserve (); fill.Dispose (); // Stroke the pie Context.Cairo.SetSourceColor (CairoExtensions.ColorShade (color_a, 0.8)); Context.Cairo.LineWidth = Context.LineWidth; Context.Cairo.Stroke (); } public override void DrawArrow (Context cr, Gdk.Rectangle alloc, double rotation) { rotation -= Math.PI / 2.0; double x1 = alloc.X; double x2 = alloc.Right; double x3 = alloc.X + alloc.Width / 2.0; double y1 = alloc.Y; double y2 = alloc.Bottom; double cx = x3; double cy = alloc.Y + alloc.Height / 2.0; if (rotation != 0) { // Rotate about the center of the arrow cr.Translate (cx, cy); cr.Rotate (rotation); cr.Translate (-cx, -cy); } cr.LineWidth = 1.0; bool hz = (rotation % (Math.PI / 2.0)) == 0; double dx = hz ? 0 : 0.5; double dy = hz ? 0.5 : 0.5; cr.Translate (dx, dy); cr.MoveTo (x1, y1); cr.LineTo (x2, y1); cr.LineTo (x3, y2); cr.LineTo (x1, y1); cr.SetSourceColor (Colors.GetWidgetColor (GtkColorClass.Base, StateType.Normal)); cr.FillPreserve (); cr.SetSourceColor (Colors.GetWidgetColor (GtkColorClass.Text, StateType.Normal)); cr.Stroke (); cr.Translate (-dx, -dy); if (rotation != 0) { cr.Translate (cx, cy); cr.Rotate (-rotation); cr.Translate (-cx, -cy); } } public override void DrawFrameBackground (Cairo.Context cr, Gdk.Rectangle alloc, Cairo.Color color, Cairo.Pattern pattern) { color.A = Context.FillAlpha; if (pattern != null) { cr.SetSource (pattern); } else { cr.SetSourceColor (color); } CairoExtensions.RoundedRectangle (cr, alloc.X, alloc.Y, alloc.Width, alloc.Height, Context.Radius, CairoCorners.All); cr.Fill (); } public override void DrawFrameBorder (Cairo.Context cr, Gdk.Rectangle alloc) { var corners = CairoCorners.All; double top_extend = 0; double bottom_extend = 0; double left_extend = 0; double right_extend = 0; if (Context.ToplevelBorderCollapse) { if (Widget.Allocation.Top <= Widget.Toplevel.Allocation.Top) { corners &= ~(CairoCorners.TopLeft | CairoCorners.TopRight); top_extend = cr.LineWidth; } if (Widget.Allocation.Bottom >= Widget.Toplevel.Allocation.Bottom) { corners &= ~(CairoCorners.BottomLeft | CairoCorners.BottomRight); bottom_extend = cr.LineWidth; } if (Widget.Allocation.Left <= Widget.Toplevel.Allocation.Left) { corners &= ~(CairoCorners.BottomLeft | CairoCorners.TopLeft); left_extend = cr.LineWidth; } if (Widget.Allocation.Right >= Widget.Toplevel.Allocation.Right) { corners &= ~(CairoCorners.BottomRight | CairoCorners.TopRight); right_extend = cr.LineWidth; } } // FIXME Windows; shading the color by .8 makes it blend into the bg if (Widget.HasFocus && !Hyena.PlatformDetection.IsWindows) { cr.LineWidth = BorderWidth * 1.5; cr.SetSourceColor (CairoExtensions.ColorShade (border_color, 0.8)); } else { cr.LineWidth = BorderWidth; cr.SetSourceColor (border_color); } double offset = (double)cr.LineWidth / 2.0; CairoExtensions.RoundedRectangle (cr, alloc.X + offset - left_extend, alloc.Y + offset - top_extend, alloc.Width - cr.LineWidth + left_extend + right_extend, alloc.Height - cr.LineWidth - top_extend + bottom_extend, Context.Radius, corners); cr.Stroke (); } public override void DrawColumnHighlight (Cairo.Context cr, Gdk.Rectangle alloc, Cairo.Color color) { Cairo.Color light_color = CairoExtensions.ColorShade (color, 1.6); Cairo.Color dark_color = CairoExtensions.ColorShade (color, 1.3); LinearGradient grad = new LinearGradient (alloc.X, alloc.Y, alloc.X, alloc.Bottom - 1); grad.AddColorStop (0, light_color); grad.AddColorStop (1, dark_color); cr.SetSource (grad); cr.Rectangle (alloc.X + 1.5, alloc.Y + 1.5, alloc.Width - 3, alloc.Height - 2); cr.Fill (); grad.Dispose (); } public override void DrawHeaderBackground (Cairo.Context cr, Gdk.Rectangle alloc) { Cairo.Color gtk_background_color = Colors.GetWidgetColor (GtkColorClass.Background, StateType.Normal); Cairo.Color light_color = CairoExtensions.ColorShade (gtk_background_color, 1.1); Cairo.Color dark_color = CairoExtensions.ColorShade (gtk_background_color, 0.95); CairoCorners corners = CairoCorners.TopLeft | CairoCorners.TopRight; LinearGradient grad = new LinearGradient (alloc.X, alloc.Y, alloc.X, alloc.Bottom); grad.AddColorStop (0, light_color); grad.AddColorStop (0.75, dark_color); grad.AddColorStop (0, light_color); cr.SetSource (grad); CairoExtensions.RoundedRectangle (cr, alloc.X, alloc.Y, alloc.Width, alloc.Height, Context.Radius, corners); cr.Fill (); cr.SetSourceColor (border_color); cr.Rectangle (alloc.X, alloc.Bottom, alloc.Width, BorderWidth); cr.Fill (); grad.Dispose (); } public override void DrawColumnHeaderFocus (Cairo.Context cr, Gdk.Rectangle alloc) { double top_offset = 2.0; double right_offset = 2.0; double margin = 0.5; double line_width = 0.7; Cairo.Color stroke_color = CairoExtensions.ColorShade ( Colors.GetWidgetColor (GtkColorClass.Background, StateType.Selected), 0.8); stroke_color.A = 0.1; cr.SetSourceColor (stroke_color); CairoExtensions.RoundedRectangle (cr, alloc.X + margin + line_width + right_offset, alloc.Y + margin + line_width + top_offset, alloc.Width - (margin + line_width)*2.0 - right_offset, alloc.Height - (margin + line_width)*2.0 - top_offset, Context.Radius/2.0, CairoCorners.None); cr.Fill (); stroke_color.A = 1.0; cr.LineWidth = line_width; cr.SetSourceColor (stroke_color); CairoExtensions.RoundedRectangle (cr, alloc.X + margin + line_width + right_offset, alloc.Y + margin + line_width + top_offset, alloc.Width - (line_width + margin)*2.0 - right_offset, alloc.Height - (line_width + margin)*2.0 - right_offset, Context.Radius/2.0, CairoCorners.All); cr.Stroke (); } public override void DrawHeaderSeparator (Cairo.Context cr, Gdk.Rectangle alloc, int x) { Cairo.Color gtk_background_color = Colors.GetWidgetColor (GtkColorClass.Background, StateType.Normal); Cairo.Color dark_color = CairoExtensions.ColorShade (gtk_background_color, 0.80); Cairo.Color light_color = CairoExtensions.ColorShade (gtk_background_color, 1.1); int y_1 = alloc.Top + 4; int y_2 = alloc.Bottom - 3; cr.LineWidth = 1; cr.Antialias = Cairo.Antialias.None; cr.SetSourceColor (dark_color); cr.MoveTo (x, y_1); cr.LineTo (x, y_2); cr.Stroke (); cr.SetSourceColor (light_color); cr.MoveTo (x + 1, y_1); cr.LineTo (x + 1, y_2); cr.Stroke (); cr.Antialias = Cairo.Antialias.Default; } public override void DrawListBackground (Context cr, Gdk.Rectangle alloc, Color color) { color.A = Context.FillAlpha; cr.SetSourceColor (color); cr.Rectangle (alloc.X, alloc.Y, alloc.Width, alloc.Height); cr.Fill (); } public override void DrawRowCursor (Cairo.Context cr, int x, int y, int width, int height, Cairo.Color color, CairoCorners corners) { cr.LineWidth = 1.25; cr.SetSourceColor (color); CairoExtensions.RoundedRectangle (cr, x + cr.LineWidth/2.0, y + cr.LineWidth/2.0, width - cr.LineWidth, height - cr.LineWidth, Context.Radius, corners, true); cr.Stroke (); } public override void DrawRowSelection (Cairo.Context cr, int x, int y, int width, int height, bool filled, bool stroked, Cairo.Color color, CairoCorners corners) { DrawRowSelection (cr, x, y, width, height, filled, stroked, color, corners, false); } public void DrawRowSelection (Cairo.Context cr, int x, int y, int width, int height, bool filled, bool stroked, Cairo.Color color, CairoCorners corners, bool flat_fill) { Cairo.Color selection_color = color; Cairo.Color selection_highlight = CairoExtensions.ColorShade (selection_color, 1.24); Cairo.Color selection_stroke = CairoExtensions.ColorShade (selection_color, 0.85); selection_highlight.A = 0.5; selection_stroke.A = color.A; LinearGradient grad = null; if (filled) { if (flat_fill) { cr.SetSourceColor (selection_color); } else { Cairo.Color selection_fill_light = CairoExtensions.ColorShade (selection_color, 1.12); Cairo.Color selection_fill_dark = selection_color; selection_fill_light.A = color.A; selection_fill_dark.A = color.A; grad = new LinearGradient (x, y, x, y + height); grad.AddColorStop (0, selection_fill_light); grad.AddColorStop (0.4, selection_fill_dark); grad.AddColorStop (1, selection_fill_light); cr.SetSource (grad); } CairoExtensions.RoundedRectangle (cr, x, y, width, height, Context.Radius, corners, true); cr.Fill (); if (grad != null) { grad.Dispose (); } } if (filled && stroked) { cr.LineWidth = 1.0; cr.SetSourceColor (selection_highlight); CairoExtensions.RoundedRectangle (cr, x + 1.5, y + 1.5, width - 3, height - 3, Context.Radius - 1, corners, true); cr.Stroke (); } if (stroked) { cr.LineWidth = 1.0; cr.SetSourceColor (selection_stroke); CairoExtensions.RoundedRectangle (cr, x + 0.5, y + 0.5, width - 1, height - 1, Context.Radius, corners, true); cr.Stroke (); } } public override void DrawRowRule (Cairo.Context cr, int x, int y, int width, int height) { cr.SetSourceColor (new Cairo.Color (rule_color.R, rule_color.G, rule_color.B, Context.FillAlpha)); cr.Rectangle (x, y, width, height); cr.Fill (); } } }
// This file was created automatically, do not modify the contents of this file. // ReSharper disable InvalidXmlDocComment // ReSharper disable InconsistentNaming // ReSharper disable CheckNamespace // ReSharper disable MemberCanBePrivate.Global using System; using System.Runtime.InteropServices; // Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\Engine\Classes\Engine\Engine.h:295 namespace UnrealEngine { public partial class FWorldContext : NativeStructWrapper { public FWorldContext(IntPtr NativePointer, bool IsRef = false) : base(NativePointer, IsRef) { } public FWorldContext() : base(E_CreateStruct_FWorldContext(), false) { } [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern bool E_PROP_FWorldContext_bWaitingOnOnlineSubsystem_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_bWaitingOnOnlineSubsystem_SET(IntPtr Ptr, bool Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern StringWrapper E_PROP_FWorldContext_ContextHandle_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_ContextHandle_SET(IntPtr Ptr, string Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern ObjectPointerDescription E_PROP_FWorldContext_GameViewport_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_GameViewport_SET(IntPtr Ptr, IntPtr Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern IntPtr E_PROP_FWorldContext_LastRemoteURL_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_LastRemoteURL_SET(IntPtr Ptr, IntPtr Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern IntPtr E_PROP_FWorldContext_LastURL_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_LastURL_SET(IntPtr Ptr, IntPtr Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern StringWrapper E_PROP_FWorldContext_PendingMapChangeFailureDescription_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_PendingMapChangeFailureDescription_SET(IntPtr Ptr, string Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern int E_PROP_FWorldContext_PIEInstance_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_PIEInstance_SET(IntPtr Ptr, int Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern StringWrapper E_PROP_FWorldContext_PIEPrefix_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_PIEPrefix_SET(IntPtr Ptr, string Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern bool E_PROP_FWorldContext_RunAsDedicated_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_RunAsDedicated_SET(IntPtr Ptr, bool Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern byte E_PROP_FWorldContext_TravelType_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_TravelType_SET(IntPtr Ptr, byte Value); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern StringWrapper E_PROP_FWorldContext_TravelURL_GET(IntPtr Ptr); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_PROP_FWorldContext_TravelURL_SET(IntPtr Ptr, string Value); #region DLLInmport [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern IntPtr E_CreateStruct_FWorldContext(); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E_FWorldContext_SetCurrentWorld(IntPtr self, IntPtr world); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern ObjectPointerDescription E_FWorldContext_World(IntPtr self); #endregion #region Property /// <summary> /// Is this world context waiting for an online login to complete (for PIE) /// </summary> public bool bWaitingOnOnlineSubsystem { get => E_PROP_FWorldContext_bWaitingOnOnlineSubsystem_GET(NativePointer); set => E_PROP_FWorldContext_bWaitingOnOnlineSubsystem_SET(NativePointer, value); } public string ContextHandle { get => E_PROP_FWorldContext_ContextHandle_GET(NativePointer); set => E_PROP_FWorldContext_ContextHandle_SET(NativePointer, value); } public UGameViewportClient GameViewport { get => E_PROP_FWorldContext_GameViewport_GET(NativePointer); set => E_PROP_FWorldContext_GameViewport_SET(NativePointer, value); } public FURL LastRemoteURL { get => E_PROP_FWorldContext_LastRemoteURL_GET(NativePointer); set => E_PROP_FWorldContext_LastRemoteURL_SET(NativePointer, value); } public FURL LastURL { get => E_PROP_FWorldContext_LastURL_GET(NativePointer); set => E_PROP_FWorldContext_LastURL_SET(NativePointer, value); } /// <summary> /// Human readable error string for any failure during a map change request. Empty if there were no failures. /// </summary> public string PendingMapChangeFailureDescription { get => E_PROP_FWorldContext_PendingMapChangeFailureDescription_GET(NativePointer); set => E_PROP_FWorldContext_PendingMapChangeFailureDescription_SET(NativePointer, value); } /// <summary> /// The PIE instance of this world, -1 is default /// </summary> public int PIEInstance { get => E_PROP_FWorldContext_PIEInstance_GET(NativePointer); set => E_PROP_FWorldContext_PIEInstance_SET(NativePointer, value); } /// <summary> /// The Prefix in front of PIE level names, empty is default /// </summary> public string PIEPrefix { get => E_PROP_FWorldContext_PIEPrefix_GET(NativePointer); set => E_PROP_FWorldContext_PIEPrefix_SET(NativePointer, value); } /// <summary> /// Is this running as a dedicated server /// </summary> public bool RunAsDedicated { get => E_PROP_FWorldContext_RunAsDedicated_GET(NativePointer); set => E_PROP_FWorldContext_RunAsDedicated_SET(NativePointer, value); } /// <summary> /// TravelType for pending client connects /// </summary> public byte TravelType { get => E_PROP_FWorldContext_TravelType_GET(NativePointer); set => E_PROP_FWorldContext_TravelType_SET(NativePointer, value); } /// <summary> /// URL to travel to for pending client connect /// </summary> public string TravelURL { get => E_PROP_FWorldContext_TravelURL_GET(NativePointer); set => E_PROP_FWorldContext_TravelURL_SET(NativePointer, value); } #endregion #region ExternMethods /// <summary> /// Set CurrentWorld and update external reference pointers to reflect this /// </summary> public void SetCurrentWorld(UWorld world) => E_FWorldContext_SetCurrentWorld(this, world); public UWorld World() => E_FWorldContext_World(this); #endregion public static implicit operator IntPtr(FWorldContext self) { return self?.NativePointer ?? IntPtr.Zero; } public static implicit operator FWorldContext(IntPtr adress) { return adress == IntPtr.Zero ? null : new FWorldContext(adress, false); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. #nullable disable using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Routing.Patterns; using Microsoft.AspNetCore.Routing.Template; using Microsoft.Extensions.Logging; namespace Microsoft.AspNetCore.Routing.Matching { internal class DfaMatcherBuilder : MatcherBuilder { private readonly List<RouteEndpoint> _endpoints = new List<RouteEndpoint>(); private readonly ILoggerFactory _loggerFactory; private readonly ParameterPolicyFactory _parameterPolicyFactory; private readonly EndpointSelector _selector; private readonly IEndpointSelectorPolicy[] _endpointSelectorPolicies; private readonly INodeBuilderPolicy[] _nodeBuilders; private readonly EndpointComparer _comparer; // These collections are reused when building candidates private readonly Dictionary<string, int> _assignments; private readonly List<KeyValuePair<string, object>> _slots; private readonly List<(string parameterName, int segmentIndex, int slotIndex)> _captures; private readonly List<(RoutePatternPathSegment pathSegment, int segmentIndex)> _complexSegments; private readonly List<KeyValuePair<string, IRouteConstraint>> _constraints; private int _stateIndex; public DfaMatcherBuilder( ILoggerFactory loggerFactory, ParameterPolicyFactory parameterPolicyFactory, EndpointSelector selector, IEnumerable<MatcherPolicy> policies) { _loggerFactory = loggerFactory; _parameterPolicyFactory = parameterPolicyFactory; _selector = selector; var (nodeBuilderPolicies, endpointComparerPolicies, endpointSelectorPolicies) = ExtractPolicies(policies.OrderBy(p => p.Order)); _endpointSelectorPolicies = endpointSelectorPolicies; _nodeBuilders = nodeBuilderPolicies; _comparer = new EndpointComparer(endpointComparerPolicies); _assignments = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase); _slots = new List<KeyValuePair<string, object>>(); _captures = new List<(string parameterName, int segmentIndex, int slotIndex)>(); _complexSegments = new List<(RoutePatternPathSegment pathSegment, int segmentIndex)>(); _constraints = new List<KeyValuePair<string, IRouteConstraint>>(); } // Used in tests internal EndpointComparer Comparer => _comparer; public override void AddEndpoint(RouteEndpoint endpoint) { _endpoints.Add(endpoint); } public DfaNode BuildDfaTree(bool includeLabel = false) { // Since we're doing a BFS we will process each 'level' of the tree in stages // this list will hold the set of items we need to process at the current // stage. var work = new List<DfaBuilderWorkerWorkItem>(_endpoints.Count); var root = new DfaNode() { PathDepth = 0, Label = includeLabel ? "/" : null }; // To prepare for this we need to compute the max depth, as well as // a seed list of items to process (entry, root). var maxDepth = 0; for (var i = 0; i < _endpoints.Count; i++) { var endpoint = _endpoints[i]; var precedenceDigit = GetPrecedenceDigitAtDepth(endpoint, depth: 0); work.Add(new DfaBuilderWorkerWorkItem(endpoint, precedenceDigit, new List<DfaNode>() { root, })); maxDepth = Math.Max(maxDepth, endpoint.RoutePattern.PathSegments.Count); } // Sort work at each level by *PRECEDENCE OF THE CURRENT SEGMENT*. // // We build the tree by doing a BFS over the list of entries. This is important // because a 'parameter' node can also traverse the same paths that literal nodes // traverse. This means that we need to order the entries first, or else we will // miss possible edges in the DFA. // // We'll sort the matches again later using the *real* comparer once building the // precedence part of the DFA is over. var precedenceDigitComparer = Comparer<DfaBuilderWorkerWorkItem>.Create((x, y) => { return x.PrecedenceDigit.CompareTo(y.PrecedenceDigit); }); var dfaWorker = new DfaBuilderWorker(work, precedenceDigitComparer, includeLabel, _parameterPolicyFactory); // Now we process the entries a level at a time. for (var depth = 0; depth <= maxDepth; depth++) { dfaWorker.ProcessLevel(depth); } // Build the trees of policy nodes (like HTTP methods). Post-order traversal // means that we won't have infinite recursion. root.Visit(ApplyPolicies); return root; } private class DfaBuilderWorker { private List<DfaBuilderWorkerWorkItem> _previousWork; private List<DfaBuilderWorkerWorkItem> _work; private int _workCount; private readonly Comparer<DfaBuilderWorkerWorkItem> _precedenceDigitComparer; private readonly bool _includeLabel; private readonly ParameterPolicyFactory _parameterPolicyFactory; public DfaBuilderWorker( List<DfaBuilderWorkerWorkItem> work, Comparer<DfaBuilderWorkerWorkItem> precedenceDigitComparer, bool includeLabel, ParameterPolicyFactory parameterPolicyFactory) { _work = work; _previousWork = new List<DfaBuilderWorkerWorkItem>(); _workCount = work.Count; _precedenceDigitComparer = precedenceDigitComparer; _includeLabel = includeLabel; _parameterPolicyFactory = parameterPolicyFactory; } // Each time we process a level of the DFA we keep a list of work items consisting on the nodes we need to evaluate // their precendence and their parent nodes. We sort nodes by precedence on each level, which means that nodes are // evaluated in the following order: (literals, constrained parameters/complex segments, parameters, constrainted catch-alls and catch-alls) // When we process a stage we build a list of the next set of workitems we need to evaluate. We also keep around the // list of workitems from the previous level so that we can reuse all the nested lists while we are evaluating the current level. internal void ProcessLevel(int depth) { // As we process items, collect the next set of items. var nextWork = _previousWork; var nextWorkCount = 0; // See comments on precedenceDigitComparer _work.Sort(0, _workCount, _precedenceDigitComparer); for (var i = 0; i < _workCount; i++) { var (endpoint, _, parents) = _work[i]; if (!HasAdditionalRequiredSegments(endpoint, depth)) { for (var j = 0; j < parents.Count; j++) { var parent = parents[j]; parent.AddMatch(endpoint); } } // Find the parents of this edge at the current depth List<DfaNode> nextParents; if (nextWorkCount < nextWork.Count) { nextParents = nextWork[nextWorkCount].Parents; nextParents.Clear(); var nextPrecedenceDigit = GetPrecedenceDigitAtDepth(endpoint, depth + 1); nextWork[nextWorkCount] = new DfaBuilderWorkerWorkItem(endpoint, nextPrecedenceDigit, nextParents); } else { nextParents = new List<DfaNode>(); // Add to the next set of work now so the list will be reused // even if there are no parents var nextPrecedenceDigit = GetPrecedenceDigitAtDepth(endpoint, depth + 1); nextWork.Add(new DfaBuilderWorkerWorkItem(endpoint, nextPrecedenceDigit, nextParents)); } var segment = GetCurrentSegment(endpoint, depth); if (segment == null) { continue; } ProcessSegment(endpoint, parents, nextParents, segment); if (nextParents.Count > 0) { nextWorkCount++; } } // Prepare to process the next stage. _previousWork = _work; _work = nextWork; _workCount = nextWorkCount; } private void ProcessSegment( RouteEndpoint endpoint, List<DfaNode> parents, List<DfaNode> nextParents, RoutePatternPathSegment segment) { for (var i = 0; i < parents.Count; i++) { var parent = parents[i]; var part = segment.Parts[0]; var parameterPart = part as RoutePatternParameterPart; if (segment.IsSimple && part is RoutePatternLiteralPart literalPart) { AddLiteralNode(_includeLabel, nextParents, parent, literalPart.Content); } else if (segment.IsSimple && parameterPart != null && parameterPart.IsCatchAll) { // A catch all should traverse all literal nodes as well as parameter nodes // we don't need to create the parameter node here because of ordering // all catchalls will be processed after all parameters. if (parent.Literals != null) { nextParents.AddRange(parent.Literals.Values); } if (parent.Parameters != null) { nextParents.Add(parent.Parameters); } // We also create a 'catchall' here. We don't do further traversals // on the catchall node because only catchalls can end up here. The // catchall node allows us to capture an unlimited amount of segments // and also to match a zero-length segment, which a parameter node // doesn't allow. if (parent.CatchAll == null) { parent.CatchAll = new DfaNode() { PathDepth = parent.PathDepth + 1, Label = _includeLabel ? parent.Label + "{*...}/" : null, }; // The catchall node just loops. parent.CatchAll.Parameters = parent.CatchAll; parent.CatchAll.CatchAll = parent.CatchAll; } parent.CatchAll.AddMatch(endpoint); } else if (segment.IsSimple && parameterPart != null && TryGetRequiredValue(endpoint.RoutePattern, parameterPart, out var requiredValue)) { // If the parameter has a matching required value, replace the parameter with the required value // as a literal. This should use the parameter's transformer (if present) // e.g. Template: Home/{action}, Required values: { action = "Index" }, Result: Home/Index AddRequiredLiteralValue(endpoint, nextParents, parent, parameterPart, requiredValue); } else if (segment.IsSimple && parameterPart != null) { if (parent.Parameters == null) { parent.Parameters = new DfaNode() { PathDepth = parent.PathDepth + 1, Label = _includeLabel ? parent.Label + "{...}/" : null, }; } if (parent.Literals != null) { // If the parameter contains constraints, we can be smarter about it and evaluate them while we build the tree. // If the literal doesn't match any of the constraints, we can prune the branch. // For example, for a parameter in a route {lang:length(2)} and a parent literal "ABC", we can check that "ABC" // doesn't meet the parameter constraint (length(2)) when building the tree, and avoid the extra nodes. if (endpoint.RoutePattern.ParameterPolicies.TryGetValue(parameterPart.Name, out var parameterPolicyReferences)) { // We filter out sibling literals that don't match one of the constraints in the segment to avoid adding nodes to the DFA // that will never match a route and which will result in a much higher memory usage. AddParentsWithMatchingLiteralConstraints(nextParents, parent, parameterPart, parameterPolicyReferences); } else { // This means the current parameter we are evaluating doesn't contain any constraint, so we need to // traverse all literal nodes as well as the parameter node. nextParents.AddRange(parent.Literals.Values); } } nextParents.Add(parent.Parameters); } else { // Complex segment - we treat these are parameters here and do the // expensive processing later. We don't want to spend time processing // complex segments unless they are the best match, and treating them // like parameters in the DFA allows us to do just that. if (parent.Parameters == null) { parent.Parameters = new DfaNode() { PathDepth = parent.PathDepth + 1, Label = _includeLabel ? parent.Label + "{...}/" : null, }; } if (parent.Literals != null) { // For a complex segment like this, we can evaluate the literals and avoid adding extra nodes to // the tree on cases where the literal won't ever be able to match the complex parameter. // For example, if we have a complex parameter {a}-{b}.{c?} and a literal "Hello" we can guarantee // that it will never be a match. // We filter out sibling literals that don't match the complex parameter segment to avoid adding nodes to the DFA // that will never match a route and which will result in a much higher memory usage. AddParentsMatchingComplexSegment(endpoint, nextParents, segment, parent, parameterPart); } nextParents.Add(parent.Parameters); } } } private void AddParentsMatchingComplexSegment(RouteEndpoint endpoint, List<DfaNode> nextParents, RoutePatternPathSegment segment, DfaNode parent, RoutePatternParameterPart parameterPart) { var routeValues = new RouteValueDictionary(); foreach (var literal in parent.Literals.Keys) { if (RoutePatternMatcher.MatchComplexSegment(segment, literal, routeValues)) { // If we got here (rare) it means that the literal matches the complex segment (for example the literal is something A-B) // there is another thing we can try here, which is to evaluate the policies for the parts in case they have one (for example {a:length(4)}-{b:regex(\d+)}) // so that even if it maps closely to a complex parameter we have a chance to discard it and avoid adding the extra branches. var passedAllPolicies = true; for (var i = 0; i < segment.Parts.Count; i++) { var segmentPart = segment.Parts[i]; if (segmentPart is not RoutePatternParameterPart partParameter) { // We skip over the literals and the separator since we already checked against them continue; } if (!routeValues.TryGetValue(partParameter.Name, out var parameterValue)) { // We have a pattern like {a}-{b}.{part?} and a literal "a-b". Since we've matched the complex segment it means that the optional // parameter was not specified, so we skip it. Debug.Assert(i == segment.Parts.Count - 1 && partParameter.IsOptional); continue; } if (endpoint.RoutePattern.ParameterPolicies.TryGetValue(partParameter.Name, out var parameterPolicyReferences)) { for (var j = 0; j < parameterPolicyReferences.Count; j++) { var reference = parameterPolicyReferences[j]; var parameterPolicy = _parameterPolicyFactory.Create(parameterPart, reference); if (parameterPolicy is IParameterLiteralNodeMatchingPolicy constraint && !constraint.MatchesLiteral(partParameter.Name, (string)parameterValue)) { passedAllPolicies = false; break; } } } } if (passedAllPolicies) { nextParents.Add(parent.Literals[literal]); } } routeValues.Clear(); } } private void AddParentsWithMatchingLiteralConstraints(List<DfaNode> nextParents, DfaNode parent, RoutePatternParameterPart parameterPart, IReadOnlyList<RoutePatternParameterPolicyReference> parameterPolicyReferences) { // The list of parameters that fail to meet at least one IParameterLiteralNodeMatchingPolicy. var hasFailingPolicy = parent.Literals.Keys.Count < 32 ? (stackalloc bool[32]).Slice(0, parent.Literals.Keys.Count) : new bool[parent.Literals.Keys.Count]; // Whether or not all parameters have failed to meet at least one constraint. for (var i = 0; i < parameterPolicyReferences.Count; i++) { var reference = parameterPolicyReferences[i]; var parameterPolicy = _parameterPolicyFactory.Create(parameterPart, reference); if (parameterPolicy is IParameterLiteralNodeMatchingPolicy constraint) { var literalIndex = 0; var allFailed = true; foreach (var literal in parent.Literals.Keys) { if (!hasFailingPolicy[literalIndex] && !constraint.MatchesLiteral(parameterPart.Name, literal)) { hasFailingPolicy[literalIndex] = true; } allFailed &= hasFailingPolicy[literalIndex]; literalIndex++; } if (allFailed) { // If we get here it means that all literals have failed at least one policy, which means we can skip checking policies // and return early. This will be a very common case when your constraints are things like "int,length or a regex". return; } } } var k = 0; foreach (var literal in parent.Literals.Values) { if (!hasFailingPolicy[k]) { nextParents.Add(literal); } k++; } } private void AddRequiredLiteralValue(RouteEndpoint endpoint, List<DfaNode> nextParents, DfaNode parent, RoutePatternParameterPart parameterPart, object requiredValue) { if (endpoint.RoutePattern.ParameterPolicies.TryGetValue(parameterPart.Name, out var parameterPolicyReferences)) { for (var k = 0; k < parameterPolicyReferences.Count; k++) { var reference = parameterPolicyReferences[k]; var parameterPolicy = _parameterPolicyFactory.Create(parameterPart, reference); if (parameterPolicy is IOutboundParameterTransformer parameterTransformer) { requiredValue = parameterTransformer.TransformOutbound(requiredValue); break; } } } var literalValue = requiredValue?.ToString() ?? throw new InvalidOperationException($"Required value for literal '{parameterPart.Name}' must evaluate to a non-null string."); AddLiteralNode(_includeLabel, nextParents, parent, literalValue); } } private static void AddLiteralNode(bool includeLabel, List<DfaNode> nextParents, DfaNode parent, string literal) { DfaNode next = null; if (parent.Literals == null || !parent.Literals.TryGetValue(literal, out next)) { next = new DfaNode() { PathDepth = parent.PathDepth + 1, Label = includeLabel ? parent.Label + literal + "/" : null, }; parent.AddLiteral(literal, next); } nextParents.Add(next); } private static RoutePatternPathSegment GetCurrentSegment(RouteEndpoint endpoint, int depth) { if (depth < endpoint.RoutePattern.PathSegments.Count) { return endpoint.RoutePattern.PathSegments[depth]; } if (endpoint.RoutePattern.PathSegments.Count == 0) { return null; } var lastSegment = endpoint.RoutePattern.PathSegments[endpoint.RoutePattern.PathSegments.Count - 1]; if (lastSegment.IsSimple && lastSegment.Parts[0] is RoutePatternParameterPart parameterPart && parameterPart.IsCatchAll) { return lastSegment; } return null; } private static int GetPrecedenceDigitAtDepth(RouteEndpoint endpoint, int depth) { var segment = GetCurrentSegment(endpoint, depth); if (segment is null) { // Treat "no segment" as high priority. it won't effect the algorithm, but we need to define a sort-order. return 0; } return RoutePrecedence.ComputeInboundPrecedenceDigit(endpoint.RoutePattern, segment); } public override Matcher Build() { #if DEBUG var includeLabel = true; #else var includeLabel = false; #endif var root = BuildDfaTree(includeLabel); // State count is the number of nodes plus an exit state var stateCount = 1; var maxSegmentCount = 0; root.Visit((node) => { stateCount++; maxSegmentCount = Math.Max(maxSegmentCount, node.PathDepth); }); _stateIndex = 0; // The max segment count is the maximum path-node-depth +1. We need // the +1 to capture any additional content after the 'last' segment. maxSegmentCount++; var states = new DfaState[stateCount]; var exitDestination = stateCount - 1; AddNode(root, states, exitDestination); // The root state only has a jump table. states[exitDestination] = new DfaState( Array.Empty<Candidate>(), Array.Empty<IEndpointSelectorPolicy>(), JumpTableBuilder.Build(exitDestination, exitDestination, null), null); return new DfaMatcher(_loggerFactory.CreateLogger<DfaMatcher>(), _selector, states, maxSegmentCount); } private int AddNode( DfaNode node, DfaState[] states, int exitDestination) { node.Matches?.Sort(_comparer); var currentStateIndex = _stateIndex; var currentDefaultDestination = exitDestination; var currentExitDestination = exitDestination; (string text, int destination)[] pathEntries = null; PolicyJumpTableEdge[] policyEntries = null; if (node.Literals != null) { pathEntries = new (string text, int destination)[node.Literals.Count]; var index = 0; foreach (var kvp in node.Literals) { var transition = Transition(kvp.Value); pathEntries[index++] = (kvp.Key, transition); } } if (node.Parameters != null && node.CatchAll != null && ReferenceEquals(node.Parameters, node.CatchAll)) { // This node has a single transition to but it should accept zero-width segments // this can happen when a node only has catchall parameters. currentExitDestination = currentDefaultDestination = Transition(node.Parameters); } else if (node.Parameters != null && node.CatchAll != null) { // This node has a separate transition for zero-width segments // this can happen when a node has both parameters and catchall parameters. currentDefaultDestination = Transition(node.Parameters); currentExitDestination = Transition(node.CatchAll); } else if (node.Parameters != null) { // This node has parameters but no catchall. currentDefaultDestination = Transition(node.Parameters); } else if (node.CatchAll != null) { // This node has a catchall but no parameters currentExitDestination = currentDefaultDestination = Transition(node.CatchAll); } if (node.PolicyEdges != null && node.PolicyEdges.Count > 0) { policyEntries = new PolicyJumpTableEdge[node.PolicyEdges.Count]; var index = 0; foreach (var kvp in node.PolicyEdges) { policyEntries[index++] = new PolicyJumpTableEdge(kvp.Key, Transition(kvp.Value)); } } var candidates = CreateCandidates(node.Matches); // Perf: most of the time there aren't any endpoint selector policies, create // this lazily. List<IEndpointSelectorPolicy> endpointSelectorPolicies = null; if (node.Matches?.Count > 0) { for (var i = 0; i < _endpointSelectorPolicies.Length; i++) { var endpointSelectorPolicy = _endpointSelectorPolicies[i]; if (endpointSelectorPolicy.AppliesToEndpoints(node.Matches)) { if (endpointSelectorPolicies == null) { endpointSelectorPolicies = new List<IEndpointSelectorPolicy>(); } endpointSelectorPolicies.Add(endpointSelectorPolicy); } } } states[currentStateIndex] = new DfaState( candidates, endpointSelectorPolicies?.ToArray() ?? Array.Empty<IEndpointSelectorPolicy>(), JumpTableBuilder.Build(currentDefaultDestination, currentExitDestination, pathEntries), // Use the final exit destination when building the policy state. // We don't want to use either of the current destinations because they refer routing states, // and a policy state should never transition back to a routing state. BuildPolicy(exitDestination, node.NodeBuilder, policyEntries)); return currentStateIndex; int Transition(DfaNode next) { // Break cycles if (ReferenceEquals(node, next)) { return _stateIndex; } else { _stateIndex++; return AddNode(next, states, exitDestination); } } } private static PolicyJumpTable BuildPolicy(int exitDestination, INodeBuilderPolicy nodeBuilder, PolicyJumpTableEdge[] policyEntries) { if (policyEntries == null) { return null; } return nodeBuilder.BuildJumpTable(exitDestination, policyEntries); } // Builds an array of candidates for a node, assigns a 'score' for each // endpoint. internal Candidate[] CreateCandidates(IReadOnlyList<Endpoint> endpoints) { if (endpoints == null || endpoints.Count == 0) { return Array.Empty<Candidate>(); } var candiates = new Candidate[endpoints.Count]; var score = 0; var examplar = endpoints[0]; candiates[0] = CreateCandidate(examplar, score); for (var i = 1; i < endpoints.Count; i++) { var endpoint = endpoints[i]; if (!_comparer.Equals(examplar, endpoint)) { // This endpoint doesn't have the same priority. examplar = endpoint; score++; } candiates[i] = CreateCandidate(endpoint, score); } return candiates; } // internal for tests internal Candidate CreateCandidate(Endpoint endpoint, int score) { (string parameterName, int segmentIndex, int slotIndex) catchAll = default; if (endpoint is RouteEndpoint routeEndpoint) { _assignments.Clear(); _slots.Clear(); _captures.Clear(); _complexSegments.Clear(); _constraints.Clear(); foreach (var kvp in routeEndpoint.RoutePattern.Defaults) { _assignments.Add(kvp.Key, _assignments.Count); _slots.Add(kvp); } for (var i = 0; i < routeEndpoint.RoutePattern.PathSegments.Count; i++) { var segment = routeEndpoint.RoutePattern.PathSegments[i]; if (!segment.IsSimple) { continue; } var parameterPart = segment.Parts[0] as RoutePatternParameterPart; if (parameterPart == null) { continue; } if (!_assignments.TryGetValue(parameterPart.Name, out var slotIndex)) { slotIndex = _assignments.Count; _assignments.Add(parameterPart.Name, slotIndex); // A parameter can have a required value, default value/catch all, or be a normal parameter // Add the required value or default value as the slot's initial value if (TryGetRequiredValue(routeEndpoint.RoutePattern, parameterPart, out var requiredValue)) { _slots.Add(new KeyValuePair<string, object>(parameterPart.Name, requiredValue)); } else { var hasDefaultValue = parameterPart.Default != null || parameterPart.IsCatchAll; _slots.Add(hasDefaultValue ? new KeyValuePair<string, object>(parameterPart.Name, parameterPart.Default) : default); } } if (TryGetRequiredValue(routeEndpoint.RoutePattern, parameterPart, out _)) { // Don't capture a parameter if it has a required value // There is no need because a parameter with a required value is matched as a literal } else if (parameterPart.IsCatchAll) { catchAll = (parameterPart.Name, i, slotIndex); } else { _captures.Add((parameterPart.Name, i, slotIndex)); } } for (var i = 0; i < routeEndpoint.RoutePattern.PathSegments.Count; i++) { var segment = routeEndpoint.RoutePattern.PathSegments[i]; if (segment.IsSimple) { continue; } _complexSegments.Add((segment, i)); } foreach (var kvp in routeEndpoint.RoutePattern.ParameterPolicies) { var parameter = routeEndpoint.RoutePattern.GetParameter(kvp.Key); // may be null, that's ok var parameterPolicyReferences = kvp.Value; for (var i = 0; i < parameterPolicyReferences.Count; i++) { var reference = parameterPolicyReferences[i]; var parameterPolicy = _parameterPolicyFactory.Create(parameter, reference); if (parameterPolicy is IRouteConstraint routeConstraint) { _constraints.Add(new KeyValuePair<string, IRouteConstraint>(kvp.Key, routeConstraint)); } } } return new Candidate( endpoint, score, _slots.ToArray(), _captures.ToArray(), catchAll, _complexSegments.ToArray(), _constraints.ToArray()); } else { return new Candidate( endpoint, score, Array.Empty<KeyValuePair<string, object>>(), Array.Empty<(string parameterName, int segmentIndex, int slotIndex)>(), catchAll, Array.Empty<(RoutePatternPathSegment pathSegment, int segmentIndex)>(), Array.Empty<KeyValuePair<string, IRouteConstraint>>()); } } private static bool HasAdditionalRequiredSegments(RouteEndpoint endpoint, int depth) { for (var i = depth; i < endpoint.RoutePattern.PathSegments.Count; i++) { var segment = endpoint.RoutePattern.PathSegments[i]; if (!segment.IsSimple) { // Complex segments always require more processing return true; } var parameterPart = segment.Parts[0] as RoutePatternParameterPart; if (parameterPart == null) { // It's a literal return true; } if (!parameterPart.IsOptional && !parameterPart.IsCatchAll && parameterPart.Default == null) { return true; } } return false; } private void ApplyPolicies(DfaNode node) { if (node.Matches == null || node.Matches.Count == 0) { return; } // We're done with the precedence based work. Sort the endpoints // before applying policies for simplicity in policy-related code. node.Matches.Sort(_comparer); // Start with the current node as the root. var work = new List<DfaNode>() { node, }; List<DfaNode> previousWork = null; for (var i = 0; i < _nodeBuilders.Length; i++) { var nodeBuilder = _nodeBuilders[i]; // Build a list of each List<DfaNode> nextWork; if (previousWork == null) { nextWork = new List<DfaNode>(); } else { // Reuse previous collection for the next collection previousWork.Clear(); nextWork = previousWork; } for (var j = 0; j < work.Count; j++) { var parent = work[j]; if (!nodeBuilder.AppliesToEndpoints(parent.Matches ?? (IReadOnlyList<Endpoint>)Array.Empty<Endpoint>())) { // This node-builder doesn't care about this node, so add it to the list // to be processed by the next node-builder. nextWork.Add(parent); continue; } // This node-builder does apply to this node, so we need to create new nodes for each edge, // and then attach them to the parent. var edges = nodeBuilder.GetEdges(parent.Matches ?? (IReadOnlyList<Endpoint>)Array.Empty<Endpoint>()); for (var k = 0; k < edges.Count; k++) { var edge = edges[k]; var next = new DfaNode() { // If parent label is null then labels are not being included Label = (parent.Label != null) ? parent.Label + " " + edge.State.ToString() : null, }; if (edge.Endpoints.Count > 0) { next.AddMatches(edge.Endpoints); } nextWork.Add(next); parent.AddPolicyEdge(edge.State, next); } // Associate the node-builder so we can build a jump table later. parent.NodeBuilder = nodeBuilder; // The parent no longer has matches, it's not considered a terminal node. parent.Matches?.Clear(); } previousWork = work; work = nextWork; } } private static (INodeBuilderPolicy[] nodeBuilderPolicies, IEndpointComparerPolicy[] endpointComparerPolicies, IEndpointSelectorPolicy[] endpointSelectorPolicies) ExtractPolicies(IEnumerable<MatcherPolicy> policies) { var nodeBuilderPolicies = new List<INodeBuilderPolicy>(); var endpointComparerPolicies = new List<IEndpointComparerPolicy>(); var endpointSelectorPolicies = new List<IEndpointSelectorPolicy>(); foreach (var policy in policies) { if (policy is INodeBuilderPolicy nodeBuilderPolicy) { nodeBuilderPolicies.Add(nodeBuilderPolicy); } if (policy is IEndpointComparerPolicy endpointComparerPolicy) { endpointComparerPolicies.Add(endpointComparerPolicy); } if (policy is IEndpointSelectorPolicy endpointSelectorPolicy) { endpointSelectorPolicies.Add(endpointSelectorPolicy); } } return (nodeBuilderPolicies.ToArray(), endpointComparerPolicies.ToArray(), endpointSelectorPolicies.ToArray()); } private static bool TryGetRequiredValue(RoutePattern routePattern, RoutePatternParameterPart parameterPart, out object value) { if (!routePattern.RequiredValues.TryGetValue(parameterPart.Name, out value)) { return false; } return !RouteValueEqualityComparer.Default.Equals(value, string.Empty); } private record struct DfaBuilderWorkerWorkItem(RouteEndpoint Endpoint, int PrecedenceDigit, List<DfaNode> Parents); } }
// // Copyright (c) 2004-2017 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.Targets.Wrappers { using System; using System.Collections.Generic; using Common; using Conditions; using Config; using Internal; /// <summary> /// Filters buffered log entries based on a set of conditions that are evaluated on a group of events. /// </summary> /// <seealso href="https://github.com/nlog/nlog/wiki/PostFilteringWrapper-target">Documentation on NLog Wiki</seealso> /// <remarks> /// PostFilteringWrapper must be used with some type of buffering target or wrapper, such as /// AsyncTargetWrapper, BufferingWrapper or ASPNetBufferingWrapper. /// </remarks> /// <example> /// <p> /// This example works like this. If there are no Warn,Error or Fatal messages in the buffer /// only Info messages are written to the file, but if there are any warnings or errors, /// the output includes detailed trace (levels &gt;= Debug). You can plug in a different type /// of buffering wrapper (such as ASPNetBufferingWrapper) to achieve different /// functionality. /// </p> /// <p> /// To set up the target in the <a href="config.html">configuration file</a>, /// use the following syntax: /// </p> /// <code lang="XML" source="examples/targets/Configuration File/PostFilteringWrapper/NLog.config" /> /// <p> /// The above examples assume just one target and a single rule. See below for /// a programmatic configuration that's equivalent to the above config file: /// </p> /// <code lang="C#" source="examples/targets/Configuration API/PostFilteringWrapper/Simple/Example.cs" /> /// </example> [Target("PostFilteringWrapper", IsWrapper = true)] public class PostFilteringTargetWrapper : WrapperTargetBase { private static object boxedTrue = true; /// <summary> /// Initializes a new instance of the <see cref="PostFilteringTargetWrapper" /> class. /// </summary> public PostFilteringTargetWrapper() : this(null) { Rules = new List<FilteringRule>(); } /// <summary> /// Initializes a new instance of the <see cref="PostFilteringTargetWrapper" /> class. /// </summary> public PostFilteringTargetWrapper(Target wrappedTarget) { Rules = new List<FilteringRule>(); WrappedTarget = wrappedTarget; } /// <summary> /// Initializes a new instance of the <see cref="PostFilteringTargetWrapper" /> class. /// </summary> /// <param name="name">Name of the target.</param> /// <param name="wrappedTarget">The wrapped target.</param> public PostFilteringTargetWrapper(string name, Target wrappedTarget) : this(wrappedTarget) { Name = name; } /// <summary> /// Gets or sets the default filter to be applied when no specific rule matches. /// </summary> /// <docgen category='Filtering Options' order='10' /> public ConditionExpression DefaultFilter { get; set; } /// <summary> /// Gets the collection of filtering rules. The rules are processed top-down /// and the first rule that matches determines the filtering condition to /// be applied to log events. /// </summary> /// <docgen category='Filtering Rules' order='10' /> [ArrayParameter(typeof(FilteringRule), "when")] public IList<FilteringRule> Rules { get; private set; } /// <summary> /// NOTE! Obsolete, instead override Write(IList{AsyncLogEventInfo} logEvents) /// /// Writes an array of logging events to the log target. By default it iterates on all /// events and passes them to "Write" method. Inheriting classes can use this method to /// optimize batch writes. /// </summary> /// <param name="logEvents">Logging events to be written out.</param> [Obsolete("Instead override Write(IList<AsyncLogEventInfo> logEvents. Marked obsolete on NLog 4.5")] protected override void Write(AsyncLogEventInfo[] logEvents) { Write((IList<AsyncLogEventInfo>)logEvents); } /// <summary> /// Evaluates all filtering rules to find the first one that matches. /// The matching rule determines the filtering condition to be applied /// to all items in a buffer. If no condition matches, default filter /// is applied to the array of log events. /// </summary> /// <param name="logEvents">Array of log events to be post-filtered.</param> protected override void Write(IList<AsyncLogEventInfo> logEvents) { ConditionExpression resultFilter = null; InternalLogger.Trace("Running {0} on {1} events", this, logEvents.Count); // evaluate all the rules to get the filtering condition for (int i = 0; i < logEvents.Count; ++i) { foreach (FilteringRule rule in Rules) { object v = rule.Exists.Evaluate(logEvents[i].LogEvent); if (boxedTrue.Equals(v)) { InternalLogger.Trace("Rule matched: {0}", rule.Exists); resultFilter = rule.Filter; break; } } if (resultFilter != null) { break; } } if (resultFilter == null) { resultFilter = DefaultFilter; } if (resultFilter == null) { WrappedTarget.WriteAsyncLogEvents(logEvents); } else { InternalLogger.Trace("Filter to apply: {0}", resultFilter); // apply the condition to the buffer var resultBuffer = new List<AsyncLogEventInfo>(); for (int i = 0; i < logEvents.Count; ++i) { object v = resultFilter.Evaluate(logEvents[i].LogEvent); if (boxedTrue.Equals(v)) { resultBuffer.Add(logEvents[i]); } else { // anything not passed down will be notified about successful completion logEvents[i].Continuation(null); } } InternalLogger.Trace("After filtering: {0} events.", resultBuffer.Count); if (resultBuffer.Count > 0) { InternalLogger.Trace("Sending to {0}", WrappedTarget); WrappedTarget.WriteAsyncLogEvents(resultBuffer); } } } } }
//----------------------------------------------------------------------- // <copyright file="SocialApi.cs" company="LoginRadius"> // Created by LoginRadius Development Team // Copyright 2019 LoginRadius Inc. All rights reserved. // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using LoginRadiusSDK.V2.Common; using System.Threading.Tasks; using LoginRadiusSDK.V2.Util; using LoginRadiusSDK.V2.Models.ResponseModels; using LoginRadiusSDK.V2.Models.ResponseModels.OtherObjects; using LoginRadiusSDK.V2.Models.RequestModels; using LoginRadiusSDK.V2.Models.ResponseModels.UserProfile; namespace LoginRadiusSDK.V2.Api.Social { public class SocialApi : LoginRadiusResource { /// <summary> /// This API Is used to translate the Request Token returned during authentication into an Access Token that can be used with other API calls. /// </summary> /// <param name="token">Token generated from a successful oauth from social platform</param> /// <returns>Response containing Definition of Complete Token data</returns> /// 20.1 public async Task<ApiResponse<AccessToken>> ExchangeAccessToken(string token) { if (string.IsNullOrWhiteSpace(token)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(token)); } var queryParameters = new QueryParameters { { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] }, { "token", token } }; var resourcePath = "api/v2/access_token"; return await ConfigureAndExecute<AccessToken>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Refresh Access Token API is used to refresh the provider access token after authentication. It will be valid for up to 60 days on LoginRadius depending on the provider. In order to use the access token in other APIs, always refresh the token using this API.<br><br><b>Supported Providers :</b> Facebook,Yahoo,Google,Twitter, Linkedin.<br><br> Contact LoginRadius support team to enable this API. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="expiresIn">Allows you to specify a desired expiration time in minutes for the newly issued access token.</param> /// <param name="isWeb">Is web or not.</param> /// <returns>Response containing Definition of Complete Token data</returns> /// 20.2 public async Task<ApiResponse<AccessToken>> RefreshAccessToken(string accessToken, int? expiresIn = 0, bool isWeb = false) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] } }; if (expiresIn != null) { queryParameters.Add("expiresIn", expiresIn.ToString()); } if (isWeb != false) { queryParameters.Add("isWeb", isWeb.ToString()); } var resourcePath = "api/v2/access_token/refresh"; return await ConfigureAndExecute<AccessToken>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// This API validates access token, if valid then returns a response with its expiry otherwise error. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response containing Definition of Complete Token data</returns> /// 20.9 public async Task<ApiResponse<AccessToken>> ValidateAccessToken(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "key", ConfigDictionary[LRConfigConstants.LoginRadiusApiKey] }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] } }; var resourcePath = "api/v2/access_token/validate"; return await ConfigureAndExecute<AccessToken>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// This api invalidates the active access token or expires an access token validity. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response containing Definition for Complete Validation data</returns> /// 20.10 public async Task<ApiResponse<PostMethodResponse>> InValidateAccessToken(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "key", ConfigDictionary[LRConfigConstants.LoginRadiusApiKey] }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] } }; var resourcePath = "api/v2/access_token/invalidate"; return await ConfigureAndExecute<PostMethodResponse>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// This api is use to get all active session by Access Token. /// </summary> /// <param name="token">Token generated from a successful oauth from social platform</param> /// <returns>Response containing Definition for Complete active sessions</returns> /// 20.11.1 public async Task<ApiResponse<UserActiveSession>> GetActiveSession(string token) { if (string.IsNullOrWhiteSpace(token)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(token)); } var queryParameters = new QueryParameters { { "key", ConfigDictionary[LRConfigConstants.LoginRadiusApiKey] }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] }, { "token", token } }; var resourcePath = "api/v2/access_token/activesession"; return await ConfigureAndExecute<UserActiveSession>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// This api is used to get all active sessions by AccountID(UID). /// </summary> /// <param name="accountId">UID, the unified identifier for each user account</param> /// <returns>Response containing Definition for Complete active sessions</returns> /// 20.11.2 public async Task<ApiResponse<UserActiveSession>> GetActiveSessionByAccountID(string accountId) { if (string.IsNullOrWhiteSpace(accountId)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accountId)); } var queryParameters = new QueryParameters { { "accountId", accountId }, { "key", ConfigDictionary[LRConfigConstants.LoginRadiusApiKey] }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] } }; var resourcePath = "api/v2/access_token/activesession"; return await ConfigureAndExecute<UserActiveSession>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// This api is used to get all active sessions by ProfileId. /// </summary> /// <param name="profileId">Social Provider Id</param> /// <returns>Response containing Definition for Complete active sessions</returns> /// 20.11.3 public async Task<ApiResponse<UserActiveSession>> GetActiveSessionByProfileID(string profileId) { if (string.IsNullOrWhiteSpace(profileId)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(profileId)); } var queryParameters = new QueryParameters { { "key", ConfigDictionary[LRConfigConstants.LoginRadiusApiKey] }, { "profileId", profileId }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] } }; var resourcePath = "api/v2/access_token/activesession"; return await ConfigureAndExecute<UserActiveSession>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// <b>Supported Providers:</b> Facebook, Google, Live, Vkontakte.<br><br> This API returns the photo albums associated with the passed in access tokens Social Profile. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Album Data</returns> /// 22.2.1 public async Task<ApiResponse<List<Album>>> GetAlbums(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/album"; return await ConfigureAndExecute<List<Album>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// <b>Supported Providers:</b> Facebook, Google, Live, Vkontakte.<br><br> This API returns the photo albums associated with the passed in access tokens Social Profile. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response Model containing Albums with next cursor</returns> /// 22.2.2 public async Task<ApiResponse<CursorResponse<Album>>> GetAlbumsWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/album"; return await ConfigureAndExecute<CursorResponse<Album>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Audio API is used to get audio files data from the user's social account.<br><br><b>Supported Providers:</b> Live, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Audio Data</returns> /// 24.2.1 public async Task<ApiResponse<List<Audio>>> GetAudios(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/audio"; return await ConfigureAndExecute<List<Audio>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Audio API is used to get audio files data from the user's social account.<br><br><b>Supported Providers:</b> Live, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response Model containing Audio with next cursor</returns> /// 24.2.2 public async Task<ApiResponse<CursorResponse<Audio>>> GetAudiosWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/audio"; return await ConfigureAndExecute<CursorResponse<Audio>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Check In API is used to get check Ins data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Foursquare, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of CheckIn Data</returns> /// 25.2.1 public async Task<ApiResponse<List<CheckIn>>> GetCheckIns(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/checkin"; return await ConfigureAndExecute<List<CheckIn>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Check In API is used to get check Ins data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Foursquare, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response Model containing Checkins with next cursor</returns> /// 25.2.2 public async Task<ApiResponse<CursorResponse<CheckIn>>> GetCheckInsWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/checkin"; return await ConfigureAndExecute<CursorResponse<CheckIn>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Contact API is used to get contacts/friends/connections data from the user's social account.This is one of the APIs that makes up the LoginRadius Friend Invite System. The data will normalized into LoginRadius' standard data format. This API requires setting permissions in your LoginRadius Dashboard. <br><br><b>Note:</b> Facebook restricts access to the list of friends that is returned. When using the Contacts API with Facebook you will only receive friends that have accepted some permissions with your app. <br><br><b>Supported Providers:</b> Facebook, Foursquare, Google, LinkedIn, Live, Twitter, Vkontakte, Yahoo /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response containing Definition of Contact Data with Cursor</returns> /// 27.1 public async Task<ApiResponse<CursorResponse<Contact>>> GetContacts(string accessToken, string nextCursor = "") { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; if (!string.IsNullOrWhiteSpace(nextCursor)) { queryParameters.Add("nextCursor", nextCursor); } var resourcePath = "api/v2/contact"; return await ConfigureAndExecute<CursorResponse<Contact>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Event API is used to get the event data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Live /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Events Data</returns> /// 28.2.1 public async Task<ApiResponse<List<Events>>> GetEvents(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/event"; return await ConfigureAndExecute<List<Events>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Event API is used to get the event data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Live /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response Model containing Events with next cursor</returns> /// 28.2.2 public async Task<ApiResponse<CursorResponse<Events>>> GetEventsWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/event"; return await ConfigureAndExecute<CursorResponse<Events>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// Get the following user list from the user's social account.<br><br><b>Supported Providers:</b> Twitter /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Contacts Data</returns> /// 29.2.1 public async Task<ApiResponse<List<Contact>>> GetFollowings(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/following"; return await ConfigureAndExecute<List<Contact>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// Get the following user list from the user's social account.<br><br><b>Supported Providers:</b> Twitter /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response containing Definition of Contact Data with Cursor</returns> /// 29.2.2 public async Task<ApiResponse<CursorResponse<Contact>>> GetFollowingsWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/following"; return await ConfigureAndExecute<CursorResponse<Contact>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Group API is used to get group data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Groups Data</returns> /// 30.2.1 public async Task<ApiResponse<List<Group>>> GetGroups(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/group"; return await ConfigureAndExecute<List<Group>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Group API is used to get group data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response Model containing Groups with next cursor</returns> /// 30.2.2 public async Task<ApiResponse<CursorResponse<Group>>> GetGroupsWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/group"; return await ConfigureAndExecute<CursorResponse<Group>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Like API is used to get likes data from the user's social account.<br><br><b>Supported Providers:</b> Facebook /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Likes Data</returns> /// 31.2.1 public async Task<ApiResponse<List<Like>>> GetLikes(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/like"; return await ConfigureAndExecute<List<Like>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Like API is used to get likes data from the user's social account.<br><br><b>Supported Providers:</b> Facebook /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response Model containing Likes with next cursor</returns> /// 31.2.2 public async Task<ApiResponse<CursorResponse<Like>>> GetLikesWithCursor(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/like"; return await ConfigureAndExecute<CursorResponse<Like>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Mention API is used to get mentions data from the user's social account.<br><br><b>Supported Providers:</b> Twitter /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Status Data</returns> /// 32.1 public async Task<ApiResponse<List<Status>>> GetMentions(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/mention"; return await ConfigureAndExecute<List<Status>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// Post Message API is used to post messages to the user's contacts.<br><br><b>Supported Providers:</b> Twitter, LinkedIn <br><br>The Message API is used to post messages to the user?s contacts. This is one of the APIs that makes up the LoginRadius Friend Invite System. After using the Contact API, you can send messages to the retrieved contacts. This API requires setting permissions in your LoginRadius Dashboard.<br><br>GET & POST Message API work the same way except the API method is different /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="message">Body of your message</param> /// <param name="subject">Subject of your message</param> /// <param name="to">Recipient's social provider's id</param> /// <returns>Response containing Definition for Complete Validation data</returns> /// 33.1 public async Task<ApiResponse<PostMethodResponse>> PostMessage(string accessToken, string message, string subject, string to) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(message)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(message)); } if (string.IsNullOrWhiteSpace(subject)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(subject)); } if (string.IsNullOrWhiteSpace(to)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(to)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "message", message }, { "subject", subject }, { "to", to } }; var resourcePath = "api/v2/message"; return await ConfigureAndExecute<PostMethodResponse>(HttpMethod.POST, resourcePath, queryParameters, null); } /// <summary> /// The Page API is used to get the page data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, LinkedIn /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="pageName">Name of the page you want to retrieve info from</param> /// <returns>Response containing Definition of Complete page data</returns> /// 34.1 public async Task<ApiResponse<Page>> GetPage(string accessToken, string pageName) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(pageName)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(pageName)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "pageName", pageName } }; var resourcePath = "api/v2/page"; return await ConfigureAndExecute<Page>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Photo API is used to get photo data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Foursquare, Google, Live, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="albumId">The id of the album you want to retrieve info from</param> /// <returns>Response Containing List of Photos Data</returns> /// 35.1 public async Task<ApiResponse<List<Photo>>> GetPhotos(string accessToken, string albumId) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(albumId)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(albumId)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "albumId", albumId } }; var resourcePath = "api/v2/photo"; return await ConfigureAndExecute<List<Photo>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Post API is used to get post message data from the user's social account.<br><br><b>Supported Providers:</b> Facebook /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <returns>Response Containing List of Posts Data</returns> /// 36.1 public async Task<ApiResponse<List<Post>>> GetPosts(string accessToken) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/post"; return await ConfigureAndExecute<List<Post>>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Status API is used to update the status on the user's wall.<br><br><b>Supported Providers:</b> Facebook, Twitter, LinkedIn /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="caption">Message displayed below the description(Requires URL, Under 70 Characters).</param> /// <param name="description">Description of the displayed URL and Image(Requires URL)</param> /// <param name="imageUrl">Image to be displayed in the share(Requires URL).</param> /// <param name="status">Main body of the Status update.</param> /// <param name="title">Title of Linked URL</param> /// <param name="url">URL to be included when clicking on the share.</param> /// <param name="shorturl">short url</param> /// <returns>Response conatining Definition of Validation and Short URL data</returns> /// 37.2 public async Task<ApiResponse<PostMethodResponse<ShortUrlResponse>>> StatusPosting(string accessToken, string caption, string description, string imageUrl, string status, string title, string url, string shorturl = "0") { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(caption)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(caption)); } if (string.IsNullOrWhiteSpace(description)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(description)); } if (string.IsNullOrWhiteSpace(imageUrl)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(imageUrl)); } if (string.IsNullOrWhiteSpace(status)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(status)); } if (string.IsNullOrWhiteSpace(title)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(title)); } if (string.IsNullOrWhiteSpace(url)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(url)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "caption", caption }, { "description", description }, { "imageurl", imageUrl }, { "status", status }, { "title", title }, { "url", url } }; if (!string.IsNullOrWhiteSpace(shorturl)) { queryParameters.Add("shorturl", shorturl); } var resourcePath = "api/v2/status"; return await ConfigureAndExecute<PostMethodResponse<ShortUrlResponse>>(HttpMethod.POST, resourcePath, queryParameters, null); } /// <summary> /// The Trackable status API works very similar to the Status API but it returns a Post id that you can use to track the stats(shares, likes, comments) for a specific share/post/status update. This API requires setting permissions in your LoginRadius Dashboard.<br><br> The Trackable Status API is used to update the status on the user's wall and return an Post ID value. It is commonly referred to as Permission based sharing or Push notifications.<br><br> POST Input Parameter Format: application/x-www-form-urlencoded /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="statusModel">Model Class containing Definition of payload for Status API</param> /// <returns>Response containing Definition for Complete status data</returns> /// 37.6 public async Task<ApiResponse<StatusUpdateResponse>> TrackableStatusPosting(string accessToken, StatusModel statusModel) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (statusModel == null) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(statusModel)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; var resourcePath = "api/v2/status/trackable"; return await ConfigureAndExecute<StatusUpdateResponse>(HttpMethod.POST, resourcePath, queryParameters, ConvertToJson(statusModel)); } /// <summary> /// The Trackable status API works very similar to the Status API but it returns a Post id that you can use to track the stats(shares, likes, comments) for a specific share/post/status update. This API requires setting permissions in your LoginRadius Dashboard.<br><br> The Trackable Status API is used to update the status on the user's wall and return an Post ID value. It is commonly referred to as Permission based sharing or Push notifications. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="caption">Message displayed below the description(Requires URL, Under 70 Characters).</param> /// <param name="description">Description of the displayed URL and Image(Requires URL)</param> /// <param name="imageUrl">Image to be displayed in the share(Requires URL).</param> /// <param name="status">Main body of the Status update.</param> /// <param name="title">Title of Linked URL</param> /// <param name="url">URL to be included when clicking on the share.</param> /// <returns>Response containing Definition for Complete status data</returns> /// 37.7 public async Task<ApiResponse<StatusUpdateResponse>> GetTrackableStatusStats(string accessToken, string caption, string description, string imageUrl, string status, string title, string url) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(caption)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(caption)); } if (string.IsNullOrWhiteSpace(description)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(description)); } if (string.IsNullOrWhiteSpace(imageUrl)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(imageUrl)); } if (string.IsNullOrWhiteSpace(status)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(status)); } if (string.IsNullOrWhiteSpace(title)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(title)); } if (string.IsNullOrWhiteSpace(url)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(url)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "caption", caption }, { "description", description }, { "imageurl", imageUrl }, { "status", status }, { "title", title }, { "url", url } }; var resourcePath = "api/v2/status/trackable/js"; return await ConfigureAndExecute<StatusUpdateResponse>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Trackable status API works very similar to the Status API but it returns a Post id that you can use to track the stats(shares, likes, comments) for a specific share/post/status update. This API requires setting permissions in your LoginRadius Dashboard.<br><br> This API is used to retrieve a tracked post based on the passed in post ID value. This API requires setting permissions in your LoginRadius Dashboard.<br><br> <b>Note:</b> To utilize this API you need to find the ID for the post you want to track, which might require using Trackable Status Posting API first. /// </summary> /// <param name="postId">Post ID value</param> /// <returns>Response containing Definition of Complete Status Update data</returns> /// 37.8 public async Task<ApiResponse<StatusUpdateStats>> TrackableStatusFetching(string postId) { if (string.IsNullOrWhiteSpace(postId)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(postId)); } var queryParameters = new QueryParameters { { "postId", postId }, { "secret", ConfigDictionary[LRConfigConstants.LoginRadiusApiSecret] } }; var resourcePath = "api/v2/status/trackable"; return await ConfigureAndExecute<StatusUpdateStats>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The User Profile API is used to get the latest updated social profile data from the user's social account after authentication. The social profile will be retrieved via oAuth and OpenID protocols. The data is normalized into LoginRadius' standard data format. This API should be called using the access token retrieved from the refresh access token API. /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="fields">The fields parameter filters the API response so that the response only includes a specific set of fields</param> /// <returns>Response containing Definition for Complete UserProfile data</returns> /// 38.2 public async Task<ApiResponse<UserProfile>> GetRefreshedSocialUserProfile(string accessToken, string fields = "") { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } var queryParameters = new QueryParameters { { "access_token", accessToken } }; if (!string.IsNullOrWhiteSpace(fields)) { queryParameters.Add("fields", fields); } var resourcePath = "api/v2/userprofile/refresh"; return await ConfigureAndExecute<UserProfile>(HttpMethod.GET, resourcePath, queryParameters, null); } /// <summary> /// The Video API is used to get video files data from the user's social account.<br><br><b>Supported Providers:</b> Facebook, Google, Live, Vkontakte /// </summary> /// <param name="accessToken">Uniquely generated identifier key by LoginRadius that is activated after successful authentication.</param> /// <param name="nextCursor">Cursor value if not all contacts can be retrieved once.</param> /// <returns>Response containing Definition of Video Data with Cursor</returns> /// 39.2 public async Task<ApiResponse<CursorResponse<Video>>> GetVideos(string accessToken, string nextCursor) { if (string.IsNullOrWhiteSpace(accessToken)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(accessToken)); } if (string.IsNullOrWhiteSpace(nextCursor)) { throw new ArgumentException(BaseConstants.ValidationMessage, nameof(nextCursor)); } var queryParameters = new QueryParameters { { "access_token", accessToken }, { "nextCursor", nextCursor } }; var resourcePath = "api/v2/video"; return await ConfigureAndExecute<CursorResponse<Video>>(HttpMethod.GET, resourcePath, queryParameters, null); } } }
/* * HandlerCollection.cs - Implementation of the * "I18N.Common.HandlerCollection" class. * * Copyright (c) 2003 Southern Storm Software, Pty Ltd * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace I18N.Common { using System; using System.Collections; // This class manages a collection of handler names and the // assemblies that implement them. Previously we used a hash // table, but it incurred a large application startup cost. public sealed class HandlerCollection : IDictionary { // Internal state. private String[] names; private String[] namespaces; private int numHandlers; // Estimate of the number of handlers (should be >= the number // of lines in the "I18N-handlers.def" file). private const int HandlerCountEstimate = 600; // Constructor. public HandlerCollection() { names = new String [HandlerCountEstimate]; namespaces = new String [HandlerCountEstimate]; numHandlers = 0; } // Find the index of a specific name. public int IndexOf(String name) { int posn; for(posn = 0; posn < numHandlers; ++posn) { if(names[posn] == name) { return posn; } } return -1; } // Implement the IDictionary interface. public void Add(Object key, Object value) { if(numHandlers >= names.Length) { String[] newNames = new String [names.Length + 32]; String[] newNamespaces = new String [names.Length + 32]; Array.Copy(names, 0, newNames, 0, names.Length); Array.Copy(namespaces, 0, newNamespaces, 0, names.Length); names = newNames; namespaces = newNamespaces; } names[numHandlers] = (String)key; namespaces[numHandlers] = (String)value; ++numHandlers; } public void Clear() { numHandlers = 0; } public bool Contains(Object key) { return (IndexOf((String)key) != -1); } public IDictionaryEnumerator GetEnumerator() { return new HandlerCollectionEnumerator(this); } public void Remove(Object key) { // Not used in this implementation. throw new InvalidOperationException(); } public bool IsFixedSize { get { return false; } } public bool IsReadOnly { get { return false; } } public Object this[Object key] { get { int index = IndexOf((String)key); if(index != -1) { return namespaces[index]; } else { return null; } } set { Add(key, value); } } public ICollection Keys { get { // Not used in this implementation. throw new InvalidOperationException(); } } public ICollection Values { get { // Not used in this implementation. throw new InvalidOperationException(); } } // Implement the ICollection interface. public void CopyTo(Array array, int index) { // Not used in this implementation. throw new InvalidOperationException(); } public int Count { get { return numHandlers; } } public bool IsSynchronized { get { return false; } } public Object SyncRoot { get { return this; } } // Implement the IEnumerable interface. IEnumerator IEnumerable.GetEnumerator() { return new HandlerCollectionEnumerator(this); } // Enumerator class for this collection. private sealed class HandlerCollectionEnumerator : IDictionaryEnumerator { // Internal state. private HandlerCollection coll; private int posn; // Constructor. public HandlerCollectionEnumerator(HandlerCollection coll) { this.coll = coll; this.posn = -1; } // Implement the IEnumerator class. public bool MoveNext() { ++posn; return (posn < coll.numHandlers); } public void Reset() { posn = -1; } public Object Current { get { if(posn >= 0 && posn < coll.numHandlers) { return new DictionaryEntry (coll.names[posn], coll.namespaces[posn]); } throw new InvalidOperationException(); } } // Implement the IDictionaryEnumerator class. public DictionaryEntry Entry { get { if(posn >= 0 && posn < coll.numHandlers) { return new DictionaryEntry (coll.names[posn], coll.namespaces[posn]); } throw new InvalidOperationException(); } } public Object Key { get { if(posn >= 0 && posn < coll.numHandlers) { return coll.names[posn]; } throw new InvalidOperationException(); } } public Object Value { get { if(posn >= 0 && posn < coll.numHandlers) { return coll.namespaces[posn]; } throw new InvalidOperationException(); } } }; // class HandlerCollectionEnumerator }; // class HandlerCollection }; // namespace I18N.Common