context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/
// Portions Copyright 2000-2004 Jonathan de Halleux
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Text.RegularExpressions;
using Gallio.Framework.Assertions;
namespace MbUnit.Framework
{
public abstract partial class Assert
{
#region Contains
/// <summary>
/// Verifies that a string contains some expected value.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedSubstring">The expected substring.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedSubstring"/> is null.</exception>
public static void Contains(string actualValue, string expectedSubstring)
{
Contains(actualValue, expectedSubstring, null);
}
/// <summary>
/// Verifies that a string contains some expected value.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedSubstring">The expected substring.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedSubstring"/> is null.</exception>
public static void Contains(string actualValue, string expectedSubstring, string messageFormat, params object[] messageArgs)
{
ContainsInternal(actualValue, expectedSubstring, null, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string contains some expected value.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedSubstring">The expected substring.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how the expected substring is compared to the actual value.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedSubstring"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void Contains(string actualValue, string expectedSubstring, StringComparison comparisonType)
{
Contains(actualValue, expectedSubstring, comparisonType, null);
}
/// <summary>
/// Verifies that a string contains some expected value.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedSubstring">The expected substring.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how the expected substring is compared to the actual value.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedSubstring"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void Contains(string actualValue, string expectedSubstring, StringComparison comparisonType, string messageFormat, params object[] messageArgs)
{
ContainsInternal(actualValue, expectedSubstring, comparisonType, messageFormat, messageArgs);
}
private static void ContainsInternal(string actualValue, string expectedSubstring, StringComparison? comparisonType, string messageFormat, object[] messageArgs)
{
if (expectedSubstring == null)
throw new ArgumentNullException("expectedSubstring");
AssertionHelper.Verify(delegate
{
if (actualValue != null && actualValue.IndexOf(expectedSubstring, comparisonType ?? StringComparison.Ordinal) >= 0)
return null;
return new AssertionFailureBuilder("Expected string to contain a particular substring.")
.If(comparisonType.HasValue, builder => builder.AddRawLabeledValue("Comparison Type", comparisonType.Value))
.AddRawLabeledValue("Expected Substring", expectedSubstring)
.AddRawActualValue(actualValue)
.SetMessage(messageFormat, messageArgs)
.ToAssertionFailure();
});
}
#endregion
#region DoesNotContain
/// <summary>
/// Verifies that a string does not contain some unexpected substring.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="unexpectedSubstring">The unexpected substring.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="unexpectedSubstring"/> is null.</exception>
public static void DoesNotContain(string actualValue, string unexpectedSubstring)
{
DoesNotContain(actualValue, unexpectedSubstring, null);
}
/// <summary>
/// Verifies that a string does not contain some unexpected substring.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="unexpectedSubstring">The unexpected substring.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="unexpectedSubstring"/> is null.</exception>
public static void DoesNotContain(string actualValue, string unexpectedSubstring, string messageFormat, params object[] messageArgs)
{
DoesNotContainInternal(actualValue, unexpectedSubstring, null, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string does not contain some unexpected substring.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="unexpectedSubstring">The unexpected substring.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how unexpected text is compared to the actual value.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="unexpectedSubstring"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void DoesNotContain(string actualValue, string unexpectedSubstring, StringComparison comparisonType)
{
DoesNotContain(actualValue, unexpectedSubstring, comparisonType, null);
}
/// <summary>
/// Verifies that a string does not contain some unexpected substring.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <param name="actualValue">The actual value.</param>
/// <param name="unexpectedSubstring">The unexpected substring.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how unexpected text is compared to the actual value.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="unexpectedSubstring"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void DoesNotContain(string actualValue, string unexpectedSubstring, StringComparison comparisonType, string messageFormat, params object[] messageArgs)
{
DoesNotContainInternal(actualValue, unexpectedSubstring, comparisonType, messageFormat, messageArgs);
}
private static void DoesNotContainInternal(string actualValue, string unexpectedSubstring, StringComparison? comparisonType, string messageFormat, object[] messageArgs)
{
if (unexpectedSubstring == null)
throw new ArgumentNullException("unexpectedSubstring");
AssertionHelper.Verify(delegate
{
if (actualValue != null && actualValue.IndexOf(unexpectedSubstring, comparisonType ?? StringComparison.Ordinal) < 0)
return null;
return new AssertionFailureBuilder("Expected string to not contain a particular substring.")
.If(comparisonType.HasValue, builder => builder.AddRawLabeledValue("Comparison Type", comparisonType.Value))
.AddRawLabeledValue("Unexpected Substring", unexpectedSubstring)
.AddRawActualValue(actualValue)
.SetMessage(messageFormat, messageArgs)
.ToAssertionFailure();
});
}
#endregion
#region AreEqual
/// <summary>
/// Asserts that two strings are equal according to a particular string comparison mode.
/// </summary>
/// <param name="expectedValue">The expected value.</param>
/// <param name="actualValue">The actual value.</param>
/// <param name="comparisonType">The string comparison type.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
public static void AreEqual(string expectedValue, string actualValue, StringComparison comparisonType)
{
AreEqual(expectedValue, actualValue, comparisonType, null, null);
}
/// <summary>
/// Asserts that two strings are equal according to a particular string comparison mode.
/// </summary>
/// <param name="expectedValue">The expected value.</param>
/// <param name="actualValue">The actual value.</param>
/// <param name="comparisonType">The string comparison type.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
public static void AreEqual(string expectedValue, string actualValue, StringComparison comparisonType, string messageFormat, params object[] messageArgs)
{
AssertionHelper.Verify(delegate
{
if (String.Compare(expectedValue, actualValue, comparisonType) == 0)
return null;
bool diffing = comparisonType != StringComparison.CurrentCultureIgnoreCase &&
comparisonType != StringComparison.InvariantCultureIgnoreCase &&
comparisonType != StringComparison.OrdinalIgnoreCase;
return new AssertionFailureBuilder("Expected values to be equal according to string comparison type.")
.AddRawLabeledValue("Comparison Type", comparisonType)
.If(diffing, builder => builder
.AddRawLabeledValue("Expected Value", expectedValue)
.AddRawLabeledValue("Actual Value", actualValue))
.If(!diffing, builder => builder
.AddRawExpectedAndActualValuesWithDiffs(expectedValue, actualValue))
.SetMessage(messageFormat, messageArgs)
.ToAssertionFailure();
});
}
#endregion
#region AreNotEqual
/// <summary>
/// Asserts that two strings are not equal according to a particular string comparison mode.
/// </summary>
/// <param name="unexpectedValue">The unexpected value.</param>
/// <param name="actualValue">The actual value.</param>
/// <param name="comparisonType">The string comparison type.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
public static void AreNotEqual(string unexpectedValue, string actualValue, StringComparison comparisonType)
{
AreNotEqual(unexpectedValue, actualValue, comparisonType, null, null);
}
/// <summary>
/// Asserts that two strings are not equal according to a particular string comparison mode.
/// </summary>
/// <param name="unexpectedValue">The unexpected value.</param>
/// <param name="actualValue">The actual value.</param>
/// <param name="comparisonType">The string comparison type.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
public static void AreNotEqual(string unexpectedValue, string actualValue, StringComparison comparisonType, string messageFormat, params object[] messageArgs)
{
AssertionHelper.Verify(delegate
{
if (String.Compare(unexpectedValue, actualValue, comparisonType) != 0)
return null;
return new AssertionFailureBuilder("Expected values to be unequal according to string comparison type.")
.AddRawLabeledValue("Comparison Type", comparisonType)
.AddRawLabeledValue("Unexpected Value", unexpectedValue)
.AddRawLabeledValue("Actual Value", actualValue)
.SetMessage(messageFormat, messageArgs)
.ToAssertionFailure();
});
}
#endregion
#region FullMatch
/// <summary>
/// Verifies that a string matches regular expression pattern exactly.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.Match(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void FullMatch(string actualValue, string regexPattern)
{
FullMatch(actualValue, regexPattern, RegexOptions.None, null, null);
}
/// <summary>
/// Verifies that a string matches regular expression pattern exactly.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.Match(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void FullMatch(string actualValue, string regexPattern, string messageFormat, params object[] messageArgs)
{
FullMatch(actualValue, regexPattern, RegexOptions.None, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string matches regular expression pattern exactly.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.Match(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="regexOptions">The regular expression options.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void FullMatch(string actualValue, string regexPattern, RegexOptions regexOptions)
{
FullMatch(actualValue, regexPattern, regexOptions, null, null);
}
/// <summary>
/// Verifies that a string matches regular expression pattern exactly.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.Match(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="regexOptions">The regular expression options.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void FullMatch(string actualValue, string regexPattern, RegexOptions regexOptions, string messageFormat, params object[] messageArgs)
{
if (regexPattern == null)
throw new ArgumentNullException("regexPattern");
FullMatch(actualValue, new Regex(regexPattern, regexOptions), messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string matches regular expression pattern exactly.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.Match(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regex">The regular expression.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regex"/> is null.</exception>
public static void FullMatch(string actualValue, Regex regex)
{
FullMatch(actualValue, regex, null, null);
}
/// <summary>
/// Verifies that a string matches regular expression pattern exactly.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.Match(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regex">The regular expression.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regex"/> is null.</exception>
public static void FullMatch(string actualValue, Regex regex, string messageFormat, params object[] messageArgs)
{
if (regex == null)
throw new ArgumentNullException("regex");
AssertionHelper.Verify(delegate
{
if (actualValue != null)
{
Match match = regex.Match(actualValue);
if (match.Success && actualValue.Length == match.Length)
return null;
}
return new AssertionFailureBuilder("Expected a string to exactly match a regular expression pattern.")
.SetMessage(messageFormat, messageArgs)
.AddRawActualValue(actualValue)
.AddRawLabeledValue("Regex Pattern", regex.ToString())
.ToAssertionFailure();
});
}
#endregion
#region Like
/// <summary>
/// Verifies that a string contains a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void Like(string actualValue, string regexPattern)
{
Like(actualValue, regexPattern, RegexOptions.None, null, null);
}
/// <summary>
/// Verifies that a string contains a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void Like(string actualValue, string regexPattern, string messageFormat, params object[] messageArgs)
{
Like(actualValue, regexPattern, RegexOptions.None, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string contains a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="regexOptions">The regular expression options.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void Like(string actualValue, string regexPattern, RegexOptions regexOptions)
{
Like(actualValue, regexPattern, regexOptions, null, null);
}
/// <summary>
/// Verifies that a string contains a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="regexOptions">The regular expression options.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void Like(string actualValue, string regexPattern, RegexOptions regexOptions, string messageFormat, params object[] messageArgs)
{
if (regexPattern == null)
throw new ArgumentNullException("regexPattern");
Like(actualValue, new Regex(regexPattern, regexOptions), messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string contains a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regex">The regular expression.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regex"/> is null.</exception>
public static void Like(string actualValue, Regex regex)
{
Like(actualValue, regex, null, null);
}
/// <summary>
/// Verifies that a string contains a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regex">The regular expression.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regex"/> is null.</exception>
public static void Like(string actualValue, Regex regex, string messageFormat, params object[] messageArgs)
{
if (regex == null)
throw new ArgumentNullException("regex");
AssertionHelper.Verify(delegate
{
if (actualValue != null && regex.IsMatch(actualValue))
return null;
return new AssertionFailureBuilder("Expected a string to contain a full or partial match of a regular expression pattern.")
.SetMessage(messageFormat, messageArgs)
.AddRawActualValue(actualValue)
.AddRawLabeledValue("Regex Pattern", regex.ToString())
.ToAssertionFailure();
});
}
#endregion
#region NotLike
/// <summary>
/// Verifies that a string does not contain a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void NotLike(string actualValue, string regexPattern)
{
NotLike(actualValue, regexPattern, RegexOptions.None, null, null);
}
/// <summary>
/// Verifies that a string does not contain a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void NotLike(string actualValue, string regexPattern, string messageFormat, params object[] messageArgs)
{
NotLike(actualValue, regexPattern, RegexOptions.None, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string does not contain a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="regexOptions">The regular expression options.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void NotLike(string actualValue, string regexPattern, RegexOptions regexOptions)
{
NotLike(actualValue, regexPattern, regexOptions, null, null);
}
/// <summary>
/// Verifies that a string does not contain a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string, string, RegexOptions)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regexPattern">The regular expression pattern.</param>
/// <param name="regexOptions">The regular expression options.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regexPattern"/> is null.</exception>
public static void NotLike(string actualValue, string regexPattern, RegexOptions regexOptions, string messageFormat, params object[] messageArgs)
{
if (regexPattern == null)
throw new ArgumentNullException("regexPattern");
NotLike(actualValue, new Regex(regexPattern, regexOptions), messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string does not contain a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regex">The regular expression.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regex"/> is null.</exception>
public static void NotLike(string actualValue, Regex regex)
{
NotLike(actualValue, regex, null, null);
}
/// <summary>
/// Verifies that a string does not contain a full or partial match of a regular expression pattern.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="Regex.IsMatch(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="regex">The regular expression.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="regex"/> is null.</exception>
public static void NotLike(string actualValue, Regex regex, string messageFormat, params object[] messageArgs)
{
if (regex == null)
throw new ArgumentNullException("regex");
AssertionHelper.Verify(delegate
{
if (actualValue != null && ! regex.IsMatch(actualValue))
return null;
return new AssertionFailureBuilder("Expected a string to not contain a full or partial match of a regular expression pattern.")
.SetMessage(messageFormat, messageArgs)
.AddRawActualValue(actualValue)
.AddRawLabeledValue("Regex Pattern", regex.ToString())
.ToAssertionFailure();
});
}
#endregion
#region StartsWith
/// <summary>
/// Verifies that a string starts with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.StartsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
public static void StartsWith(string actualValue, string expectedText)
{
StartsWith(actualValue, expectedText, null, null);
}
/// <summary>
/// Verifies that a string starts with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.StartsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
public static void StartsWith(string actualValue, string expectedText, string messageFormat, params object[] messageArgs)
{
StartsWithInternal(actualValue, expectedText, null, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string starts with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.StartsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how the expected text is compared to the actual value.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void StartsWith(string actualValue, string expectedText, StringComparison comparisonType)
{
StartsWith(actualValue, expectedText, comparisonType, null, null);
}
/// <summary>
/// Verifies that a string starts with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.StartsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how the expected text is compared to the actual value.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void StartsWith(string actualValue, string expectedText, StringComparison comparisonType, string messageFormat, params object[] messageArgs)
{
StartsWithInternal(actualValue, expectedText, comparisonType, messageFormat, messageArgs);
}
private static void StartsWithInternal(string actualValue, string expectedText, StringComparison? comparisonType, string messageFormat, object[] messageArgs)
{
if (expectedText == null)
throw new ArgumentNullException("expectedText");
AssertionHelper.Verify(delegate
{
if (actualValue != null && actualValue.StartsWith(expectedText, comparisonType ?? StringComparison.Ordinal))
return null;
return new AssertionFailureBuilder("Expected string to start with the specified text.")
.If(comparisonType.HasValue, builder => builder.AddRawLabeledValue("Comparison Type", comparisonType.Value))
.AddRawActualValue(actualValue)
.AddRawLabeledValue("Expected Text", expectedText)
.SetMessage(messageFormat, messageArgs)
.ToAssertionFailure();
});
}
#endregion
#region EndsWith
/// <summary>
/// Verifies that a string ends with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.EndsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
public static void EndsWith(string actualValue, string expectedText)
{
EndsWith(actualValue, expectedText, null, null);
}
/// <summary>
/// Verifies that a string ends with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.EndsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
public static void EndsWith(string actualValue, string expectedText, string messageFormat, params object[] messageArgs)
{
EndsWithInternal(actualValue, expectedText, null, messageFormat, messageArgs);
}
/// <summary>
/// Verifies that a string ends with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.EndsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how the expected text is compared to the actual value.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void EndsWith(string actualValue, string expectedText, StringComparison comparisonType)
{
EndsWith(actualValue, expectedText, comparisonType, null, null);
}
/// <summary>
/// Verifies that a string ends with the specified text.
/// </summary>
/// <remarks>
/// <para>
/// This assertion will fail if the string is null.
/// </para>
/// </remarks>
/// <seealso cref="String.EndsWith(string)"/>
/// <param name="actualValue">The actual value.</param>
/// <param name="expectedText">The expected pattern.</param>
/// <param name="comparisonType">One of the <see cref="StringComparison"/> values that determines how the expected text is compared to the actual value.</param>
/// <param name="messageFormat">The custom assertion message format, or null if none.</param>
/// <param name="messageArgs">The custom assertion message arguments, or null if none.</param>
/// <exception cref="AssertionException">Thrown if the verification failed unless the current <see cref="AssertionContext.AssertionFailureBehavior" /> indicates otherwise.</exception>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="expectedText"/> is null.</exception>
/// <exception cref="ArgumentException">Thrown if <paramref name="comparisonType"> has invalid value.</paramref></exception>
public static void EndsWith(string actualValue, string expectedText, StringComparison comparisonType, string messageFormat, params object[] messageArgs)
{
EndsWithInternal(actualValue, expectedText, comparisonType, messageFormat, messageArgs);
}
private static void EndsWithInternal(string actualValue, string expectedText, StringComparison? comparisonType, string messageFormat, object[] messageArgs)
{
if (expectedText == null)
throw new ArgumentNullException("expectedText");
AssertionHelper.Verify(delegate
{
if (actualValue != null && actualValue.EndsWith(expectedText, comparisonType ?? StringComparison.Ordinal))
return null;
return new AssertionFailureBuilder("Expected string to end with the specified text.")
.If(comparisonType.HasValue, builder => builder.AddRawLabeledValue("Comparison Type", comparisonType.Value))
.AddRawActualValue(actualValue)
.AddRawLabeledValue("Expected Text", expectedText)
.SetMessage(messageFormat, messageArgs)
.ToAssertionFailure();
});
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Diagnostics;
using System.IO;
using Org.BouncyCastle.Asn1;
using Org.BouncyCastle.Asn1.Cms;
using Org.BouncyCastle.Asn1.X509;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Engines;
using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.Security;
using Org.BouncyCastle.Utilities;
using Org.BouncyCastle.X509;
namespace Org.BouncyCastle.Cms
{
/**
* an expanded SignerInfo block from a CMS Signed message
*/
public class SignerInformation
{
private static readonly CmsSignedHelper Helper = CmsSignedHelper.Instance;
private SignerID sid;
private SignerInfo info;
private AlgorithmIdentifier digestAlgorithm;
private AlgorithmIdentifier encryptionAlgorithm;
private Asn1Set signedAttributes;
private Asn1Set unsignedAttributes;
private CmsProcessable content;
private byte[] signature;
private DerObjectIdentifier contentType;
private IDigestCalculator digestCalculator;
private byte[] resultDigest;
internal SignerInformation(
SignerInfo info,
DerObjectIdentifier contentType,
CmsProcessable content,
IDigestCalculator digestCalculator)
{
this.info = info;
this.sid = new SignerID();
this.contentType = contentType;
try
{
SignerIdentifier s = info.SignerID;
if (s.IsTagged)
{
Asn1OctetString octs = Asn1OctetString.GetInstance(s.ID);
sid.SubjectKeyIdentifier = octs.GetEncoded();
}
else
{
Asn1.Cms.IssuerAndSerialNumber iAnds =
Asn1.Cms.IssuerAndSerialNumber.GetInstance(s.ID);
sid.Issuer = iAnds.Name;
sid.SerialNumber = iAnds.SerialNumber.Value;
}
}
catch (IOException)
{
throw new ArgumentException("invalid sid in SignerInfo");
}
this.digestAlgorithm = info.DigestAlgorithm;
this.signedAttributes = info.AuthenticatedAttributes;
this.unsignedAttributes = info.UnauthenticatedAttributes;
this.encryptionAlgorithm = info.DigestEncryptionAlgorithm;
this.signature = info.EncryptedDigest.GetOctets();
this.content = content;
this.digestCalculator = digestCalculator;
}
public SignerID SignerID
{
get { return sid; }
}
/**
* return the version number for this objects underlying SignerInfo structure.
*/
public int Version
{
get { return info.Version.Value.IntValue; }
}
public AlgorithmIdentifier DigestAlgorithmID
{
get { return digestAlgorithm; }
}
/**
* return the object identifier for the signature.
*/
public string DigestAlgOid
{
get { return digestAlgorithm.ObjectID.Id; }
}
/**
* return the signature parameters, or null if there aren't any.
*/
public Asn1Object DigestAlgParams
{
get
{
Asn1Encodable ae = digestAlgorithm.Parameters;
return ae == null ? null : ae.ToAsn1Object();
}
}
/**
* return the content digest that was calculated during verification.
*/
public byte[] GetContentDigest()
{
if (resultDigest == null)
{
throw new InvalidOperationException("method can only be called after verify.");
}
return (byte[])resultDigest.Clone();
}
public AlgorithmIdentifier EncryptionAlgorithmID
{
get { return encryptionAlgorithm; }
}
/**
* return the object identifier for the signature.
*/
public string EncryptionAlgOid
{
get { return encryptionAlgorithm.ObjectID.Id; }
}
/**
* return the signature/encryption algorithm parameters, or null if
* there aren't any.
*/
public Asn1Object EncryptionAlgParams
{
get
{
Asn1Encodable ae = encryptionAlgorithm.Parameters;
return ae == null ? null : ae.ToAsn1Object();
}
}
/**
* return a table of the signed attributes - indexed by
* the OID of the attribute.
*/
public Asn1.Cms.AttributeTable SignedAttributes
{
get
{
return signedAttributes == null
? null
: new Asn1.Cms.AttributeTable(signedAttributes);
}
}
/**
* return a table of the unsigned attributes indexed by
* the OID of the attribute.
*/
public Asn1.Cms.AttributeTable UnsignedAttributes
{
get
{
return unsignedAttributes == null
? null
: new Asn1.Cms.AttributeTable(unsignedAttributes);
}
}
/**
* return the encoded signature
*/
public byte[] GetSignature()
{
return (byte[]) signature.Clone();
}
/**
* Return a SignerInformationStore containing the counter signatures attached to this
* signer. If no counter signatures are present an empty store is returned.
*/
public SignerInformationStore GetCounterSignatures()
{
// TODO There are several checks implied by the RFC3852 comments that are missing
/*
The countersignature attribute MUST be an unsigned attribute; it MUST
NOT be a signed attribute, an authenticated attribute, an
unauthenticated attribute, or an unprotected attribute.
*/
Asn1.Cms.AttributeTable unsignedAttributeTable = UnsignedAttributes;
if (unsignedAttributeTable == null)
{
return new SignerInformationStore(new ArrayList(0));
}
IList counterSignatures = new ArrayList();
/*
The UnsignedAttributes syntax is defined as a SET OF Attributes. The
UnsignedAttributes in a signerInfo may include multiple instances of
the countersignature attribute.
*/
Asn1EncodableVector allCSAttrs = unsignedAttributeTable.GetAll(CmsAttributes.CounterSignature);
foreach (Asn1.Cms.Attribute counterSignatureAttribute in allCSAttrs)
{
/*
A countersignature attribute can have multiple attribute values. The
syntax is defined as a SET OF AttributeValue, and there MUST be one
or more instances of AttributeValue present.
*/
Asn1Set values = counterSignatureAttribute.AttrValues;
if (values.Count < 1)
{
// TODO Throw an appropriate exception?
}
foreach (Asn1Encodable asn1Obj in values)
{
/*
Countersignature values have the same meaning as SignerInfo values
for ordinary signatures, except that:
1. The signedAttributes field MUST NOT contain a content-type
attribute; there is no content type for countersignatures.
2. The signedAttributes field MUST contain a message-digest
attribute if it contains any other attributes.
3. The input to the message-digesting process is the contents
octets of the DER encoding of the signatureValue field of the
SignerInfo value with which the attribute is associated.
*/
SignerInfo si = SignerInfo.GetInstance(asn1Obj.ToAsn1Object());
string digestName = CmsSignedHelper.Instance.GetDigestAlgName(si.DigestAlgorithm.ObjectID.Id);
counterSignatures.Add(new SignerInformation(si, CmsAttributes.CounterSignature, null, new CounterSignatureDigestCalculator(digestName, GetSignature())));
}
}
return new SignerInformationStore(counterSignatures);
}
/**
* return the DER encoding of the signed attributes.
* @throws IOException if an encoding error occurs.
*/
public byte[] GetEncodedSignedAttributes()
{
return signedAttributes == null
? null
: signedAttributes.GetEncoded(Asn1Encodable.Der);
}
private bool DoVerify(
AsymmetricKeyParameter key,
Asn1.Cms.AttributeTable signedAttrTable)
{
string digestName = Helper.GetDigestAlgName(this.DigestAlgOid);
IDigest digest = Helper.GetDigestInstance(digestName);
DerObjectIdentifier sigAlgOid = this.encryptionAlgorithm.ObjectID;
Asn1Encodable sigParams = this.encryptionAlgorithm.Parameters;
ISigner sig;
if (sigAlgOid.Equals(Asn1.Pkcs.PkcsObjectIdentifiers.IdRsassaPss))
{
// RFC 4056 2.2
// When the id-RSASSA-PSS algorithm identifier is used for a signature,
// the AlgorithmIdentifier parameters field MUST contain RSASSA-PSS-params.
if (sigParams == null)
throw new CmsException("RSASSA-PSS signature must specify algorithm parameters");
try
{
// TODO Provide abstract configuration mechanism
Asn1.Pkcs.RsassaPssParameters pss = Asn1.Pkcs.RsassaPssParameters.GetInstance(
sigParams.ToAsn1Object());
if (!pss.HashAlgorithm.ObjectID.Equals(this.digestAlgorithm.ObjectID))
throw new CmsException("RSASSA-PSS signature parameters specified incorrect hash algorithm");
if (!pss.MaskGenAlgorithm.ObjectID.Equals(Asn1.Pkcs.PkcsObjectIdentifiers.IdMgf1))
throw new CmsException("RSASSA-PSS signature parameters specified unknown MGF");
IDigest pssDigest = DigestUtilities.GetDigest(pss.HashAlgorithm.ObjectID);
int saltLength = pss.SaltLength.Value.IntValue;
byte trailerField = (byte) pss.TrailerField.Value.IntValue;
// RFC 4055 3.1
// The value MUST be 1, which represents the trailer field with hexadecimal value 0xBC
if (trailerField != 1)
throw new CmsException("RSASSA-PSS signature parameters must have trailerField of 1");
sig = new PssSigner(new RsaBlindedEngine(), pssDigest, saltLength);
}
catch (Exception e)
{
throw new CmsException("failed to set RSASSA-PSS signature parameters", e);
}
}
else
{
// TODO Probably too strong a check at the moment
// if (sigParams != null)
// throw new CmsException("unrecognised signature parameters provided");
string signatureName = digestName + "with" + Helper.GetEncryptionAlgName(this.EncryptionAlgOid);
sig = Helper.GetSignatureInstance(signatureName);
}
try
{
sig.Init(false, key);
if (signedAttributes == null)
{
if (content != null)
{
content.Write(new CmsSignedDataGenerator.SigOutputStream(sig));
content.Write(new CmsSignedDataGenerator.DigOutputStream(digest));
resultDigest = DigestUtilities.DoFinal(digest);
}
else
{
resultDigest = digestCalculator.GetDigest();
// need to decrypt signature and check message bytes
return VerifyDigest(resultDigest, key, this.GetSignature());
}
}
else
{
byte[] hash;
if (content != null)
{
content.Write(
new CmsSignedDataGenerator.DigOutputStream(digest));
hash = DigestUtilities.DoFinal(digest);
}
else if (digestCalculator != null)
{
hash = digestCalculator.GetDigest();
}
else
{
hash = null;
}
resultDigest = hash;
Asn1.Cms.Attribute dig = signedAttrTable[Asn1.Cms.CmsAttributes.MessageDigest];
Asn1.Cms.Attribute type = signedAttrTable[Asn1.Cms.CmsAttributes.ContentType];
if (dig == null)
{
throw new SignatureException("no hash for content found in signed attributes");
}
if (type == null && !contentType.Equals(CmsAttributes.CounterSignature))
{
throw new SignatureException("no content type id found in signed attributes");
}
Asn1Object hashObj = dig.AttrValues[0].ToAsn1Object();
if (hashObj is Asn1OctetString)
{
byte[] signedHash = ((Asn1OctetString)hashObj).GetOctets();
if (!Arrays.AreEqual(hash, signedHash))
{
throw new SignatureException("content hash found in signed attributes different");
}
}
else if (hashObj is DerNull)
{
if (hash != null)
{
throw new SignatureException("NULL hash found in signed attributes when one expected");
}
}
if (type != null)
{
DerObjectIdentifier typeOID = (DerObjectIdentifier)type.AttrValues[0];
if (!typeOID.Equals(contentType))
{
throw new SignatureException("contentType in signed attributes different");
}
}
byte[] tmp = this.GetEncodedSignedAttributes();
sig.BlockUpdate(tmp, 0, tmp.Length);
}
return sig.VerifySignature(this.GetSignature());
}
catch (InvalidKeyException e)
{
throw new CmsException(
"key not appropriate to signature in message.", e);
}
catch (IOException e)
{
throw new CmsException(
"can't process mime object to create signature.", e);
}
catch (SignatureException e)
{
throw new CmsException(
"invalid signature format in message: " + e.Message, e);
}
}
private bool IsNull(
Asn1Encodable o)
{
return (o is Asn1Null) || (o == null);
}
private DigestInfo DerDecode(
byte[] encoding)
{
if (encoding[0] != (int)(Asn1Tags.Constructed | Asn1Tags.Sequence))
{
throw new IOException("not a digest info object");
}
DigestInfo digInfo = DigestInfo.GetInstance(Asn1Object.FromByteArray(encoding));
// length check to avoid Bleichenbacher vulnerability
if (digInfo.GetEncoded().Length != encoding.Length)
{
throw new CmsException("malformed RSA signature");
}
return digInfo;
}
private bool VerifyDigest(
byte[] digest,
AsymmetricKeyParameter key,
byte[] signature)
{
string algorithm = Helper.GetEncryptionAlgName(this.EncryptionAlgOid);
try
{
if (algorithm.Equals("RSA"))
{
IBufferedCipher c = CipherUtilities.GetCipher("RSA//PKCS1Padding");
c.Init(false, key);
byte[] decrypt = c.DoFinal(signature);
DigestInfo digInfo = DerDecode(decrypt);
if (!digInfo.AlgorithmID.ObjectID.Equals(digestAlgorithm.ObjectID))
{
return false;
}
if (!IsNull(digInfo.AlgorithmID.Parameters))
{
return false;
}
byte[] sigHash = digInfo.GetDigest();
return Arrays.AreEqual(digest, sigHash);
}
else if (algorithm.Equals("DSA"))
{
ISigner sig = SignerUtilities.GetSigner("NONEwithDSA");
sig.Init(false, key);
sig.BlockUpdate(digest, 0, digest.Length);
return sig.VerifySignature(signature);
}
else
{
throw new CmsException("algorithm: " + algorithm + " not supported in base signatures.");
}
}
catch (SecurityUtilityException e)
{
throw e;
}
catch (GeneralSecurityException e)
{
throw new CmsException("Exception processing signature: " + e, e);
}
catch (IOException e)
{
throw new CmsException("Exception decoding signature: " + e, e);
}
}
/**
* verify that the given public key succesfully handles and confirms the
* signature associated with this signer.
*/
public bool Verify(
AsymmetricKeyParameter pubKey)
{
if (pubKey.IsPrivate)
throw new ArgumentException("Expected public key", "pubKey");
return DoVerify(pubKey, this.SignedAttributes);
}
/**
* verify that the given certificate successfully handles and confirms
* the signature associated with this signer and, if a signingTime
* attribute is available, that the certificate was valid at the time the
* signature was generated.
*/
public bool Verify(
X509Certificate cert)
{
Asn1.Cms.AttributeTable attr = this.SignedAttributes;
if (attr != null)
{
Asn1EncodableVector v = attr.GetAll(CmsAttributes.SigningTime);
switch (v.Count)
{
case 0:
break;
case 1:
{
Asn1.Cms.Attribute t = (Asn1.Cms.Attribute) v[0];
Debug.Assert(t != null);
Asn1Set attrValues = t.AttrValues;
if (attrValues.Count != 1)
throw new CmsException("A signing-time attribute MUST have a single attribute value");
Asn1.Cms.Time time = Asn1.Cms.Time.GetInstance(attrValues[0].ToAsn1Object());
cert.CheckValidity(time.Date);
break;
}
default:
throw new CmsException("The SignedAttributes in a signerInfo MUST NOT include multiple instances of the signing-time attribute");
}
}
return DoVerify(cert.GetPublicKey(), attr);
}
/**
* Return the base ASN.1 CMS structure that this object contains.
*
* @return an object containing a CMS SignerInfo structure.
*/
public SignerInfo ToSignerInfo()
{
return info;
}
/**
* Return a signer information object with the passed in unsigned
* attributes replacing the ones that are current associated with
* the object passed in.
*
* @param signerInformation the signerInfo to be used as the basis.
* @param unsignedAttributes the unsigned attributes to add.
* @return a copy of the original SignerInformationObject with the changed attributes.
*/
public static SignerInformation ReplaceUnsignedAttributes(
SignerInformation signerInformation,
Asn1.Cms.AttributeTable unsignedAttributes)
{
SignerInfo sInfo = signerInformation.info;
Asn1Set unsignedAttr = null;
if (unsignedAttributes != null)
{
unsignedAttr = new DerSet(unsignedAttributes.ToAsn1EncodableVector());
}
return new SignerInformation(
new SignerInfo(
sInfo.SignerID,
sInfo.DigestAlgorithm,
sInfo.AuthenticatedAttributes,
sInfo.DigestEncryptionAlgorithm,
sInfo.EncryptedDigest,
unsignedAttr),
signerInformation.contentType,
signerInformation.content,
null);
}
/**
* Return a signer information object with passed in SignerInformationStore representing counter
* signatures attached as an unsigned attribute.
*
* @param signerInformation the signerInfo to be used as the basis.
* @param counterSigners signer info objects carrying counter signature.
* @return a copy of the original SignerInformationObject with the changed attributes.
*/
public static SignerInformation AddCounterSigners(
SignerInformation signerInformation,
SignerInformationStore counterSigners)
{
SignerInfo sInfo = signerInformation.info;
Asn1.Cms.AttributeTable unsignedAttr = signerInformation.UnsignedAttributes;
Asn1EncodableVector v;
if (unsignedAttr != null)
{
v = unsignedAttr.ToAsn1EncodableVector();
}
else
{
v = new Asn1EncodableVector();
}
Asn1EncodableVector sigs = new Asn1EncodableVector();
foreach (SignerInformation sigInf in counterSigners.GetSigners())
{
sigs.Add(sigInf.ToSignerInfo());
}
v.Add(new Asn1.Cms.Attribute(CmsAttributes.CounterSignature, new DerSet(sigs)));
return new SignerInformation(
new SignerInfo(
sInfo.SignerID,
sInfo.DigestAlgorithm,
sInfo.AuthenticatedAttributes,
sInfo.DigestEncryptionAlgorithm,
sInfo.EncryptedDigest,
new DerSet(v)),
signerInformation.contentType,
signerInformation.content,
null);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Runtime.ExceptionServices;
using System.Runtime.Remoting;
using System.Text;
using System.Threading.Tasks;
using Orleans.Runtime;
using Orleans.Runtime.Configuration;
using Orleans.TestingHost.Utils;
namespace Orleans.TestingHost
{
using Orleans.CodeGeneration;
/// <summary>
/// A host class for local testing with Orleans using in-process silos.
/// Runs a Primary and optionally secondary silos in seperate app domains, and client in the main app domain.
/// Additional silos can also be started in-process on demand if required for particular test cases.
/// </summary>
/// <remarks>
/// Make sure that your test project references your test grains and test grain interfaces
/// projects, and has CopyLocal=True set on those references [which should be the default].
/// </remarks>
public class TestCluster
{
/// <summary>
/// Primary silo handle
/// </summary>
public SiloHandle Primary { get; private set; }
/// <summary>
/// List of handles to the secondary silos
/// </summary>
public IReadOnlyList<SiloHandle> SecondarySilos => this.additionalSilos;
private readonly List<SiloHandle> additionalSilos = new List<SiloHandle>();
private readonly Dictionary<string, GeneratedAssembly> additionalAssemblies = new Dictionary<string, GeneratedAssembly>();
/// <summary>
/// Client configuration to use when initializing the client
/// </summary>
public ClientConfiguration ClientConfiguration { get; private set; }
/// <summary>
/// Cluster configuration
/// </summary>
public ClusterConfiguration ClusterConfiguration { get; private set; }
private readonly StringBuilder log = new StringBuilder();
/// <summary>
/// DeploymentId of the cluster
/// </summary>
public string DeploymentId => this.ClusterConfiguration.Globals.DeploymentId;
/// <summary>
/// GrainFactory to use in the tests
/// </summary>
public IGrainFactory GrainFactory { get; private set; }
private Logger logger => GrainClient.Logger;
/// <summary>
/// Configure the default Primary test silo, plus client in-process.
/// </summary>
public TestCluster()
: this(new TestClusterOptions())
{
}
/// <summary>
/// Configures the test cluster plus client in-process.
/// </summary>
public TestCluster(TestClusterOptions options)
: this(options.ClusterConfiguration, options.ClientConfiguration)
{
}
/// <summary>
/// Configures the test cluster plus default client in-process.
/// </summary>
public TestCluster(ClusterConfiguration clusterConfiguration)
: this(clusterConfiguration, TestClusterOptions.BuildClientConfiguration(clusterConfiguration))
{
}
/// <summary>
/// Configures the test cluster plus client in-process,
/// using the specified silo and client config configurations.
/// </summary>
public TestCluster(ClusterConfiguration clusterConfiguration, ClientConfiguration clientConfiguration)
{
this.ClusterConfiguration = clusterConfiguration;
this.ClientConfiguration = clientConfiguration;
}
/// <summary>
/// Deploys the cluster using the specified configuration and starts the client in-process.
/// It will start all the silos defined in the <see cref="Runtime.Configuration.ClusterConfiguration.Overrides"/> collection.
/// </summary>
public void Deploy()
{
this.Deploy(this.ClusterConfiguration.Overrides.Keys);
}
/// <summary>
/// Deploys the cluster using the specified configuration and starts the client in-process.
/// </summary>
/// <param name="siloNames">Only deploy the specified silos which must also be present in the <see cref="Runtime.Configuration.ClusterConfiguration.Overrides"/> collection.</param>
public void Deploy(IEnumerable<string> siloNames)
{
try
{
DeployAsync(siloNames).Wait();
}
catch (AggregateException ex)
{
if (ex.InnerExceptions.Count > 1) throw;
ExceptionDispatchInfo.Capture(ex.InnerException).Throw();
}
}
/// <summary>
/// Deploys the cluster using the specified configuration and starts the client in-process.
/// </summary>
/// <param name="siloNames">Only deploy the specified silos which must also be present in the <see cref="Runtime.Configuration.ClusterConfiguration.Overrides"/> collection.</param>
public async Task DeployAsync(IEnumerable<string> siloNames)
{
if (Primary != null) throw new InvalidOperationException("Cluster host already deployed.");
AppDomain.CurrentDomain.UnhandledException += ReportUnobservedException;
try
{
string startMsg = "----------------------------- STARTING NEW UNIT TEST SILO HOST: " + GetType().FullName + " -------------------------------------";
WriteLog(startMsg);
await InitializeAsync(siloNames);
}
catch (TimeoutException te)
{
FlushLogToConsole();
throw new TimeoutException("Timeout during test initialization", te);
}
catch (Exception ex)
{
StopAllSilos();
Exception baseExc = ex.GetBaseException();
FlushLogToConsole();
if (baseExc is TimeoutException)
{
throw new TimeoutException("Timeout during test initialization", ex);
}
// IMPORTANT:
// Do NOT re-throw the original exception here, also not as an internal exception inside AggregateException
// Due to the way MS tests works, if the original exception is an Orleans exception,
// it's assembly might not be loaded yet in this phase of the test.
// As a result, we will get "MSTest: Unit Test Adapter threw exception: Type is not resolved for member XXX"
// and will loose the original exception. This makes debugging tests super hard!
// The root cause has to do with us initializing our tests from Test constructor and not from TestInitialize method.
// More details: http://dobrzanski.net/2010/09/20/mstest-unit-test-adapter-threw-exception-type-is-not-resolved-for-member/
//throw new Exception(
// string.Format("Exception during test initialization: {0}",
// LogFormatter.PrintException(baseExc)));
throw;
}
}
/// <summary>
/// Get the list of current active silos.
/// </summary>
/// <returns>List of current silos.</returns>
public IEnumerable<SiloHandle> GetActiveSilos()
{
WriteLog("GetActiveSilos: Primary={0} + {1} Additional={2}",
Primary, additionalSilos.Count, Runtime.Utils.EnumerableToString(additionalSilos));
if (Primary?.Silo != null) yield return Primary;
if (additionalSilos.Count > 0)
foreach (var s in additionalSilos)
if (s?.Silo != null)
yield return s;
}
/// <summary>
/// Find the silo handle for the specified silo address.
/// </summary>
/// <param name="siloAddress">Silo address to be found.</param>
/// <returns>SiloHandle of the appropriate silo, or <c>null</c> if not found.</returns>
public SiloHandle GetSiloForAddress(SiloAddress siloAddress)
{
List<SiloHandle> activeSilos = GetActiveSilos().ToList();
var ret = activeSilos.FirstOrDefault(s => s.Silo.SiloAddress.Equals(siloAddress));
return ret;
}
/// <summary>
/// Wait for the silo liveness sub-system to detect and act on any recent cluster membership changes.
/// </summary>
/// <param name="didKill">Whether recent membership changes we done by graceful Stop.</param>
public async Task WaitForLivenessToStabilizeAsync(bool didKill = false)
{
TimeSpan stabilizationTime = GetLivenessStabilizationTime(this.ClusterConfiguration.Globals, didKill);
WriteLog(Environment.NewLine + Environment.NewLine + "WaitForLivenessToStabilize is about to sleep for {0}", stabilizationTime);
await Task.Delay(stabilizationTime);
WriteLog("WaitForLivenessToStabilize is done sleeping");
}
private static TimeSpan GetLivenessStabilizationTime(GlobalConfiguration global, bool didKill = false)
{
TimeSpan stabilizationTime = TimeSpan.Zero;
if (didKill)
{
// in case of hard kill (kill and not Stop), we should give silos time to detect failures first.
stabilizationTime = TestingUtils.Multiply(global.ProbeTimeout, global.NumMissedProbesLimit);
}
if (global.UseLivenessGossip)
{
stabilizationTime += TimeSpan.FromSeconds(5);
}
else
{
stabilizationTime += TestingUtils.Multiply(global.TableRefreshTimeout, 2);
}
return stabilizationTime;
}
/// <summary>
/// Start an additional silo, so that it joins the existing cluster.
/// </summary>
/// <returns>SiloHandle for the newly started silo.</returns>
public SiloHandle StartAdditionalSilo()
{
var clusterConfig = this.ClusterConfiguration;
short instanceNumber = (short)clusterConfig.Overrides.Count;
var defaultNode = clusterConfig.Defaults;
int baseSiloPort = defaultNode.Port;
int baseGatewayPort = defaultNode.ProxyGatewayEndpoint.Port;
var nodeConfig = TestClusterOptions.AddNodeConfiguration(
this.ClusterConfiguration,
Silo.SiloType.Secondary,
instanceNumber,
baseSiloPort,
baseGatewayPort);
SiloHandle instance = StartOrleansSilo(
Silo.SiloType.Secondary,
this.ClusterConfiguration,
nodeConfig);
additionalSilos.Add(instance);
return instance;
}
/// <summary>
/// Start a number of additional silo, so that they join the existing cluster.
/// </summary>
/// <param name="numExtraSilos">Number of additional silos to start.</param>
/// <returns>List of SiloHandles for the newly started silos.</returns>
public List<SiloHandle> StartAdditionalSilos(int numExtraSilos)
{
List<SiloHandle> instances = new List<SiloHandle>();
for (int i = 0; i < numExtraSilos; i++)
{
SiloHandle instance = StartAdditionalSilo();
instances.Add(instance);
}
return instances;
}
/// <summary>
/// Stop any additional silos, not including the default Primary silo.
/// </summary>
public void StopSecondarySilos()
{
foreach (SiloHandle instance in this.additionalSilos.ToList())
{
StopSilo(instance);
}
}
/// <summary>
/// Stops the default Primary silo.
/// </summary>
public void StopPrimarySilo()
{
try
{
GrainClient.Uninitialize();
}
catch (Exception exc) { WriteLog("Exception Uninitializing grain client: {0}", exc); }
StopSilo(Primary);
}
/// <summary>
/// Stop all current silos.
/// </summary>
public void StopAllSilos()
{
StopSecondarySilos();
StopPrimarySilo();
AppDomain.CurrentDomain.UnhandledException -= ReportUnobservedException;
}
/// <summary>
/// Do a semi-graceful Stop of the specified silo.
/// </summary>
/// <param name="instance">Silo to be stopped.</param>
public void StopSilo(SiloHandle instance)
{
if (instance != null)
{
StopOrleansSilo(instance, true);
if (Primary == instance)
{
Primary = null;
}
else
{
additionalSilos.Remove(instance);
}
}
}
/// <summary>
/// Do an immediate Kill of the specified silo.
/// </summary>
/// <param name="instance">Silo to be killed.</param>
public void KillSilo(SiloHandle instance)
{
if (instance != null)
{
// do NOT stop, just kill directly, to simulate crash.
StopOrleansSilo(instance, false);
}
}
/// <summary>
/// Performs a hard kill on client. Client will not cleanup resources.
/// </summary>
public void KillClient()
{
GrainClient.HardKill();
}
/// <summary>
/// Do a Stop or Kill of the specified silo, followed by a restart.
/// </summary>
/// <param name="instance">Silo to be restarted.</param>
public SiloHandle RestartSilo(SiloHandle instance)
{
if (instance != null)
{
var type = instance.Silo.Type;
var siloName = instance.Name;
StopSilo(instance);
var newInstance = StartOrleansSilo(type, this.ClusterConfiguration, this.ClusterConfiguration.Overrides[siloName]);
if (type == Silo.SiloType.Primary && siloName == Silo.PrimarySiloName)
{
Primary = newInstance;
}
else
{
additionalSilos.Add(newInstance);
}
return newInstance;
}
return null;
}
/// <summary>
/// Restart a previously stopped.
/// </summary>
/// <param name="siloName">Silo to be restarted.</param>
public SiloHandle RestartStoppedSecondarySilo(string siloName)
{
if (siloName == null) throw new ArgumentNullException(nameof(siloName));
var newInstance = StartOrleansSilo(Silo.SiloType.Secondary, this.ClusterConfiguration, this.ClusterConfiguration.Overrides[siloName]);
additionalSilos.Add(newInstance);
return newInstance;
}
#region Private methods
/// <summary>
/// Imports assemblies generated by runtime code generation from the provided silo.
/// </summary>
/// <param name="siloHandle">The silo.</param>
private void ImportGeneratedAssemblies(SiloHandle siloHandle)
{
var generatedAssemblies = TryGetGeneratedAssemblies(siloHandle);
if (generatedAssemblies != null)
{
foreach (var assembly in generatedAssemblies)
{
// If we have never seen generated code for this assembly before, or generated code might be
// newer, store it for later silo creation.
GeneratedAssembly existing;
if (!this.additionalAssemblies.TryGetValue(assembly.Key, out existing) || assembly.Value != null)
{
this.additionalAssemblies[assembly.Key] = assembly.Value;
}
}
}
}
private Dictionary<string, GeneratedAssembly> TryGetGeneratedAssemblies(SiloHandle siloHandle)
{
var tryToRetrieveGeneratedAssemblies = Task.Run(() =>
{
try
{
var silo = siloHandle.Silo;
if (silo?.TestHook != null)
{
var generatedAssemblies = new Silo.TestHooks.GeneratedAssemblies();
silo.TestHook.UpdateGeneratedAssemblies(generatedAssemblies);
return generatedAssemblies.Assemblies;
}
}
catch (Exception exc)
{
WriteLog("UpdateGeneratedAssemblies threw an exception. Ignoring it. Exception: {0}", exc);
}
return null;
});
// best effort to try to import generated assemblies, otherwise move on.
if (tryToRetrieveGeneratedAssemblies.Wait(TimeSpan.FromSeconds(3)))
{
return tryToRetrieveGeneratedAssemblies.Result;
}
return null;
}
/// <summary>
/// Initialize the grain client. This should be already done by <see cref="Deploy()"/> or <see cref="DeployAsync"/>
/// </summary>
public void InitializeClient()
{
WriteLog("Initializing Grain Client");
ClientConfiguration clientConfig = this.ClientConfiguration;
if (Debugger.IsAttached)
{
// Test is running inside debugger - Make timeout ~= infinite
clientConfig.ResponseTimeout = TimeSpan.FromMilliseconds(1000000);
}
GrainClient.Initialize(clientConfig);
GrainFactory = GrainClient.GrainFactory;
}
private async Task InitializeAsync(IEnumerable<string> siloNames)
{
var silos = siloNames.ToList();
foreach (var siloName in silos)
{
if (!this.ClusterConfiguration.Overrides.Keys.Contains(siloName))
{
throw new ArgumentOutOfRangeException(nameof(siloNames), $"Silo name {siloName} does not exist in the ClusterConfiguration.Overrides collection");
}
}
if (silos.Contains(Silo.PrimarySiloName))
{
Primary = StartOrleansSilo(Silo.SiloType.Primary, this.ClusterConfiguration, this.ClusterConfiguration.Overrides[Silo.PrimarySiloName]);
}
var secondarySiloNames = silos.Where(name => !string.Equals(Silo.PrimarySiloName, name)).ToList();
if (secondarySiloNames.Count > 0)
{
var siloStartTasks = secondarySiloNames.Select(siloName =>
{
return Task.Run(() => StartOrleansSilo(Silo.SiloType.Secondary, this.ClusterConfiguration, this.ClusterConfiguration.Overrides[siloName]));
}).ToList();
try
{
await Task.WhenAll(siloStartTasks);
}
catch (Exception)
{
this.additionalSilos.AddRange(siloStartTasks.Where(t => t.Exception != null).Select(t => t.Result));
throw;
}
this.additionalSilos.AddRange(siloStartTasks.Select(t => t.Result));
}
WriteLog("Done initializing cluster");
if (this.ClientConfiguration != null)
{
InitializeClient();
}
}
private SiloHandle StartOrleansSilo(Silo.SiloType type, ClusterConfiguration clusterConfig, NodeConfiguration nodeConfig)
{
return StartOrleansSilo(this, type, clusterConfig, nodeConfig);
}
/// <summary>
/// Start a new silo in the target cluster
/// </summary>
/// <param name="cluster">The TestCluster in which the silo should be deployed</param>
/// <param name="type">The type of the silo to deploy</param>
/// <param name="clusterConfig">The cluster config to use</param>
/// <param name="nodeConfig">The configuration for the silo to deploy</param>
/// <returns>A handle to the silo deployed</returns>
public static SiloHandle StartOrleansSilo(TestCluster cluster, Silo.SiloType type, ClusterConfiguration clusterConfig, NodeConfiguration nodeConfig)
{
if (cluster == null) throw new ArgumentNullException(nameof(cluster));
var siloName = nodeConfig.SiloName;
cluster.WriteLog("Starting a new silo in app domain {0} with config {1}", siloName, clusterConfig.ToString(siloName));
AppDomain appDomain;
Silo silo = cluster.LoadSiloInNewAppDomain(siloName, type, clusterConfig, out appDomain);
silo.Start();
SiloHandle retValue = new SiloHandle
{
Name = siloName,
Silo = silo,
NodeConfiguration = nodeConfig,
Endpoint = silo.SiloAddress.Endpoint,
AppDomain = appDomain,
};
cluster.ImportGeneratedAssemblies(retValue);
return retValue;
}
private void StopOrleansSilo(SiloHandle instance, bool stopGracefully)
{
var silo = instance.Silo;
if (stopGracefully)
{
try
{
silo?.Shutdown();
}
catch (RemotingException re)
{
WriteLog(re); /* Ignore error */
}
catch (Exception exc)
{
WriteLog(exc);
throw;
}
}
ImportGeneratedAssemblies(instance);
try
{
UnloadSiloInAppDomain(instance.AppDomain);
}
catch (Exception exc)
{
WriteLog(exc);
throw;
}
instance.AppDomain = null;
instance.Silo = null;
instance.Process = null;
}
private Silo LoadSiloInNewAppDomain(string siloName, Silo.SiloType type, ClusterConfiguration config, out AppDomain appDomain)
{
AppDomainSetup setup = GetAppDomainSetupInfo();
appDomain = AppDomain.CreateDomain(siloName, null, setup);
// Load each of the additional assemblies.
Silo.TestHooks.CodeGeneratorOptimizer optimizer = null;
foreach (var assembly in this.additionalAssemblies.Where(asm => asm.Value != null))
{
if (optimizer == null)
{
optimizer =
(Silo.TestHooks.CodeGeneratorOptimizer)
appDomain.CreateInstanceFromAndUnwrap(
"OrleansRuntime.dll",
typeof(Silo.TestHooks.CodeGeneratorOptimizer).FullName,
false,
BindingFlags.Default,
null,
null,
CultureInfo.CurrentCulture,
new object[] { });
}
optimizer.AddCachedAssembly(assembly.Key, assembly.Value);
}
var args = new object[] { siloName, type, config };
var silo = (Silo)appDomain.CreateInstanceFromAndUnwrap(
"OrleansRuntime.dll", typeof(Silo).FullName, false,
BindingFlags.Default, null, args, CultureInfo.CurrentCulture,
new object[] { });
appDomain.UnhandledException += ReportUnobservedException;
appDomain.DoCallBack(RegisterPerfCountersTelemetryConsumer);
return silo;
}
private void UnloadSiloInAppDomain(AppDomain appDomain)
{
if (appDomain != null)
{
appDomain.UnhandledException -= ReportUnobservedException;
AppDomain.Unload(appDomain);
}
}
private static void RegisterPerfCountersTelemetryConsumer()
{
#if !NETSTANDARD_TODO
LogManager.TelemetryConsumers.Add(new OrleansTelemetryConsumers.Counters.OrleansPerfCounterTelemetryConsumer());
#endif
}
private static AppDomainSetup GetAppDomainSetupInfo()
{
AppDomain currentAppDomain = AppDomain.CurrentDomain;
return new AppDomainSetup
{
ApplicationBase = Environment.CurrentDirectory,
ConfigurationFile = currentAppDomain.SetupInformation.ConfigurationFile,
ShadowCopyFiles = currentAppDomain.SetupInformation.ShadowCopyFiles,
ShadowCopyDirectories = currentAppDomain.SetupInformation.ShadowCopyDirectories,
CachePath = currentAppDomain.SetupInformation.CachePath
};
}
#endregion
#region Tracing helper functions
private static void ReportUnobservedException(object sender, UnhandledExceptionEventArgs eventArgs)
{
Exception exception = (Exception)eventArgs.ExceptionObject;
// WriteLog("Unobserved exception: {0}", exception);
}
private void WriteLog(string format, params object[] args)
{
log.AppendFormat(format + Environment.NewLine, args);
}
private void FlushLogToConsole()
{
Console.WriteLine(log.ToString());
}
private void WriteLog(object value)
{
WriteLog(value.ToString());
}
#endregion
}
}
| |
//
// Copyright (c) 2004-2017 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.Layouts
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Text;
using NLog.Config;
/// <summary>
/// A specialized layout that renders CSV-formatted events.
/// </summary>
/// <remarks>If <see cref="LayoutWithHeaderAndFooter.Header"/> is set, then the header generation with columnnames will be disabled.</remarks>
[Layout("CsvLayout")]
[ThreadAgnostic]
[AppDomainFixedOutput]
public class CsvLayout : LayoutWithHeaderAndFooter
{
private string _actualColumnDelimiter;
private string _doubleQuoteChar;
private char[] _quotableCharacters;
/// <summary>
/// Initializes a new instance of the <see cref="CsvLayout"/> class.
/// </summary>
public CsvLayout()
{
this.Columns = new List<CsvColumn>();
this.WithHeader = true;
this.Delimiter = CsvColumnDelimiterMode.Auto;
this.Quoting = CsvQuotingMode.Auto;
this.QuoteChar = "\"";
this.Layout = this;
this.Header = new CsvHeaderLayout(this);
this.Footer = null;
}
/// <summary>
/// Gets the array of parameters to be passed.
/// </summary>
/// <docgen category='CSV Options' order='10' />
[ArrayParameter(typeof(CsvColumn), "column")]
public IList<CsvColumn> Columns { get; private set; }
/// <summary>
/// Gets or sets a value indicating whether CVS should include header.
/// </summary>
/// <value>A value of <c>true</c> if CVS should include header; otherwise, <c>false</c>.</value>
/// <docgen category='CSV Options' order='10' />
public bool WithHeader { get; set; }
/// <summary>
/// Gets or sets the column delimiter.
/// </summary>
/// <docgen category='CSV Options' order='10' />
[DefaultValue("Auto")]
public CsvColumnDelimiterMode Delimiter { get; set; }
/// <summary>
/// Gets or sets the quoting mode.
/// </summary>
/// <docgen category='CSV Options' order='10' />
[DefaultValue("Auto")]
public CsvQuotingMode Quoting { get; set; }
/// <summary>
/// Gets or sets the quote Character.
/// </summary>
/// <docgen category='CSV Options' order='10' />
[DefaultValue("\"")]
public string QuoteChar { get; set; }
/// <summary>
/// Gets or sets the custom column delimiter value (valid when ColumnDelimiter is set to 'Custom').
/// </summary>
/// <docgen category='CSV Options' order='10' />
public string CustomColumnDelimiter { get; set; }
/// <summary>
/// Initializes the layout.
/// </summary>
protected override void InitializeLayout()
{
base.InitializeLayout();
if (!this.WithHeader)
{
this.Header = null;
}
switch (this.Delimiter)
{
case CsvColumnDelimiterMode.Auto:
this._actualColumnDelimiter = CultureInfo.CurrentCulture.TextInfo.ListSeparator;
break;
case CsvColumnDelimiterMode.Comma:
this._actualColumnDelimiter = ",";
break;
case CsvColumnDelimiterMode.Semicolon:
this._actualColumnDelimiter = ";";
break;
case CsvColumnDelimiterMode.Pipe:
this._actualColumnDelimiter = "|";
break;
case CsvColumnDelimiterMode.Tab:
this._actualColumnDelimiter = "\t";
break;
case CsvColumnDelimiterMode.Space:
this._actualColumnDelimiter = " ";
break;
case CsvColumnDelimiterMode.Custom:
this._actualColumnDelimiter = this.CustomColumnDelimiter;
break;
}
this._quotableCharacters = (this.QuoteChar + "\r\n" + this._actualColumnDelimiter).ToCharArray();
this._doubleQuoteChar = this.QuoteChar + this.QuoteChar;
}
/// <summary>
/// Formats the log event for write.
/// </summary>
/// <param name="logEvent">The log event to be formatted.</param>
/// <returns>A string representation of the log event.</returns>
protected override string GetFormattedMessage(LogEventInfo logEvent)
{
return RenderAllocateBuilder(logEvent);
}
private void RenderAllColumns(LogEventInfo logEvent, StringBuilder sb)
{
//Memory profiling pointed out that using a foreach-loop was allocating
//an Enumerator. Switching to a for-loop avoids the memory allocation.
for (int i = 0; i < this.Columns.Count; i++)
{
CsvColumn col = this.Columns[i];
string text = col.Layout.Render(logEvent);
RenderCol(sb, i, text);
}
}
/// <summary>
/// Formats the log event for write.
/// </summary>
/// <param name="logEvent">The logging event.</param>
/// <param name="target"><see cref="StringBuilder"/> for the result</param>
protected override void RenderFormattedMessage(LogEventInfo logEvent, StringBuilder target)
{
RenderAllColumns(logEvent, target);
}
/// <summary>
/// Get the headers with the column names.
/// </summary>
/// <returns></returns>
private void RenderHeader(StringBuilder sb)
{
//Memory profiling pointed out that using a foreach-loop was allocating
//an Enumerator. Switching to a for-loop avoids the memory allocation.
for (int i = 0; i < this.Columns.Count; i++)
{
CsvColumn col = this.Columns[i];
string text = col.Name;
RenderCol(sb, i, text);
}
}
/// <summary>
/// Render 1 columnvalue (text or header) to <paramref name="sb"/>
/// </summary>
/// <param name="sb">write-to</param>
/// <param name="columnIndex">current col index</param>
/// <param name="columnValue">col text</param>
private void RenderCol(StringBuilder sb, int columnIndex, string columnValue)
{
if (columnIndex != 0)
{
sb.Append(this._actualColumnDelimiter);
}
bool useQuoting;
switch (this.Quoting)
{
case CsvQuotingMode.Nothing:
useQuoting = false;
break;
case CsvQuotingMode.All:
useQuoting = true;
break;
default:
case CsvQuotingMode.Auto:
if (columnValue.IndexOfAny(this._quotableCharacters) >= 0)
{
useQuoting = true;
}
else
{
useQuoting = false;
}
break;
}
if (useQuoting)
{
sb.Append(this.QuoteChar);
}
if (useQuoting)
{
sb.Append(columnValue.Replace(this.QuoteChar, this._doubleQuoteChar));
}
else
{
sb.Append(columnValue);
}
if (useQuoting)
{
sb.Append(this.QuoteChar);
}
}
/// <summary>
/// Header with column names for CSV layout.
/// </summary>
[ThreadAgnostic]
private class CsvHeaderLayout : Layout
{
private CsvLayout _parent;
/// <summary>
/// Initializes a new instance of the <see cref="CsvHeaderLayout"/> class.
/// </summary>
/// <param name="parent">The parent.</param>
public CsvHeaderLayout(CsvLayout parent)
{
this._parent = parent;
}
/// <summary>
/// Renders the layout for the specified logging event by invoking layout renderers.
/// </summary>
/// <param name="logEvent">The logging event.</param>
/// <returns>The rendered layout.</returns>
protected override string GetFormattedMessage(LogEventInfo logEvent)
{
return RenderAllocateBuilder(logEvent);
}
/// <summary>
/// Renders the layout for the specified logging event by invoking layout renderers.
/// </summary>
/// <param name="logEvent">The logging event.</param>
/// <param name="target"><see cref="StringBuilder"/> for the result</param>
protected override void RenderFormattedMessage(LogEventInfo logEvent, StringBuilder target)
{
this._parent.RenderHeader(target);
}
}
}
}
| |
//
// TaglibReadWriteTests.cs
//
// Author:
// Gabriel Burt <[email protected]>
//
// Copyright (C) 2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
#if ENABLE_TESTS
using System;
using System.IO;
using System.Reflection;
using NUnit.Framework;
using Banshee.Collection;
using Banshee.Streaming;
using Banshee.Configuration.Schema;
using Hyena;
using Hyena.Tests;
namespace Banshee.Metadata
{
// FIXME: These tests don't really belong here
[TestFixture]
public class TaglibReadWriteTests : TestBase
{
private string [] files;
[TestFixtureSetUp]
public void Setup ()
{
files = new string [] {
Path.Combine (TestsDir, "data/test.mp3")
};
}
[Test]
public void TestChangeTrack ()
{
foreach (var p in Banshee.IO.Tests.Providers) {
Banshee.IO.Provider.SetProvider (p);
WriteMetadata (files, ChangeTrack, VerifyTrack);
}
}
[Test]
public void TestGenre ()
{
foreach (var p in Banshee.IO.Tests.Providers) {
Banshee.IO.Provider.SetProvider (p);
WriteMetadata (files, delegate (TrackInfo track) {
ChangeTrack (track);
track.Genre = "My Genre";
}, delegate (TrackInfo track) {
VerifyTrack (track);
Assert.AreEqual ("My Genre", track.Genre);
});
}
}
[Test]
public void TestNullGenreBug ()
{
foreach (var p in Banshee.IO.Tests.Providers) {
Banshee.IO.Provider.SetProvider (p);
// Bug in taglib-sharp-2.0.3.0: Crash if you send it a genre of "{ null }" on
// a song with both ID3v1 and ID3v2 metadata. It's happy with "{}", though.
// (see http://forum.taglib-sharp.com/viewtopic.php?f=5&t=239 )
// This tests our workaround.
WriteMetadata (files, delegate (TrackInfo track) {
ChangeTrack (track);
track.Genre = null;
}, delegate (TrackInfo track) {
VerifyTrack (track);
Assert.IsNull (track.Genre);
});
}
}
[Test]
public void TestIsCompilation ()
{
foreach (var p in Banshee.IO.Tests.Providers) {
Banshee.IO.Provider.SetProvider (p);
WriteMetadata (files, delegate (TrackInfo track) {
ChangeTrack (track);
// bgo#563283: IsCompilation was reset if AlbumArtist == Artist
track.AlbumArtist = track.ArtistName;
track.IsCompilation = true;
}, delegate (TrackInfo track) {
VerifyTrack (track);
Assert.AreEqual (track.ArtistName, track.AlbumArtist);
Assert.IsTrue (track.IsCompilation);
});
}
}
[Test]
public void TestIsNotCompilation ()
{
foreach (var p in Banshee.IO.Tests.Providers) {
Banshee.IO.Provider.SetProvider (p);
WriteMetadata (files, delegate (TrackInfo track) {
ChangeTrack (track);
track.AlbumArtist = track.ArtistName;
track.IsCompilation = false;
}, delegate (TrackInfo track) {
VerifyTrack (track);
Assert.AreEqual (track.ArtistName, track.AlbumArtist);
Assert.IsFalse (track.IsCompilation);
});
}
}
[Test]
public void TestIsCompilationAndAlbumArtist ()
{
foreach (var p in Banshee.IO.Tests.Providers) {
Banshee.IO.Provider.SetProvider (p);
WriteMetadata (files, delegate (TrackInfo track) {
ChangeTrack (track);
track.AlbumArtist = "My Album Artist";
track.IsCompilation = true;
}, delegate (TrackInfo track) {
VerifyTrack (track);
Assert.AreEqual ("My Album Artist", track.AlbumArtist);
Assert.IsTrue (track.IsCompilation);
});
}
}
private void WriteMetadata (string [] files, Action<TrackInfo> change, Action<TrackInfo> verify)
{
SafeUri newuri = null;
bool write_metadata = LibrarySchema.WriteMetadata.Get();
LibrarySchema.WriteMetadata.Set (true);
try {
AssertForEach<string> (files, delegate (string uri) {
string extension = System.IO.Path.GetExtension (uri);
newuri = new SafeUri (Path.Combine (TestsDir, "data/test_write." + extension));
Banshee.IO.File.Copy (new SafeUri (uri), newuri, true);
ChangeAndVerify (newuri, change, verify);
});
} finally {
LibrarySchema.WriteMetadata.Set (write_metadata);
if (newuri != null)
Banshee.IO.File.Delete (newuri);
}
}
private void ChangeAndVerify (SafeUri uri, Action<TrackInfo> change, Action<TrackInfo> verify)
{
TagLib.File file = StreamTagger.ProcessUri (uri);
TrackInfo track = new TrackInfo ();
StreamTagger.TrackInfoMerge (track, file);
file.Dispose ();
// Make changes
change (track);
// Save changes
bool saved = StreamTagger.SaveToFile (track, true, true, true);
Assert.IsTrue (saved);
// Read changes
file = StreamTagger.ProcessUri (uri);
track = new TrackInfo ();
StreamTagger.TrackInfoMerge (track, file, false, true, true);
file.Dispose ();
// Verify changes
verify (track);
}
private void ChangeTrack (TrackInfo track)
{
track.TrackTitle = "My Title";
track.ArtistName = "My Artist";
track.AlbumTitle = "My Album";
track.TrackNumber = 4;
track.DiscNumber = 4;
track.Year = 1999;
track.Rating = 2;
track.PlayCount = 3;
}
private void VerifyTrack (TrackInfo track)
{
Assert.AreEqual ("My Title", track.TrackTitle);
Assert.AreEqual ("My Artist", track.ArtistName);
Assert.AreEqual ("My Album", track.AlbumTitle);
Assert.AreEqual (4, track.TrackNumber);
Assert.AreEqual (4, track.DiscNumber);
Assert.AreEqual (1999, track.Year);
Assert.AreEqual (2, track.Rating);
Assert.AreEqual (3, track.PlayCount);
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
using System.Linq;
namespace System.IO.Tests
{
public partial class File_Move : FileSystemTest
{
#region Utilities
protected virtual void Move(string sourceFile, string destFile)
{
File.Move(sourceFile, destFile);
}
#endregion
#region UniversalTests
[Fact]
public void NullPath()
{
Assert.Throws<ArgumentNullException>(() => Move(null, "."));
Assert.Throws<ArgumentNullException>(() => Move(".", null));
}
[Fact]
public void EmptyPath()
{
Assert.Throws<ArgumentException>(() => Move(string.Empty, "."));
Assert.Throws<ArgumentException>(() => Move(".", string.Empty));
}
[Fact]
public virtual void NonExistentPath()
{
FileInfo testFile = new FileInfo(GetTestFilePath());
testFile.Create().Dispose();
Assert.Throws<FileNotFoundException>(() => Move(GetTestFilePath(), testFile.FullName));
Assert.Throws<DirectoryNotFoundException>(() => Move(testFile.FullName, Path.Combine(TestDirectory, GetTestFileName(), GetTestFileName())));
Assert.Throws<FileNotFoundException>(() => Move(Path.Combine(TestDirectory, GetTestFileName(), GetTestFileName()), testFile.FullName));
}
[Theory, MemberData(nameof(PathsWithInvalidCharacters))]
public void PathWithIllegalCharacters_Core(string invalidPath)
{
FileInfo testFile = new FileInfo(GetTestFilePath());
testFile.Create().Dispose();
if (invalidPath.Contains('\0'.ToString()))
{
Assert.Throws<ArgumentException>(() => Move(testFile.FullName, invalidPath));
}
else
{
if (PlatformDetection.IsInAppContainer)
{
AssertExtensions.ThrowsAny<IOException, UnauthorizedAccessException>(() => Move(testFile.FullName, invalidPath));
}
else
{
Assert.ThrowsAny<IOException>(() => Move(testFile.FullName, invalidPath));
}
}
}
[Fact]
public void BasicMove()
{
FileInfo testFileSource = new FileInfo(GetTestFilePath());
testFileSource.Create().Dispose();
string testFileDest = GetTestFilePath();
Move(testFileSource.FullName, testFileDest);
Assert.True(File.Exists(testFileDest));
Assert.False(File.Exists(testFileSource.FullName));
}
[Fact]
public void MoveNonEmptyFile()
{
FileInfo testFileSource = new FileInfo(GetTestFilePath());
using (var stream = testFileSource.Create())
{
var writer = new StreamWriter(stream);
writer.Write("testing\nwrite\n");
writer.Flush();
}
string testFileDest = GetTestFilePath();
Move(testFileSource.FullName, testFileDest);
Assert.True(File.Exists(testFileDest));
Assert.False(File.Exists(testFileSource.FullName));
Assert.Equal("testing\nwrite\n", File.ReadAllText(testFileDest));
}
[Fact]
public void MoveOntoDirectory()
{
FileInfo testFile = new FileInfo(GetTestFilePath());
testFile.Create().Dispose();
Assert.Throws<IOException>(() => Move(testFile.FullName, TestDirectory));
}
[Fact]
public void MoveOntoExistingFile()
{
FileInfo testFileSource = new FileInfo(GetTestFilePath());
testFileSource.Create().Dispose();
FileInfo testFileDest = new FileInfo(GetTestFilePath());
testFileDest.Create().Dispose();
Assert.Throws<IOException>(() => Move(testFileSource.FullName, testFileDest.FullName));
Assert.True(File.Exists(testFileSource.FullName));
Assert.True(File.Exists(testFileDest.FullName));
}
[Fact]
public void MoveIntoParentDirectory()
{
string testDir = GetTestFilePath();
Directory.CreateDirectory(testDir);
FileInfo testFileSource = new FileInfo(Path.Combine(testDir, GetTestFileName()));
testFileSource.Create().Dispose();
FileInfo testFileDest = new FileInfo(Path.Combine(testDir, "..", GetTestFileName()));
Move(testFileSource.FullName, testFileDest.FullName);
Assert.True(File.Exists(testFileDest.FullName));
}
[Fact]
public void MoveToSameName()
{
string testDir = GetTestFilePath();
Directory.CreateDirectory(testDir);
FileInfo testFileSource = new FileInfo(Path.Combine(testDir, GetTestFileName()));
testFileSource.Create().Dispose();
Move(testFileSource.FullName, testFileSource.FullName);
Assert.True(File.Exists(testFileSource.FullName));
}
[Fact]
public void MoveToSameNameDifferentCasing()
{
string testDir = GetTestFilePath();
Directory.CreateDirectory(testDir);
FileInfo testFileSource = new FileInfo(Path.Combine(testDir, Path.GetRandomFileName().ToLowerInvariant()));
testFileSource.Create().Dispose();
FileInfo testFileDest = new FileInfo(Path.Combine(testFileSource.DirectoryName, testFileSource.Name.ToUpperInvariant()));
Move(testFileSource.FullName, testFileDest.FullName);
Assert.True(File.Exists(testFileDest.FullName));
}
[Fact]
public void MultipleMoves()
{
FileInfo testFileSource = new FileInfo(GetTestFilePath());
testFileSource.Create().Dispose();
string testFileDest1 = GetTestFilePath();
string testFileDest2 = GetTestFilePath();
Move(testFileSource.FullName, testFileDest1);
Move(testFileDest1, testFileDest2);
Assert.True(File.Exists(testFileDest2));
Assert.False(File.Exists(testFileDest1));
Assert.False(File.Exists(testFileSource.FullName));
}
[Fact]
public void FileNameWithSignificantWhitespace()
{
string testFileSource = Path.Combine(TestDirectory, GetTestFileName());
string testFileDest = Path.Combine(TestDirectory, " e n d");
File.Create(testFileSource).Dispose();
Move(testFileSource, testFileDest);
Assert.True(File.Exists(testFileDest));
Assert.False(File.Exists(testFileSource));
}
[ConditionalFact(nameof(AreAllLongPathsAvailable))]
[PlatformSpecific(TestPlatforms.Windows)] // Path longer than max path limit
public void OverMaxPathWorks_Windows()
{
// Create a destination path longer than the traditional Windows limit of 256 characters,
// but under the long path limitation (32K).
string testFileSource = Path.Combine(TestDirectory, GetTestFileName());
File.Create(testFileSource).Dispose();
Assert.True(File.Exists(testFileSource), "test file should exist");
Assert.All(IOInputs.GetPathsLongerThanMaxPath(GetTestFilePath()), (path) =>
{
string baseDestinationPath = Path.GetDirectoryName(path);
if (!Directory.Exists(baseDestinationPath))
{
Directory.CreateDirectory(baseDestinationPath);
}
Assert.True(Directory.Exists(baseDestinationPath), "base destination path should exist");
Move(testFileSource, path);
Assert.True(File.Exists(path), "moved test file should exist");
File.Delete(testFileSource);
Assert.False(File.Exists(testFileSource), "source test file should not exist");
Move(path, testFileSource);
Assert.True(File.Exists(testFileSource), "restored test file should exist");
});
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)]
public void LongPath()
{
string testFileSource = Path.Combine(TestDirectory, GetTestFileName());
File.Create(testFileSource).Dispose();
Assert.All(IOInputs.GetPathsLongerThanMaxLongPath(GetTestFilePath()), (path) =>
{
AssertExtensions.ThrowsAny<PathTooLongException, FileNotFoundException, DirectoryNotFoundException>(() => Move(testFileSource, path));
File.Delete(testFileSource);
AssertExtensions.ThrowsAny<PathTooLongException, FileNotFoundException, DirectoryNotFoundException>(() => Move(path, testFileSource));
});
}
#endregion
#region PlatformSpecific
[Theory, MemberData(nameof(PathsWithInvalidColons))]
[PlatformSpecific(TestPlatforms.Windows)]
public void WindowsPathWithIllegalColons_Core(string invalidPath)
{
FileInfo testFile = new FileInfo(GetTestFilePath());
testFile.Create().Dispose();
Assert.ThrowsAny<IOException>(() => Move(testFile.FullName, testFile.DirectoryName + Path.DirectorySeparatorChar + invalidPath));
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)]
public void WindowsWildCharacterPath_Core()
{
Assert.Throws<FileNotFoundException>(() => Move(Path.Combine(TestDirectory, "*"), GetTestFilePath()));
Assert.Throws<FileNotFoundException>(() => Move(GetTestFilePath(), Path.Combine(TestDirectory, "*")));
Assert.Throws<FileNotFoundException>(() => Move(GetTestFilePath(), Path.Combine(TestDirectory, "Test*t")));
Assert.Throws<FileNotFoundException>(() => Move(GetTestFilePath(), Path.Combine(TestDirectory, "*Test")));
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Wild characters in path are allowed
public void UnixWildCharacterPath()
{
string testDir = GetTestFilePath();
string testFileSource = Path.Combine(testDir, "*");
string testFileShouldntMove = Path.Combine(testDir, "*t");
string testFileDest = Path.Combine(testDir, "*" + GetTestFileName());
Directory.CreateDirectory(testDir);
File.Create(testFileSource).Dispose();
File.Create(testFileShouldntMove).Dispose();
Move(testFileSource, testFileDest);
Assert.True(File.Exists(testFileDest));
Assert.False(File.Exists(testFileSource));
Assert.True(File.Exists(testFileShouldntMove));
Move(testFileDest, testFileSource);
Assert.False(File.Exists(testFileDest));
Assert.True(File.Exists(testFileSource));
Assert.True(File.Exists(testFileShouldntMove));
}
[Theory,
MemberData(nameof(ControlWhiteSpace))]
[PlatformSpecific(TestPlatforms.Windows)]
public void WindowsControlPath_Core(string whitespace)
{
FileInfo testFile = new FileInfo(GetTestFilePath());
Assert.ThrowsAny<IOException>(() => Move(testFile.FullName, Path.Combine(TestDirectory, whitespace)));
}
[Theory,
MemberData(nameof(SimpleWhiteSpace))]
[PlatformSpecific(TestPlatforms.Windows)]
public void WindowsSimpleWhitespacePath(string whitespace)
{
FileInfo testFile = new FileInfo(GetTestFilePath());
Assert.Throws<ArgumentException>(() => Move(testFile.FullName, whitespace));
}
[Theory,
MemberData(nameof(WhiteSpace))]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Whitespace in path allowed
public void UnixWhitespacePath(string whitespace)
{
FileInfo testFileSource = new FileInfo(GetTestFilePath());
testFileSource.Create().Dispose();
Move(testFileSource.FullName, Path.Combine(TestDirectory, whitespace));
Move(Path.Combine(TestDirectory, whitespace), testFileSource.FullName);
}
[Theory,
InlineData("", ":bar"),
InlineData("", ":bar:$DATA"),
InlineData("::$DATA", ":bar"),
InlineData("::$DATA", ":bar:$DATA")]
[PlatformSpecific(TestPlatforms.Windows)]
public void WindowsAlternateDataStreamMove(string defaultStream, string alternateStream)
{
DirectoryInfo testDirectory = Directory.CreateDirectory(GetTestFilePath());
string testFile = Path.Combine(testDirectory.FullName, GetTestFileName());
string testFileDefaultStream = testFile + defaultStream;
string testFileAlternateStream = testFile + alternateStream;
// Cannot move into an alternate stream
File.WriteAllText(testFileDefaultStream, "Foo");
Assert.Throws<IOException>(() => Move(testFileDefaultStream, testFileAlternateStream));
// Cannot move out of an alternate stream
File.WriteAllText(testFileAlternateStream, "Bar");
string testFile2 = Path.Combine(testDirectory.FullName, GetTestFileName());
Assert.Throws<IOException>(() => Move(testFileAlternateStream, testFile2));
}
#endregion
}
}
| |
// Part of fCraft | fCraft is copyright (c) 2009-2013 Matvei Stefarov <[email protected]> | BSD-3 | See LICENSE.txt
/*
THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
PARTICULAR PURPOSE.
This is sample code and is freely distributable.
*/
using System;
using System.Collections;
using System.Drawing;
using System.Drawing.Imaging;
namespace ImageManipulation {
// Part of ImageManipulation library by Morgan Skinner of Microsoft
// Used here under MSPL
sealed unsafe class OctreeQuantizer : Quantizer {
/// <summary> Construct the octree quantizer. </summary>
/// <remarks> The Octree quantizer is a two pass algorithm. The initial pass sets up the octree,
/// the second pass quantizes a color based on the nodes in the tree. </remarks>
/// <param name="maxColors"> The maximum number of colors to return. </param>
/// <param name="maxColorBits"> The number of significant bits. </param>
public OctreeQuantizer( int maxColors, int maxColorBits )
: base( false ) {
if( maxColors > 255 )
throw new ArgumentOutOfRangeException( "maxColors", maxColors,
"The number of colors should be less than 256" );
if( ( maxColorBits < 1 ) | ( maxColorBits > 8 ) )
throw new ArgumentOutOfRangeException( "maxColorBits", maxColorBits, "This should be between 1 and 8" );
// Construct the octree
octree = new Octree( maxColorBits );
this.maxColors = maxColors;
}
/// <summary> Process the pixel in the first pass of the algorithm. </summary>
/// <param name="pixel"> The pixel to quantize. </param>
/// <remarks> This function need only be overridden if your quantize algorithm needs two passes,
/// such as an Octree quantizer. </remarks>
protected override void InitialQuantizePixel( Color32* pixel ) {
// Add the color to the octree
octree.AddColor( pixel );
}
/// <summary> Override this to process the pixel in the second pass of the algorithm. </summary>
/// <param name="pixel"> The pixel to quantize. </param>
/// <returns> The quantized value. </returns>
protected override byte QuantizePixel( Color32* pixel ) {
byte paletteIndex = (byte)maxColors; // The color at [_maxColors] is set to transparent
// Get the palette index if this non-transparent
if( pixel->Alpha > 0 )
paletteIndex = (byte)octree.GetPaletteIndex( pixel );
return paletteIndex;
}
/// <summary> Retrieve the palette for the quantized image. </summary>
/// <param name="original"> Any old palette, this is overrwritten. </param>
/// <returns> The new color palette. </returns>
protected override ColorPalette GetPalette( ColorPalette original ) {
// First off convert the octree to _maxColors colors
ArrayList palette = octree.Palletize( maxColors - 1 );
// Then convert the palette based on those colors
for( int index = 0; index < palette.Count; index++ )
original.Entries[index] = (Color)palette[index];
// Add the transparent color
original.Entries[maxColors] = Color.FromArgb( 0, 0, 0, 0 );
return original;
}
/// <summary> Stores the tree. </summary>
readonly Octree octree;
/// <summary> Maximum allowed color depth. </summary>
readonly int maxColors;
/// <summary> Class which does the actual quantization. </summary>
sealed class Octree {
/// <summary> Construct the octree. </summary>
/// <param name="maxColorBits"> The maximum number of significant bits in the image. </param>
public Octree( int maxColorBits ) {
this.maxColorBits = maxColorBits;
leafCount = 0;
reducibleNodes = new OctreeNode[9];
root = new OctreeNode( 0, this.maxColorBits, this );
previousColor = 0;
previousNode = null;
}
/// <summary> Add a given color value to the octree. </summary>
public void AddColor( Color32* pixel ) {
// Check if this request is for the same color as the last
if( previousColor == pixel->ARGB ) {
// If so, check if I have a previous node setup. This will only ocurr if the first color in the image
// happens to be black, with an alpha component of zero.
if( null == previousNode ) {
previousColor = pixel->ARGB;
root.AddColor( pixel, maxColorBits, 0, this );
} else
// Just update the previous node
previousNode.Increment( pixel );
} else {
previousColor = pixel->ARGB;
root.AddColor( pixel, maxColorBits, 0, this );
}
}
/// <summary> Reduce the depth of the tree. </summary>
void Reduce() {
int index;
// Find the deepest level containing at least one reducible node
for( index = maxColorBits - 1; ( index > 0 ) && ( null == reducibleNodes[index] ); index-- ) {}
// Reduce the node most recently added to the list at level 'index'
OctreeNode node = reducibleNodes[index];
reducibleNodes[index] = node.NextReducible;
// Decrement the leaf count after reducing the node
leafCount -= node.Reduce();
// And just in case I've reduced the last color to be added, and the next color to
// be added is the same, invalidate the previousNode...
previousNode = null;
}
/// <summary> Get/Set the number of leaves in the tree. </summary>
int Leaves {
get { return leafCount; }
set { leafCount = value; }
}
/// <summary> Return the array of reducible nodes. </summary>
OctreeNode[] ReducibleNodes {
get { return reducibleNodes; }
}
/// <summary> Keep track of the previous node that was quantized. </summary>
/// <param name="node"> The node last quantized. </param>
void TrackPrevious( OctreeNode node ) {
previousNode = node;
}
/// <summary> Convert the nodes in the octree to a palette with a maximum of colorCount colors. </summary>
/// <param name="colorCount"> The maximum number of colors. </param>
/// <returns> An ArrayList with the palettized colors. </returns>
public ArrayList Palletize( int colorCount ) {
while( Leaves > colorCount )
Reduce();
// Now palettize the nodes
ArrayList palette = new ArrayList( Leaves );
int paletteIndex = 0;
root.ConstructPalette( palette, ref paletteIndex );
// And return the palette
return palette;
}
/// <summary> Get the palette index for the passed color. </summary>
public int GetPaletteIndex( Color32* pixel ) {
return root.GetPaletteIndex( pixel, 0 );
}
/// <summary> Mask used when getting the appropriate pixels for a given node. </summary>
static readonly int[] Mask = new[] { 0x80, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01 };
/// <summary> The root of the octree. </summary>
readonly OctreeNode root;
/// <summary> Number of leaves in the tree. </summary>
int leafCount;
/// <summary> Array of reducible nodes. </summary>
readonly OctreeNode[] reducibleNodes;
/// <summary> Maximum number of significant bits in the image. </summary>
readonly int maxColorBits;
/// <summary> Store the last node quantized. </summary>
OctreeNode previousNode;
/// <summary> Cache the previous color quantized. </summary>
int previousColor;
/// <summary> Class which encapsulates each node in the tree. </summary>
sealed class OctreeNode {
/// <summary> Construct the node. </summary>
/// <param name="level"> The level in the tree = 0 - 7 </param>
/// <param name="colorBits"> The number of significant color bits in the image. </param>
/// <param name="octree"> The tree to which this node belongs. </param>
public OctreeNode( int level, int colorBits, Octree octree ) {
// Construct the new node
leaf = ( level == colorBits );
red = green = blue = 0;
pixelCount = 0;
// If a leaf, increment the leaf count
if( leaf ) {
octree.Leaves++;
NextReducible = null;
children = null;
} else {
// Otherwise add this to the reducible nodes
NextReducible = octree.ReducibleNodes[level];
octree.ReducibleNodes[level] = this;
children = new OctreeNode[8];
}
}
/// <summary> Add a color into the tree. </summary>
/// <param name="pixel"> The color. </param>
/// <param name="colorBits"> The number of significant color bits. </param>
/// <param name="level"> The level in the tree. </param>
/// <param name="octree"> The tree to which this node belongs. </param>
public void AddColor( Color32* pixel, int colorBits, int level, Octree octree ) {
// Update the color information if this is a leaf
if( leaf ) {
Increment( pixel );
// Setup the previous node
octree.TrackPrevious( this );
} else {
// Go to the next level down in the tree
int shift = 7 - level;
int index = ( ( pixel->Red & Mask[level] ) >> ( shift - 2 ) ) |
( ( pixel->Green & Mask[level] ) >> ( shift - 1 ) ) |
( ( pixel->Blue & Mask[level] ) >> ( shift ) );
OctreeNode child = children[index];
if( null == child ) {
// Create a new child node & store in the array
child = new OctreeNode( level + 1, colorBits, octree );
children[index] = child;
}
// Add the color to the child node
child.AddColor( pixel, colorBits, level + 1, octree );
}
}
/// <summary> Get/Set the next reducible node. </summary>
public OctreeNode NextReducible { get; private set; }
/// <summary> Reduce this node by removing all of its children. </summary>
/// <returns> The number of leaves removed. </returns>
public int Reduce() {
red = green = blue = 0;
int reducedChildren = 0;
// Loop through all children and add their information to this node
for( int index = 0; index < 8; index++ ) {
if( null != children[index] ) {
red += children[index].red;
green += children[index].green;
blue += children[index].blue;
pixelCount += children[index].pixelCount;
++reducedChildren;
children[index] = null;
}
}
// Now change this to a leaf node
leaf = true;
// Return the number of nodes to decrement the leaf count by
return ( reducedChildren - 1 );
}
/// <summary> Traverse the tree, building up the color palette. </summary>
/// <param name="palette"> The palette. </param>
/// <param name="currentPaletteIndex"> The current palette index. </param>
public void ConstructPalette( IList palette, ref int currentPaletteIndex ) {
if( leaf ) {
// Consume the next palette index
paletteIndex = currentPaletteIndex++;
// And set the color of the palette entry
palette.Add( Color.FromArgb( red / pixelCount, green / pixelCount, blue / pixelCount ) );
} else {
// Loop through children looking for leaves
for( int index = 0; index < 8; index++ ) {
if( null != children[index] )
children[index].ConstructPalette( palette, ref currentPaletteIndex );
}
}
}
/// <summary> Return the palette index for the passed color. </summary>
public int GetPaletteIndex( Color32* pixel, int level ) {
int result = paletteIndex;
if( !leaf ) {
int shift = 7 - level;
int index = ( ( pixel->Red & Mask[level] ) >> ( shift - 2 ) ) |
( ( pixel->Green & Mask[level] ) >> ( shift - 1 ) ) |
( ( pixel->Blue & Mask[level] ) >> ( shift ) );
if( null != children[index] )
result = children[index].GetPaletteIndex( pixel, level + 1 );
else
throw new Exception( "Didn't expect this!" );
}
return result;
}
/// <summary> Increment the pixel count and add to the color information. </summary>
public void Increment( Color32* pixel ) {
pixelCount++;
red += pixel->Red;
green += pixel->Green;
blue += pixel->Blue;
}
/// <summary> Flag indicating that this is a leaf node. </summary>
bool leaf;
/// <summary> Number of pixels in this node. </summary>
int pixelCount;
/// <summary> Red component. </summary>
int red;
/// <summary> Green Component. </summary>
int green;
/// <summary> Blue component. </summary>
int blue;
/// <summary> Pointers to any child nodes. </summary>
readonly OctreeNode[] children;
/// <summary> The index of this node in the palette. </summary>
int paletteIndex;
}
}
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace ParentLoad.Business.ERLevel
{
/// <summary>
/// A04_SubContinent (editable child object).<br/>
/// This is a generated base class of <see cref="A04_SubContinent"/> business object.
/// </summary>
/// <remarks>
/// This class contains one child collection:<br/>
/// - <see cref="A05_CountryObjects"/> of type <see cref="A05_CountryColl"/> (1:M relation to <see cref="A06_Country"/>)<br/>
/// This class is an item of <see cref="A03_SubContinentColl"/> collection.
/// </remarks>
[Serializable]
public partial class A04_SubContinent : BusinessBase<A04_SubContinent>
{
#region Static Fields
private static int _lastID;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="SubContinent_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> SubContinent_IDProperty = RegisterProperty<int>(p => p.SubContinent_ID, "Sub Continent ID");
/// <summary>
/// Gets the Sub Continent ID.
/// </summary>
/// <value>The Sub Continent ID.</value>
public int SubContinent_ID
{
get { return GetProperty(SubContinent_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="SubContinent_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> SubContinent_NameProperty = RegisterProperty<string>(p => p.SubContinent_Name, "Sub Continent Name");
/// <summary>
/// Gets or sets the Sub Continent Name.
/// </summary>
/// <value>The Sub Continent Name.</value>
public string SubContinent_Name
{
get { return GetProperty(SubContinent_NameProperty); }
set { SetProperty(SubContinent_NameProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="A05_SubContinent_SingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<A05_SubContinent_Child> A05_SubContinent_SingleObjectProperty = RegisterProperty<A05_SubContinent_Child>(p => p.A05_SubContinent_SingleObject, "A05 SubContinent Single Object", RelationshipTypes.Child);
/// <summary>
/// Gets the A05 Sub Continent Single Object ("parent load" child property).
/// </summary>
/// <value>The A05 Sub Continent Single Object.</value>
public A05_SubContinent_Child A05_SubContinent_SingleObject
{
get { return GetProperty(A05_SubContinent_SingleObjectProperty); }
private set { LoadProperty(A05_SubContinent_SingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="A05_SubContinent_ASingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<A05_SubContinent_ReChild> A05_SubContinent_ASingleObjectProperty = RegisterProperty<A05_SubContinent_ReChild>(p => p.A05_SubContinent_ASingleObject, "A05 SubContinent ASingle Object", RelationshipTypes.Child);
/// <summary>
/// Gets the A05 Sub Continent ASingle Object ("parent load" child property).
/// </summary>
/// <value>The A05 Sub Continent ASingle Object.</value>
public A05_SubContinent_ReChild A05_SubContinent_ASingleObject
{
get { return GetProperty(A05_SubContinent_ASingleObjectProperty); }
private set { LoadProperty(A05_SubContinent_ASingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="A05_CountryObjects"/> property.
/// </summary>
public static readonly PropertyInfo<A05_CountryColl> A05_CountryObjectsProperty = RegisterProperty<A05_CountryColl>(p => p.A05_CountryObjects, "A05 Country Objects", RelationshipTypes.Child);
/// <summary>
/// Gets the A05 Country Objects ("parent load" child property).
/// </summary>
/// <value>The A05 Country Objects.</value>
public A05_CountryColl A05_CountryObjects
{
get { return GetProperty(A05_CountryObjectsProperty); }
private set { LoadProperty(A05_CountryObjectsProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="A04_SubContinent"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="A04_SubContinent"/> object.</returns>
internal static A04_SubContinent NewA04_SubContinent()
{
return DataPortal.CreateChild<A04_SubContinent>();
}
/// <summary>
/// Factory method. Loads a <see cref="A04_SubContinent"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="A04_SubContinent"/> object.</returns>
internal static A04_SubContinent GetA04_SubContinent(SafeDataReader dr)
{
A04_SubContinent obj = new A04_SubContinent();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.LoadProperty(A05_CountryObjectsProperty, A05_CountryColl.NewA05_CountryColl());
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="A04_SubContinent"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public A04_SubContinent()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="A04_SubContinent"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
LoadProperty(SubContinent_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID));
LoadProperty(A05_SubContinent_SingleObjectProperty, DataPortal.CreateChild<A05_SubContinent_Child>());
LoadProperty(A05_SubContinent_ASingleObjectProperty, DataPortal.CreateChild<A05_SubContinent_ReChild>());
LoadProperty(A05_CountryObjectsProperty, DataPortal.CreateChild<A05_CountryColl>());
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="A04_SubContinent"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(SubContinent_IDProperty, dr.GetInt32("SubContinent_ID"));
LoadProperty(SubContinent_NameProperty, dr.GetString("SubContinent_Name"));
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Loads child <see cref="A05_SubContinent_Child"/> object.
/// </summary>
/// <param name="child">The child object to load.</param>
internal void LoadChild(A05_SubContinent_Child child)
{
LoadProperty(A05_SubContinent_SingleObjectProperty, child);
}
/// <summary>
/// Loads child <see cref="A05_SubContinent_ReChild"/> object.
/// </summary>
/// <param name="child">The child object to load.</param>
internal void LoadChild(A05_SubContinent_ReChild child)
{
LoadProperty(A05_SubContinent_ASingleObjectProperty, child);
}
/// <summary>
/// Inserts a new <see cref="A04_SubContinent"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(A02_Continent parent)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("AddA04_SubContinent", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Parent_Continent_ID", parent.Continent_ID).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).Direction = ParameterDirection.Output;
cmd.Parameters.AddWithValue("@SubContinent_Name", ReadProperty(SubContinent_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnInsertPre(args);
cmd.ExecuteNonQuery();
OnInsertPost(args);
LoadProperty(SubContinent_IDProperty, (int) cmd.Parameters["@SubContinent_ID"].Value);
}
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="A04_SubContinent"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("UpdateA04_SubContinent", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@SubContinent_Name", ReadProperty(SubContinent_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnUpdatePre(args);
cmd.ExecuteNonQuery();
OnUpdatePost(args);
}
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Self deletes the <see cref="A04_SubContinent"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
// flushes all pending data operations
FieldManager.UpdateChildren(this);
using (var cmd = new SqlCommand("DeleteA04_SubContinent", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd);
OnDeletePre(args);
cmd.ExecuteNonQuery();
OnDeletePost(args);
}
}
// removes all previous references to children
LoadProperty(A05_SubContinent_SingleObjectProperty, DataPortal.CreateChild<A05_SubContinent_Child>());
LoadProperty(A05_SubContinent_ASingleObjectProperty, DataPortal.CreateChild<A05_SubContinent_ReChild>());
LoadProperty(A05_CountryObjectsProperty, DataPortal.CreateChild<A05_CountryColl>());
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Data;
using System.Reflection;
using System.Collections.Generic;
using log4net;
using MySql.Data.MySqlClient;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Data;
namespace OpenSim.Data.MySQL
{
/// <summary>
/// A MySQL Interface for the Asset Server
/// </summary>
public class MySQLAssetData : AssetDataBase
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private string m_connectionString;
private object m_dbLock = new object();
protected virtual Assembly Assembly
{
get { return GetType().Assembly; }
}
#region IPlugin Members
public override string Version { get { return "1.0.0.0"; } }
/// <summary>
/// <para>Initialises Asset interface</para>
/// <para>
/// <list type="bullet">
/// <item>Loads and initialises the MySQL storage plugin.</item>
/// <item>Warns and uses the obsolete mysql_connection.ini if connect string is empty.</item>
/// <item>Check for migration</item>
/// </list>
/// </para>
/// </summary>
/// <param name="connect">connect string</param>
public override void Initialise(string connect)
{
m_connectionString = connect;
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
Migration m = new Migration(dbcon, Assembly, "AssetStore");
m.Update();
}
}
public override void Initialise()
{
throw new NotImplementedException();
}
public override void Dispose() { }
/// <summary>
/// The name of this DB provider
/// </summary>
override public string Name
{
get { return "MySQL Asset storage engine"; }
}
#endregion
#region IAssetDataPlugin Members
/// <summary>
/// Fetch Asset <paramref name="assetID"/> from database
/// </summary>
/// <param name="assetID">Asset UUID to fetch</param>
/// <returns>Return the asset</returns>
/// <remarks>On failure : throw an exception and attempt to reconnect to database</remarks>
override public AssetBase GetAsset(UUID assetID)
{
AssetBase asset = null;
lock (m_dbLock)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd = new MySqlCommand(
"SELECT name, description, assetType, local, temporary, asset_flags, CreatorID, data FROM assets WHERE id=?id",
dbcon))
{
cmd.Parameters.AddWithValue("?id", assetID.ToString());
try
{
using (MySqlDataReader dbReader = cmd.ExecuteReader(CommandBehavior.SingleRow))
{
if (dbReader.Read())
{
asset = new AssetBase(assetID, (string)dbReader["name"], (sbyte)dbReader["assetType"], dbReader["CreatorID"].ToString());
asset.Data = (byte[])dbReader["data"];
asset.Description = (string)dbReader["description"];
string local = dbReader["local"].ToString();
if (local.Equals("1") || local.Equals("true", StringComparison.InvariantCultureIgnoreCase))
asset.Local = true;
else
asset.Local = false;
asset.Temporary = Convert.ToBoolean(dbReader["temporary"]);
asset.Flags = (AssetFlags)Convert.ToInt32(dbReader["asset_flags"]);
}
}
}
catch (Exception e)
{
m_log.Error(
string.Format("[ASSETS DB]: MySql failure fetching asset {0}. Exception ", assetID), e);
}
}
}
}
return asset;
}
/// <summary>
/// Create an asset in database, or update it if existing.
/// </summary>
/// <param name="asset">Asset UUID to create</param>
/// <remarks>On failure : Throw an exception and attempt to reconnect to database</remarks>
override public void StoreAsset(AssetBase asset)
{
lock (m_dbLock)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd =
new MySqlCommand(
"replace INTO assets(id, name, description, assetType, local, temporary, create_time, access_time, asset_flags, CreatorID, data)" +
"VALUES(?id, ?name, ?description, ?assetType, ?local, ?temporary, ?create_time, ?access_time, ?asset_flags, ?CreatorID, ?data)",
dbcon))
{
string assetName = asset.Name;
if (asset.Name.Length > 64)
{
assetName = asset.Name.Substring(0, 64);
m_log.WarnFormat(
"[ASSET DB]: Name '{0}' for asset {1} truncated from {2} to {3} characters on add",
asset.Name, asset.ID, asset.Name.Length, assetName.Length);
}
string assetDescription = asset.Description;
if (asset.Description.Length > 64)
{
assetDescription = asset.Description.Substring(0, 64);
m_log.WarnFormat(
"[ASSET DB]: Description '{0}' for asset {1} truncated from {2} to {3} characters on add",
asset.Description, asset.ID, asset.Description.Length, assetDescription.Length);
}
try
{
using (cmd)
{
// create unix epoch time
int now = (int)Utils.DateTimeToUnixTime(DateTime.UtcNow);
cmd.Parameters.AddWithValue("?id", asset.ID);
cmd.Parameters.AddWithValue("?name", assetName);
cmd.Parameters.AddWithValue("?description", assetDescription);
cmd.Parameters.AddWithValue("?assetType", asset.Type);
cmd.Parameters.AddWithValue("?local", asset.Local);
cmd.Parameters.AddWithValue("?temporary", asset.Temporary);
cmd.Parameters.AddWithValue("?create_time", now);
cmd.Parameters.AddWithValue("?access_time", now);
cmd.Parameters.AddWithValue("?CreatorID", asset.Metadata.CreatorID);
cmd.Parameters.AddWithValue("?asset_flags", (int)asset.Flags);
cmd.Parameters.AddWithValue("?data", asset.Data);
cmd.ExecuteNonQuery();
}
}
catch (Exception e)
{
m_log.Error(
string.Format(
"[ASSET DB]: MySQL failure creating asset {0} with name {1}. Exception ",
asset.FullID, asset.Name)
, e);
}
}
}
}
}
private void UpdateAccessTime(AssetBase asset)
{
lock (m_dbLock)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd
= new MySqlCommand("update assets set access_time=?access_time where id=?id", dbcon))
{
try
{
using (cmd)
{
// create unix epoch time
int now = (int)Utils.DateTimeToUnixTime(DateTime.UtcNow);
cmd.Parameters.AddWithValue("?id", asset.ID);
cmd.Parameters.AddWithValue("?access_time", now);
cmd.ExecuteNonQuery();
}
}
catch (Exception e)
{
m_log.Error(
string.Format(
"[ASSETS DB]: Failure updating access_time for asset {0} with name {1}. Exception ",
asset.FullID, asset.Name),
e);
}
}
}
}
}
/// <summary>
/// Check if the asset exists in the database
/// </summary>
/// <param name="uuid">The asset UUID</param>
/// <returns>true if it exists, false otherwise.</returns>
override public bool ExistsAsset(UUID uuid)
{
// m_log.DebugFormat("[ASSETS DB]: Checking for asset {0}", uuid);
bool assetExists = false;
lock (m_dbLock)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd = new MySqlCommand("SELECT id FROM assets WHERE id=?id", dbcon))
{
cmd.Parameters.AddWithValue("?id", uuid.ToString());
try
{
using (MySqlDataReader dbReader = cmd.ExecuteReader(CommandBehavior.SingleRow))
{
if (dbReader.Read())
{
// m_log.DebugFormat("[ASSETS DB]: Found asset {0}", uuid);
assetExists = true;
}
}
}
catch (Exception e)
{
m_log.Error(
string.Format("[ASSETS DB]: MySql failure fetching asset {0}. Exception ", uuid), e);
}
}
}
}
return assetExists;
}
/// <summary>
/// Returns a list of AssetMetadata objects. The list is a subset of
/// the entire data set offset by <paramref name="start" /> containing
/// <paramref name="count" /> elements.
/// </summary>
/// <param name="start">The number of results to discard from the total data set.</param>
/// <param name="count">The number of rows the returned list should contain.</param>
/// <returns>A list of AssetMetadata objects.</returns>
public override List<AssetMetadata> FetchAssetMetadataSet(int start, int count)
{
List<AssetMetadata> retList = new List<AssetMetadata>(count);
lock (m_dbLock)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd
= new MySqlCommand(
"SELECT name,description,assetType,temporary,id,asset_flags,CreatorID FROM assets LIMIT ?start, ?count",
dbcon))
{
cmd.Parameters.AddWithValue("?start", start);
cmd.Parameters.AddWithValue("?count", count);
try
{
using (MySqlDataReader dbReader = cmd.ExecuteReader())
{
while (dbReader.Read())
{
AssetMetadata metadata = new AssetMetadata();
metadata.Name = (string)dbReader["name"];
metadata.Description = (string)dbReader["description"];
metadata.Type = (sbyte)dbReader["assetType"];
metadata.Temporary = Convert.ToBoolean(dbReader["temporary"]); // Not sure if this is correct.
metadata.Flags = (AssetFlags)Convert.ToInt32(dbReader["asset_flags"]);
metadata.FullID = DBGuid.FromDB(dbReader["id"]);
metadata.CreatorID = dbReader["CreatorID"].ToString();
// Current SHA1s are not stored/computed.
metadata.SHA1 = new byte[] { };
retList.Add(metadata);
}
}
}
catch (Exception e)
{
m_log.Error(
string.Format(
"[ASSETS DB]: MySql failure fetching asset set from {0}, count {1}. Exception ",
start, count),
e);
}
}
}
}
return retList;
}
public override bool Delete(string id)
{
lock (m_dbLock)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd = new MySqlCommand("delete from assets where id=?id", dbcon))
{
cmd.Parameters.AddWithValue("?id", id);
cmd.ExecuteNonQuery();
}
}
}
return true;
}
#endregion
}
}
| |
// HtmlAgilityPack V1.0 - Simon Mourier <simon underscore mourier at hotmail dot com>
using System;
using System.Collections;
namespace HtmlAgilityPack
{
/// <summary>
/// Represents a combined list and collection of HTML nodes.
/// </summary>
public class HtmlNodeCollection : IEnumerable
{
private ArrayList _items = new ArrayList();
private HtmlNode _parentnode;
internal HtmlNodeCollection(HtmlNode parentnode)
{
_parentnode = parentnode; // may be null
}
/// <summary>
/// Gets the number of elements actually contained in the list.
/// </summary>
public int Count
{
get
{
return _items.Count;
}
}
internal void Clear()
{
foreach (HtmlNode node in _items)
{
node._parentnode = null;
node._nextnode = null;
node._prevnode = null;
}
_items.Clear();
}
internal void Remove(int index)
{
HtmlNode next = null;
HtmlNode prev = null;
HtmlNode oldnode = (HtmlNode)_items[index];
if (index > 0)
{
prev = (HtmlNode)_items[index - 1];
}
if (index < (_items.Count - 1))
{
next = (HtmlNode)_items[index + 1];
}
_items.RemoveAt(index);
if (prev != null)
{
if (next == prev)
{
throw new InvalidProgramException("Unexpected error.");
}
prev._nextnode = next;
}
if (next != null)
{
next._prevnode = prev;
}
oldnode._prevnode = null;
oldnode._nextnode = null;
oldnode._parentnode = null;
}
internal void Replace(int index, HtmlNode node)
{
HtmlNode next = null;
HtmlNode prev = null;
HtmlNode oldnode = (HtmlNode)_items[index];
if (index > 0)
{
prev = (HtmlNode)_items[index - 1];
}
if (index < (_items.Count - 1))
{
next = (HtmlNode)_items[index + 1];
}
_items[index] = node;
if (prev != null)
{
if (node == prev)
{
throw new InvalidProgramException("Unexpected error.");
}
prev._nextnode = node;
}
if (next != null)
{
next._prevnode = node;
}
node._prevnode = prev;
if (next == node)
{
throw new InvalidProgramException("Unexpected error.");
}
node._nextnode = next;
node._parentnode = _parentnode;
oldnode._prevnode = null;
oldnode._nextnode = null;
oldnode._parentnode = null;
}
internal void Insert(int index, HtmlNode node)
{
HtmlNode next = null;
HtmlNode prev = null;
if (index > 0)
{
prev = (HtmlNode)_items[index - 1];
}
if (index < _items.Count)
{
next = (HtmlNode)_items[index];
}
_items.Insert(index, node);
if (prev != null)
{
if (node == prev)
{
throw new InvalidProgramException("Unexpected error.");
}
prev._nextnode = node;
}
if (next != null)
{
next._prevnode = node;
}
node._prevnode = prev;
if (next == node)
{
throw new InvalidProgramException("Unexpected error.");
}
node._nextnode = next;
node._parentnode = _parentnode;
}
internal void Append(HtmlNode node)
{
HtmlNode last = null;
if (_items.Count > 0)
{
last = (HtmlNode)_items[_items.Count - 1];
}
_items.Add(node);
node._prevnode = last;
node._nextnode = null;
node._parentnode = _parentnode;
if (last != null)
{
if (last == node)
{
throw new InvalidProgramException("Unexpected error.");
}
last._nextnode = node;
}
}
internal void Prepend(HtmlNode node)
{
HtmlNode first = null;
if (_items.Count > 0)
{
first = (HtmlNode)_items[0];
}
_items.Insert(0, node);
if (node == first)
{
throw new InvalidProgramException("Unexpected error.");
}
node._nextnode = first;
node._prevnode = null;
node._parentnode = _parentnode;
if (first != null)
{
first._prevnode = node;
}
}
internal void Add(HtmlNode node)
{
_items.Add(node);
}
/// <summary>
/// Gets the node at the specified index.
/// </summary>
public HtmlNode this[int index]
{
get
{
return _items[index] as HtmlNode;
}
}
internal int GetNodeIndex(HtmlNode node)
{
// TODO: should we rewrite this? what would be the key of a node?
for (int i = 0; i < _items.Count; i++)
{
if (node == ((HtmlNode)_items[i]))
{
return i;
}
}
return -1;
}
/// <summary>
/// Gets a given node from the list.
/// </summary>
public int this[HtmlNode node]
{
get
{
int index = GetNodeIndex(node);
if (index == -1)
{
throw new ArgumentOutOfRangeException("node", "Node \"" + node.CloneNode(false).OuterHtml + "\" was not found in the collection");
}
return index;
}
}
/// <summary>
/// Returns an enumerator that can iterate through the list.
/// </summary>
/// <returns>An IEnumerator for the entire list.</returns>
public HtmlNodeEnumerator GetEnumerator()
{
return new HtmlNodeEnumerator(_items);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// Represents an enumerator that can iterate through the list.
/// </summary>
public class HtmlNodeEnumerator : IEnumerator
{
int _index;
ArrayList _items;
internal HtmlNodeEnumerator(ArrayList items)
{
_items = items;
_index = -1;
}
/// <summary>
/// Sets the enumerator to its initial position, which is before the first element in the collection.
/// </summary>
public void Reset()
{
_index = -1;
}
/// <summary>
/// Advances the enumerator to the next element of the collection.
/// </summary>
/// <returns>true if the enumerator was successfully advanced to the next element, false if the enumerator has passed the end of the collection.</returns>
public bool MoveNext()
{
_index++;
return (_index < _items.Count);
}
/// <summary>
/// Gets the current element in the collection.
/// </summary>
public HtmlNode Current
{
get
{
return (HtmlNode)(_items[_index]);
}
}
/// <summary>
/// Gets the current element in the collection.
/// </summary>
object IEnumerator.Current
{
get
{
return (Current);
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Signum.Engine.Basics;
using Signum.Engine.Operations.Internal;
using Signum.Entities;
using Signum.Entities.Basics;
using Signum.Utilities;
namespace Signum.Engine.Operations
{
public delegate F Overrider<F>(F baseFunc);
public class Graph<T>
where T : class, IEntity
{
public class Construct : _Construct<T>, IConstructOperation
{
protected readonly OperationSymbol operationSymbol;
OperationSymbol IOperation.OperationSymbol { get { return operationSymbol; } }
Type IOperation.OverridenType { get { return typeof(T); } }
OperationType IOperation.OperationType { get { return OperationType.Constructor; } }
bool IOperation.Returns { get { return true; } }
Type? IOperation.ReturnType { get { return typeof(T); } }
IEnumerable<Enum>? IOperation.UntypedFromStates { get { return null; } }
IEnumerable<Enum>? IOperation.UntypedToStates { get { return Enumerable.Empty<Enum>(); } }
Type? IOperation.StateType { get { return null; } }
public bool LogAlsoIfNotSaved { get; set; }
//public Func<object[]?, T> Construct { get; set; } (inherited)
public bool Lite { get { return false; } }
public Construct(ConstructSymbol<T>.Simple symbol)
{
if (symbol == null)
throw AutoInitAttribute.ArgumentNullException(typeof(ConstructSymbol<T>.Simple), nameof(symbol));
this.operationSymbol = symbol.Symbol;
}
protected Construct(OperationSymbol operationSymbol)
{
this.operationSymbol = operationSymbol ?? throw new ArgumentNullException(nameof(operationSymbol));
}
public static Construct Untyped<B>(ConstructSymbol<B>.Simple symbol)
where B: class, IEntity
{
return new Construct(symbol.Symbol);
}
public void OverrideConstruct(Overrider<Func<object?[]?, T>> overrider)
{
this.Construct = overrider(this.Construct);
}
IEntity IConstructOperation.Construct(params object?[]? args)
{
using (HeavyProfiler.Log("Construct", () => operationSymbol.Key))
{
OperationLogic.AssertOperationAllowed(operationSymbol, typeof(T), inUserInterface: false);
OperationLogEntity? log = new OperationLogEntity
{
Operation = operationSymbol,
Start = TimeZoneManager.Now,
User = UserHolder.Current?.ToLite()!,
};
try
{
using (Transaction tr = new Transaction())
{
T? result = null;
using (OperationLogic.AllowSave<T>())
OperationLogic.OnSuroundOperation(this, log, null, args).EndUsing(_ =>
{
result = Construct(args);
AssertEntity(result);
if ((result != null && !result.IsNew) || LogAlsoIfNotSaved)
{
log.SetTarget(result);
log.End = TimeZoneManager.Now;
}
else
log = null;
});
if (log != null)
log.SaveLog();
return tr.Commit(result!);
}
}
catch (Exception ex)
{
OperationLogic.SetExceptionData(ex, operationSymbol, null, args);
if (LogAlsoIfNotSaved)
{
if (Transaction.InTestTransaction)
throw;
var exLog = ex.LogException();
using (Transaction tr2 = Transaction.ForceNew())
{
log!.Exception = exLog.ToLite();
log.SaveLog();
tr2.Commit();
}
}
throw;
}
}
}
protected virtual void AssertEntity(T entity)
{
}
public virtual void AssertIsValid()
{
if (Construct == null)
throw new InvalidOperationException("Operation {0} does not have Constructor initialized".FormatWith(operationSymbol));
}
public override string ToString()
{
return "{0} Construct {1}".FormatWith(operationSymbol, typeof(T));
}
}
public class ConstructFrom<F> : IConstructorFromOperation
where F : class, IEntity
{
protected readonly OperationSymbol operationSymbol;
OperationSymbol IOperation.OperationSymbol { get { return operationSymbol; } }
Type IOperation.OverridenType { get { return typeof(F); } }
OperationType IOperation.OperationType { get { return OperationType.ConstructorFrom; } }
IEnumerable<Enum>? IOperation.UntypedFromStates { get { return null; } }
IEnumerable<Enum>? IOperation.UntypedToStates { get { return Enumerable.Empty<Enum>(); } }
Type? IOperation.StateType { get { return null; } }
public bool CanBeModified { get; set; }
public bool LogAlsoIfNotSaved { get; set; }
bool IOperation.Returns { get { return true; } }
Type? IOperation.ReturnType { get { return typeof(T); } }
protected readonly Type baseType;
Type IEntityOperation.BaseType { get { return baseType; } }
bool IEntityOperation.HasCanExecute { get { return CanConstruct != null; } }
public bool CanBeNew { get; set; }
public Func<F, string?>? CanConstruct { get; set; }
public ConstructFrom<F> OverrideCanConstruct(Overrider<Func<F, string?>> overrider)
{
this.CanConstruct = overrider(this.CanConstruct ?? (f => null));
return this;
}
public Func<F, object?[]?, T> Construct { get; set; } = null!;
public void OverrideConstruct(Overrider<Func<F, object?[]?, T>> overrider)
{
this.Construct = overrider(this.Construct);
}
public ConstructFrom(ConstructSymbol<T>.From<F> symbol)
{
if (symbol == null)
throw AutoInitAttribute.ArgumentNullException(typeof(ConstructSymbol<T>.From<F>), nameof(symbol));
this.operationSymbol = symbol.Symbol;
this.baseType = symbol.BaseType;
}
protected ConstructFrom(OperationSymbol operationSymbol, Type baseType)
{
this.operationSymbol = operationSymbol ?? throw new ArgumentNullException(nameof(operationSymbol));
this.baseType = baseType ?? throw new ArgumentNullException(nameof(baseType));
}
public static ConstructFrom<F> Untyped<B>(ConstructSymbol<B>.From<F> symbol)
where B : class, IEntity
{
return new ConstructFrom<F>(symbol.Symbol, symbol.BaseType);
}
string? IEntityOperation.CanExecute(IEntity entity)
{
return OnCanConstruct(entity);
}
string? OnCanConstruct(IEntity entity)
{
if (entity.IsNew && !CanBeNew)
return EngineMessage.TheEntity0IsNew.NiceToString().FormatWith(entity);
if (CanConstruct != null)
return CanConstruct((F)entity);
return null;
}
IEntity IConstructorFromOperation.Construct(IEntity origin, params object?[]? args)
{
using (HeavyProfiler.Log("ConstructFrom", () => operationSymbol.Key))
{
OperationLogic.AssertOperationAllowed(operationSymbol, origin.GetType(), inUserInterface: false);
string? error = OnCanConstruct(origin);
if (error != null)
throw new ApplicationException(error);
OperationLogEntity? log = new OperationLogEntity
{
Operation = operationSymbol,
Start = TimeZoneManager.Now,
User = UserHolder.Current?.ToLite()!,
Origin = origin.ToLite(origin.IsNew),
};
try
{
using (Transaction tr = new Transaction())
{
T? result = null;
using (OperationLogic.AllowSave(origin.GetType()))
using (OperationLogic.AllowSave<T>())
OperationLogic.OnSuroundOperation(this, log, origin, args).EndUsing(_ =>
{
result = Construct((F)origin, args);
AssertEntity(result);
if ((result != null && !result.IsNew) || LogAlsoIfNotSaved)
{
log.End = TimeZoneManager.Now;
log.SetTarget(result);
}
else
{
log = null;
}
});
if (log != null)
log.SaveLog();
return tr.Commit(result!);
}
}
catch (Exception ex)
{
OperationLogic.SetExceptionData(ex, operationSymbol, (Entity)origin, args);
if (LogAlsoIfNotSaved)
{
if (Transaction.InTestTransaction)
throw;
var exLog = ex.LogException();
using (Transaction tr2 = Transaction.ForceNew())
{
log!.Exception = exLog.ToLite();
log.SaveLog();
tr2.Commit();
}
}
throw;
}
}
}
protected virtual void AssertEntity(T entity)
{
}
public virtual void AssertIsValid()
{
if (Construct == null)
throw new InvalidOperationException("Operation {0} does not hace Construct initialized".FormatWith(operationSymbol));
}
public override string ToString()
{
return "{0} ConstructFrom {1} -> {2}".FormatWith(operationSymbol, typeof(F), typeof(T));
}
}
public class ConstructFromMany<F> : IConstructorFromManyOperation
where F : class, IEntity
{
protected readonly OperationSymbol operationSymbol;
OperationSymbol IOperation.OperationSymbol { get { return operationSymbol; } }
Type IOperation.OverridenType { get { return typeof(F); } }
OperationType IOperation.OperationType { get { return OperationType.ConstructorFromMany; } }
bool IOperation.Returns { get { return true; } }
Type? IOperation.ReturnType { get { return typeof(T); } }
protected readonly Type baseType;
Type IConstructorFromManyOperation.BaseType { get { return baseType; } }
IEnumerable<Enum>? IOperation.UntypedFromStates { get { return null; } }
IEnumerable<Enum>? IOperation.UntypedToStates { get { return Enumerable.Empty<Enum>(); } }
Type? IOperation.StateType { get { return null; } }
public bool LogAlsoIfNotSaved { get; set; }
public Func<List<Lite<F>>, object?[]?, T> Construct { get; set; } = null!;
public void OverrideConstruct(Overrider<Func<List<Lite<F>>, object?[]?, T>> overrider)
{
this.Construct = overrider(this.Construct);
}
public ConstructFromMany(ConstructSymbol<T>.FromMany<F> symbol)
{
if (symbol == null)
throw AutoInitAttribute.ArgumentNullException(typeof(ConstructSymbol<T>.FromMany<F>), nameof(symbol));
this.operationSymbol = symbol.Symbol;
this.baseType = symbol.BaseType;
}
protected ConstructFromMany(OperationSymbol operationSymbol, Type baseType)
{
this.operationSymbol = operationSymbol ?? throw new ArgumentNullException(nameof(operationSymbol));
this.baseType = baseType ?? throw new ArgumentNullException(nameof(baseType));
}
public static ConstructFromMany<F> Untyped<B>(ConstructSymbol<B>.FromMany<F> symbol)
where B : class, IEntity
{
return new ConstructFromMany<F>(symbol.Symbol, symbol.BaseType);
}
IEntity IConstructorFromManyOperation.Construct(IEnumerable<Lite<IEntity>> lites, params object?[]? args)
{
using (HeavyProfiler.Log("ConstructFromMany", () => operationSymbol.Key))
{
foreach (var type in lites.Select(a => a.EntityType).Distinct())
{
OperationLogic.AssertOperationAllowed(operationSymbol, type, inUserInterface: false);
}
OperationLogEntity? log = new OperationLogEntity
{
Operation = operationSymbol,
Start = TimeZoneManager.Now,
User = UserHolder.Current?.ToLite()!
};
try
{
using (Transaction tr = new Transaction())
{
T? result = null;
using (OperationLogic.AllowSave<F>())
using (OperationLogic.AllowSave<T>())
OperationLogic.OnSuroundOperation(this, log, null, args).EndUsing(_ =>
{
result = OnConstruct(lites.Cast<Lite<F>>().ToList(), args);
AssertEntity(result);
if ((result != null && !result.IsNew) || LogAlsoIfNotSaved)
{
log.End = TimeZoneManager.Now;
log.SetTarget(result);
}
else
{
log = null;
}
});
if (log != null)
log.SaveLog();
return tr.Commit(result!);
}
}
catch (Exception ex)
{
OperationLogic.SetExceptionData(ex, operationSymbol, null, args);
if (LogAlsoIfNotSaved)
{
if (Transaction.InTestTransaction)
throw;
var exLog = ex.LogException();
using (Transaction tr2 = Transaction.ForceNew())
{
log!.Exception = exLog.ToLite();
log.SaveLog();
tr2.Commit();
}
}
throw;
}
}
}
protected virtual T OnConstruct(List<Lite<F>> lites, object?[]? args)
{
return Construct(lites, args);
}
protected virtual void AssertEntity(T entity)
{
}
public virtual void AssertIsValid()
{
if (Construct == null)
throw new InvalidOperationException("Operation {0} Constructor initialized".FormatWith(operationSymbol));
}
public override string ToString()
{
return "{0} ConstructFromMany {1} -> {2}".FormatWith(operationSymbol, typeof(F), typeof(T));
}
}
public class Execute : _Execute<T>, IExecuteOperation
{
protected readonly ExecuteSymbol<T> Symbol;
OperationSymbol IOperation.OperationSymbol { get { return Symbol.Symbol; } }
Type IOperation.OverridenType { get { return typeof(T); } }
OperationType IOperation.OperationType { get { return OperationType.Execute; } }
public bool CanBeModified { get; set; }
bool IOperation.Returns { get { return true; } }
Type? IOperation.ReturnType { get { return null; } }
Type? IOperation.StateType { get { return null; } }
public bool AvoidImplicitSave { get; set; }
Type IEntityOperation.BaseType { get { return Symbol.BaseType; } }
bool IEntityOperation.HasCanExecute { get { return CanExecute != null; } }
IEnumerable<Enum>? IOperation.UntypedFromStates { get { return Enumerable.Empty<Enum>(); } }
IEnumerable<Enum>? IOperation.UntypedToStates { get { return Enumerable.Empty<Enum>(); } }
public bool CanBeNew { get; set; }
//public Action<T, object[]?> Execute { get; set; } (inherited)
public Func<T, string?>? CanExecute { get; set; }
public Execute OverrideCanExecute(Overrider<Func<T, string?>> overrider)
{
this.CanExecute = overrider(this.CanExecute ?? (t => null));
return this;
}
public void OverrideExecute(Overrider<Action<T, object?[]?>> overrider)
{
this.Execute = overrider(this.Execute);
}
public Execute(ExecuteSymbol<T> symbol)
{
this.Symbol = symbol ?? throw AutoInitAttribute.ArgumentNullException(typeof(ExecuteSymbol<T>), nameof(symbol));
}
string? IEntityOperation.CanExecute(IEntity entity)
{
return OnCanExecute((T)entity);
}
protected virtual string? OnCanExecute(T entity)
{
if (entity.IsNew && !CanBeNew)
return EngineMessage.TheEntity0IsNew.NiceToString().FormatWith(entity);
if (CanExecute != null)
return CanExecute(entity);
return null;
}
void IExecuteOperation.Execute(IEntity entity, params object?[]? args)
{
using (HeavyProfiler.Log("Execute", () => Symbol.Symbol.Key))
{
OperationLogic.AssertOperationAllowed(Symbol.Symbol, entity.GetType(), inUserInterface: false);
string? error = OnCanExecute((T)entity);
if (error != null)
throw new ApplicationException(error);
OperationLogEntity log = new OperationLogEntity
{
Operation = Symbol.Symbol,
Start = TimeZoneManager.Now,
User = UserHolder.Current?.ToLite()!
};
try
{
using (Transaction tr = new Transaction())
{
using (OperationLogic.AllowSave(entity.GetType()))
OperationLogic.OnSuroundOperation(this, log, entity, args).EndUsing(_ =>
{
Execute((T)entity, args);
AssertEntity((T)entity);
if (!AvoidImplicitSave)
entity.Save(); //Nothing happens if already saved
log.SetTarget(entity);
log.End = TimeZoneManager.Now;
});
log.SaveLog();
tr.Commit();
}
}
catch (Exception ex)
{
OperationLogic.SetExceptionData(ex, Symbol.Symbol, (Entity)entity, args);
if (Transaction.InTestTransaction)
throw;
var exLog = ex.LogException();
using (Transaction tr2 = Transaction.ForceNew())
{
OperationLogEntity newLog = new OperationLogEntity //Transaction chould have been rollbacked just before commiting
{
Operation = log.Operation,
Start = log.Start,
User = log.User,
Target = entity.IsNew ? null : entity.ToLite(),
Exception = exLog.ToLite(),
};
newLog.SaveLog();
tr2.Commit();
}
throw;
}
}
}
protected virtual void AssertEntity(T entity)
{
}
public virtual void AssertIsValid()
{
if (Execute == null)
throw new InvalidOperationException("Operation {0} does not have Execute initialized".FormatWith(Symbol));
}
public override string ToString()
{
return "{0} Execute on {1}".FormatWith(Symbol, typeof(T));
}
}
public class Delete : _Delete<T>, IDeleteOperation
{
protected readonly DeleteSymbol<T> Symbol;
OperationSymbol IOperation.OperationSymbol { get { return Symbol.Symbol; } }
Type IOperation.OverridenType { get { return typeof(T); } }
OperationType IOperation.OperationType { get { return OperationType.Delete; } }
public bool CanBeModified { get; set; }
bool IOperation.Returns { get { return false; } }
Type? IOperation.ReturnType { get { return null; } }
IEnumerable<Enum>? IOperation.UntypedFromStates { get { return Enumerable.Empty<Enum>(); } }
IEnumerable<Enum>? IOperation.UntypedToStates { get { return null; } }
Type? IOperation.StateType { get { return null; } }
public bool CanBeNew { get { return false; } }
Type IEntityOperation.BaseType { get { return Symbol.BaseType; } }
bool IEntityOperation.HasCanExecute { get { return CanDelete != null; } }
//public Action<T, object[]?> Delete { get; set; } (inherited)
public Func<T, string?>? CanDelete { get; set; }
public Delete OverrideCanDelete(Overrider<Func<T, string?>> overrider)
{
this.CanDelete = overrider(this.CanDelete ?? (t => null));
return this;
}
public void OverrideDelete(Overrider<Action<T, object?[]?>> overrider)
{
this.Delete = overrider(this.Delete);
}
public Delete(DeleteSymbol<T> symbol)
{
this.Symbol = symbol ?? throw AutoInitAttribute.ArgumentNullException(typeof(DeleteSymbol<T>), nameof(symbol));
}
string? IEntityOperation.CanExecute(IEntity entity)
{
return OnCanDelete((T)entity);
}
protected virtual string? OnCanDelete(T entity)
{
if (entity.IsNew)
return EngineMessage.TheEntity0IsNew.NiceToString().FormatWith(entity);
if (CanDelete != null)
return CanDelete(entity);
return null;
}
void IDeleteOperation.Delete(IEntity entity, params object?[]? args)
{
using (HeavyProfiler.Log("Delete", () => Symbol.Symbol.Key))
{
OperationLogic.AssertOperationAllowed(Symbol.Symbol, entity.GetType(), inUserInterface: false);
string? error = OnCanDelete((T)entity);
if (error != null)
throw new ApplicationException(error);
OperationLogEntity log = new OperationLogEntity
{
Operation = Symbol.Symbol,
Start = TimeZoneManager.Now,
User = UserHolder.Current?.ToLite()!,
};
using (OperationLogic.AllowSave(entity.GetType()))
OperationLogic.OnSuroundOperation(this, log, entity, args).EndUsing(_ =>
{
try
{
using (Transaction tr = new Transaction())
{
OnDelete((T)entity, args);
log.SetTarget(entity);
log.End = TimeZoneManager.Now;
log.SaveLog();
tr.Commit();
}
}
catch (Exception ex)
{
OperationLogic.SetExceptionData(ex, Symbol.Symbol, (Entity)entity, args);
if (Transaction.InTestTransaction)
throw;
var exLog = ex.LogException();
using (Transaction tr2 = Transaction.ForceNew())
{
log.Target = entity.ToLite();
log.Exception = exLog.ToLite();
log.SaveLog();
tr2.Commit();
}
throw;
}
});
}
}
protected virtual void OnDelete(T entity, object?[]? args)
{
Delete(entity, args);
}
public virtual void AssertIsValid()
{
if (Delete == null)
throw new InvalidOperationException("Operation {0} does not have Delete initialized".FormatWith(Symbol.Symbol));
}
public override string ToString()
{
return "{0} Delete {1}".FormatWith(Symbol.Symbol, typeof(T));
}
}
}
}
| |
/// This code was generated by
/// \ / _ _ _| _ _
/// | (_)\/(_)(_|\/| |(/_ v1.0.0
/// / /
using System;
using System.Collections.Generic;
using System.Linq;
using Twilio.Base;
using Twilio.Converters;
namespace Twilio.Rest.IpMessaging.V2.Service.Channel
{
/// <summary>
/// FetchInviteOptions
/// </summary>
public class FetchInviteOptions : IOptions<InviteResource>
{
/// <summary>
/// The service_sid
/// </summary>
public string PathServiceSid { get; }
/// <summary>
/// The channel_sid
/// </summary>
public string PathChannelSid { get; }
/// <summary>
/// The sid
/// </summary>
public string PathSid { get; }
/// <summary>
/// Construct a new FetchInviteOptions
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathChannelSid"> The channel_sid </param>
/// <param name="pathSid"> The sid </param>
public FetchInviteOptions(string pathServiceSid, string pathChannelSid, string pathSid)
{
PathServiceSid = pathServiceSid;
PathChannelSid = pathChannelSid;
PathSid = pathSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
return p;
}
}
/// <summary>
/// CreateInviteOptions
/// </summary>
public class CreateInviteOptions : IOptions<InviteResource>
{
/// <summary>
/// The service_sid
/// </summary>
public string PathServiceSid { get; }
/// <summary>
/// The channel_sid
/// </summary>
public string PathChannelSid { get; }
/// <summary>
/// The identity
/// </summary>
public string Identity { get; }
/// <summary>
/// The role_sid
/// </summary>
public string RoleSid { get; set; }
/// <summary>
/// Construct a new CreateInviteOptions
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathChannelSid"> The channel_sid </param>
/// <param name="identity"> The identity </param>
public CreateInviteOptions(string pathServiceSid, string pathChannelSid, string identity)
{
PathServiceSid = pathServiceSid;
PathChannelSid = pathChannelSid;
Identity = identity;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (Identity != null)
{
p.Add(new KeyValuePair<string, string>("Identity", Identity));
}
if (RoleSid != null)
{
p.Add(new KeyValuePair<string, string>("RoleSid", RoleSid.ToString()));
}
return p;
}
}
/// <summary>
/// ReadInviteOptions
/// </summary>
public class ReadInviteOptions : ReadOptions<InviteResource>
{
/// <summary>
/// The service_sid
/// </summary>
public string PathServiceSid { get; }
/// <summary>
/// The channel_sid
/// </summary>
public string PathChannelSid { get; }
/// <summary>
/// The identity
/// </summary>
public List<string> Identity { get; set; }
/// <summary>
/// Construct a new ReadInviteOptions
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathChannelSid"> The channel_sid </param>
public ReadInviteOptions(string pathServiceSid, string pathChannelSid)
{
PathServiceSid = pathServiceSid;
PathChannelSid = pathChannelSid;
Identity = new List<string>();
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public override List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (Identity != null)
{
p.AddRange(Identity.Select(prop => new KeyValuePair<string, string>("Identity", prop)));
}
if (PageSize != null)
{
p.Add(new KeyValuePair<string, string>("PageSize", PageSize.ToString()));
}
return p;
}
}
/// <summary>
/// DeleteInviteOptions
/// </summary>
public class DeleteInviteOptions : IOptions<InviteResource>
{
/// <summary>
/// The service_sid
/// </summary>
public string PathServiceSid { get; }
/// <summary>
/// The channel_sid
/// </summary>
public string PathChannelSid { get; }
/// <summary>
/// The sid
/// </summary>
public string PathSid { get; }
/// <summary>
/// Construct a new DeleteInviteOptions
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathChannelSid"> The channel_sid </param>
/// <param name="pathSid"> The sid </param>
public DeleteInviteOptions(string pathServiceSid, string pathChannelSid, string pathSid)
{
PathServiceSid = pathServiceSid;
PathChannelSid = pathChannelSid;
PathSid = pathSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
return p;
}
}
}
| |
//
// Encog(tm) Core v3.2 - .Net Version
// http://www.heatonresearch.com/encog/
//
// Copyright 2008-2014 Heaton Research, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
#region
using System;
using System.Collections.Generic;
using System.IO;
using ConsoleExamples.Examples;
using Encog.MathUtil.RBF;
using Encog.ML.Data;
using Encog.ML.Data.Basic;
using Encog.Neural.Data.Basic;
using Encog.Neural.Pattern;
using Encog.Neural.RBF;
using Encog.Neural.Rbf.Training;
#endregion
namespace Encog.Examples.MultiRadial
{
public class MultiRadial : IExample
{
/// <summary>
/// Input for the XOR function.
/// </summary>
public static double[][] INPUT;
public static double[][] IDEAL;
public static ExampleInfo Info
{
get
{
var info = new ExampleInfo(
typeof (MultiRadial),
"radial-multi",
"A RBF network example.",
"Use a RBF network to learn the XOR operator.");
return info;
}
}
#region IExample Members
public void Execute(IExampleInterface app)
{
//Specify the number of dimensions and the number of neurons per dimension
const int dimensions = 2;
const int numNeuronsPerDimension = 7;
//Set the standard RBF neuron width.
//Literature seems to suggest this is a good default value.
const double volumeNeuronWidth = 2.0/numNeuronsPerDimension;
//RBF can struggle when it comes to flats at the edge of the sample space.
//We have added the ability to include wider neurons on the sample space boundary which greatly
//improves fitting to flats
const bool includeEdgeRBFs = true;
#region Setup
//General setup is the same as before
var pattern = new RadialBasisPattern();
pattern.InputNeurons = dimensions;
pattern.OutputNeurons = 1;
//Total number of neurons required.
//Total number of Edges is calculated possibly for future use but not used any further here
int numNeurons = (int) Math.Pow(numNeuronsPerDimension, dimensions);
// int numEdges = (int) (dimensions*Math.Pow(2, dimensions - 1));
pattern.AddHiddenLayer(numNeurons);
var network = (RBFNetwork) pattern.Generate();
//RadialBasisFunctionLayer rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER);
//Position the multidimensional RBF neurons, with equal spacing, within the provided sample space from 0 to 1.
//rbfLayer.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, dimensions, volumeNeuronWidth, includeEdgeRBFs);
network.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, volumeNeuronWidth, includeEdgeRBFs);
#endregion
//Create some training data that can not easily be represented by gaussians
//There are other training examples for both 1D and 2D
//Degenerate training data only provides outputs as 1 or 0 (averaging over all outputs for a given set of inputs would produce something approaching the smooth training data).
//Smooth training data provides true values for the provided input dimensions.
Create2DSmoothTainingDataGit();
//Create the training set and train.
var trainingSet = new BasicMLDataSet(INPUT, IDEAL);
var train = new SVDTraining(network, trainingSet);
//SVD is a single step solve
int epoch = 1;
do
{
train.Iteration();
Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
epoch++;
} while ((epoch < 1) && (train.Error > 0.001));
// test the neural network
Console.WriteLine(@"Neural Network Results:");
//Create a testing array which may be to a higher resolution than the original training data
Set2DTestingArrays(100);
trainingSet = new BasicNeuralDataSet(INPUT, IDEAL);
//Write out the results data
using (var sw = new StreamWriter("results.csv", false))
{
foreach (IMLDataPair pair in trainingSet)
{
var output = network.Compute(pair.Input);
//1D//sw.WriteLine(InverseScale(pair.Input[0]) + ", " + Chop(InverseScale(output[0])));// + ", " + pair.Ideal[0]);
sw.WriteLine(InverseScale(pair.Input[0]) + ", " + InverseScale(pair.Input[1]) + ", " +
Chop(InverseScale(output[0])));
// + ", " + pair.Ideal[0]);// + ",ideal=" + pair.Ideal[0]);
//3D//sw.WriteLine(InverseScale(pair.Input[0]) + ", " + InverseScale(pair.Input[1]) + ", " + InverseScale(pair.Input[2]) + ", " + Chop(InverseScale(output[0])));// + ", " + pair.Ideal[0]);// + ",ideal=" + pair.Ideal[0]);
//Console.WriteLine(pair.Input[0] + ", actual=" + output[0] + ",ideal=" + pair.Ideal[0]);
}
}
Console.WriteLine(@"\nFit output saved to results.csv");
Console.WriteLine(@"\nComplete - Please press the 'any' key to close.");
Console.ReadKey();
}
#endregion
private static double Scale(double x)
{
return (x*0.7) + 0.15;
}
private static double InverseScale(double x)
{
return (x - 0.15)/0.7;
}
private static double Chop(double x)
{
if (x > 0.99)
return 0.99;
return x < 0 ? 0 : x;
}
// ReSharper disable UnusedMember.Local
private static void SaveOutNeuronCentersAndWeights(double[][] centers, double[][] widths)
// ReSharper restore UnusedMember.Local
{
using (var sw = new StreamWriter("neuronCentersWeights.csv", false))
{
for (int i = 0; i < centers.Length; i++)
{
foreach (double value in centers[i])
sw.Write(value + ",");
foreach (double value in widths[i])
sw.Write(value + ",");
sw.WriteLine();
}
}
}
// ReSharper disable UnusedMember.Local
private static double[][] LoadMatrix(string fileName)
// ReSharper restore UnusedMember.Local
{
var allLines = File.ReadAllLines(fileName);
var matrix = new double[allLines.Length][];
for (int i = 0; i < allLines.Length; i++)
{
var values = allLines[i].Split(',');
matrix[i] = new double[values.Length];
for (int j = 0; j < values.Length; j++)
{
if (values[j] != "")
matrix[i][j] = Convert.ToDouble(values[j]);
else
matrix[i][j] = Double.NaN;
}
}
return matrix;
}
// ReSharper disable UnusedMember.Local
private static void SaveMatrix(IEnumerable<double[]> surface, string fileName)
// ReSharper restore UnusedMember.Local
{
using (var sw = new StreamWriter(fileName, false))
{
foreach (double[] t in surface)
{
foreach (double t1 in t)
{
if (double.IsNaN(t1))
sw.Write(",");
else
sw.Write(t1 + ",");
}
sw.WriteLine();
}
}
}
// ReSharper disable UnusedMember.Local
private static double[][] ConvertColumnsTo2DSurface(double[][] cols, int valueCol)
// ReSharper restore UnusedMember.Local
{
//if (cols[0].Length != 3)
// throw new Exception("Incorrect number of cols detected.");
double sideLength = Math.Sqrt(cols.Length);
var surface = new double[(int) sideLength + 1][];
for (int i = 0; i < surface.Length; i++)
{
surface[i] = new double[surface.Length];
}
foreach (double[] t in cols)
{
//[0] is x
//[1] is y
//Boundary bottom
//int rowIndex = (int)Math.Round(((cols[i][0]) * (sideLength-1)), 6);
//int columnIndex = (int)Math.Round(((cols[i][1]) * (sideLength-1)), 6);
//Boundary middle
int rowIndex = (int) Math.Round(((t[0] - 0.05)*(sideLength)), 6);
int columnIndex = (int) Math.Round(((t[1] - 0.05)*(sideLength)), 6);
surface[0][rowIndex + 1] = t[0];
surface[columnIndex + 1][0] = t[1];
surface[columnIndex + 1][rowIndex + 1] = t[valueCol];
}
//fix the 0,0 value
surface[0][0] = double.NaN;
return surface;
}
// ReSharper disable UnusedMember.Local
private static double[][] Convert2DSurfaceToColumns(IList<double[]> surface)
// ReSharper restore UnusedMember.Local
{
int totalRows = (surface.Count - 1)*(surface.Count - 1);
var cols = new double[totalRows][];
for (int i = 1; i < surface.Count; i++)
{
for (int j = 1; j < surface[i].Length; j++)
{
double cellWidth = (1.0/(2.0*(surface.Count - 1)));
cols[(i - 1)*(surface.Count - 1) + (j - 1)] = new double[3];
//For midpoints
cols[(i - 1)*(surface.Count - 1) + (j - 1)][0] = ((i - 1)/(double) (surface.Count - 1)) +
cellWidth;
cols[(i - 1)*(surface.Count - 1) + (j - 1)][1] = ((j - 1)/(double) (surface.Count - 1)) +
cellWidth;
//For actual value
//cols[(i - 1) * (surface.Length - 1) + (j - 1)][0] = ((double)(i - 1) / (double)(surface.Length - 1));
//cols[(i - 1) * (surface.Length - 1) + (j - 1)][1] = ((double)(j - 1) / (double)(surface.Length - 1));
cols[(i - 1)*(surface.Count - 1) + (j - 1)][2] = surface[j][i];
}
}
return cols;
}
#region LoadRealData
// ReSharper disable UnusedMember.Local
private static void LoadReal1DTrainingData(string fileName)
// ReSharper restore UnusedMember.Local
{
var allLines = File.ReadAllLines(fileName);
INPUT = new double[allLines.Length][];
IDEAL = new double[allLines.Length][];
for (int i = 0; i < allLines.Length; i++)
{
INPUT[i] = new double[1];
IDEAL[i] = new double[1];
var values = allLines[i].Split(',');
INPUT[i][0] = Scale((Convert.ToDouble(values[0]) - 0.05)*(1.0/0.9));
IDEAL[i][0] = Scale(Convert.ToDouble(values[1]));
}
}
// ReSharper disable UnusedMember.Local
private static void LoadReal2DTrainingData(string fileName)
// ReSharper restore UnusedMember.Local
{
var allLines = File.ReadAllLines(fileName);
INPUT = new double[allLines.Length][];
IDEAL = new double[allLines.Length][];
for (int i = 0; i < allLines.Length; i++)
{
INPUT[i] = new double[2];
IDEAL[i] = new double[1];
var values = allLines[i].Split(',');
INPUT[i][0] = Scale((Convert.ToDouble(values[0]) - 0.05)*(1.0/0.9));
INPUT[i][1] = Scale((Convert.ToDouble(values[1]) - 0.05)*(1.0/0.9));
IDEAL[i][0] = Scale(Convert.ToDouble(values[2]));
}
}
// ReSharper disable UnusedMember.Local
private static void LoadReal3DTrainingData(string fileName)
// ReSharper restore UnusedMember.Local
{
var allLines = File.ReadAllLines(fileName);
INPUT = new double[allLines.Length][];
IDEAL = new double[allLines.Length][];
for (int i = 0; i < allLines.Length; i++)
{
INPUT[i] = new double[3];
IDEAL[i] = new double[1];
var values = allLines[i].Split(',');
INPUT[i][0] = Scale(Convert.ToDouble(values[0]));
INPUT[i][1] = Scale(Convert.ToDouble(values[1]));
INPUT[i][2] = Scale(Convert.ToDouble(values[2]));
IDEAL[i][0] = Scale(Convert.ToDouble(values[3]));
}
}
#endregion
#region CreateTestingInputs
// ReSharper disable UnusedMember.Local
private static void Set1DTestingArrays(int sideLength)
// ReSharper restore UnusedMember.Local
{
int iLimit = sideLength;
INPUT = new double[(iLimit + 1)][];
IDEAL = new double[(iLimit + 1)][];
for (int i = 0; i <= iLimit; i++)
{
INPUT[i] = new double[1];
IDEAL[i] = new double[1];
//double x = i/(double) iLimit;
INPUT[i][0] = Scale((i/((double) iLimit)));
IDEAL[i][0] = 0;
}
}
private static void Set2DTestingArrays(int sideLength)
{
int iLimit = sideLength;
int kLimit = sideLength;
INPUT = new double[(iLimit + 1)*(kLimit + 1)][];
IDEAL = new double[(iLimit + 1)*(kLimit + 1)][];
for (int i = 0; i <= iLimit; i++)
{
for (int k = 0; k <= kLimit; k++)
{
INPUT[i*(kLimit + 1) + k] = new double[2];
IDEAL[i*(kLimit + 1) + k] = new double[1];
//double x = i/(double) iLimit;
//double y = k/(double) kLimit;
INPUT[i*(kLimit + 1) + k][0] = Scale((i/((double) iLimit)));
INPUT[i*(kLimit + 1) + k][1] = Scale((k/((double) kLimit)));
IDEAL[i*(kLimit + 1) + k][0] = 0;
}
}
}
// ReSharper disable UnusedMember.Local
private static void Set3DTestingArrays(int sideLength)
// ReSharper restore UnusedMember.Local
{
int iLimit = sideLength;
int kLimit = sideLength;
int jLimit = sideLength;
INPUT = new double[(iLimit + 1)*(kLimit + 1)*(jLimit + 1)][];
IDEAL = new double[(iLimit + 1)*(kLimit + 1)*(jLimit + 1)][];
for (int i = 0; i <= iLimit; i++)
{
for (int k = 0; k <= kLimit; k++)
{
for (int j = 0; j <= jLimit; j++)
{
int index = (i*(kLimit + 1)*(jLimit + 1)) + (j*(kLimit + 1)) + k;
INPUT[index] = new double[3];
IDEAL[index] = new double[1];
//double x = (double)i / (double)iLimit;
//double y = (double)k / (double)kLimit;
//double z = (double)j / (double)jLimit;
INPUT[index][0] = Scale((i/((double) iLimit)));
INPUT[index][1] = Scale((k/((double) kLimit)));
INPUT[index][2] = Scale((j/((double) jLimit)));
IDEAL[index][0] = 0;
}
}
}
}
#endregion
#region CreateTrainingData
// ReSharper disable UnusedMember.Local
private static void Create2DDegenerateTainingDataHill()
// ReSharper restore UnusedMember.Local
{
var r = new Random();
const int iLimit = 30;
const int kLimit = 30;
const int jLimit = 1;
INPUT = new double[jLimit*iLimit*kLimit][];
IDEAL = new double[jLimit*iLimit*kLimit][];
for (int i = 0; i < iLimit; i++)
{
for (int k = 0; k < kLimit; k++)
{
for (int j = 0; j < jLimit; j++)
{
INPUT[i*jLimit*kLimit + k*jLimit + j] = new double[2];
IDEAL[i*jLimit*kLimit + k*jLimit + j] = new double[1];
double x = i/(double) iLimit;
double y = k/(double) kLimit;
INPUT[i*jLimit*kLimit + k*jLimit + j][0] = (i/((double) iLimit));
INPUT[i*jLimit*kLimit + k*jLimit + j][1] = (k/((double) iLimit));
IDEAL[i*jLimit*kLimit + k*jLimit + j][0] = (r.NextDouble() <
(Math.Exp(
-((x - 0.6)*(x - 0.6) + (y - 0.5)*(y - 0.5))*3) -
0.1))
? 1
: 0;
}
}
}
}
// ReSharper disable UnusedMember.Local
private static void Create2DSmoothTainingDataHill()
// ReSharper restore UnusedMember.Local
{
var r = new Random();
const int iLimit = 100;
const int kLimit = 100;
const int jLimit = 10000;
INPUT = new double[(iLimit + 1)*(kLimit + 1)][];
IDEAL = new double[(iLimit + 1)*(kLimit + 1)][];
for (int i = 0; i <= iLimit; i++)
{
for (int k = 0; k <= kLimit; k++)
{
INPUT[i*(kLimit + 1) + k] = new double[2];
IDEAL[i*(kLimit + 1) + k] = new double[1];
double average = 0;
double x = i/(double) iLimit;
double y = k/(double) kLimit;
double expression = (Math.Exp(-((x - 0.5)*(x - 0.5) + (y - 0.6)*(y - 0.6))*3) - 0.1);
//if (r.NextDouble() < 0.4) jLimit = 5; else jLimit = 10;
for (int j = 0; j < jLimit; j++)
{
average += (r.NextDouble() < expression) ? 1 : 0;
}
INPUT[i*(kLimit + 1) + k][0] = Scale((i/((double) iLimit)));
INPUT[i*(kLimit + 1) + k][1] = Scale((k/((double) kLimit)));
IDEAL[i*(kLimit + 1) + k][0] = Scale((average/jLimit));
}
}
}
private static void Create2DSmoothTainingDataGit()
{
const int iLimit = 10;
const int kLimit = 10;
//int jLimit = 100;
INPUT = new double[(iLimit + 1)*(kLimit + 1)][];
IDEAL = new double[(iLimit + 1)*(kLimit + 1)][];
for (int i = 0; i <= iLimit; i++)
{
for (int k = 0; k <= kLimit; k++)
{
INPUT[i*(kLimit + 1) + k] = new double[2];
IDEAL[i*(kLimit + 1) + k] = new double[1];
double x = i/(double) iLimit;
double y = k/(double) kLimit;
double expression = ((x + 1.0/3.0)*(2 + Math.Log10((y/(x + 0.1)) + 0.1)))/3;
INPUT[i*(kLimit + 1) + k][0] = Scale((i/((double) iLimit)));
INPUT[i*(kLimit + 1) + k][1] = Scale((k/((double) kLimit)));
IDEAL[i*(kLimit + 1) + k][0] = Scale(expression);
}
}
}
// ReSharper disable UnusedMember.Local
private static void Create2DDegenerateTainingDataGit()
// ReSharper restore UnusedMember.Local
{
var r = new Random();
const int iLimit = 10;
const int kLimit = 10;
const int jLimit = 10;
INPUT = new double[jLimit*iLimit*kLimit][];
IDEAL = new double[jLimit*iLimit*kLimit][];
for (int i = 0; i < iLimit; i++)
{
for (int k = 0; k < kLimit; k++)
{
double x = i/(double) iLimit;
double y = k/(double) kLimit;
for (int j = 0; j < jLimit; j++)
{
INPUT[i*jLimit*kLimit + k*jLimit + j] = new double[2];
IDEAL[i*jLimit*kLimit + k*jLimit + j] = new double[1];
double expression = ((x + 1.0/3.0)*(2 + Math.Log10((y/(x + 0.1)) + 0.1)))/3;
INPUT[i*jLimit*kLimit + k*jLimit + j][0] = (i/((double) iLimit));
INPUT[i*jLimit*kLimit + k*jLimit + j][1] = (k/((double) iLimit));
IDEAL[i*jLimit*kLimit + k*jLimit + j][0] = (r.NextDouble() < expression) ? 1 : 0;
}
}
}
}
// ReSharper disable UnusedMember.Local
private static void Create1DDegenerateTrainingDataLine()
// ReSharper restore UnusedMember.Local
{
var r = new Random(14768);
const int iLimit = 10;
const int jLimit = 100;
INPUT = new double[iLimit*jLimit][];
IDEAL = new double[iLimit*jLimit][];
for (int i = 0; i < iLimit; i++)
{
for (int j = 0; j < jLimit; j++)
{
INPUT[i*jLimit + j] = new double[1];
IDEAL[i*jLimit + j] = new double[1];
double x = i/(double) iLimit;
INPUT[i*jLimit + j][0] = Scale(x);
IDEAL[i*jLimit + j][0] = Scale((r.NextDouble() < x) ? 1 : 0);
}
}
}
// ReSharper disable UnusedMember.Local
private static void Create1DSmoothTrainingDataLine()
// ReSharper restore UnusedMember.Local
{
var r = new Random(14768);
const int iLimit = 1000;
const int jLimit = 1;
INPUT = new double[iLimit][];
IDEAL = new double[iLimit][];
for (int i = 0; i < iLimit; i++)
{
INPUT[i] = new double[1];
IDEAL[i] = new double[1];
double average = 0;
double x = i/(double) iLimit;
for (int j = 0; j < jLimit; j++)
average += (r.NextDouble() < x) ? 1 : 0;
INPUT[i][0] = Scale(x);
IDEAL[i][0] = Scale(average/jLimit);
}
}
// ReSharper disable UnusedMember.Local
private static void Create1DSmoothTrainingDataCurveSimple()
// ReSharper restore UnusedMember.Local
{
var r = new Random(14768);
const int iLimit = 20;
const int jLimit = 10;
INPUT = new double[iLimit][];
IDEAL = new double[iLimit][];
for (int i = 0; i < iLimit; i++)
{
INPUT[i] = new double[1];
IDEAL[i] = new double[1];
double average = 0;
double x = i/(double) iLimit;
for (int j = 0; j < jLimit; j++)
average += (r.NextDouble() < (-4*Math.Pow(x, 2) + 4*x)) ? 1 : 0;
INPUT[i][0] = Scale(x);
IDEAL[i][0] = Scale(average/jLimit);
}
}
// ReSharper disable UnusedMember.Local
private static void Create1DSmoothTrainingDataCurveAdv()
// ReSharper restore UnusedMember.Local
{
var r = new Random(14768);
const int iLimit = 100;
const int jLimit = 100;
INPUT = new double[iLimit][];
IDEAL = new double[iLimit][];
for (int i = 0; i < iLimit; i++)
{
INPUT[i] = new double[1];
IDEAL[i] = new double[1];
double average = 0;
double x = i/(double) iLimit;
//double y = (-7.5 * Math.Pow(x, 4)) + (21.3 * Math.Pow(x, 3)) + (-22.3 * Math.Pow(x, 2)) + (10.4 * x) - 0.8;
double y = ((Math.Exp(2.0*(x*4.0 - 1)) - 1.0)/(Math.Exp(2.0*(x*4.0 - 1)) + 1.0))/2 + 0.5;
for (int j = 0; j < jLimit; j++)
{
average += (r.NextDouble() < y) ? 1 : 0;
}
INPUT[i][0] = Scale(x);
IDEAL[i][0] = Scale(average/jLimit);
}
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Globalization;
using Org.BouncyCastle.Math;
using Org.BouncyCastle.Math.EC;
using Org.BouncyCastle.Utilities;
using Org.BouncyCastle.Utilities.Collections;
using Org.BouncyCastle.Utilities.Encoders;
namespace Org.BouncyCastle.Asn1.X9
{
/**
* table of the current named curves defined in X.962 EC-DSA.
*/
public sealed class X962NamedCurves
{
private X962NamedCurves()
{
}
internal class Prime192v1Holder
: X9ECParametersHolder
{
private Prime192v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime192v1Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp192v1 = new FpCurve(
new BigInteger("6277101735386680763835789423207666416083908700390324961279"),
new BigInteger("fffffffffffffffffffffffffffffffefffffffffffffffc", 16),
new BigInteger("64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1", 16));
return new X9ECParameters(
cFp192v1,
cFp192v1.DecodePoint(
Hex.Decode("03188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012")),
new BigInteger("ffffffffffffffffffffffff99def836146bc9b1b4d22831", 16),
BigInteger.One,
Hex.Decode("3045AE6FC8422f64ED579528D38120EAE12196D5"));
}
}
internal class Prime192v2Holder
: X9ECParametersHolder
{
private Prime192v2Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime192v2Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp192v2 = new FpCurve(
new BigInteger("6277101735386680763835789423207666416083908700390324961279"),
new BigInteger("fffffffffffffffffffffffffffffffefffffffffffffffc", 16),
new BigInteger("cc22d6dfb95c6b25e49c0d6364a4e5980c393aa21668d953", 16));
return new X9ECParameters(
cFp192v2,
cFp192v2.DecodePoint(
Hex.Decode("03eea2bae7e1497842f2de7769cfe9c989c072ad696f48034a")),
new BigInteger("fffffffffffffffffffffffe5fb1a724dc80418648d8dd31", 16),
BigInteger.One,
Hex.Decode("31a92ee2029fd10d901b113e990710f0d21ac6b6"));
}
}
internal class Prime192v3Holder
: X9ECParametersHolder
{
private Prime192v3Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime192v3Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp192v3 = new FpCurve(
new BigInteger("6277101735386680763835789423207666416083908700390324961279"),
new BigInteger("fffffffffffffffffffffffffffffffefffffffffffffffc", 16),
new BigInteger("22123dc2395a05caa7423daeccc94760a7d462256bd56916", 16));
return new X9ECParameters(
cFp192v3,
cFp192v3.DecodePoint(
Hex.Decode("027d29778100c65a1da1783716588dce2b8b4aee8e228f1896")),
new BigInteger("ffffffffffffffffffffffff7a62d031c83f4294f640ec13", 16),
BigInteger.One,
Hex.Decode("c469684435deb378c4b65ca9591e2a5763059a2e"));
}
}
internal class Prime239v1Holder
: X9ECParametersHolder
{
private Prime239v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime239v1Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp239v1 = new FpCurve(
new BigInteger("883423532389192164791648750360308885314476597252960362792450860609699839"),
new BigInteger("7fffffffffffffffffffffff7fffffffffff8000000000007ffffffffffc", 16),
new BigInteger("6b016c3bdcf18941d0d654921475ca71a9db2fb27d1d37796185c2942c0a", 16));
return new X9ECParameters(
cFp239v1,
cFp239v1.DecodePoint(
Hex.Decode("020ffa963cdca8816ccc33b8642bedf905c3d358573d3f27fbbd3b3cb9aaaf")),
new BigInteger("7fffffffffffffffffffffff7fffff9e5e9a9f5d9071fbd1522688909d0b", 16),
BigInteger.One,
Hex.Decode("e43bb460f0b80cc0c0b075798e948060f8321b7d"));
}
}
internal class Prime239v2Holder
: X9ECParametersHolder
{
private Prime239v2Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime239v2Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp239v2 = new FpCurve(
new BigInteger("883423532389192164791648750360308885314476597252960362792450860609699839"),
new BigInteger("7fffffffffffffffffffffff7fffffffffff8000000000007ffffffffffc", 16),
new BigInteger("617fab6832576cbbfed50d99f0249c3fee58b94ba0038c7ae84c8c832f2c", 16));
return new X9ECParameters(
cFp239v2,
cFp239v2.DecodePoint(
Hex.Decode("0238af09d98727705120c921bb5e9e26296a3cdcf2f35757a0eafd87b830e7")),
new BigInteger("7fffffffffffffffffffffff800000cfa7e8594377d414c03821bc582063", 16),
BigInteger.One,
Hex.Decode("e8b4011604095303ca3b8099982be09fcb9ae616"));
}
}
internal class Prime239v3Holder
: X9ECParametersHolder
{
private Prime239v3Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime239v3Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp239v3 = new FpCurve(
new BigInteger("883423532389192164791648750360308885314476597252960362792450860609699839"),
new BigInteger("7fffffffffffffffffffffff7fffffffffff8000000000007ffffffffffc", 16),
new BigInteger("255705fa2a306654b1f4cb03d6a750a30c250102d4988717d9ba15ab6d3e", 16));
return new X9ECParameters(
cFp239v3,
cFp239v3.DecodePoint(
Hex.Decode("036768ae8e18bb92cfcf005c949aa2c6d94853d0e660bbf854b1c9505fe95a")),
new BigInteger("7fffffffffffffffffffffff7fffff975deb41b3a6057c3c432146526551", 16),
BigInteger.One,
Hex.Decode("7d7374168ffe3471b60a857686a19475d3bfa2ff"));
}
}
internal class Prime256v1Holder
: X9ECParametersHolder
{
private Prime256v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new Prime256v1Holder();
protected override X9ECParameters CreateParameters()
{
ECCurve cFp256v1 = new FpCurve(
new BigInteger("115792089210356248762697446949407573530086143415290314195533631308867097853951"),
new BigInteger("ffffffff00000001000000000000000000000000fffffffffffffffffffffffc", 16),
new BigInteger("5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b", 16));
return new X9ECParameters(
cFp256v1,
cFp256v1.DecodePoint(
Hex.Decode("036b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296")),
new BigInteger("ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551", 16),
BigInteger.One,
Hex.Decode("c49d360886e704936a6678e1139d26b7819f7e90"));
}
}
/*
* F2m Curves
*/
internal class C2pnb163v1Holder
: X9ECParametersHolder
{
private C2pnb163v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb163v1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("0400000000000000000001E60FC8821CC74DAEAFC1", 16);
BigInteger h = BigInteger.ValueOf(2);
ECCurve c2m163v1 = new F2mCurve(
163,
1, 2, 8,
new BigInteger("072546B5435234A422E0789675F432C89435DE5242", 16),
new BigInteger("00C9517D06D5240D3CFF38C74B20B6CD4D6F9DD4D9", 16),
n, h);
return new X9ECParameters(
c2m163v1,
c2m163v1.DecodePoint(
Hex.Decode("0307AF69989546103D79329FCC3D74880F33BBE803CB")),
n, h,
Hex.Decode("D2COFB15760860DEF1EEF4D696E6768756151754"));
}
}
internal class C2pnb163v2Holder
: X9ECParametersHolder
{
private C2pnb163v2Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb163v2Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("03FFFFFFFFFFFFFFFFFFFDF64DE1151ADBB78F10A7", 16);
BigInteger h = BigInteger.ValueOf(2);
ECCurve c2m163v2 = new F2mCurve(
163,
1, 2, 8,
new BigInteger("0108B39E77C4B108BED981ED0E890E117C511CF072", 16),
new BigInteger("0667ACEB38AF4E488C407433FFAE4F1C811638DF20", 16),
n, h);
return new X9ECParameters(
c2m163v2,
c2m163v2.DecodePoint(
Hex.Decode("030024266E4EB5106D0A964D92C4860E2671DB9B6CC5")),
n, h,
null);
}
}
internal class C2pnb163v3Holder
: X9ECParametersHolder
{
private C2pnb163v3Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb163v3Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("03FFFFFFFFFFFFFFFFFFFE1AEE140F110AFF961309", 16);
BigInteger h = BigInteger.ValueOf(2);
ECCurve c2m163v3 = new F2mCurve(
163,
1, 2, 8,
new BigInteger("07A526C63D3E25A256A007699F5447E32AE456B50E", 16),
new BigInteger("03F7061798EB99E238FD6F1BF95B48FEEB4854252B", 16),
n, h);
return new X9ECParameters(
c2m163v3,
c2m163v3.DecodePoint(Hex.Decode("0202F9F87B7C574D0BDECF8A22E6524775F98CDEBDCB")),
n, h,
null);
}
}
internal class C2pnb176w1Holder
: X9ECParametersHolder
{
private C2pnb176w1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb176w1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("010092537397ECA4F6145799D62B0A19CE06FE26AD", 16);
BigInteger h = BigInteger.ValueOf(0xFF6E);
ECCurve c2m176w1 = new F2mCurve(
176,
1, 2, 43,
new BigInteger("00E4E6DB2995065C407D9D39B8D0967B96704BA8E9C90B", 16),
new BigInteger("005DDA470ABE6414DE8EC133AE28E9BBD7FCEC0AE0FFF2", 16),
n, h);
return new X9ECParameters(
c2m176w1,
c2m176w1.DecodePoint(
Hex.Decode("038D16C2866798B600F9F08BB4A8E860F3298CE04A5798")),
n, h,
null);
}
}
internal class C2tnb191v1Holder
: X9ECParametersHolder
{
private C2tnb191v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb191v1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("40000000000000000000000004A20E90C39067C893BBB9A5", 16);
BigInteger h = BigInteger.ValueOf(2);
ECCurve c2m191v1 = new F2mCurve(
191,
9,
new BigInteger("2866537B676752636A68F56554E12640276B649EF7526267", 16),
new BigInteger("2E45EF571F00786F67B0081B9495A3D95462F5DE0AA185EC", 16),
n, h);
return new X9ECParameters(
c2m191v1,
c2m191v1.DecodePoint(
Hex.Decode("0236B3DAF8A23206F9C4F299D7B21A9C369137F2C84AE1AA0D")),
n, h,
Hex.Decode("4E13CA542744D696E67687561517552F279A8C84"));
}
}
internal class C2tnb191v2Holder
: X9ECParametersHolder
{
private C2tnb191v2Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb191v2Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("20000000000000000000000050508CB89F652824E06B8173", 16);
BigInteger h = BigInteger.ValueOf(4);
ECCurve c2m191v2 = new F2mCurve(
191,
9,
new BigInteger("401028774D7777C7B7666D1366EA432071274F89FF01E718", 16),
new BigInteger("0620048D28BCBD03B6249C99182B7C8CD19700C362C46A01", 16),
n, h);
return new X9ECParameters(
c2m191v2,
c2m191v2.DecodePoint(
Hex.Decode("023809B2B7CC1B28CC5A87926AAD83FD28789E81E2C9E3BF10")),
n, h,
null);
}
}
internal class C2tnb191v3Holder
: X9ECParametersHolder
{
private C2tnb191v3Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb191v3Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("155555555555555555555555610C0B196812BFB6288A3EA3", 16);
BigInteger h = BigInteger.ValueOf(6);
ECCurve c2m191v3 = new F2mCurve(
191,
9,
new BigInteger("6C01074756099122221056911C77D77E77A777E7E7E77FCB", 16),
new BigInteger("71FE1AF926CF847989EFEF8DB459F66394D90F32AD3F15E8", 16),
n, h);
return new X9ECParameters(
c2m191v3,
c2m191v3.DecodePoint(
Hex.Decode("03375D4CE24FDE434489DE8746E71786015009E66E38A926DD")),
n, h,
null);
}
}
internal class C2pnb208w1Holder
: X9ECParametersHolder
{
private C2pnb208w1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb208w1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("0101BAF95C9723C57B6C21DA2EFF2D5ED588BDD5717E212F9D", 16);
BigInteger h = BigInteger.ValueOf(0xFE48);
ECCurve c2m208w1 = new F2mCurve(
208,
1, 2, 83,
new BigInteger("0", 16),
new BigInteger("00C8619ED45A62E6212E1160349E2BFA844439FAFC2A3FD1638F9E", 16),
n, h);
return new X9ECParameters(
c2m208w1,
c2m208w1.DecodePoint(
Hex.Decode("0289FDFBE4ABE193DF9559ECF07AC0CE78554E2784EB8C1ED1A57A")),
n, h,
null);
}
}
internal class C2tnb239v1Holder
: X9ECParametersHolder
{
private C2tnb239v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb239v1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("2000000000000000000000000000000F4D42FFE1492A4993F1CAD666E447", 16);
BigInteger h = BigInteger.ValueOf(4);
ECCurve c2m239v1 = new F2mCurve(
239,
36,
new BigInteger("32010857077C5431123A46B808906756F543423E8D27877578125778AC76", 16),
new BigInteger("790408F2EEDAF392B012EDEFB3392F30F4327C0CA3F31FC383C422AA8C16", 16),
n, h);
return new X9ECParameters(
c2m239v1,
c2m239v1.DecodePoint(
Hex.Decode("0257927098FA932E7C0A96D3FD5B706EF7E5F5C156E16B7E7C86038552E91D")),
n, h,
null);
}
}
internal class C2tnb239v2Holder
: X9ECParametersHolder
{
private C2tnb239v2Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb239v2Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("1555555555555555555555555555553C6F2885259C31E3FCDF154624522D", 16);
BigInteger h = BigInteger.ValueOf(6);
ECCurve c2m239v2 = new F2mCurve(
239,
36,
new BigInteger("4230017757A767FAE42398569B746325D45313AF0766266479B75654E65F", 16),
new BigInteger("5037EA654196CFF0CD82B2C14A2FCF2E3FF8775285B545722F03EACDB74B", 16),
n, h);
return new X9ECParameters(
c2m239v2,
c2m239v2.DecodePoint(
Hex.Decode("0228F9D04E900069C8DC47A08534FE76D2B900B7D7EF31F5709F200C4CA205")),
n, h,
null);
}
}
internal class C2tnb239v3Holder
: X9ECParametersHolder
{
private C2tnb239v3Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb239v3Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("0CCCCCCCCCCCCCCCCCCCCCCCCCCCCCAC4912D2D9DF903EF9888B8A0E4CFF", 16);
BigInteger h = BigInteger.ValueOf(10);
ECCurve c2m239v3 = new F2mCurve(
239,
36,
new BigInteger("01238774666A67766D6676F778E676B66999176666E687666D8766C66A9F", 16),
new BigInteger("6A941977BA9F6A435199ACFC51067ED587F519C5ECB541B8E44111DE1D40", 16),
n, h);
return new X9ECParameters(
c2m239v3,
c2m239v3.DecodePoint(
Hex.Decode("0370F6E9D04D289C4E89913CE3530BFDE903977D42B146D539BF1BDE4E9C92")),
n, h,
null);
}
}
internal class C2pnb272w1Holder
: X9ECParametersHolder
{
private C2pnb272w1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb272w1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("0100FAF51354E0E39E4892DF6E319C72C8161603FA45AA7B998A167B8F1E629521", 16);
BigInteger h = BigInteger.ValueOf(0xFF06);
ECCurve c2m272w1 = new F2mCurve(
272,
1, 3, 56,
new BigInteger("0091A091F03B5FBA4AB2CCF49C4EDD220FB028712D42BE752B2C40094DBACDB586FB20", 16),
new BigInteger("7167EFC92BB2E3CE7C8AAAFF34E12A9C557003D7C73A6FAF003F99F6CC8482E540F7", 16),
n, h);
return new X9ECParameters(
c2m272w1,
c2m272w1.DecodePoint(
Hex.Decode("026108BABB2CEEBCF787058A056CBE0CFE622D7723A289E08A07AE13EF0D10D171DD8D")),
n, h,
null);
}
}
internal class C2pnb304w1Holder
: X9ECParametersHolder
{
private C2pnb304w1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb304w1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("0101D556572AABAC800101D556572AABAC8001022D5C91DD173F8FB561DA6899164443051D", 16);
BigInteger h = BigInteger.ValueOf(0xFE2E);
ECCurve c2m304w1 = new F2mCurve(
304,
1, 2, 11,
new BigInteger("00FD0D693149A118F651E6DCE6802085377E5F882D1B510B44160074C1288078365A0396C8E681", 16),
new BigInteger("00BDDB97E555A50A908E43B01C798EA5DAA6788F1EA2794EFCF57166B8C14039601E55827340BE", 16),
n, h);
return new X9ECParameters(
c2m304w1,
c2m304w1.DecodePoint(
Hex.Decode("02197B07845E9BE2D96ADB0F5F3C7F2CFFBD7A3EB8B6FEC35C7FD67F26DDF6285A644F740A2614")),
n, h,
null);
}
}
internal class C2tnb359v1Holder
: X9ECParametersHolder
{
private C2tnb359v1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb359v1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("01AF286BCA1AF286BCA1AF286BCA1AF286BCA1AF286BC9FB8F6B85C556892C20A7EB964FE7719E74F490758D3B", 16);
BigInteger h = BigInteger.ValueOf(0x4C);
ECCurve c2m359v1 = new F2mCurve(
359,
68,
new BigInteger("5667676A654B20754F356EA92017D946567C46675556F19556A04616B567D223A5E05656FB549016A96656A557", 16),
new BigInteger("2472E2D0197C49363F1FE7F5B6DB075D52B6947D135D8CA445805D39BC345626089687742B6329E70680231988", 16),
n, h);
return new X9ECParameters(
c2m359v1,
c2m359v1.DecodePoint(
Hex.Decode("033C258EF3047767E7EDE0F1FDAA79DAEE3841366A132E163ACED4ED2401DF9C6BDCDE98E8E707C07A2239B1B097")),
n, h,
null);
}
}
internal class C2pnb368w1Holder
: X9ECParametersHolder
{
private C2pnb368w1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2pnb368w1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("010090512DA9AF72B08349D98A5DD4C7B0532ECA51CE03E2D10F3B7AC579BD87E909AE40A6F131E9CFCE5BD967", 16);
BigInteger h = BigInteger.ValueOf(0xFF70);
ECCurve c2m368w1 = new F2mCurve(
368,
1, 2, 85,
new BigInteger("00E0D2EE25095206F5E2A4F9ED229F1F256E79A0E2B455970D8D0D865BD94778C576D62F0AB7519CCD2A1A906AE30D", 16),
new BigInteger("00FC1217D4320A90452C760A58EDCD30C8DD069B3C34453837A34ED50CB54917E1C2112D84D164F444F8F74786046A", 16),
n, h);
return new X9ECParameters(
c2m368w1,
c2m368w1.DecodePoint(
Hex.Decode("021085E2755381DCCCE3C1557AFA10C2F0C0C2825646C5B34A394CBCFA8BC16B22E7E789E927BE216F02E1FB136A5F")),
n, h,
null);
}
}
internal class C2tnb431r1Holder
: X9ECParametersHolder
{
private C2tnb431r1Holder() {}
internal static readonly X9ECParametersHolder Instance = new C2tnb431r1Holder();
protected override X9ECParameters CreateParameters()
{
BigInteger n = new BigInteger("0340340340340340340340340340340340340340340340340340340323C313FAB50589703B5EC68D3587FEC60D161CC149C1AD4A91", 16);
BigInteger h = BigInteger.ValueOf(0x2760);
ECCurve c2m431r1 = new F2mCurve(
431,
120,
new BigInteger("1A827EF00DD6FC0E234CAF046C6A5D8A85395B236CC4AD2CF32A0CADBDC9DDF620B0EB9906D0957F6C6FEACD615468DF104DE296CD8F", 16),
new BigInteger("10D9B4A3D9047D8B154359ABFB1B7F5485B04CEB868237DDC9DEDA982A679A5A919B626D4E50A8DD731B107A9962381FB5D807BF2618", 16),
n, h);
return new X9ECParameters(
c2m431r1,
c2m431r1.DecodePoint(
Hex.Decode("02120FC05D3C67A99DE161D2F4092622FECA701BE4F50F4758714E8A87BBF2A658EF8C21E7C5EFE965361F6C2999C0C247B0DBD70CE6B7")),
n, h,
null);
}
}
private static readonly IDictionary objIds = Platform.CreateHashtable();
private static readonly IDictionary curves = Platform.CreateHashtable();
private static readonly IDictionary names = Platform.CreateHashtable();
private static void DefineCurve(
string name,
DerObjectIdentifier oid,
X9ECParametersHolder holder)
{
objIds.Add(name, oid);
names.Add(oid, name);
curves.Add(oid, holder);
}
static X962NamedCurves()
{
DefineCurve("prime192v1", X9ObjectIdentifiers.Prime192v1, Prime192v1Holder.Instance);
DefineCurve("prime192v2", X9ObjectIdentifiers.Prime192v2, Prime192v2Holder.Instance);
DefineCurve("prime192v3", X9ObjectIdentifiers.Prime192v3, Prime192v3Holder.Instance);
DefineCurve("prime239v1", X9ObjectIdentifiers.Prime239v1, Prime239v1Holder.Instance);
DefineCurve("prime239v2", X9ObjectIdentifiers.Prime239v2, Prime239v2Holder.Instance);
DefineCurve("prime239v3", X9ObjectIdentifiers.Prime239v3, Prime239v3Holder.Instance);
DefineCurve("prime256v1", X9ObjectIdentifiers.Prime256v1, Prime256v1Holder.Instance);
DefineCurve("c2pnb163v1", X9ObjectIdentifiers.C2Pnb163v1, C2pnb163v1Holder.Instance);
DefineCurve("c2pnb163v2", X9ObjectIdentifiers.C2Pnb163v2, C2pnb163v2Holder.Instance);
DefineCurve("c2pnb163v3", X9ObjectIdentifiers.C2Pnb163v3, C2pnb163v3Holder.Instance);
DefineCurve("c2pnb176w1", X9ObjectIdentifiers.C2Pnb176w1, C2pnb176w1Holder.Instance);
DefineCurve("c2tnb191v1", X9ObjectIdentifiers.C2Tnb191v1, C2tnb191v1Holder.Instance);
DefineCurve("c2tnb191v2", X9ObjectIdentifiers.C2Tnb191v2, C2tnb191v2Holder.Instance);
DefineCurve("c2tnb191v3", X9ObjectIdentifiers.C2Tnb191v3, C2tnb191v3Holder.Instance);
DefineCurve("c2pnb208w1", X9ObjectIdentifiers.C2Pnb208w1, C2pnb208w1Holder.Instance);
DefineCurve("c2tnb239v1", X9ObjectIdentifiers.C2Tnb239v1, C2tnb239v1Holder.Instance);
DefineCurve("c2tnb239v2", X9ObjectIdentifiers.C2Tnb239v2, C2tnb239v2Holder.Instance);
DefineCurve("c2tnb239v3", X9ObjectIdentifiers.C2Tnb239v3, C2tnb239v3Holder.Instance);
DefineCurve("c2pnb272w1", X9ObjectIdentifiers.C2Pnb272w1, C2pnb272w1Holder.Instance);
DefineCurve("c2pnb304w1", X9ObjectIdentifiers.C2Pnb304w1, C2pnb304w1Holder.Instance);
DefineCurve("c2tnb359v1", X9ObjectIdentifiers.C2Tnb359v1, C2tnb359v1Holder.Instance);
DefineCurve("c2pnb368w1", X9ObjectIdentifiers.C2Pnb368w1, C2pnb368w1Holder.Instance);
DefineCurve("c2tnb431r1", X9ObjectIdentifiers.C2Tnb431r1, C2tnb431r1Holder.Instance);
}
public static X9ECParameters GetByName(
string name)
{
DerObjectIdentifier oid = (DerObjectIdentifier) objIds[name.ToLower(CultureInfo.InvariantCulture)];
return oid == null ? null : GetByOid(oid);
}
/**
* return the X9ECParameters object for the named curve represented by
* the passed in object identifier. Null if the curve isn't present.
*
* @param oid an object identifier representing a named curve, if present.
*/
public static X9ECParameters GetByOid(
DerObjectIdentifier oid)
{
X9ECParametersHolder holder = (X9ECParametersHolder) curves[oid];
return holder == null ? null : holder.Parameters;
}
/**
* return the object identifier signified by the passed in name. Null
* if there is no object identifier associated with name.
*
* @return the object identifier associated with name, if present.
*/
public static DerObjectIdentifier GetOid(
string name)
{
return (DerObjectIdentifier) objIds[name.ToLower(CultureInfo.InvariantCulture)];
}
/**
* return the named curve name represented by the given object identifier.
*/
public static string GetName(
DerObjectIdentifier oid)
{
return (string) names[oid];
}
/**
* returns an enumeration containing the name strings for curves
* contained in this structure.
*/
public static IEnumerable Names
{
get { return new EnumerableProxy(objIds.Keys); }
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Text;
using OpenLiveWriter.Interop.Com;
namespace OpenLiveWriter.Interop.Windows
{
/// <summary>
/// Summary description for Shlwapi.
/// </summary>
public class Shlwapi
{
/// <summary>
/// Combines two paths (relative and base) to form a new path
/// </summary>
/// <param name="pszBase">The base path</param>
/// <param name="pszRelative">The relative path</param>
/// <returns>The combined path</returns>
public static string UrlCombine(string pszBase, string pszRelative)
{
StringBuilder builder = new StringBuilder(DEFAULT_URL_BUFFER_SIZE);
IntPtr bufferSize = new IntPtr(builder.Capacity);
int hResult = Shlwapi.UrlCombine(
pszBase,
pszRelative,
builder,
ref bufferSize,
0);
// The buffer wasn't large enough, grow it!'ll
while (hResult == HRESULT.E_POINTER)
{
builder = new StringBuilder(bufferSize.ToInt32());
hResult = Shlwapi.UrlCombine(
pszBase,
pszRelative,
builder,
ref bufferSize,
0);
}
// Some other exception has occurred, bail
if (hResult != HRESULT.S_OK)
throw new COMException("Unabled to combine Urls", hResult);
// We've got the new URL
return builder.ToString();
}
private static readonly int DEFAULT_URL_BUFFER_SIZE = 32;
[DllImport("Shlwapi.dll", CharSet = CharSet.Auto)]
public static extern int UrlCombine(
[In, MarshalAs(UnmanagedType.LPTStr)] string pszBase,
[In, MarshalAs(UnmanagedType.LPTStr)] string pszRelative,
[Out, MarshalAs(UnmanagedType.LPTStr)] StringBuilder pszCombined,
[In, Out] ref IntPtr pcchCombined,
uint dwFlags
);
public struct URL
{
public const UInt32 ESCAPE_SPACES_ONLY = 0x04000000;
public const UInt32 DONT_SIMPLIFY = 0x08000000;
public const UInt32 ESCAPE_PERCENT = 0x00001000;
public const UInt32 UNESCAPE = 0x10000000;
public const UInt32 ESCAPE_UNSAFE = 0x20000000;
public const UInt32 PLUGGABLE_PROTOCOL = 0x40000000;
}
[DllImport("Shlwapi.dll")]
public static extern int SHAutoComplete(IntPtr hwndEdit, uint dwFlags);
// interop declaration for converting a path to a URL
[DllImport("shlwapi.dll", CharSet = CharSet.Auto)]
public static extern int UrlCreateFromPath(
[In, MarshalAs(UnmanagedType.LPTStr)] string pszPath,
[Out, MarshalAs(UnmanagedType.LPTStr)] StringBuilder pszUrl,
[In, Out] ref uint pcchUrl,
[In] uint dwReserved);
/// <summary>
/// Searches for and retrieves a file association-related string from the registry.
/// </summary>
/// <param name="flags">Flags that can be used to control the search. It can be any combination of ASSOCF values, except that only one INIT value can be included.</param>
/// <param name="str">ASSOCSTR value that specifies the type of string that is to be returned.</param>
/// <param name="pszAssoc">
/// Pointer to a null-terminated string that is used to determine the root key. Four types of strings can be used.
/// File name extension
/// A file name extension, such as .txt.
/// CLSID
/// A class identifier (CLSID) globally unique identifier (GUID) in the standard "{GUID}" format.
/// ProgID
/// An application's ProgID, such as Word.Document.8.
/// Executable name
/// The name of an application's .exe file. The ASSOCF_OPEN_BYEXENAME flag must be set in flags.
/// </param>
/// <param name="pszExtra">Optional null-terminated string with additional information about the location of the string. It is normally set to a Shell verb such as open. Set this parameter to NULL if it is not used.</param>
/// <param name="pszOut">Null-terminated string used to return the requested string. Set this parameter to NULL to retrieve the required buffer size.</param>
/// <param name="pcchOut">Pointer to a value that is set to the number of characters in the pszOut buffer. When the function returns, it will be set to the number of characters actually placed in the buffer.
/// If the ASSOCF.NOTRUNCATE flag is set in flags and the buffer specified in pszOut is too small, the function returns E_POINTER and the value is set to the required size of the buffer.
/// If pszOut is NULL, the function returns S_FALSE and pcchOut points to the required size of the buffer.</param>
/// <returns>
/// Returns a standard error value or one of the following: Error Meaning
/// S_OK Success.
/// E_POINTER The pszOut buffer is too small to hold the entire string.
/// S_FALSE pszOut is NULL. pcchOut contains the required buffer size.
/// </returns>
[DllImport("Shlwapi.dll", CharSet = CharSet.Auto)]
public static extern int AssocQueryString(
[In] ASSOCF flags,
[In] ASSOCSTR str,
[In] string pszAssoc,
[In] string pszExtra,
[Out, MarshalAs(UnmanagedType.LPTStr)] StringBuilder pszOut,
[In, Out] ref int pcchOut
);
[DllImport("Shlwapi.dll", CharSet = CharSet.Unicode)]
public static extern int SHCreateStreamOnFileEx(string pszFile,
int grfMode,
int dwAttributes,
bool fCreate,
IntPtr pstmTemplate,
out IStream ppstm);
}
public enum ASSOCF
{
/// <summary>
/// do not remap clsids to progids
/// </summary>
INIT_NOREMAPCLSID = 0x00000001,
/// <summary>
/// executable is being passed in
/// </summary>
INIT_BYEXENAME = 0x00000002,
/// <summary>
/// executable is being passed in
/// </summary>
OPEN_BYEXENAME = 0x00000002,
/// <summary>
/// treat "*" as the BaseClass
/// </summary>
INIT_DEFAULTTOSTAR = 0x00000004,
/// <summary>
/// treat "Folder" as the BaseClass
/// </summary>
INIT_DEFAULTTOFOLDER = 0x00000008,
/// <summary>
/// dont use HKCU
/// </summary>
NOUSERSETTINGS = 0x00000010,
/// <summary>
/// dont truncate the return string
/// </summary>
NOTRUNCATE = 0x00000020,
/// <summary>
/// verify data is accurate (DISK HITS)
/// </summary>
VERIFY = 0x00000040,
/// <summary>
/// actually gets info about rundlls target if applicable
/// </summary>
REMAPRUNDLL = 0x00000080,
/// <summary>
/// attempt to fix errors if found
/// </summary>
NOFIXUPS = 0x00000100,
/// <summary>
/// dont recurse into the baseclass
/// </summary>
IGNOREBASECLASS = 0x00000200,
}
public enum ASSOCSTR
{
COMMAND = 1, // shell\verb\command string
EXECUTABLE, // the executable part of command string
FRIENDLYDOCNAME, // friendly name of the document type
FRIENDLYAPPNAME, // friendly name of executable
NOOPEN, // noopen value
SHELLNEWVALUE, // query values under the shellnew key
DDECOMMAND, // template for DDE commands
DDEIFEXEC, // DDECOMMAND to use if just create a process
DDEAPPLICATION, // Application name in DDE broadcast
DDETOPIC, // Topic Name in DDE broadcast
INFOTIP, // info tip for an item, or list of properties to create info tip from
QUICKTIP, // same as INFOTIP, except, this list contains only quickly retrievable properties
TILEINFO, // similar to INFOTIP - lists important properties for tileview
CONTENTTYPE, // MIME Content type
DEFAULTICON, // Default icon source
SHELLEXTENSION, // Guid string pointing to the Shellex\Shellextensionhandler value.
MAX // last item in enum...
}
public struct SHACF
{
public const uint DEFAULT = 0x00000000; // Currently (SHACF_FILESYSTEM | SHACF_URLALL)
public const uint FILESYSTEM = 0x00000001; // This includes the File System as well as the rest of the shell (Desktop\My Computer\Control Panel\)
public const uint URLALL = (SHACF.URLHISTORY | SHACF.URLMRU);
public const uint URLHISTORY = 0x00000002; // URLs in the User's History
public const uint URLMRU = 0x00000004; // URLs in the User's Recently Used list.
public const uint USETAB = 0x00000008; // Use the tab to move thru the autocomplete possibilities instead of to the next dialog/window control.
public const uint FILESYS_ONLY = 0x00000010; // This includes the File System
public const uint AUTOSUGGEST_FORCE_ON = 0x10000000; // Ignore the registry default and force the feature on.
public const uint AUTOSUGGEST_FORCE_OFF = 0x20000000; // Ignore the registry default and force the feature off.
public const uint AUTOAPPEND_FORCE_ON = 0x40000000; // Ignore the registry default and force the feature on. (Also know as AutoComplete)
public const uint AUTOAPPEND_FORCE_OFF = 0x80000000; // Ignore the registry default and force the feature off. (Also know as AutoComplete)
}
}
| |
using Apache.NMS.Util;
using System;
using System.Collections.Generic;
using Lucene.Net.Documents;
namespace Lucene.Net.Index
{
using Lucene.Net.Randomized.Generators;
using Lucene.Net.Support;
using NUnit.Framework;
using System.IO;
using BinaryDocValuesField = BinaryDocValuesField;
using Bits = Lucene.Net.Util.Bits;
using BytesRef = Lucene.Net.Util.BytesRef;
using Directory = Lucene.Net.Store.Directory;
using Document = Documents.Document;
using IOUtils = Lucene.Net.Util.IOUtils;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using NumericDocValuesField = NumericDocValuesField;
using Store = Field.Store;
using StringField = StringField;
using TestUtil = Lucene.Net.Util.TestUtil;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
[TestFixture]
public class TestMixedDocValuesUpdates : LuceneTestCase
{
[Test]
public virtual void TestManyReopensAndFields()
{
Directory dir = NewDirectory();
Random random = Random();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
LogMergePolicy lmp = NewLogMergePolicy();
lmp.MergeFactor = 3; // merge often
conf.SetMergePolicy(lmp);
IndexWriter writer = new IndexWriter(dir, conf);
bool isNRT = random.NextBoolean();
DirectoryReader reader;
if (isNRT)
{
reader = DirectoryReader.Open(writer, true);
}
else
{
writer.Commit();
reader = DirectoryReader.Open(dir);
}
int numFields = random.Next(4) + 3; // 3-7
int numNDVFields = random.Next(numFields / 2) + 1; // 1-3
long[] fieldValues = new long[numFields];
bool[] fieldHasValue = new bool[numFields];
Arrays.Fill(fieldHasValue, true);
for (int i = 0; i < fieldValues.Length; i++)
{
fieldValues[i] = 1;
}
int numRounds = AtLeast(15);
int docID = 0;
for (int i = 0; i < numRounds; i++)
{
int numDocs = AtLeast(5);
// System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
for (int j = 0; j < numDocs; j++)
{
Document doc = new Document();
doc.Add(new StringField("id", "doc-" + docID, Store.NO));
doc.Add(new StringField("key", "all", Store.NO)); // update key
// add all fields with their current value
for (int f = 0; f < fieldValues.Length; f++)
{
if (f < numNDVFields)
{
doc.Add(new NumericDocValuesField("f" + f, fieldValues[f]));
}
else
{
doc.Add(new BinaryDocValuesField("f" + f, TestBinaryDocValuesUpdates.ToBytes(fieldValues[f])));
}
}
writer.AddDocument(doc);
++docID;
}
// if field's value was unset before, unset it from all new added documents too
for (int field = 0; field < fieldHasValue.Length; field++)
{
if (!fieldHasValue[field])
{
if (field < numNDVFields)
{
writer.UpdateNumericDocValue(new Term("key", "all"), "f" + field, null);
}
else
{
writer.UpdateBinaryDocValue(new Term("key", "all"), "f" + field, null);
}
}
}
int fieldIdx = random.Next(fieldValues.Length);
string updateField = "f" + fieldIdx;
if (random.NextBoolean())
{
// System.out.println("[" + Thread.currentThread().getName() + "]: unset field '" + updateField + "'");
fieldHasValue[fieldIdx] = false;
if (fieldIdx < numNDVFields)
{
writer.UpdateNumericDocValue(new Term("key", "all"), updateField, null);
}
else
{
writer.UpdateBinaryDocValue(new Term("key", "all"), updateField, null);
}
}
else
{
fieldHasValue[fieldIdx] = true;
if (fieldIdx < numNDVFields)
{
writer.UpdateNumericDocValue(new Term("key", "all"), updateField, ++fieldValues[fieldIdx]);
}
else
{
writer.UpdateBinaryDocValue(new Term("key", "all"), updateField, TestBinaryDocValuesUpdates.ToBytes(++fieldValues[fieldIdx]));
}
// System.out.println("[" + Thread.currentThread().getName() + "]: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
}
if (random.NextDouble() < 0.2)
{
int deleteDoc = random.Next(docID); // might also delete an already deleted document, ok!
writer.DeleteDocuments(new Term("id", "doc-" + deleteDoc));
// System.out.println("[" + Thread.currentThread().getName() + "]: deleted document: doc-" + deleteDoc);
}
// verify reader
if (!isNRT)
{
writer.Commit();
}
// System.out.println("[" + Thread.currentThread().getName() + "]: reopen reader: " + reader);
DirectoryReader newReader = DirectoryReader.OpenIfChanged(reader);
Assert.IsNotNull(newReader);
reader.Dispose();
reader = newReader;
// System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
Assert.IsTrue(reader.NumDocs > 0); // we delete at most one document per round
BytesRef scratch = new BytesRef();
foreach (AtomicReaderContext context in reader.Leaves)
{
AtomicReader r = context.AtomicReader;
// System.out.println(((SegmentReader) r).getSegmentName());
Bits liveDocs = r.LiveDocs;
for (int field = 0; field < fieldValues.Length; field++)
{
string f = "f" + field;
BinaryDocValues bdv = r.GetBinaryDocValues(f);
NumericDocValues ndv = r.GetNumericDocValues(f);
Bits docsWithField = r.GetDocsWithField(f);
if (field < numNDVFields)
{
Assert.IsNotNull(ndv);
Assert.IsNull(bdv);
}
else
{
Assert.IsNull(ndv);
Assert.IsNotNull(bdv);
}
int maxDoc = r.MaxDoc;
for (int doc = 0; doc < maxDoc; doc++)
{
if (liveDocs == null || liveDocs.Get(doc))
{
// System.out.println("doc=" + (doc + context.DocBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch));
if (fieldHasValue[field])
{
Assert.IsTrue(docsWithField.Get(doc));
if (field < numNDVFields)
{
Assert.AreEqual(fieldValues[field], ndv.Get(doc), "invalid value for doc=" + doc + ", field=" + f + ", reader=" + r);
}
else
{
Assert.AreEqual(fieldValues[field], TestBinaryDocValuesUpdates.GetValue(bdv, doc, scratch), "invalid value for doc=" + doc + ", field=" + f + ", reader=" + r);
}
}
else
{
Assert.IsFalse(docsWithField.Get(doc));
}
}
}
}
}
// System.out.println();
}
IOUtils.Close(writer, reader, dir);
}
[Test]
public virtual void TestStressMultiThreading()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
// create index
int numThreads = TestUtil.NextInt(Random(), 3, 6);
int numDocs = AtLeast(2000);
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(new StringField("id", "doc" + i, Store.NO));
double group = Random().NextDouble();
string g;
if (group < 0.1)
{
g = "g0";
}
else if (group < 0.5)
{
g = "g1";
}
else if (group < 0.8)
{
g = "g2";
}
else
{
g = "g3";
}
doc.Add(new StringField("updKey", g, Store.NO));
for (int j = 0; j < numThreads; j++)
{
long value = Random().Next();
doc.Add(new BinaryDocValuesField("f" + j, TestBinaryDocValuesUpdates.ToBytes(value)));
doc.Add(new NumericDocValuesField("cf" + j, value * 2)); // control, always updated to f * 2
}
writer.AddDocument(doc);
}
CountDownLatch done = new CountDownLatch(numThreads);
AtomicInteger numUpdates = new AtomicInteger(AtLeast(100));
// same thread updates a field as well as reopens
ThreadClass[] threads = new ThreadClass[numThreads];
for (int i = 0; i < threads.Length; i++)
{
string f = "f" + i;
string cf = "cf" + i;
threads[i] = new ThreadAnonymousInnerClassHelper(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf);
}
foreach (ThreadClass t in threads)
{
t.Start();
}
done.@await();
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
BytesRef scratch = new BytesRef();
foreach (AtomicReaderContext context in reader.Leaves)
{
AtomicReader r = context.AtomicReader;
for (int i = 0; i < numThreads; i++)
{
BinaryDocValues bdv = r.GetBinaryDocValues("f" + i);
NumericDocValues control = r.GetNumericDocValues("cf" + i);
Bits docsWithBdv = r.GetDocsWithField("f" + i);
Bits docsWithControl = r.GetDocsWithField("cf" + i);
Bits liveDocs = r.LiveDocs;
for (int j = 0; j < r.MaxDoc; j++)
{
if (liveDocs == null || liveDocs.Get(j))
{
Assert.AreEqual(docsWithBdv.Get(j), docsWithControl.Get(j));
if (docsWithBdv.Get(j))
{
long ctrlValue = control.Get(j);
long bdvValue = TestBinaryDocValuesUpdates.GetValue(bdv, j, scratch) * 2;
// if (ctrlValue != bdvValue) {
// System.out.println("seg=" + r + ", f=f" + i + ", doc=" + j + ", group=" + r.Document(j).Get("updKey") + ", ctrlValue=" + ctrlValue + ", bdvBytes=" + scratch);
// }
Assert.AreEqual(ctrlValue, bdvValue);
}
}
}
}
}
reader.Dispose();
dir.Dispose();
}
private class ThreadAnonymousInnerClassHelper : ThreadClass
{
private readonly TestMixedDocValuesUpdates OuterInstance;
private IndexWriter Writer;
private int NumDocs;
private CountDownLatch Done;
private AtomicInteger NumUpdates;
private string f;
private string Cf;
public ThreadAnonymousInnerClassHelper(TestMixedDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountDownLatch done, AtomicInteger numUpdates, string f, string cf)
: base(str)
{
this.OuterInstance = outerInstance;
this.Writer = writer;
this.NumDocs = numDocs;
this.Done = done;
this.NumUpdates = numUpdates;
this.f = f;
this.Cf = cf;
}
public override void Run()
{
DirectoryReader reader = null;
bool success = false;
try
{
Random random = Random();
while (NumUpdates.GetAndDecrement() > 0)
{
double group = random.NextDouble();
Term t;
if (group < 0.1)
{
t = new Term("updKey", "g0");
}
else if (group < 0.5)
{
t = new Term("updKey", "g1");
}
else if (group < 0.8)
{
t = new Term("updKey", "g2");
}
else
{
t = new Term("updKey", "g3");
}
// System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t);
if (random.NextBoolean()) // sometimes unset a value
{
// System.err.println("[" + Thread.currentThread().getName() + "] t=" + t + ", f=" + f + ", updValue=UNSET");
Writer.UpdateBinaryDocValue(t, f, null);
Writer.UpdateNumericDocValue(t, Cf, null);
}
else
{
long updValue = random.Next();
// System.err.println("[" + Thread.currentThread().getName() + "] t=" + t + ", f=" + f + ", updValue=" + updValue);
Writer.UpdateBinaryDocValue(t, f, TestBinaryDocValuesUpdates.ToBytes(updValue));
Writer.UpdateNumericDocValue(t, Cf, updValue * 2);
}
if (random.NextDouble() < 0.2)
{
// delete a random document
int doc = random.Next(NumDocs);
// System.out.println("[" + Thread.currentThread().getName() + "] deleteDoc=doc" + doc);
Writer.DeleteDocuments(new Term("id", "doc" + doc));
}
if (random.NextDouble() < 0.05) // commit every 20 updates on average
{
// System.out.println("[" + Thread.currentThread().getName() + "] commit");
Writer.Commit();
}
if (random.NextDouble() < 0.1) // reopen NRT reader (apply updates), on average once every 10 updates
{
if (reader == null)
{
// System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
reader = DirectoryReader.Open(Writer, true);
}
else
{
// System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
DirectoryReader r2 = DirectoryReader.OpenIfChanged(reader, Writer, true);
if (r2 != null)
{
reader.Dispose();
reader = r2;
}
}
}
}
// System.out.println("[" + Thread.currentThread().getName() + "] DONE");
success = true;
}
catch (IOException e)
{
throw new Exception(e.Message, e);
}
finally
{
if (reader != null)
{
try
{
reader.Dispose();
}
catch (IOException e)
{
if (success) // suppress this exception only if there was another exception
{
throw new Exception(e.Message, e);
}
}
}
Done.countDown();
}
}
}
[Test]
public virtual void TestUpdateDifferentDocsInDifferentGens()
{
// update same document multiple times across generations
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetMaxBufferedDocs(4);
IndexWriter writer = new IndexWriter(dir, conf);
int numDocs = AtLeast(10);
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(new StringField("id", "doc" + i, Store.NO));
long value = Random().Next();
doc.Add(new BinaryDocValuesField("f", TestBinaryDocValuesUpdates.ToBytes(value)));
doc.Add(new NumericDocValuesField("cf", value * 2));
writer.AddDocument(doc);
}
int numGens = AtLeast(5);
BytesRef scratch = new BytesRef();
for (int i = 0; i < numGens; i++)
{
int doc = Random().Next(numDocs);
Term t = new Term("id", "doc" + doc);
long value = Random().NextLong();
writer.UpdateBinaryDocValue(t, "f", TestBinaryDocValuesUpdates.ToBytes(value));
writer.UpdateNumericDocValue(t, "cf", value * 2);
DirectoryReader reader = DirectoryReader.Open(writer, true);
foreach (AtomicReaderContext context in reader.Leaves)
{
AtomicReader r = context.AtomicReader;
BinaryDocValues fbdv = r.GetBinaryDocValues("f");
NumericDocValues cfndv = r.GetNumericDocValues("cf");
for (int j = 0; j < r.MaxDoc; j++)
{
Assert.AreEqual(cfndv.Get(j), TestBinaryDocValuesUpdates.GetValue(fbdv, j, scratch) * 2);
}
}
reader.Dispose();
}
writer.Dispose();
dir.Dispose();
}
[Test]
public virtual void TestTonsOfUpdates()
{
// LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
Directory dir = NewDirectory();
Random random = Random();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
conf.SetRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
IndexWriter writer = new IndexWriter(dir, conf);
// test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
int numDocs = AtLeast(20000);
int numBinaryFields = AtLeast(5);
int numTerms = TestUtil.NextInt(random, 10, 100); // terms should affect many docs
HashSet<string> updateTerms = new HashSet<string>();
while (updateTerms.Count < numTerms)
{
updateTerms.Add(TestUtil.RandomSimpleString(random));
}
// System.out.println("numDocs=" + numDocs + " numBinaryFields=" + numBinaryFields + " numTerms=" + numTerms);
// build a large index with many BDV fields and update terms
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
int numUpdateTerms = TestUtil.NextInt(random, 1, numTerms / 10);
for (int j = 0; j < numUpdateTerms; j++)
{
doc.Add(new StringField("upd", RandomInts.RandomFrom(random, updateTerms), Store.NO));
}
for (int j = 0; j < numBinaryFields; j++)
{
long val = random.Next();
doc.Add(new BinaryDocValuesField("f" + j, TestBinaryDocValuesUpdates.ToBytes(val)));
doc.Add(new NumericDocValuesField("cf" + j, val * 2));
}
writer.AddDocument(doc);
}
writer.Commit(); // commit so there's something to apply to
// set to flush every 2048 bytes (approximately every 12 updates), so we get
// many flushes during binary updates
writer.Config.SetRAMBufferSizeMB(2048.0 / 1024 / 1024);
int numUpdates = AtLeast(100);
// System.out.println("numUpdates=" + numUpdates);
for (int i = 0; i < numUpdates; i++)
{
int field = random.Next(numBinaryFields);
Term updateTerm = new Term("upd", RandomInts.RandomFrom(random, updateTerms));
long value = random.Next();
writer.UpdateBinaryDocValue(updateTerm, "f" + field, TestBinaryDocValuesUpdates.ToBytes(value));
writer.UpdateNumericDocValue(updateTerm, "cf" + field, value * 2);
}
writer.Dispose();
DirectoryReader reader = DirectoryReader.Open(dir);
BytesRef scratch = new BytesRef();
foreach (AtomicReaderContext context in reader.Leaves)
{
for (int i = 0; i < numBinaryFields; i++)
{
AtomicReader r = context.AtomicReader;
BinaryDocValues f = r.GetBinaryDocValues("f" + i);
NumericDocValues cf = r.GetNumericDocValues("cf" + i);
for (int j = 0; j < r.MaxDoc; j++)
{
Assert.AreEqual(cf.Get(j), TestBinaryDocValuesUpdates.GetValue(f, j, scratch) * 2, "reader=" + r + ", field=f" + i + ", doc=" + j);
}
}
}
reader.Dispose();
dir.Dispose();
}
}
}
| |
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Caching.Memory;
using NBitcoin;
using NBitcoin.RPC;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Threading.Tasks;
using WalletWasabi.Backend.Models;
using WalletWasabi.Backend.Models.Responses;
using WalletWasabi.BitcoinCore.Rpc;
using WalletWasabi.Blockchain.Analysis.FeesEstimation;
using WalletWasabi.Helpers;
using WalletWasabi.Logging;
using WalletWasabi.Models;
namespace WalletWasabi.Backend.Controllers
{
/// <summary>
/// To interact with the Bitcoin Blockchain.
/// </summary>
[Produces("application/json")]
[Route("api/v" + Constants.BackendMajorVersion + "/btc/[controller]")]
public class BlockchainController : ControllerBase
{
public static readonly TimeSpan FilterTimeout = TimeSpan.FromMinutes(20);
public BlockchainController(IMemoryCache memoryCache, Global global)
{
Cache = memoryCache;
Global = global;
}
private IRPCClient RpcClient => Global.RpcClient;
private Network Network => Global.Config.Network;
public static Dictionary<uint256, string> TransactionHexCache { get; } = new Dictionary<uint256, string>();
public static object TransactionHexCacheLock { get; } = new object();
public IMemoryCache Cache { get; }
public Global Global { get; }
/// <summary>
/// Get all fees.
/// </summary>
/// <param name="estimateSmartFeeMode">Bitcoin Core's estimatesmartfee mode: ECONOMICAL/CONSERVATIVE.</param>
/// <returns>A dictionary of fee targets and estimations.</returns>
/// <response code="200">A dictionary of fee targets and estimations.</response>
/// <response code="400">Invalid estimation mode is provided, possible values: ECONOMICAL/CONSERVATIVE.</response>
[HttpGet("all-fees")]
[ProducesResponseType(200)]
[ProducesResponseType(400)]
[ResponseCache(Duration = 300, Location = ResponseCacheLocation.Client)]
public async Task<IActionResult> GetAllFeesAsync([FromQuery, Required] string estimateSmartFeeMode)
{
if (!Enum.TryParse(estimateSmartFeeMode, ignoreCase: true, out EstimateSmartFeeMode mode))
{
return BadRequest("Invalid estimation mode is provided, possible values: ECONOMICAL/CONSERVATIVE.");
}
AllFeeEstimate estimation = await GetAllFeeEstimateAsync(mode);
return Ok(estimation.Estimations);
}
internal async Task<AllFeeEstimate> GetAllFeeEstimateAsync(EstimateSmartFeeMode mode)
{
var cacheKey = $"{nameof(GetAllFeeEstimateAsync)}_{mode}";
var cacheOptions = new MemoryCacheEntryOptions { AbsoluteExpirationRelativeToNow = TimeSpan.FromSeconds(60) };
return await Cache.AtomicGetOrCreateAsync(
cacheKey,
cacheOptions,
() => RpcClient.EstimateAllFeeAsync(mode, simulateIfRegTest: true));
}
/// <summary>
/// Gets mempool hashes.
/// </summary>
/// <param name="compactness">Can strip the last x characters from the hashes.</param>
/// <returns>A collection of transaction hashes.</returns>
/// <response code="200">A collection of transaction hashes.</response>
/// <response code="400">Invalid model state.</response>
[HttpGet("mempool-hashes")]
[ProducesResponseType(200)]
[ProducesResponseType(400)]
[ResponseCache(Duration = 3, Location = ResponseCacheLocation.Client)]
public async Task<IActionResult> GetMempoolHashesAsync([FromQuery] int compactness = 64)
{
if (compactness is < 1 or > 64)
{
return BadRequest("Invalid compactness parameter is provided.");
}
IEnumerable<string> fulls = await GetRawMempoolStringsWithCacheAsync();
if (compactness == 64)
{
return Ok(fulls);
}
else
{
IEnumerable<string> compacts = fulls.Select(x => x.Substring(0, compactness));
return Ok(compacts);
}
}
internal async Task<IEnumerable<string>> GetRawMempoolStringsWithCacheAsync()
{
var cacheKey = $"{nameof(GetRawMempoolStringsWithCacheAsync)}";
var cacheOptions = new MemoryCacheEntryOptions { AbsoluteExpirationRelativeToNow = TimeSpan.FromSeconds(3) };
return await Cache.AtomicGetOrCreateAsync(
cacheKey,
cacheOptions,
() => GetRawMempoolStringsNoCacheAsync());
}
private async Task<IEnumerable<string>> GetRawMempoolStringsNoCacheAsync()
{
uint256[] transactionHashes = await Global.RpcClient.GetRawMempoolAsync();
return transactionHashes.Select(x => x.ToString());
}
/// <summary>
/// Attempts to get transactions.
/// </summary>
/// <param name="transactionIds">The transactions the client is interested in.</param>
/// <returns>200 Ok on with the list of found transactions. This list can be empty if none of the transactions are found.</returns>
/// <response code="200">Returns the list of transactions hexes. The list can be empty.</response>
/// <response code="400">Something went wrong.</response>
[HttpGet("transaction-hexes")]
[ProducesResponseType(200)]
[ProducesResponseType(400)]
public async Task<IActionResult> GetTransactionsAsync([FromQuery, Required] IEnumerable<string> transactionIds)
{
var maxTxToRequest = 10;
if (transactionIds.Count() > maxTxToRequest)
{
return BadRequest($"Maximum {maxTxToRequest} transactions can be requested.");
}
var parsedIds = new List<uint256>();
try
{
// Remove duplicates, do not use Distinct(), order is not guaranteed.
foreach (var txid in transactionIds.Select(x => new uint256(x)))
{
if (!parsedIds.Contains(txid))
{
parsedIds.Add(txid);
}
}
}
catch
{
return BadRequest("Invalid transaction Ids.");
}
try
{
var hexes = new Dictionary<uint256, string>();
var queryRpc = false;
IRPCClient batchingRpc = null;
List<Task<Transaction>> tasks = null;
lock (TransactionHexCacheLock)
{
foreach (var txid in parsedIds)
{
if (TransactionHexCache.TryGetValue(txid, out string hex))
{
hexes.Add(txid, hex);
}
else
{
if (!queryRpc)
{
queryRpc = true;
batchingRpc = RpcClient.PrepareBatch();
tasks = new List<Task<Transaction>>();
}
tasks.Add(batchingRpc.GetRawTransactionAsync(txid));
}
}
}
if (queryRpc)
{
await batchingRpc.SendBatchAsync();
foreach (var tx in await Task.WhenAll(tasks))
{
string hex = tx.ToHex();
hexes.Add(tx.GetHash(), hex);
lock (TransactionHexCacheLock)
{
if (TransactionHexCache.TryAdd(tx.GetHash(), hex) && TransactionHexCache.Count >= 1000)
{
TransactionHexCache.Remove(TransactionHexCache.Keys.First());
}
}
}
}
// Order hexes according to the order of the query.
var orderedResult = parsedIds.Where(x => hexes.ContainsKey(x)).Select(x => hexes[x]);
return Ok(orderedResult);
}
catch (Exception ex)
{
Logger.LogDebug(ex);
return BadRequest(ex.Message);
}
}
/// <summary>
/// Attempts to broadcast a transaction.
/// </summary>
/// <remarks>
/// Sample request:
///
/// POST /broadcast
/// "01000000014b6b6fced23fa0d772f83fd849ce2f4e8fa51ea49cc12710ebcdc722d74c87f5000000006a47304402206bf1118e381342d0387e47807c83d2c1e919e2e3792f2673579a9ce87a380db002207e471504f96d7830dc9cbb7442332d747a25dcfd5d1530feea92b8a302aa57f4012102a40230b345856cc18ca1d745e7ea52319a012753b050e24d7be64ca0b978fb3effffffff0235662803000000001976a9146adfacaab3dc7c51b3300c4256b184f95cc48f4288acd0dd0600000000001976a91411ff558b1790b8d57cb25b9c07094591cfd2051c88ac00000000"
///
/// </remarks>
/// <param name="hex">The hex string of the raw transaction.</param>
/// <returns>200 Ok on successful broadcast or 400 BadRequest on failure.</returns>
/// <response code="200">If broadcast is successful.</response>
/// <response code="400">If broadcast fails.</response>
[HttpPost("broadcast")]
[ProducesResponseType(200)]
[ProducesResponseType(400)]
public async Task<IActionResult> BroadcastAsync([FromBody, Required] string hex)
{
Transaction transaction;
try
{
transaction = Transaction.Parse(hex, Network);
}
catch (Exception ex)
{
Logger.LogDebug(ex);
return BadRequest("Invalid hex.");
}
try
{
await RpcClient.SendRawTransactionAsync(transaction);
}
catch (RPCException ex) when (ex.Message.Contains("already in block chain", StringComparison.InvariantCultureIgnoreCase))
{
return Ok("Transaction is already in the blockchain.");
}
catch (RPCException ex)
{
Logger.LogDebug(ex);
return BadRequest(ex.Message);
}
return Ok("Transaction is successfully broadcasted.");
}
/// <summary>
/// Gets block filters from the provided block hash.
/// </summary>
/// <remarks>
/// Filter examples:
///
/// Main: 0000000000000000001c8018d9cb3b742ef25114f27563e3fc4a1902167f9893
/// TestNet: 00000000000f0d5edcaeba823db17f366be49a80d91d15b77747c2e017b8c20a
/// RegTest: 0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206
///
/// </remarks>
/// <param name="bestKnownBlockHash">The best block hash the client knows its filter.</param>
/// <param name="count">The number of filters to return.</param>
/// <returns>The best height and an array of block hash : element count : filter pairs.</returns>
/// <response code="200">The best height and an array of block hash : element count : filter pairs.</response>
/// <response code="204">When the provided hash is the tip.</response>
/// <response code="400">The provided hash was malformed or the count value is out of range</response>
/// <response code="404">If the hash is not found. This happens at blockhain reorg.</response>
[HttpGet("filters")]
[ProducesResponseType(200)] // Note: If you add typeof(IList<string>) then swagger UI visualization will be ugly.
[ProducesResponseType(204)]
[ProducesResponseType(400)]
[ProducesResponseType(404)]
public IActionResult GetFilters([FromQuery, Required] string bestKnownBlockHash, [FromQuery, Required] int count)
{
if (count <= 0)
{
return BadRequest("Invalid block hash or count is provided.");
}
var knownHash = new uint256(bestKnownBlockHash);
(Height bestHeight, IEnumerable<FilterModel> filters) = Global.IndexBuilderService.GetFilterLinesExcluding(knownHash, count, out bool found);
if (!found)
{
return NotFound($"Provided {nameof(bestKnownBlockHash)} is not found: {bestKnownBlockHash}.");
}
if (!filters.Any())
{
return NoContent();
}
var response = new FiltersResponse
{
BestHeight = bestHeight,
Filters = filters
};
return Ok(response);
}
[HttpGet("status")]
[ProducesResponseType(typeof(StatusResponse), 200)]
public async Task<StatusResponse> GetStatusAsync()
{
try
{
var cacheKey = $"{nameof(GetStatusAsync)}";
var cacheOptions = new MemoryCacheEntryOptions { AbsoluteExpirationRelativeToNow = TimeSpan.FromSeconds(7) };
return await Cache.AtomicGetOrCreateAsync(
cacheKey,
cacheOptions,
() => FetchStatusAsync());
}
catch (Exception ex)
{
Logger.LogDebug(ex);
throw;
}
}
private async Task<StatusResponse> FetchStatusAsync()
{
StatusResponse status = new StatusResponse();
// Updating the status of the filters.
if (DateTimeOffset.UtcNow - Global.IndexBuilderService.LastFilterBuildTime > FilterTimeout)
{
// Checking if the last generated filter is created for one of the last two blocks on the blockchain.
var lastFilter = Global.IndexBuilderService.GetLastFilter();
var lastFilterHash = lastFilter.Header.BlockHash;
var bestHash = await RpcClient.GetBestBlockHashAsync();
var lastBlockHeader = await RpcClient.GetBlockHeaderAsync(bestHash);
var prevHash = lastBlockHeader.HashPrevBlock;
if (bestHash == lastFilterHash || prevHash == lastFilterHash)
{
status.FilterCreationActive = true;
}
}
else
{
status.FilterCreationActive = true;
}
// Updating the status of CoinJoin
var validInterval = TimeSpan.FromSeconds(Global.Coordinator.RoundConfig.InputRegistrationTimeout * 2);
if (validInterval < TimeSpan.FromHours(1))
{
validInterval = TimeSpan.FromHours(1);
}
if (DateTimeOffset.UtcNow - Global.Coordinator.LastSuccessfulCoinJoinTime < validInterval)
{
status.CoinJoinCreationActive = true;
}
return status;
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
namespace ASC.Mail.Net.IMAP.Server
{
#region usings
using System;
using System.Collections;
using Mime;
#endregion
/// <summary>
/// IMAP search command grouped(parenthesized) search-key collection.
/// </summary>
internal class SearchGroup
{
#region Members
private readonly ArrayList m_pSearchKeys;
#endregion
#region Constructor
/// <summary>
/// Default constructor.
/// </summary>
public SearchGroup()
{
m_pSearchKeys = new ArrayList();
}
#endregion
#region Methods
/// <summary>
/// Parses search key from current position.
/// </summary>
/// <param name="reader"></param>
public void Parse(StringReader reader)
{
//Remove spaces from string start
reader.ReadToFirstChar();
if (reader.StartsWith("("))
{
reader = new StringReader(reader.ReadParenthesized().Trim());
}
//--- Start parsing search keys --------------//
while (reader.Available > 0)
{
object searchKey = ParseSearchKey(reader);
if (searchKey != null)
{
m_pSearchKeys.Add(searchKey);
}
}
//--------------------------------------------//
}
/// <summary>
/// Gets if message Header is needed for matching.
/// </summary>
/// <returns></returns>
public bool IsHeaderNeeded()
{
foreach (object searchKey in m_pSearchKeys)
{
if (IsHeaderNeededForKey(searchKey))
{
return true;
}
}
return false;
}
/// <summary>
/// Gets if message body text is needed for matching.
/// </summary>
/// <returns></returns>
public bool IsBodyTextNeeded()
{
foreach (object searchKey in m_pSearchKeys)
{
if (IsBodyTextNeededForKey(searchKey))
{
return true;
}
}
return false;
}
/// <summary>
/// Gets if specified message matches with this class search-key.
/// </summary>
/// <param name="no">IMAP message sequence number.</param>
/// <param name="uid">IMAP message UID.</param>
/// <param name="size">IMAP message size in bytes.</param>
/// <param name="internalDate">IMAP message INTERNALDATE (dateTime when server stored message).</param>
/// <param name="flags">IMAP message flags.</param>
/// <param name="mime">Mime message main header only.</param>
/// <param name="bodyText">Message body text.</param>
/// <returns></returns>
public bool Match(long no,
long uid,
long size,
DateTime internalDate,
IMAP_MessageFlags flags,
Mime mime,
string bodyText)
{
// We must match all keys, if one fails, no need to check others
foreach (object searckKey in m_pSearchKeys)
{
if (!Match_Key_Value(searckKey, no, uid, size, internalDate, flags, mime, bodyText))
{
return false;
}
}
return true;
}
#endregion
#region Internal methods
/// <summary>
/// Parses SearchGroup or SearchItem from reader. If reader starts with (, then parses searchGroup, otherwise SearchItem.
/// </summary>
/// <param name="reader"></param>
/// <returns></returns>
internal static object ParseSearchKey(StringReader reader)
{
//Remove spaces from string start
reader.ReadToFirstChar();
// SearchGroup
if (reader.StartsWith("("))
{
SearchGroup searchGroup = new SearchGroup();
searchGroup.Parse(reader);
return searchGroup;
}
// SearchItem
else
{
return SearchKey.Parse(reader);
}
}
/// <summary>
/// Gets if specified message matches to specified search key.
/// </summary>
/// <param name="searchKey">SearchKey or SearchGroup.</param>
/// <param name="no">IMAP message sequence number.</param>
/// <param name="uid">IMAP message UID.</param>
/// <param name="size">IMAP message size in bytes.</param>
/// <param name="internalDate">IMAP message INTERNALDATE (dateTime when server stored message).</param>
/// <param name="flags">IMAP message flags.</param>
/// <param name="mime">Mime message main header only.</param>
/// <param name="bodyText">Message body text.</param>
/// <returns></returns>
internal static bool Match_Key_Value(object searchKey,
long no,
long uid,
long size,
DateTime internalDate,
IMAP_MessageFlags flags,
Mime mime,
string bodyText)
{
if (searchKey.GetType() == typeof (SearchKey))
{
if (!((SearchKey) searchKey).Match(no, uid, size, internalDate, flags, mime, bodyText))
{
return false;
}
}
else if (searchKey.GetType() == typeof (SearchGroup))
{
if (!((SearchGroup) searchKey).Match(no, uid, size, internalDate, flags, mime, bodyText))
{
return false;
}
}
return true;
}
/// <summary>
/// Gets if message header is needed for matching.
/// </summary>
/// <param name="searchKey"></param>
/// <returns></returns>
internal static bool IsHeaderNeededForKey(object searchKey)
{
if (searchKey.GetType() == typeof (SearchKey))
{
if (((SearchKey) searchKey).IsHeaderNeeded())
{
return true;
}
}
else if (searchKey.GetType() == typeof (SearchGroup))
{
if (((SearchGroup) searchKey).IsHeaderNeeded())
{
return true;
}
}
return false;
}
/// <summary>
/// Gets if message body text is needed for matching.
/// </summary>
/// <param name="searchKey"></param>
/// <returns></returns>
internal static bool IsBodyTextNeededForKey(object searchKey)
{
if (searchKey.GetType() == typeof (SearchKey))
{
if (((SearchKey) searchKey).IsBodyTextNeeded())
{
return true;
}
}
else if (searchKey.GetType() == typeof (SearchGroup))
{
if (((SearchGroup) searchKey).IsBodyTextNeeded())
{
return true;
}
}
return false;
}
#endregion
}
}
| |
//-----------------------------------------------------------------------------
//
// <copyright file="NativeRightsManagementAPIsConstants.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// Description:
// These are the Constant declarations for interop services required to call into unmanaged
// Promethium Rights Management SDK APIs
//
// History:
// 06/13/2005: IgorBel: Initial implementation.
//
//-----------------------------------------------------------------------------
using System;
using System.Runtime.InteropServices;
using System.Text;
using System.Security;
namespace MS.Internal.Security.RightsManagement
{
internal enum SecurityProviderType: uint
{
SoftwareSecRep = 0
}
internal enum SpecType : uint
{
Unknown = 0,
FileName = 1
}
internal enum StatusMessage: uint
{
ActivateMachine = 0,
ActivateGroupIdentity = 1,
AcquireLicense = 2,
AcquireAdvisory = 3,
SignIssuanceLicense = 4,
AcquireClientLicensor = 5
}
[Flags]
internal enum EnumerateLicenseFlags: uint
{
Machine = 0x0001,
GroupIdentity = 0x0002,
GroupIdentityName = 0x0004,
GroupIdentityLid = 0x0008,
SpecifiedGroupIdentity = 0x0010,
Eul = 0x0020,
EulLid = 0x0040,
ClientLicensor = 0x0080,
ClientLicensorLid = 0x0100,
SpecifiedClientLicensor = 0x0200,
RevocationList = 0x0400,
RevocationListLid = 0x0800,
Expired = 0x1000,
}
[Flags]
internal enum ActivationFlags: uint
{
Machine = 0x01, // Activate machine
GroupIdentity = 0x02, // Activate Group Identity
Temporary = 0x04, // Temporary certificate
Cancel = 0x08, // Cancel previous request
Silent = 0x10, // Silent Activation
SharedGroupIdentity = 0x20, // Shared Group Identity certificate
Delayed = 0x40, // Delayed activation
}
[Flags]
internal enum ServiceType: uint
{
Activation = 0x01,
Certification = 0x02,
Publishing = 0x04,
ClientLicensor = 0x08,
}
internal enum ServiceLocation : uint
{
Internet = 0x01,
Enterprise = 0x02,
}
[Flags]
internal enum AcquireLicenseFlags: uint
{
NonSilent = 0x01, // Acquire non-silently
NoPersist = 0x02, // Don't persist the license
Cancel = 0x04, // Cancel previous request
FetchAdvisory = 0x08, // Don't acquire advisories
NoUI = 0x10, // Don't display any Authentication UI
}
[Flags]
internal enum SignIssuanceLicenseFlags: uint
{
Online = 0x01,
Offline = 0x02,
Cancel = 0x04,
ServerIssuanceLicense = 0x08,
AutoGenerateKey = 0x10,
OwnerLicenseNoPersist = 0x20,
}
internal enum DistributionPointInfo
{
LicenseAcquisition = 0,
Publishing = 1,
ReferralInfo = 2,
}
internal enum LicenseAttributeEncoding
{
Base64 = 0,
String = 1,
Long = 2,
Time = 3,
UInt = 4,
Raw = 5
};
internal static class NativeConstants
{
public const uint DrmCallbackVersion = 1;
/////////////////////////////////////////////////////////////////////////////////
//
//The following codes are used to indicate where the various query strings may be used:
//
//for example, GI(*) means that all DRMHANDLES may be asked the indicated question using DRMGetInfo &
// GI(hEnv) means on environment handle only
//
//GI: DRMGetInfo
//GULA: DRMGetUnboundLicenceAttribute
//GULO: DRMGetUnboundLicenseObject
//GBLA: DRMGetBoundLicenseAttribute
//GBLO: DRMGetBoundLicenseObject
//
/////////////////////////////////////////////////////////////////////////////////
internal const string TAG_ASCII = "ASCII Tag";
internal const string TAG_XRML = "XrML Tag";
internal const string TAG_FILENAME = "filename";
internal const string TAG_MSGUID = "MS-GUID";
internal const string PLUG_STANDARDENABLINGPRINCIPAL = "UDStdPlg Enabling Principal";
internal const string PLUG_STANDARDRIGHTSINTERPRETER = "XrMLv2a";
internal const string PLUG_STANDARDEBDECRYPTOR = "UDStdPlg Enabling Bits Decryptor";
internal const string PLUG_STANDARDEBENCRYPTOR = "UDStdPlg Enabling Bits Encryptor";
internal const string PLUG_STANDARDEBCRYPTOPROVIDER = "UDStdPlg Enabling Bits Crypto Provider";
internal const string PLUG_STANDARDLIBRARY = "UDStdPlg";
internal const string ALGORITHMID_DES = "DES";
internal const string ALGORITHMID_COCKTAIL = "COCKTAIL";
internal const string ALGORITHMID_AES = "AES";
internal const string ALGORITHMID_RC4 = "RC4";
// QUERY CONSTANTS BELOW HERE ////////////////////////////////////////////////
// GI(*)
internal const string QUERY_OBJECTIDTYPE = "object-id-type";
internal const string QUERY_OBJECTID = "object-id";
// GBLA(on a bound right object), GULA(on a principal object, rights group, right, & work)
internal const string QUERY_NAME = "name";
// GBLA(on a bound license)
internal const string QUERY_CONTENTIDTYPE = "content-id-type";
internal const string QUERY_CONTENTIDVALUE = "content-id-value";
internal const string QUERY_CONTENTSKUTYPE = "content-sku-type";
internal const string QUERY_CONTENTSKUVALUE = "content-sku-value";
// GI(hEnv)
internal const string QUERY_MANIFESTSOURCE = "manifest-xrml";
internal const string QUERY_MACHINECERTSOURCE = "machine-certificate-xrml";
// GI(hEnv)
internal const string QUERY_APIVERSION = "api-version";
internal const string QUERY_SECREPVERSION = "secrep-version";
// GI(hCrypto)
internal const string QUERY_BLOCKSIZE = "block-size";
// GULO(on a condition list), GBLO(on a bound right)
internal const string QUERY_ACCESSCONDITION = "access-condition";
// GULA(on a principal)
internal const string QUERY_ADDRESSTYPE = "address-type";
internal const string QUERY_ADDRESSVALUE = "address-value";
internal const string QUERY_APPDATANAME = "appdata-name";
internal const string QUERY_APPDATAVALUE = "appdata-value";
// GULA(on a license, a work, and rights group, or a right)
internal const string QUERY_CONDITIONLIST = "condition-list";
// GULO(on a license or revocation condition)
internal const string QUERY_DISTRIBUTIONPOINT = "distribution-point";
internal const string QUERY_OBJECTTYPE = "object-type";
// GBLA(on a bound license)
internal const string QUERY_ENABLINGPRINCIPALIDTYPE = "enabling-principal-id-type";
internal const string QUERY_ENABLINGPRINCIPALIDVALUE = "enabling-principal-id-value";
// GULO(on a license)
internal const string QUERY_GROUPIDENTITYPRINCIPAL = "group-identity-principal";
// GULO(on an interval time condition)
internal const string QUERY_FIRSTUSETAG = "first-use-tag";
// GULA(on a range time condition)
internal const string QUERY_FROMTIME = "from-time";
// GULA(on a license, principal, or work)
internal const string QUERY_IDTYPE = "id-type";
internal const string QUERY_IDVALUE = "id-value";
// GULO(on a license)
internal const string QUERY_ISSUEDPRINCIPAL = "issued-principal";
// GULA(on a license)
internal const string QUERY_ISSUEDTIME = "issued-time";
// GULO(on a license)
internal const string QUERY_ISSUER = "issuer";
// GULO(on a work)
internal const string QUERY_OWNER = "owner";
// GULO(on an access condition)
internal const string QUERY_PRINCIPAL = "principal";
// GI(hEnablingPrincipal)
internal const string QUERY_PRINCIPALIDVALUE = "principal-id-value";
internal const string QUERY_PRINCIPALIDTYPE = "principal-id-type";
// GULO GBLO (on a condition list)
internal const string QUERY_RANGETIMECONDITION = "rangetime-condition";
internal const string QUERY_OSEXCLUSIONCONDITION = "os-exclusion-condition";
// GULA
internal const string QUERY_INTERVALTIMECONDITION = "intervaltime-condition";
internal const string QUERY_INTERVALTIMEINTERVAL = "intervaltime-interval";
internal const string QUERY_MAXVERSION = "max-version";
internal const string QUERY_MINVERSION = "min-version";
// GULA(on a revocation condition)
internal const string QUERY_REFRESHPERIOD = "refresh-period";
// GULO(on a condition list)
internal const string QUERY_REVOCATIONCONDITION = "revocation-condition";
// GULO(on a rights group), GBLO(on a bound license)
internal const string QUERY_RIGHT = "right";
// GULO(on a work)
internal const string QUERY_RIGHTSGROUP = "rights-group";
// GULA(on a right), GBLA(on a bound right)
internal const string QUERY_RIGHTSPARAMETERNAME = "rights-parameter-name";
internal const string QUERY_RIGHTSPARAMETERVALUE = "rights-parameter-value";
// GULA(on a work)
internal const string QUERY_SKUTYPE = "sku-type";
internal const string QUERY_SKUVALUE = "sku-value";
// GULA(on an interval time or metered time condition)
internal const string QUERY_TIMEINTERVAL = "time-interval";
// GULA(on a range time condition)
internal const string QUERY_UNTILTIME = "until-time";
// GULA(on a license)
internal const string QUERY_VALIDITYFROMTIME = "valid-from";
internal const string QUERY_VALIDITYUNTILTIME = "valid-until";
// GULO(on a license)
internal const string QUERY_WORK = "work";
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Management.Automation;
using DiscUtils.Common;
using DiscUtils.PowerShell.VirtualDiskProvider;
namespace DiscUtils.PowerShell
{
[Cmdlet(VerbsCommon.New, "VirtualDisk")]
public class NewVirtualDiskCommand : PSCmdlet
{
[Parameter(Mandatory = true, Position = 0)]
public string LiteralPath { get; set; }
[Parameter(Mandatory = true, ParameterSetName="New")]
[ValidateLength(1,int.MaxValue)]
public string Type { get; set; }
[Parameter(Mandatory = true, ParameterSetName="New")]
public string Size { get; set; }
[Parameter(Mandatory = true, ParameterSetName = "Diff")]
public SwitchParameter Differencing { get; set; }
[Parameter(Mandatory = true, ParameterSetName = "Diff")]
public string BaseDisk { get; set; }
protected override void ProcessRecord()
{
if (ParameterSetName == "New")
{
CreateNewDisk();
}
else
{
CreateDiffDisk();
}
}
private void CreateNewDisk()
{
string[] typeAndVariant = Type.Split('-');
if (typeAndVariant.Length < 1 || typeAndVariant.Length > 2)
{
WriteError(new ErrorRecord(
new ArgumentException("Invalid Type of disk"),
"BadDiskType",
ErrorCategory.InvalidArgument,
null));
return;
}
long size;
if (!DiscUtils.Common.Utilities.TryParseDiskSize(Size, out size))
{
WriteError(new ErrorRecord(
new ArgumentException("Unable to parse the disk size"),
"BadDiskSize",
ErrorCategory.InvalidArgument,
null));
return;
}
string type = typeAndVariant[0];
string variant = typeAndVariant.Length > 1 ? typeAndVariant[1] : null;
string child;
PSObject parentObj = ResolveNewDiskPath(out child);
VirtualDisk disk = null;
if (parentObj.BaseObject is DirectoryInfo)
{
string path = Path.Combine(((DirectoryInfo)parentObj.BaseObject).FullName, child);
using (VirtualDisk realDisk = VirtualDisk.CreateDisk(type, variant, path, size, null, null)) { }
disk = new OnDemandVirtualDisk(path, FileAccess.ReadWrite);
}
else if (parentObj.BaseObject is DiscDirectoryInfo)
{
DiscDirectoryInfo ddi = (DiscDirectoryInfo)parentObj.BaseObject;
string path = Path.Combine(ddi.FullName, child);
using (VirtualDisk realDisk = VirtualDisk.CreateDisk(ddi.FileSystem, type, variant, path, size, null, null)) { }
disk = new OnDemandVirtualDisk(ddi.FileSystem, path, FileAccess.ReadWrite);
}
else
{
WriteError(new ErrorRecord(
new DirectoryNotFoundException("Cannot create a virtual disk in that location"),
"BadDiskLocation",
ErrorCategory.InvalidArgument,
null));
return;
}
WriteObject(disk, false);
}
private void CreateDiffDisk()
{
string child;
PSObject parentObj = ResolveNewDiskPath(out child);
PSObject baseDiskObj = SessionState.InvokeProvider.Item.Get(new string[] { BaseDisk }, false, true)[0];
VirtualDisk baseDisk = null;
try
{
if (baseDiskObj.BaseObject is FileInfo)
{
baseDisk = VirtualDisk.OpenDisk(((FileInfo)baseDiskObj.BaseObject).FullName, FileAccess.Read);
}
else if (baseDiskObj.BaseObject is DiscFileInfo)
{
DiscFileInfo dfi = (DiscFileInfo)baseDiskObj.BaseObject;
baseDisk = VirtualDisk.OpenDisk(dfi.FileSystem, dfi.FullName, FileAccess.Read);
}
else
{
WriteError(new ErrorRecord(
new FileNotFoundException("The file specified by the BaseDisk parameter doesn't exist"),
"BadBaseDiskLocation",
ErrorCategory.InvalidArgument,
null));
return;
}
VirtualDisk newDisk = null;
if (parentObj.BaseObject is DirectoryInfo)
{
string path = Path.Combine(((DirectoryInfo)parentObj.BaseObject).FullName, child);
using (baseDisk.CreateDifferencingDisk(path)) { }
newDisk = new OnDemandVirtualDisk(path, FileAccess.ReadWrite);
}
else if (parentObj.BaseObject is DiscDirectoryInfo)
{
DiscDirectoryInfo ddi = (DiscDirectoryInfo)parentObj.BaseObject;
string path = Path.Combine(ddi.FullName, child);
using (baseDisk.CreateDifferencingDisk(ddi.FileSystem, path)) { }
newDisk = new OnDemandVirtualDisk(ddi.FileSystem, path, FileAccess.ReadWrite);
}
else
{
WriteError(new ErrorRecord(
new DirectoryNotFoundException("Cannot create a virtual disk in that location"),
"BadDiskLocation",
ErrorCategory.InvalidArgument,
null));
return;
}
WriteObject(newDisk, false);
}
finally
{
if (baseDisk != null)
{
baseDisk.Dispose();
}
}
}
private PSObject ResolveNewDiskPath(out string child)
{
PSObject parentObj;
child = SessionState.Path.ParseChildName(LiteralPath);
string parent = SessionState.Path.ParseParent(LiteralPath, null);
PathInfo parentPath = this.SessionState.Path.GetResolvedPSPathFromPSPath(parent)[0];
parentObj = SessionState.InvokeProvider.Item.Get(new string[] { parentPath.Path }, false, true)[0];
// If we got a Volume, then we need to send a special marker to the provider indicating that we
// actually wanted the root directory on the volume, not the volume itself.
if (parentObj.BaseObject is VolumeInfo)
{
parentObj = SessionState.InvokeProvider.Item.Get(new string[] { Path.Combine(parentPath.Path, @"$Root") }, false, true)[0];
}
return parentObj;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Runtime.InteropServices;
using System.Threading;
using Microsoft.Win32.SafeHandles;
using Microsoft.Build.Framework;
using Microsoft.Build.BuildEngine.Shared;
namespace Microsoft.Build.BuildEngine
{
internal enum SharedMemoryType
{
ReadOnly,
WriteOnly
}
/// <summary>
/// The shared memory is used to transmit serialized LocalCallDescriptors.
/// These local call descriptors encapsulate commands and data that needs
/// to be communicated between the parent and child objects. This enumeration
/// is used by the shared memory to mark what kind of LocalCallDescriptor
/// object is in the shared memory so it can be correctly deserialized.
/// This marker is placed at the front of object in the shared memory.
/// Enumeration of LocalCallDescriptor Types
/// </summary>
internal enum ObjectType
{
// Has the object been serialized using .net serialization (binary formatter)
NetSerialization = 1,
// Used to mark that the next int read represents how many bytes are in the
// large object which is about to be sent
FrameMarker = 2,
// Mark the end of the batch in sharedMemory.
EndMarker = 3,
// Below are the enumeration values are for messages / commands which are
// passed between the child and the parent processes
PostBuildRequests = 4,
PostBuildResult = 5,
PostLoggingMessagesToHost = 6,
UpdateNodeSettings = 7,
RequestStatus = 8,
PostStatus = 9,
InitializeNode = 10,
InitializationComplete = 11,
ShutdownNode = 12,
ShutdownComplete = 13,
PostIntrospectorCommand = 14,
GenericSingleObjectReply = 15,
PostCacheEntriesToHost = 16,
GetCacheEntriesFromHost = 17
}
/// <summary>
/// This class is responsible for providing a communication channel between
/// a child process and a parent process. Each process (child or parent) will
/// have two SharedMemory class instances, one for reading and one for writing.
/// For example, a parent will have one shared memory class to "read" data
/// sent from the child and one "write" shared The shared memory communicates
/// through named shared memory regions.
/// </summary>
internal class SharedMemory : IDisposable
{
#region Construction
private SharedMemory()
{
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="name">
/// The name the shared memory will be given, this is combination of node,
/// username, admin status, and some other ones,
/// see LocalNodeProviderGlobalNames.NodeInputMemoryName for greater detail.
/// </param>
/// <param name="type">
/// This type determines which lock and stream needs to be instantiated
/// within the shared memory class. For example,
/// read only means, only create a memory stream,
/// a read lock and a backing byte array and a binary reader. A write
/// only type means, create a memory stream, write lock and a binary writer.
/// This type however does not set the type of the memory mapped section,
/// the memory mapped section itself is created
/// with READWRITE access.
///</param>
/// <param name="allowExistingMapping">
/// The shared memory is given a parameter to determine whether or not to
/// reuse an existing mapped memory secion. When the node is first created
/// this is false, however when the shared memory threads are created this
/// is true. We do this because we create the shared memory when the node
/// is created, at this point the there should be no shared memory with the
/// same name. However when we create the reader and writer threads
/// (which happens on node reuse) we want to reuse the memory.
///</param>
internal SharedMemory(string name, SharedMemoryType type, bool allowExistingMapping)
{
this.type = type;
InitializeMemoryMapping(name, allowExistingMapping);
writeBytesRemaining = 0;
readBytesRemaining = 0;
readBytesTotal = 0;
largeObjectsQueue = null;
// Has the shared memory been properly created and is ready to use
if (IsUsable)
{
// Setup the structures for either a read only or write only stream
InitializeStreams(type);
try
{
// This could fail if two different administrator accounts try and
// access each others nodes as events and semaphores are protected
// against cross account access
InitializeSynchronization();
}
catch (System.UnauthorizedAccessException)
{
if (writeStream != null)
{
// Closes binary writer and the underlying stream
binaryWriter.Close();
}
if (readStream != null)
{
// Closes binary reader and the underlying stream
binaryReader.Close();
}
NativeMethods.UnmapViewOfFile(pageFileView);
pageFileMapping.Dispose();
}
}
}
/// <summary>
/// Creates the shared memory region and map a view to it.
/// </summary>
private void InitializeMemoryMapping(string memoryMapName, bool allowExistingMapping)
{
// Null means use the default security permissions
IntPtr pointerToSecurityAttributes = NativeMethods.NullPtr;
IntPtr pSDNative = IntPtr.Zero;
try
{
// Check to see if the user is an administrator, this is done to prevent non
// administrator processes from accessing the shared memory. On a vista machine
// the check does not differentiate beween the application being elevated to have
// administrator rights or the application being started with administrator rights.
// If the user is an administator create a new set of securityAttributes which make
// the shared memory only accessable to administrators.
if (NativeMethods.IsUserAdministrator())
{
NativeMethods.SECURITY_ATTRIBUTES saAttr = new NativeMethods.SECURITY_ATTRIBUTES();
uint pSDLength = 0;
if (!NativeMethods.ConvertStringSecurityDescriptorToSecurityDescriptor(NativeMethods.ADMINONLYSDDL, NativeMethods.SECURITY_DESCRIPTOR_REVISION, ref pSDNative, ref pSDLength))
{
throw new System.ComponentModel.Win32Exception();
}
saAttr.bInheritHandle = 0;
saAttr.nLength = Marshal.SizeOf(typeof(NativeMethods.SECURITY_ATTRIBUTES));
saAttr.lpSecurityDescriptor = pSDNative;
pointerToSecurityAttributes = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(NativeMethods.SECURITY_ATTRIBUTES)));
Marshal.StructureToPtr(saAttr, pointerToSecurityAttributes, true);
}
// The file mapping has either the default (current user) security permissions or
// permissions restricted to only administrator users depending on the check above.
// If pointerToSecurityAttributes is null the default permissions are used.
this.pageFileMapping =
NativeMethods.CreateFileMapping
(
NativeMethods.InvalidHandle,
pointerToSecurityAttributes,
NativeMethods.PAGE_READWRITE,
0,
size + 4,
memoryMapName
);
// If only new mappings are allowed and the current one has been created by somebody else
// delete the mapping. Note that we would like to compare the GetLastError value against
// ERROR_ALREADY_EXISTS but CLR sometimes overwrites the last error so to be safe we'll
// not reuse the node for any unsuccessful value.
if (!allowExistingMapping && Marshal.GetLastWin32Error() != NativeMethods.ERROR_SUCCESS)
{
if (!pageFileMapping.IsInvalid && !pageFileMapping.IsClosed)
{
NativeMethods.UnmapViewOfFile(pageFileView);
pageFileMapping.Close();
}
}
}
finally
{
NativeMethods.LocalFree(pointerToSecurityAttributes);
NativeMethods.LocalFree(pSDNative);
}
if (!this.pageFileMapping.IsInvalid && !pageFileMapping.IsClosed)
{
// Maps a view of a file mapping into the address space of the calling process so that we can use the
// view to read and write to the shared memory region.
this.pageFileView =
NativeMethods.MapViewOfFile
(
this.pageFileMapping,
NativeMethods.FILE_MAP_ALL_ACCESS, // Give the map read, write, and copy access
0, // Start mapped view at high order offset 0
0, // Start mapped view at low order offset 0
// The size of the shared memory plus some extra space for an int
// to write the number of bytes written
(IntPtr)(size + 4)
);
// Check to see if the file view has been created on the fileMapping.
if (this.pageFileView == NativeMethods.NullPtr)
{
// Make the shared memory not usable.
this.pageFileMapping.Close();
}
else
{
this.name = memoryMapName;
}
}
}
/// <summary>
/// Initialize the MemoryStreams which will be used to contain the serialized data from the LocalCallDescriptors.
/// </summary>
private void InitializeStreams(SharedMemoryType streamType)
{
// Initialize the .net binary formatter in case we need to use .net serialization.
this.binaryFormatter = new BinaryFormatter();
this.loggingTypeCache = new Hashtable();
if (streamType == SharedMemoryType.ReadOnly)
{
this.readBuffer = new byte[size];
this.readStream = new MemoryStream(this.readBuffer);
this.binaryReader = new BinaryReader(this.readStream);
readLock = new object();
}
else if (streamType == SharedMemoryType.WriteOnly)
{
this.writeStream = new MemoryStream();
writeLock = new object();
this.binaryWriter = new BinaryWriter(this.writeStream);
}
else
{
ErrorUtilities.VerifyThrow(false, "Unknown shared memory type.");
}
}
/// <summary>
/// Initialize the synchronization variables which will be used to communicate the status of the shared memory between processes.
/// </summary>
private void InitializeSynchronization()
{
this.unreadBatchCounter = new Semaphore(0, size, this.name + "UnreadBatchCounter");
this.fullFlag = new EventWaitHandle(false, EventResetMode.ManualReset, this.name + "FullFlag");
this.notFullFlag = new EventWaitHandle(true, EventResetMode.ManualReset, this.name + "NotFullFlag");
this.readActionCounter = new Semaphore(0, size + /* for full-flag */ 1, this.name + "ReadActionCounter");
// Make sure the count of unread batches is 0
while (NumberOfUnreadBatches > 0)
{
DecrementUnreadBatchCounter();
}
}
#endregion
#region Disposal
protected virtual void Dispose(bool disposing)
{
if (!disposed)
{
if (IsUsable)
{
NativeMethods.UnmapViewOfFile(pageFileView);
pageFileMapping.Dispose();
unreadBatchCounter.Close();
fullFlag.Close();
notFullFlag.Close();
readActionCounter.Close();
}
if (writeStream != null)
{
// Closes binary writer and the underlying stream
binaryWriter.Close();
}
if (readStream != null)
{
// Closes binary reader and the underlying stream
binaryReader.Close();
}
// Set the sentinel.
disposed = true;
// Suppress finalization of this disposed instance.
if (disposing)
{
GC.SuppressFinalize(this);
}
}
}
public void Dispose()
{
Dispose(true);
}
~SharedMemory()
{
Dispose();
}
#endregion
#region Properties
/// <summary>
/// Indicates the shared memory region been created and initialized properly.
/// </summary>
internal bool IsUsable
{
get
{
return !pageFileMapping.IsInvalid &&
!pageFileMapping.IsClosed &&
(pageFileView != NativeMethods.NullPtr);
}
}
/// <summary>
/// Returns the readActionCounter as a WaitHandle. This WaitHandle is used
/// to notify the SharedMemory reader threads that there is something ready
/// in the shared memory to be read. The ReadFlag will remain set as long as
/// the number of times the shared memory has been read is less than the
/// number of times writer thread has written to the shared memory.
/// </summary>
internal WaitHandle ReadFlag
{
get
{
return readActionCounter;
}
}
/// <summary>
/// Indicates when the SharedMemory is full
/// </summary>
private bool IsFull
{
get
{
// If the flag is set true is returned
// A timeout of 0 means the WaitOne will time out
// instantly and return false if the flag is not set.
return fullFlag.WaitOne(0, false);
}
}
/// <summary>
/// The NumberOfUnreadBatches is the number of "batches" written to shared
/// memory which have not been read yet by the ReaderThread. A batch
/// contains one or more serialized objects.
/// </summary>
private int NumberOfUnreadBatches
{
get
{
// Relese the semaphore, this will return the number of times the
// semaphore was entered into. This value reflects the count before
// the release is taken into account.
int numberOfUnreadBatches = unreadBatchCounter.Release();
// Decrement the semaphore to offset the increment used to get the count.
unreadBatchCounter.WaitOne();
return numberOfUnreadBatches;
}
}
#endregion
#region Methods
/// <summary>
/// The shared memory is now full, set the correct synchronization variables to
/// inform the reader thread of this situation.
/// </summary>
private void MarkAsFull()
{
fullFlag.Set();
notFullFlag.Reset();
readActionCounter.Release();
}
/// <summary>
/// The shared memory is no longer full. Set the correct synchronization variables
/// to inform the writer thread of this situation.
/// </summary>
private void MarkAsNotFull()
{
fullFlag.Reset();
notFullFlag.Set();
}
/// <summary>
/// A batch is now in the shared memory and is ready to be read by the reader thread.
/// </summary>
private void IncrementUnreadBatchCounter()
{
// Release increments a semaphore
// http://msdn2.microsoft.com/en-us/library/system.threading.semaphore.aspx
unreadBatchCounter.Release();
readActionCounter.Release();
}
/// <summary>
/// A batch has just been read out of shared memory.
/// </summary>
private void DecrementUnreadBatchCounter()
{
// WaitOne decrements the semaphore
unreadBatchCounter.WaitOne();
}
/// <summary>
/// This function write out a set of objects into the the shared buffer.
/// In normal operation all the objects in the queue are serialized into
/// the buffer followed by an end marker class. If the buffer is not big
/// enough to contain a single object the object is broken into into
/// multiple buffers as follows - first a frame marker is sent containing
/// the size of the serialized object + size of end marker. The reader makes
/// sure upon receiving the frame marker that its buffer is large enough
/// to contain the object about to be sent. After the frame marker the object
/// is sent as a series of buffers until all of it is written out.
/// </summary>
/// <param name="objectsToWrite"> Queue of objects to be sent (mostly logging messages)</param>
/// <param name="objectsToWriteHiPriority">Queue of high priority objects (these are commands and statuses) </param>
/// <param name="blockUntilDone"> If true the function will block until both queues are empty</param>
internal void Write(DualQueue<LocalCallDescriptor> objectsToWrite, DualQueue<LocalCallDescriptor> objectsToWriteHiPriority, bool blockUntilDone)
{
Debug.Assert(type == SharedMemoryType.WriteOnly, "Should only be calling Write from a writeonly shared memory object");
lock (writeLock)
{
// Loop as long as there are objects availiable and room in the shared memory.
// If blockUntilDone is set continue to loop until all of the objects in both queues are sent.
while ((objectsToWrite.Count > 0 || objectsToWriteHiPriority.Count > 0) &&
((blockUntilDone && notFullFlag.WaitOne()) || !IsFull))
{
bool isFull = false;
long writeStartPosition = writeStream.Position;
bool writeEndMarker = false;
// Put as many LocalCallDescriptor objects as possible into the shared memory
while (!isFull && (objectsToWrite.Count > 0 || objectsToWriteHiPriority.Count > 0))
{
long writeResetPosition = writeStream.Position;
DualQueue<LocalCallDescriptor> currentQueue = objectsToWriteHiPriority.Count > 0 ? objectsToWriteHiPriority : objectsToWrite;
// writeBytesRemaining == 0 is when we are currently not sending a multi part object through
// the shared memory
if (writeBytesRemaining == 0)
{
// Serialize the object to the memory stream
SerializeCallDescriptorToStream(currentQueue);
// If the size of the serialized object plus the end marker fits within the shared memory
// dequeue the object as it is going to be sent.
if ((writeStream.Position + sizeof(byte)) <= size)
{
currentQueue.Dequeue();
writeEndMarker = true;
}
else
{
// The serialized object plus the end marker is larger than the shared memory buffer
// Check if it necessary break down the object into multiple buffers
// If the memoryStream was empty before trying to serialize the object
// create a frame marker with the size of the object and send through the shared memory
if (writeResetPosition == 0)
{
// We don't want to switch from low priority to high priority queue in the middle of sending a large object
// so we make a record of which queue contains the large object
largeObjectsQueue = currentQueue;
// Calculate the total number of bytes that needs to be sent
writeBytesRemaining = (int)(writeStream.Position + sizeof(byte));
// Send a frame marker out to the reader containing the size of the object
writeStream.Position = 0;
// Write the frameMarkerId byte and then the amount of bytes for the large object
writeStream.WriteByte((byte)ObjectType.FrameMarker);
binaryWriter.Write((Int32)writeBytesRemaining);
writeEndMarker = true;
}
else
{
// Some items were placed in the shared Memory buffer, erase the last one which was too large
// and say the buffer is full so it can be sent
writeStream.Position = writeResetPosition;
}
isFull = true;
}
}
else
{
if (writeStream.Position == 0)
{
// Serialize the object which will be split across multiple buffers
SerializeCallDescriptorToStream(largeObjectsQueue);
writeStream.WriteByte((byte)ObjectType.EndMarker);
}
break;
}
}
// If a multi-buffer object is being sent and the large object is still larger or equal to the shard memory buffer - send the next chunk of the object
if (writeBytesRemaining != 0 && writeStream.Position >= size)
{
// Set write Length to an entire buffer length or just the remaining portion
int writeLength = writeBytesRemaining > size ? size : writeBytesRemaining;
//Write the length of the buffer to the memory file
Marshal.WriteInt32((IntPtr)pageFileView, (int)writeLength);
Marshal.Copy
(
writeStream.GetBuffer(), // Source Buffer
(int)(writeStream.Position - writeBytesRemaining), // Start index
(IntPtr)((int)pageFileView + 4), //Destination (+4 because of the int written to the memory file with the write length)
(int)writeLength // Length of bytes to write
);
writeBytesRemaining = writeBytesRemaining - writeLength;
IncrementUnreadBatchCounter();
// Once the object is fully sent - remove it from the queue
if (writeBytesRemaining == 0)
{
largeObjectsQueue.Dequeue();
}
isFull = true;
}
if (writeEndMarker)
{
writeStream.WriteByte((byte)ObjectType.EndMarker);
// Need to verify the WriteInt32 and ReadInt32 are always atomic operations
//writeSizeMutex.WaitOne();
// Write the size of the buffer to send to the memory stream
Marshal.WriteInt32((IntPtr)pageFileView, (int)writeStream.Position);
//writeSizeMutex.ReleaseMutex();
Marshal.Copy
(
writeStream.GetBuffer(), // Buffer
(int)writeStartPosition, // Start Position
(IntPtr)((int)pageFileView + writeStartPosition + 4), // Destination + 4 for the int indicating the size of the data to be copied to the memory file
(int)(writeStream.Position - writeStartPosition) // Length of data to copy to memory file
);
IncrementUnreadBatchCounter();
}
if (isFull)
{
MarkAsFull();
writeStream.SetLength(0);
}
}
}
}
/// <summary>
/// Serialize the first object in the queue to the a memory stream which will be copied into shared memory.
/// The write stream which is being written to is not the shared memory itself, it is a memory stream from which
/// bytes will be copied and placed in the shared memory in the write method.
/// </summary>
private void SerializeCallDescriptorToStream(DualQueue<LocalCallDescriptor> objectsToWrite)
{
// Get the object by peeking at the queue rather than dequeueing the object. This is done
// because we only want to dequeue the object when it has completely been put in shared memory.
// This may be done right away if the object is small enough to fit in the shared memory or
// may happen after a the object is sent as a number of smaller chunks.
object objectToWrite = objectsToWrite.Peek();
Debug.Assert(objectToWrite != null, "Expect to get a non-null object from the queue");
if (objectToWrite is LocalCallDescriptorForPostBuildResult)
{
writeStream.WriteByte((byte)ObjectType.PostBuildResult);
((LocalCallDescriptorForPostBuildResult)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForPostBuildRequests)
{
writeStream.WriteByte((byte)ObjectType.PostBuildRequests);
((LocalCallDescriptorForPostBuildRequests)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForPostLoggingMessagesToHost)
{
writeStream.WriteByte((byte)ObjectType.PostLoggingMessagesToHost);
((LocalCallDescriptorForPostLoggingMessagesToHost)objectToWrite).WriteToStream(binaryWriter, loggingTypeCache);
}
else if (objectToWrite is LocalCallDescriptorForInitializeNode)
{
writeStream.WriteByte((byte)ObjectType.InitializeNode);
((LocalCallDescriptorForInitializeNode)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForInitializationComplete)
{
writeStream.WriteByte((byte)ObjectType.InitializationComplete);
((LocalCallDescriptorForInitializationComplete)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForUpdateNodeSettings)
{
writeStream.WriteByte((byte)ObjectType.UpdateNodeSettings);
((LocalCallDescriptorForUpdateNodeSettings)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForRequestStatus)
{
writeStream.WriteByte((byte)ObjectType.RequestStatus);
((LocalCallDescriptorForRequestStatus)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForPostingCacheEntriesToHost)
{
writeStream.WriteByte((byte)ObjectType.PostCacheEntriesToHost);
((LocalCallDescriptorForPostingCacheEntriesToHost)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForGettingCacheEntriesFromHost)
{
writeStream.WriteByte((byte)ObjectType.GetCacheEntriesFromHost);
((LocalCallDescriptorForGettingCacheEntriesFromHost)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForShutdownComplete)
{
writeStream.WriteByte((byte)ObjectType.ShutdownComplete);
((LocalCallDescriptorForShutdownComplete)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForShutdownNode)
{
writeStream.WriteByte((byte)ObjectType.ShutdownNode);
((LocalCallDescriptorForShutdownNode)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForPostIntrospectorCommand)
{
writeStream.WriteByte((byte)ObjectType.PostIntrospectorCommand);
((LocalCallDescriptorForPostIntrospectorCommand)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalReplyCallDescriptor)
{
writeStream.WriteByte((byte)ObjectType.GenericSingleObjectReply);
((LocalReplyCallDescriptor)objectToWrite).WriteToStream(binaryWriter);
}
else if (objectToWrite is LocalCallDescriptorForPostStatus)
{
writeStream.WriteByte((byte)ObjectType.PostStatus);
((LocalCallDescriptorForPostStatus)objectToWrite).WriteToStream(binaryWriter);
}
else
{
// If the object is not one of the well known local descriptors, use .net Serialization to serialize the object
writeStream.WriteByte((byte)ObjectType.NetSerialization);
binaryFormatter.Serialize(writeStream, objectToWrite);
}
}
/// <summary>
/// This function reads data from the shared memory buffer and returns a list
/// of deserialized LocalCallDescriptors or null. The method will return null
/// if the object being sent accross is a multi buffer object. Read needs to
/// be called multiple times until the entire large object has been recived.
/// Once this has happened the large object is deserialized and returned in
/// the Ilist. Read is used by the shared memory reader threads in the LocalNode
/// (child end) and the LocalNodeProvider(ParentEnd) to read LocalCallDescriptors
/// from the shared memory. Read is called from loops in the SharedMemoryReaderThread
/// </summary>
internal IList Read()
{
ErrorUtilities.VerifyThrow(type == SharedMemoryType.ReadOnly, "Should only be calling Read from a readonly shared memory object");
ArrayList objectsRead = null;
int objectId = -1;
lock (readLock)
{
if (NumberOfUnreadBatches > 0)
{
// The read stream is a memory stream where data read from the shared memory section
// will be copied to. From this memory stream LocalCallDescriptors are deserialized.
// Stream position may not be 0 if we are reading a multipart object
int readStartPosition = (int)readStream.Position;
// Read the first int from the memory file. This indicates the number of bytes written to
// shared memory by the write method.
int endWritePosition = Marshal.ReadInt32((IntPtr)((int)pageFileView));
// Copy the bytes written into the shared memory section into the readStream memory stream.
Marshal.Copy
(
(IntPtr)((int)pageFileView + 4 + readStream.Position), // Source
readBuffer, //Destination
(int)(readStream.Position + (readBytesTotal - readBytesRemaining)), // Start Index
(int)(endWritePosition - readStream.Position) //Length of data
);
// If a multi buffer object is being read - decrement the bytes remaining
if (readBytesRemaining != 0)
{
readBytesRemaining -= endWritePosition;
}
// If a multi buffer object is not being read (or just completed) - try
// deserializing the data from the buffer into a set of objects
if (readBytesRemaining == 0)
{
objectsRead = new ArrayList();
// Deserialize the object in the read stream to a LocalCallDescriptor. The objectId
// is the "ObjectType" which was written to the head of the object when written to the memory stream.
// It describes which kind of object was serialized
object objectRead = DeserializeFromStream(out objectId);
// Check if the writer wants to sent a multi-buffer object, by checking
// if the top object is a frame marker.
if (readStartPosition == 0)
{
if ((int)ObjectType.FrameMarker == objectId)
{
int frameSizeInPages = (int)((((int)objectRead) + NativeMethods.PAGE_SIZE)
/ NativeMethods.PAGE_SIZE);
// Read the end marker off the stream
objectId = binaryReader.ReadByte();
// Allocate a bigger readStream buffer to contain the large object which will be sent if necessary
if (readBuffer.Length < frameSizeInPages * NativeMethods.PAGE_SIZE)
{
// Close the binary reader and the underlying stream before recreating a larger buffer
binaryReader.Close();
this.readBuffer = new byte[frameSizeInPages * NativeMethods.PAGE_SIZE];
this.readStream = new MemoryStream(this.readBuffer);
this.readStream.Position = 0;
// ReCreate the reader on the new stream
binaryReader = new BinaryReader(readStream);
}
readBytesRemaining = (int)objectRead;
readBytesTotal = (int)objectRead;
}
else
{
readBytesTotal = 0;
}
}
// Deserialized the objects in the read stream and add them into the arrayList as long as
// we did not encounter a frameMarker which says a large object is next or the end marker
// which marks the end of the batch.
while (((int)ObjectType.EndMarker != objectId) && ((int)ObjectType.FrameMarker != objectId))
{
objectsRead.Add(objectRead);
objectRead = DeserializeFromStream(out objectId);
}
}
DecrementUnreadBatchCounter();
}
else
{
MarkAsNotFull();
readStream.Position = 0;
}
}
return objectsRead;
}
/// <summary>
/// This method first reads the objectId as an int from the stream,
/// this int should be found in the "ObjectType" enumeration. This
/// objectId informs the method what kind of object should be
/// deserialized and returned from the method. The objectId is an
/// output parameter. This parameter is also returned so it can be
/// used in the read and write methods to determine if
/// a frame or end marker was found.
/// </summary>
private object DeserializeFromStream(out int objectId)
{
object objectRead = null;
objectId = readStream.ReadByte();
switch ((ObjectType)objectId)
{
case ObjectType.NetSerialization:
objectRead = binaryFormatter.Deserialize(readStream);
break;
case ObjectType.FrameMarker:
objectRead = binaryReader.ReadInt32();
break;
case ObjectType.PostBuildResult:
objectRead = new LocalCallDescriptorForPostBuildResult();
((LocalCallDescriptorForPostBuildResult)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.PostBuildRequests:
objectRead = new LocalCallDescriptorForPostBuildRequests();
((LocalCallDescriptorForPostBuildRequests)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.PostLoggingMessagesToHost:
objectRead = new LocalCallDescriptorForPostLoggingMessagesToHost();
((LocalCallDescriptorForPostLoggingMessagesToHost)objectRead).CreateFromStream(binaryReader, loggingTypeCache);
break;
case ObjectType.InitializeNode:
objectRead = new LocalCallDescriptorForInitializeNode();
((LocalCallDescriptorForInitializeNode)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.InitializationComplete:
objectRead = new LocalCallDescriptorForInitializationComplete();
((LocalCallDescriptorForInitializationComplete)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.UpdateNodeSettings:
objectRead = new LocalCallDescriptorForUpdateNodeSettings();
((LocalCallDescriptorForUpdateNodeSettings)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.RequestStatus:
objectRead = new LocalCallDescriptorForRequestStatus();
((LocalCallDescriptorForRequestStatus)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.PostCacheEntriesToHost:
objectRead = new LocalCallDescriptorForPostingCacheEntriesToHost();
((LocalCallDescriptorForPostingCacheEntriesToHost)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.GetCacheEntriesFromHost:
objectRead = new LocalCallDescriptorForGettingCacheEntriesFromHost();
((LocalCallDescriptorForGettingCacheEntriesFromHost)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.ShutdownComplete:
objectRead = new LocalCallDescriptorForShutdownComplete();
((LocalCallDescriptorForShutdownComplete)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.ShutdownNode:
objectRead = new LocalCallDescriptorForShutdownNode();
((LocalCallDescriptorForShutdownNode)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.PostIntrospectorCommand:
objectRead = new LocalCallDescriptorForPostIntrospectorCommand(null, null);
((LocalCallDescriptorForPostIntrospectorCommand)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.GenericSingleObjectReply:
objectRead = new LocalReplyCallDescriptor();
((LocalReplyCallDescriptor)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.PostStatus:
objectRead = new LocalCallDescriptorForPostStatus();
((LocalCallDescriptorForPostStatus)objectRead).CreateFromStream(binaryReader);
break;
case ObjectType.EndMarker:
return null;
default:
ErrorUtilities.VerifyThrow(false, "Should not be here, ObjectId:" + objectId + "Next:" + readStream.ReadByte());
break;
}
return objectRead;
}
/// <summary>
/// Reset the state of the shared memory, this is called when the node is
/// initialized for the first time or when the node is activated due to node reuse.
/// </summary>
internal void Reset()
{
if (readStream != null)
{
readStream.Position = 0;
}
if (writeStream != null)
{
writeStream.SetLength(0);
Marshal.WriteInt32((IntPtr)pageFileView, (int)writeStream.Position);
}
largeObjectsQueue = null;
}
#endregion
#region Member data
private const int size = 100 * 1024;
private string name;
private SafeFileHandle pageFileMapping;
private IntPtr pageFileView;
private BinaryFormatter binaryFormatter;
// Binary reader and writer used to read and write from the memory streams used to contain the deserialized LocalCallDescriptors before and after they are copied
// to and from the shared memory region.
private BinaryWriter binaryWriter;
private BinaryReader binaryReader;
/// <summary>
/// Memory stream to contain the deserialized objects before they are sent accross the shared memory region
/// </summary>
private MemoryStream writeStream;
// Backing byte array of the readStream
private byte[] readBuffer;
private MemoryStream readStream;
// The count on a semaphore is decremented each time a thread enters the semaphore,
// and incremented when a thread releases the semaphore.
// When the count is zero, subsequent requests block until other threads release the semaphore.
// A semaphore is considered siginaled when the count > 1 and not siginaled when the count is 0.
// unreadBatchCounter is used to track how many batches are remaining to be read from shared memory.
private Semaphore unreadBatchCounter;
//Used to inform the shared memory reader threads the writer thread has written something in shared memory to read.
//The semaphore is incremented when the shared memory is full and when there is an unreadBatch availiable to be read or the shared memory is full.
//The semaphore is decremented when the shared memory reader thread is about to read from the shared memory.
private Semaphore readActionCounter;
// Whether or not the shared memory is full
private EventWaitHandle fullFlag;
private EventWaitHandle notFullFlag;
private object writeLock;
private object readLock;
// How many bytes remain to be written for the large object to be fully written to shared memory
private int writeBytesRemaining;
// How many bytes remain to be read for the large object to be fully read to shared memory
private int readBytesRemaining;
// How many bytes is the large object in size
private int readBytesTotal;
// Have we disposed this object yet;
private bool disposed;
// Is the memory read only or write only
private SharedMemoryType type;
// Because we are using reflection to get the writeToStream and readFromStream methods from the classes in the framework assembly we found
// we were spending a lot of time reflecting for these methods. The loggingTypeCache, caches the methodInfo for the classes and then look them
// up when serializing or deserializing the objects.
private Hashtable loggingTypeCache;
// Keep a pointer to the queue which contains the large object which is being deserialized. We do this because we want to make sure
// after the object is properly sent we dequeue off the correct queue.
private DualQueue<LocalCallDescriptor> largeObjectsQueue;
#endregion
}
}
| |
/*
* TokenPattern.cs
*/
using System;
using System.Text;
namespace Core.Library {
/**
* A token pattern. This class contains the definition of a token
* (i.e. it's pattern), and allows testing a string against this
* pattern. A token pattern is uniquely identified by an integer id,
* that must be provided upon creation.
*
*
*/
public class TokenPattern {
/**
* The pattern type enumeration.
*/
public enum PatternType {
/**
* The string pattern type is used for tokens that only
* match an exact string.
*/
STRING,
/**
* The regular expression pattern type is used for tokens
* that match a regular expression.
*/
REGEXP
}
/**
* The token pattern identity.
*/
private int id;
/**
* The token pattern name.
*/
private string name;
/**
* The token pattern type.
*/
private PatternType type;
/**
* The token pattern.
*/
private string pattern;
/**
* The token error flag. If this flag is set, it means that an
* error should be reported if the token is found. The error
* message is present in the errorMessage variable.
*
* @see #errorMessage
*/
private bool error = false;
/**
* The token error message. This message will only be set if the
* token error flag is set.
*
* @see #error
*/
private string errorMessage = null;
/**
* The token ignore flag. If this flag is set, it means that the
* token should be ignored if found. If an ignore message is
* present in the ignoreMessage variable, it will also be reported
* as a warning.
*
* @see #ignoreMessage
*/
private bool ignore = false;
/**
* The token ignore message. If this message is set when the token
* ignore flag is also set, a warning message will be printed if
* the token is found.
*
* @see #ignore
*/
private string ignoreMessage = null;
/**
* The optional debug information message. This is normally set
* when the token pattern is analyzed by the tokenizer.
*/
private string debugInfo = null;
/**
* Creates a new token pattern.
*
* @param id the token pattern id
* @param name the token pattern name
* @param type the token pattern type
* @param pattern the token pattern
*/
public TokenPattern(int id,
string name,
PatternType type,
string pattern) {
this.id = id;
this.name = name;
this.type = type;
this.pattern = pattern;
}
/**
* The token pattern identity property (read-only). This
* property contains the unique token pattern identity value.
*
*
*/
public int Id {
get {
return id;
}
}
/**
* Returns the unique token pattern identity value.
*
* @return the token pattern id
*
* @see #Id
*
* @deprecated Use the Id property instead.
*/
public int GetId() {
return id;
}
/**
* The token pattern name property (read-only).
*
*
*/
public string Name {
get {
return name;
}
}
/**
* Returns the token pattern name.
*
* @return the token pattern name
*
* @see #Name
*
* @deprecated Use the Name property instead.
*/
public string GetName() {
return name;
}
/**
* The token pattern type property (read-only).
*
*
*/
public PatternType Type {
get {
return type;
}
}
/**
* Returns the token pattern type.
*
* @return the token pattern type
*
* @see #Type
*
* @deprecated Use the Type property instead.
*/
public PatternType GetPatternType() {
return type;
}
/**
* The token pattern property (read-only). This property
* contains the actual pattern (string or regexp) which have
* to be matched.
*
*
*/
public string Pattern {
get {
return pattern;
}
}
/**
* Returns te token pattern.
*
* @return the token pattern
*
* @see #Pattern
*
* @deprecated Use the Pattern property instead.
*/
public string GetPattern() {
return pattern;
}
/**
* The error flag property. If this property is true, the
* token pattern corresponds to an error token and an error
* should be reported if a match is found. When setting this
* property to true, a default error message is created if
* none was previously set.
*
*
*/
public bool Error {
get {
return error;
}
set {
error = value;
if (error && errorMessage == null) {
errorMessage = "unrecognized token found";
}
}
}
/**
* The token error message property. The error message is
* printed whenever the token is matched. Setting the error
* message property also sets the error flag to true.
*
* @see #Error
*
*
*/
public string ErrorMessage {
get {
return errorMessage;
}
set {
error = true;
errorMessage = value;
}
}
/**
* Checks if the pattern corresponds to an error token. If this
* is true, it means that an error should be reported if a
* matching token is found.
*
* @return true if the pattern maps to an error token, or
* false otherwise
*
* @see #Error
*
* @deprecated Use the Error property instead.
*/
public bool IsError() {
return Error;
}
/**
* Returns the token error message if the pattern corresponds to
* an error token.
*
* @return the token error message
*
* @see #ErrorMessage
*
* @deprecated Use the ErrorMessage property instead.
*/
public string GetErrorMessage() {
return ErrorMessage;
}
/**
* Sets the token error flag and assigns a default error message.
*
* @see #Error
*
* @deprecated Use the Error property instead.
*/
public void SetError() {
Error = true;
}
/**
* Sets the token error flag and assigns the specified error
* message.
*
* @param message the error message to display
*
* @see #ErrorMessage
*
* @deprecated Use the ErrorMessage property instead.
*/
public void SetError(string message) {
ErrorMessage = message;
}
/**
* The ignore flag property. If this property is true, the
* token pattern corresponds to an ignore token and should be
* skipped if a match is found.
*
*
*/
public bool Ignore {
get {
return ignore;
}
set {
ignore = value;
}
}
/**
* The token ignore message property. The ignore message is
* printed whenever the token is matched. Setting the ignore
* message property also sets the ignore flag to true.
*
* @see #Ignore
*
*
*/
public string IgnoreMessage {
get {
return ignoreMessage;
}
set {
ignore = true;
ignoreMessage = value;
}
}
/**
* Checks if the pattern corresponds to an ignored token. If this
* is true, it means that the token should be ignored if found.
*
* @return true if the pattern maps to an ignored token, or
* false otherwise
*
* @see #Ignore
*
* @deprecated Use the Ignore property instead.
*/
public bool IsIgnore() {
return Ignore;
}
/**
* Returns the token ignore message if the pattern corresponds to
* an ignored token.
*
* @return the token ignore message
*
* @see #IgnoreMessage
*
* @deprecated Use the IgnoreMessage property instead.
*/
public string GetIgnoreMessage() {
return IgnoreMessage;
}
/**
* Sets the token ignore flag and clears the ignore message.
*
* @see #Ignore
*
* @deprecated Use the Ignore property instead.
*/
public void SetIgnore() {
Ignore = true;
}
/**
* Sets the token ignore flag and assigns the specified ignore
* message.
*
* @param message the ignore message to display
*
* @see #IgnoreMessage
*
* @deprecated Use the IgnoreMessage property instead.
*/
public void SetIgnore(string message) {
IgnoreMessage = message;
}
/**
* The token debug info message property. This is normally be
* set when the token pattern is analyzed by the tokenizer.
*
*
*/
public string DebugInfo {
get {
return debugInfo;
}
set {
debugInfo = value;
}
}
/**
* Returns a string representation of this object.
*
* @return a token pattern string representation
*/
public override string ToString() {
StringBuilder buffer = new StringBuilder();
buffer.Append(name);
buffer.Append(" (");
buffer.Append(id);
buffer.Append("): ");
switch (type) {
case PatternType.STRING:
buffer.Append("\"");
buffer.Append(pattern);
buffer.Append("\"");
break;
case PatternType.REGEXP:
buffer.Append("<<");
buffer.Append(pattern);
buffer.Append(">>");
break;
}
if (error) {
buffer.Append(" ERROR: \"");
buffer.Append(errorMessage);
buffer.Append("\"");
}
if (ignore) {
buffer.Append(" IGNORE");
if (ignoreMessage != null) {
buffer.Append(": \"");
buffer.Append(ignoreMessage);
buffer.Append("\"");
}
}
if (debugInfo != null) {
buffer.Append("\n ");
buffer.Append(debugInfo);
}
return buffer.ToString();
}
/**
* Returns a short string representation of this object.
*
* @return a short string representation of this object
*/
public string ToShortString() {
StringBuilder buffer = new StringBuilder();
int newline = pattern.IndexOf('\n');
if (type == PatternType.STRING) {
buffer.Append("\"");
if (newline >= 0) {
if (newline > 0 && pattern[newline - 1] == '\r') {
newline--;
}
buffer.Append(pattern.Substring(0, newline));
buffer.Append("(...)");
} else {
buffer.Append(pattern);
}
buffer.Append("\"");
} else {
buffer.Append("<");
buffer.Append(name);
buffer.Append(">");
}
return buffer.ToString();
}
}
}
| |
//---------------------------------------------------------------------
// <copyright file="CqlLexerHelper.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// @owner [....]
// @backupOwner [....]
//---------------------------------------------------------------------
namespace System.Data.Common.EntitySql
{
using System;
using System.Globalization;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Diagnostics;
using System.Text;
using System.Data.Entity;
/// <summary>
/// Represents eSQL error context.
/// </summary>
internal class ErrorContext
{
/// <summary>
/// Represents the position of the error in the input stream.
/// </summary>
internal int InputPosition = -1;
/// <summary>
/// Represents the additional/contextual information related to the error position/cause.
/// </summary>
internal string ErrorContextInfo;
/// <summary>
/// Defines how ErrorContextInfo should be interpreted.
/// </summary>
internal bool UseContextInfoAsResourceIdentifier = true;
/// <summary>
/// Represents a referece to the original command text.
/// </summary>
internal string CommandText;
}
/// <summary>
/// Represents Cql scanner and helper functions.
/// </summary>
internal sealed partial class CqlLexer
{
static readonly StringComparer _stringComparer = StringComparer.OrdinalIgnoreCase;
static Dictionary<string, short> _keywords;
static HashSet<string> _invalidAliasNames;
static HashSet<string> _invalidInlineFunctionNames;
static Dictionary<string, short> _operators;
static Dictionary<string, short> _punctuators;
static HashSet<string> _canonicalFunctionNames;
static Regex _reDateTimeValue;
static Regex _reTimeValue;
static Regex _reDateTimeOffsetValue;
private const string _datetimeValueRegularExpression = @"^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}([ ])+[0-9]{1,2}:[0-9]{1,2}(:[0-9]{1,2}(\.[0-9]{1,7})?)?$";
private const string _timeValueRegularExpression = @"^[0-9]{1,2}:[0-9]{1,2}(:[0-9]{1,2}(\.[0-9]{1,7})?)?$";
private const string _datetimeOffsetValueRegularExpression = @"^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}([ ])+[0-9]{1,2}:[0-9]{1,2}(:[0-9]{1,2}(\.[0-9]{1,7})?)?([ ])*[\+-][0-9]{1,2}:[0-9]{1,2}$";
private int _iPos;
private int _lineNumber;
ParserOptions _parserOptions;
private string _query;
/// <summary>
/// set for DOT expressions
/// </summary>
private bool _symbolAsIdentifierState = false;
/// <summary>
/// set for AS expressions
/// </summary>
private bool _symbolAsAliasIdentifierState = false;
/// <summary>
/// set for function definitions
/// </summary>
private bool _symbolAsInlineFunctionNameState = false;
/// Defines the set of characters to be interpreted as mandatory line breaks
/// according to UNICODE 5.0, section 5.8 Newline Guidelines.These are 'mandatory'
/// line breaks. We do not handle other 'line breaking opportunities'as defined by
/// UNICODE 5.0 since they are intended for presentation. The mandatory line break
/// defines breaking opportunities that must not be ignored. For all practical purposes
/// the interpretation of mandatory breaks determines the end of one line and consequently
/// the start of the next line of query text.
/// NOTE that CR and CRLF is treated as a composite 'character' and was obviously and intentionaly
/// omitted in the character set bellow.
static readonly Char[] _newLineCharacters = { '\u000A' , // LF - line feed
'\u0085' , // NEL - next line
'\u000B' , // VT - vertical tab
'\u2028' , // LS - line separator
'\u2029' // PS - paragraph separator
};
/// <summary>
/// Intializes scanner
/// </summary>
/// <param name="query">input query</param>
/// <param name="parserOptions">parser options</param>
internal CqlLexer(string query, ParserOptions parserOptions)
: this()
{
Debug.Assert(query != null, "query must not be null");
Debug.Assert(parserOptions != null, "parserOptions must not be null");
_query = query;
_parserOptions = parserOptions;
yy_reader = new System.IO.StringReader(_query);
}
/// <summary>
/// Creates a new token.
/// </summary>
/// <param name="tokenId">tokenid</param>
/// <param name="tokenvalue">ast node</param>
/// <returns></returns>
static internal Token NewToken(short tokenId, AST.Node tokenvalue)
{
return new Token(tokenId, tokenvalue);
}
/// <summary>
/// Creates a new token representing a terminal.
/// </summary>
/// <param name="tokenId">tokenid</param>
/// <param name="termToken">lexical value</param>
/// <returns></returns>
static internal Token NewToken(short tokenId, TerminalToken termToken)
{
return new Token(tokenId, termToken);
}
/// <summary>
/// Represents a token to be used in parser stack.
/// </summary>
internal class Token
{
private short _tokenId;
private object _tokenValue;
internal Token(short tokenId, AST.Node tokenValue)
{
_tokenId = tokenId;
_tokenValue = tokenValue;
}
internal Token(short tokenId, TerminalToken terminal)
{
_tokenId = tokenId;
_tokenValue = terminal;
}
internal short TokenId
{
get { return _tokenId; }
}
internal object Value
{
get { return _tokenValue; }
}
}
/// <summary>
/// Represents a terminal token
/// </summary>
internal class TerminalToken
{
string _token;
int _iPos;
internal TerminalToken(string token, int iPos)
{
_token = token;
_iPos = iPos;
}
internal int IPos
{
get { return _iPos; }
}
internal string Token
{
get { return _token; }
}
}
internal static class yy_translate
{
internal static char translate(char c)
#region TRANSLATE
{
if (Char.IsWhiteSpace(c) || Char.IsControl(c))
{
if (IsNewLine(c))
{
return '\n';
}
return ' ';
}
if (c < 0x007F)
{
return c;
}
if (Char.IsLetter(c) || Char.IsSymbol(c) || Char.IsNumber(c))
{
return 'a';
}
//
// otherwise pass dummy 'marker' char so as we can continue 'extracting' tokens.
//
return '`';
}
#endregion
}
/// <summary>
/// Returns current lexeme
/// </summary>
internal string YYText
{
get { return yytext(); }
}
/// <summary>
/// Returns current input position
/// </summary>
internal int IPos
{
get { return _iPos; }
}
/// <summary>
/// Advances input position.
/// </summary>
/// <returns>updated input position</returns>
internal int AdvanceIPos()
{
_iPos += YYText.Length;
return _iPos;
}
/// <summary>
/// returns true if given term is a eSQL keyword
/// </summary>
/// <param name="term"></param>
/// <returns></returns>
internal static bool IsReservedKeyword(string term)
{
return CqlLexer.InternalKeywordDictionary.ContainsKey(term);
}
/// <summary>
/// Map lexical symbol to a keyword or an identifier.
/// </summary>
/// <param name="symbol">lexeme</param>
/// <returns>Token</returns>
internal Token MapIdentifierOrKeyword(string symbol)
{
/*
The purpose of this method is to separate symbols into keywords and identifiers.
This separation then leads parser into applying different productions
to the same eSQL expression. For example if 'key' symbol is mapped to a keyword then
the expression 'KEY(x)' will satisfy 'keyExpr ::= KEY parenExpr', else if 'key' is mapped
to an identifier then the expression satisfies
'methodExpr :: = identifier L_PAREN optAllOrDistinct exprList R_PAREN optWithRelationship'
Escaped symbols are always assumed to be identifiers.
For unescaped symbols the naive implementation would check the symbol against
the collection of keywords and map the symbol to a keyword in case of match,
otherwise map to an identifier.
This would result in a strong restriction on unescaped identifiers - they must not
match keywords.
In the long run this strategy has a potential of invalidating user queries with addition
of new keywords to the language. This is an undesired effect and the current implementation
tries to mitigate it.
The general mitigation pattern is to separate the collection of keywords and the collection of
invalid aliases (identifiers), making invalid identifiers a subset of keywords.
This allows in certain language constructs using unescaped references 'common' identifiers
that may be defined in the query or in the model (such as Key in Customer.Key).
Although it adds usability for common cases, it does not solve the general problem:
select c.id as Key from Customers as c -- works
select Key from (select c.id from Customers as c) as Key -- does not work for the first occurence of Key
-- it is mapped to a keyword which results in
-- invalid syntax
select [Key] from (select c.id from Customers as c) as Key -- works again
The first two major places in syntax where restrictions are relaxed:
1. DOT expressions where a symbol before DOT or after DOT is expected to be an identifier.
2. AS expressions where a symbol after AS is expected to be an identifier.
In both places identifiers are checked against the invalid aliases collection instead of
the keywords collection. If an unescaped identifier appears outside of these two places
(like the Key in the second query above) it must be escaped or it must not match a keyword.
The third special case is related to method expressions (function calls). Normally method identifier
in a method expression must not match a keyword or must be escaped, except the two cases: LEFT and RIGHT.
LEFT and RIGHT are canonical functions and their usage in a method expression is not ambiguos with
LEFT OUTER JOIN and RIGHT OUT JOIN constructs.
Note that if method identifier is a DOT expression (multipart identifier) such as 'MyNameSpace.Key.Ref(x)'
then every part of the identifier follows the relaxed check described for DOT expressions (see above).
This would help with LEFT and RIGHT functions, 'Edm.Left(x)' would work without the third specialcase,
but most common use of these function is likely to be without 'Edm.'
The fourth special case is function names in query inline definition section. These names are checked
against both
- the invalid aliases collection and
- the collection invalid inline function names.
The second collection contains certain keywords that are not in the first collection and that may be followed
by the L_PAREN, which makes them look like method expression. The reason for this stronger restriction is to
disallow the following kind of ambiguos queries:
Function Key(c Customer) AS (Key(c))
select Key(cust) from Customsers as cust
*/
Token token;
// Handle the escaped identifiers coming from HandleEscapedIdentifiers()
if (IsEscapedIdentifier(symbol, out token))
{
Debug.Assert(token != null, "IsEscapedIdentifier must not return null token");
return token;
}
// Handle keywords
if (IsKeyword(symbol, out token))
{
Debug.Assert(token != null, "IsKeyword must not return null token");
return token;
}
// Handle unescaped identifiers
return MapUnescapedIdentifier(symbol);
}
#region MapIdentifierOrKeyword implementation details
private bool IsEscapedIdentifier(string symbol, out Token identifierToken)
{
if (symbol.Length > 1 && symbol[0] == '[')
{
if (symbol[symbol.Length - 1] == ']')
{
string name = symbol.Substring(1, symbol.Length - 2);
AST.Identifier id = new AST.Identifier(name, true, _query, _iPos);
id.ErrCtx.ErrorContextInfo = EntityRes.CtxEscapedIdentifier;
identifierToken = NewToken(CqlParser.ESCAPED_IDENTIFIER, id);
return true;
}
else
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidEscapedIdentifier(symbol), _iPos);
}
}
else
{
identifierToken = null;
return false;
}
}
private bool IsKeyword(string symbol, out Token terminalToken)
{
Char lookAheadChar = GetLookAheadChar();
if (!IsInSymbolAsIdentifierState(lookAheadChar) &&
!IsCanonicalFunctionCall(symbol, lookAheadChar) &&
CqlLexer.InternalKeywordDictionary.ContainsKey(symbol))
{
ResetSymbolAsIdentifierState(true);
short keywordID = CqlLexer.InternalKeywordDictionary[symbol];
if (keywordID == CqlParser.AS)
{
// Treat the symbol following AS keyword as an identifier.
// Note that this state will be turned off by a punctuator, so in case of function definitions:
// FUNCTION identifier(...) AS (generalExpr)
// the generalExpr will not be affected by the state.
_symbolAsAliasIdentifierState = true;
}
else if (keywordID == CqlParser.FUNCTION)
{
// Treat the symbol following FUNCTION keyword as an identifier.
// Inline function names in definition section have stronger restrictions than normal identifiers
_symbolAsInlineFunctionNameState = true;
}
terminalToken = NewToken(keywordID, new TerminalToken(symbol, _iPos));
return true;
}
else
{
terminalToken = null;
return false;
}
}
/// <summary>
/// Returns true when current symbol looks like a caninical function name in a function call.
/// Method only treats canonical functions with names ovelapping eSQL keywords.
/// This check allows calling these canonical functions without escaping their names.
/// Check lookAheadChar for a left paren to see if looks like a function call, check symbol against the list of
/// canonical functions with names overlapping keywords.
/// </summary>
private bool IsCanonicalFunctionCall(string symbol, Char lookAheadChar)
{
return lookAheadChar == '(' && CqlLexer.InternalCanonicalFunctionNames.Contains(symbol);
}
private Token MapUnescapedIdentifier(string symbol)
{
// Validate before calling ResetSymbolAsIdentifierState(...) because it will reset _symbolAsInlineFunctionNameState
bool invalidIdentifier = CqlLexer.InternalInvalidAliasNames.Contains(symbol);
if (_symbolAsInlineFunctionNameState)
{
invalidIdentifier |= CqlLexer.InternalInvalidInlineFunctionNames.Contains(symbol);
}
ResetSymbolAsIdentifierState(true);
if (invalidIdentifier)
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidAliasName(symbol), _iPos);
}
else
{
AST.Identifier id = new AST.Identifier(symbol, false, _query, _iPos);
id.ErrCtx.ErrorContextInfo = EntityRes.CtxIdentifier;
return NewToken(CqlParser.IDENTIFIER, id);
}
}
/// <summary>
/// Skip insignificant whitespace to reach the first potentially significant char.
/// </summary>
private Char GetLookAheadChar()
{
yy_mark_end();
Char lookAheadChar = yy_advance();
while (lookAheadChar != YY_EOF && (Char.IsWhiteSpace(lookAheadChar) || IsNewLine(lookAheadChar)))
{
lookAheadChar = yy_advance();
}
yy_to_mark();
return lookAheadChar;
}
private bool IsInSymbolAsIdentifierState(char lookAheadChar)
{
return _symbolAsIdentifierState ||
_symbolAsAliasIdentifierState ||
_symbolAsInlineFunctionNameState ||
lookAheadChar == '.' /*treat symbols followed by DOT as identifiers*/;
}
/// <summary>
/// Resets "symbol as identifier" state.
/// </summary>
/// <param name="significant">see function callers for more info</param>
private void ResetSymbolAsIdentifierState(bool significant)
{
_symbolAsIdentifierState = false;
// Do not reset the following states if going over {NONNEWLINE_SPACE} or {NEWLINE} or {LINE_COMMENT}
if (significant)
{
_symbolAsAliasIdentifierState = false;
_symbolAsInlineFunctionNameState = false;
}
}
#endregion
/// <summary>
/// Maps operator to respective token
/// </summary>
/// <param name="oper">operator lexeme</param>
/// <returns>Token</returns>
internal Token MapOperator(string oper)
{
if (InternalOperatorDictionary.ContainsKey(oper))
{
return NewToken(InternalOperatorDictionary[oper], new TerminalToken(oper, _iPos));
}
else
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidOperatorSymbol, _iPos);
}
}
/// <summary>
/// Maps punctuator to respective token
/// </summary>
/// <param name="punct">punctuator</param>
/// <returns>Token</returns>
internal Token MapPunctuator(string punct)
{
if (InternalPunctuatorDictionary.ContainsKey(punct))
{
ResetSymbolAsIdentifierState(true);
if (punct.Equals(".", StringComparison.OrdinalIgnoreCase))
{
_symbolAsIdentifierState = true;
}
return NewToken(InternalPunctuatorDictionary[punct], new TerminalToken(punct, _iPos));
}
else
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidPunctuatorSymbol, _iPos);
}
}
/// <summary>
/// Maps double quoted string to a literal or an idendifier
/// </summary>
/// <param name="symbol"></param>
/// <returns>Token</returns>
internal Token MapDoubleQuotedString(string symbol)
{
// If there is a mode that makes eSQL parser to follow the SQL-92 rules regarding quotation mark
// delimiting identifiers then this method may decide to map to identifiers.
// In this case identifiers delimited by double quotation marks can be either eSQL reserved keywords
// or can contain characters not usually allowed by the eSQL syntax rules for identifiers,
// so identifiers mapped here should be treated as escaped identifiers.
return NewLiteralToken(symbol, AST.LiteralKind.String);
}
/// <summary>
/// Creates literal token
/// </summary>
/// <param name="literal">literal</param>
/// <param name="literalKind">literal kind</param>
/// <returns>Literal Token</returns>
internal Token NewLiteralToken(string literal, AST.LiteralKind literalKind)
{
Debug.Assert(!String.IsNullOrEmpty(literal), "literal must not be null or empty");
Debug.Assert(literalKind != AST.LiteralKind.Null, "literalKind must not be LiteralKind.Null");
string literalValue = literal;
switch (literalKind)
{
case AST.LiteralKind.Binary:
literalValue = GetLiteralSingleQuotePayload(literal);
if (!IsValidBinaryValue(literalValue))
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidLiteralFormat("binary", literalValue), _iPos);
}
break;
case AST.LiteralKind.String:
if ('N' == literal[0])
{
literalKind = AST.LiteralKind.UnicodeString;
}
break;
case AST.LiteralKind.DateTime:
literalValue = GetLiteralSingleQuotePayload(literal);
if (!IsValidDateTimeValue(literalValue))
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidLiteralFormat("datetime", literalValue), _iPos);
}
break;
case AST.LiteralKind.Time:
literalValue = GetLiteralSingleQuotePayload(literal);
if (!IsValidTimeValue(literalValue))
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidLiteralFormat("time", literalValue), _iPos);
}
break;
case AST.LiteralKind.DateTimeOffset:
literalValue = GetLiteralSingleQuotePayload(literal);
if (!IsValidDateTimeOffsetValue(literalValue))
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidLiteralFormat("datetimeoffset", literalValue), _iPos);
}
break;
case AST.LiteralKind.Guid:
literalValue = GetLiteralSingleQuotePayload(literal);
if (!IsValidGuidValue(literalValue))
{
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidLiteralFormat("guid", literalValue), _iPos);
}
break;
}
return NewToken(CqlParser.LITERAL, new AST.Literal(literalValue, literalKind, _query, _iPos));
}
/// <summary>
/// Creates parameter token
/// </summary>
/// <param name="param">param</param>
/// <returns>Parameter Token</returns>
internal Token NewParameterToken(string param)
{
return NewToken(CqlParser.PARAMETER, new AST.QueryParameter(param, _query, _iPos));
}
/// <summary>
/// handles escaped identifiers
/// ch will always be translated i.e. normalized.
/// </summary>
internal Token HandleEscapedIdentifiers()
{
char ch = YYText[0];
while (ch != YY_EOF)
{
if (ch == ']')
{
yy_mark_end();
ch = yy_advance();
if (ch != ']')
{
yy_to_mark();
ResetSymbolAsIdentifierState(true);
return MapIdentifierOrKeyword(YYText.Replace("]]", "]"));
}
}
ch = yy_advance();
}
Debug.Assert(ch == YY_EOF, "ch == YY_EOF");
throw EntityUtil.EntitySqlError(_query, System.Data.Entity.Strings.InvalidEscapedIdentifierUnbalanced(YYText), _iPos);
}
internal static bool IsLetterOrDigitOrUnderscore(string symbol, out bool isIdentifierASCII)
{
isIdentifierASCII = true;
for (int i = 0; i < symbol.Length; i++)
{
isIdentifierASCII = isIdentifierASCII && symbol[i] < 0x80;
if (!isIdentifierASCII && !IsLetter(symbol[i]) && !IsDigit(symbol[i]) && (symbol[i] != '_'))
{
return false;
}
}
return true;
}
private static bool IsLetter(char c)
{
return (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z');
}
private static bool IsDigit(char c)
{
return (c >= '0' && c <= '9');
}
private static bool isHexDigit(char c)
{
return (IsDigit(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'));
}
/// <summary>
/// Returns true if given char is a new line character defined by
/// UNICODE 5.0, section 5.8 Newline Guidelines.
/// These are 'mandatory' line breaks. NOTE that CRLF is treated as a
/// composite 'character' and was intentionaly omitted in the character set bellow.
/// </summary>
/// <param name="c"></param>
/// <returns></returns>
internal static bool IsNewLine(Char c)
{
for (int i = 0; i < _newLineCharacters.Length; i++)
{
if (c == _newLineCharacters[i])
{
return true;
}
}
return false;
}
/// <summary>
/// extracts single quoted literal 'payload'. literal MUST BE normalized.
/// </summary>
/// <param name="literal"></param>
/// <returns></returns>
private static string GetLiteralSingleQuotePayload(string literal)
{
Debug.Assert(-1 != literal.IndexOf('\''), "quoted literal value must have single quotes");
Debug.Assert(-1 != literal.LastIndexOf('\''), "quoted literal value must have single quotes");
Debug.Assert(literal.IndexOf('\'') != literal.LastIndexOf('\''), "quoted literal value must have 2 single quotes");
Debug.Assert(literal.Split(new char[] { '\'' }).Length == 3, "quoted literal value must have 2 single quotes");
// NOTE: this is not a precondition validation. This validation is for security purposes based on the
// paranoid assumption that all input is evil. we should not see this exception under normal
// conditions.
if ((literal.Split(new char[] { '\'' }).Length != 3) || (-1 == literal.IndexOf('\'')) || (-1 == literal.LastIndexOf('\'')))
{
throw EntityUtil.EntitySqlError(System.Data.Entity.Strings.MalformedSingleQuotePayload);
}
int startIndex = literal.IndexOf('\'');
string literalPayload = literal.Substring(startIndex + 1, literal.Length - (startIndex + 2));
Debug.Assert(literalPayload.IndexOf('\'') == -1, "quoted literal payload must not have single quotes");
Debug.Assert(literalPayload.LastIndexOf('\'') == -1, "quoted literal payload must not have single quotes");
// NOTE: this is not a precondition validation. This validation is for security purposes based on the
// paranoid assumption that all input is evil. we should not see this exception under normal
// conditions.
if (literalPayload.Split(new char[] { '\'' }).Length != 1)
{
throw EntityUtil.EntitySqlError(System.Data.Entity.Strings.MalformedSingleQuotePayload);
}
return literalPayload;
}
/// <summary>
/// returns true if guid literal value format is valid
/// </summary>
/// <param name="guidValue"></param>
/// <returns></returns>
private static bool IsValidGuidValue(string guidValue)
{
int startIndex = 0;
int endIndex = guidValue.Length - 1;
if ((endIndex - startIndex) + 1 != 36)
{
return false;
}
int i = 0;
bool bValid = true;
while (bValid && i < 36)
{
if ((i == 8) || (i == 13) || (i == 18) || (i == 23))
{
bValid = (guidValue[startIndex + i] == '-');
}
else
{
bValid = isHexDigit(guidValue[startIndex + i]);
}
i++;
}
return bValid;
}
/// <summary>
/// returns true if binary literal value format is valid
/// </summary>
/// <param name="binaryValue"></param>
/// <returns></returns>
private static bool IsValidBinaryValue(string binaryValue)
{
Debug.Assert(null != binaryValue, "binaryValue must not be null");
if (String.IsNullOrEmpty(binaryValue))
{
return true;
}
int i = 0;
bool bValid = binaryValue.Length > 0;
while (bValid && i < binaryValue.Length)
{
bValid = isHexDigit(binaryValue[i++]);
}
return bValid;
}
/// <summary>
/// Returns true if datetime literal value format is valid
/// allowed format is: dddd-d?d-d?d{space}+d?d:d?d(:d?d(.d?d?d)?)?
/// where d is any decimal digit.
/// </summary>
/// <param name="datetimeValue"></param>
/// <returns></returns>
private static bool IsValidDateTimeValue(string datetimeValue)
{
if (null == _reDateTimeValue)
{
_reDateTimeValue = new Regex(_datetimeValueRegularExpression, RegexOptions.Singleline | RegexOptions.CultureInvariant);
}
return _reDateTimeValue.IsMatch(datetimeValue);
}
/// <summary>
/// Returns true if time literal value format is valid
/// allowed format is: +d?d:d?d(:d?d(.d?d?d)?)?
/// where d is any decimal digit.
/// </summary>
/// <param name="timeValue"></param>
/// <returns></returns>
private static bool IsValidTimeValue(string timeValue)
{
if (null == _reTimeValue)
{
_reTimeValue = new Regex(_timeValueRegularExpression, RegexOptions.Singleline | RegexOptions.CultureInvariant);
}
return _reTimeValue.IsMatch(timeValue);
}
/// <summary>
/// Returns true if datetimeoffset literal value format is valid
/// allowed format is: dddd-d?d-d?d{space}+d?d:d?d(:d?d(.d?d?d)?)?([+-]d?d:d?d)?
/// where d is any decimal digit.
/// </summary>
/// <param name="datetimeOffsetValue"></param>
/// <returns></returns>
private static bool IsValidDateTimeOffsetValue(string datetimeOffsetValue)
{
if (null == _reDateTimeOffsetValue)
{
_reDateTimeOffsetValue = new Regex(_datetimeOffsetValueRegularExpression, RegexOptions.Singleline | RegexOptions.CultureInvariant);
}
return _reDateTimeOffsetValue.IsMatch(datetimeOffsetValue);
}
private static Dictionary<string, short> InternalKeywordDictionary
{
get
{
if (null == _keywords)
{
#region Initializes eSQL keywords
Dictionary<string, short> keywords = new Dictionary<string, short>(60, _stringComparer);
keywords.Add("all", CqlParser.ALL);
keywords.Add("and", CqlParser.AND);
keywords.Add("anyelement", CqlParser.ANYELEMENT);
keywords.Add("apply", CqlParser.APPLY);
keywords.Add("as", CqlParser.AS);
keywords.Add("asc", CqlParser.ASC);
keywords.Add("between", CqlParser.BETWEEN);
keywords.Add("by", CqlParser.BY);
keywords.Add("case", CqlParser.CASE);
keywords.Add("cast", CqlParser.CAST);
keywords.Add("collate", CqlParser.COLLATE);
keywords.Add("collection", CqlParser.COLLECTION);
keywords.Add("createref", CqlParser.CREATEREF);
keywords.Add("cross", CqlParser.CROSS);
keywords.Add("deref", CqlParser.DEREF);
keywords.Add("desc", CqlParser.DESC);
keywords.Add("distinct", CqlParser.DISTINCT);
keywords.Add("element", CqlParser.ELEMENT);
keywords.Add("else", CqlParser.ELSE);
keywords.Add("end", CqlParser.END);
keywords.Add("escape", CqlParser.ESCAPE);
keywords.Add("except", CqlParser.EXCEPT);
keywords.Add("exists", CqlParser.EXISTS);
keywords.Add("false", CqlParser.LITERAL);
keywords.Add("flatten", CqlParser.FLATTEN);
keywords.Add("from", CqlParser.FROM);
keywords.Add("full", CqlParser.FULL);
keywords.Add("function", CqlParser.FUNCTION);
keywords.Add("group", CqlParser.GROUP);
keywords.Add("grouppartition", CqlParser.GROUPPARTITION);
keywords.Add("having", CqlParser.HAVING);
keywords.Add("in", CqlParser.IN);
keywords.Add("inner", CqlParser.INNER);
keywords.Add("intersect", CqlParser.INTERSECT);
keywords.Add("is", CqlParser.IS);
keywords.Add("join", CqlParser.JOIN);
keywords.Add("key", CqlParser.KEY);
keywords.Add("left", CqlParser.LEFT);
keywords.Add("like", CqlParser.LIKE);
keywords.Add("limit", CqlParser.LIMIT);
keywords.Add("multiset", CqlParser.MULTISET);
keywords.Add("navigate", CqlParser.NAVIGATE);
keywords.Add("not", CqlParser.NOT);
keywords.Add("null", CqlParser.NULL);
keywords.Add("of", CqlParser.OF);
keywords.Add("oftype", CqlParser.OFTYPE);
keywords.Add("on", CqlParser.ON);
keywords.Add("only", CqlParser.ONLY);
keywords.Add("or", CqlParser.OR);
keywords.Add("order", CqlParser.ORDER);
keywords.Add("outer", CqlParser.OUTER);
keywords.Add("overlaps", CqlParser.OVERLAPS);
keywords.Add("ref", CqlParser.REF);
keywords.Add("relationship", CqlParser.RELATIONSHIP);
keywords.Add("right", CqlParser.RIGHT);
keywords.Add("row", CqlParser.ROW);
keywords.Add("select", CqlParser.SELECT);
keywords.Add("set", CqlParser.SET);
keywords.Add("skip", CqlParser.SKIP);
keywords.Add("then", CqlParser.THEN);
keywords.Add("top", CqlParser.TOP);
keywords.Add("treat", CqlParser.TREAT);
keywords.Add("true", CqlParser.LITERAL);
keywords.Add("union", CqlParser.UNION);
keywords.Add("using", CqlParser.USING);
keywords.Add("value", CqlParser.VALUE);
keywords.Add("when", CqlParser.WHEN);
keywords.Add("where", CqlParser.WHERE);
keywords.Add("with", CqlParser.WITH);
_keywords = keywords;
#endregion
}
return _keywords;
}
}
private static HashSet<string> InternalInvalidAliasNames
{
get
{
if (null == _invalidAliasNames)
{
#region Initializes invalid aliases
HashSet<string> invalidAliasName = new HashSet<string>(_stringComparer);
invalidAliasName.Add("all");
invalidAliasName.Add("and");
invalidAliasName.Add("apply");
invalidAliasName.Add("as");
invalidAliasName.Add("asc");
invalidAliasName.Add("between");
invalidAliasName.Add("by");
invalidAliasName.Add("case");
invalidAliasName.Add("cast");
invalidAliasName.Add("collate");
invalidAliasName.Add("createref");
invalidAliasName.Add("deref");
invalidAliasName.Add("desc");
invalidAliasName.Add("distinct");
invalidAliasName.Add("element");
invalidAliasName.Add("else");
invalidAliasName.Add("end");
invalidAliasName.Add("escape");
invalidAliasName.Add("except");
invalidAliasName.Add("exists");
invalidAliasName.Add("flatten");
invalidAliasName.Add("from");
invalidAliasName.Add("group");
invalidAliasName.Add("having");
invalidAliasName.Add("in");
invalidAliasName.Add("inner");
invalidAliasName.Add("intersect");
invalidAliasName.Add("is");
invalidAliasName.Add("join");
invalidAliasName.Add("like");
invalidAliasName.Add("multiset");
invalidAliasName.Add("navigate");
invalidAliasName.Add("not");
invalidAliasName.Add("null");
invalidAliasName.Add("of");
invalidAliasName.Add("oftype");
invalidAliasName.Add("on");
invalidAliasName.Add("only");
invalidAliasName.Add("or");
invalidAliasName.Add("overlaps");
invalidAliasName.Add("ref");
invalidAliasName.Add("relationship");
invalidAliasName.Add("select");
invalidAliasName.Add("set");
invalidAliasName.Add("then");
invalidAliasName.Add("treat");
invalidAliasName.Add("union");
invalidAliasName.Add("using");
invalidAliasName.Add("when");
invalidAliasName.Add("where");
invalidAliasName.Add("with");
_invalidAliasNames = invalidAliasName;
#endregion
}
return _invalidAliasNames;
}
}
private static HashSet<string> InternalInvalidInlineFunctionNames
{
get
{
if (null == _invalidInlineFunctionNames)
{
#region Initializes invalid inline function names
HashSet<string> invalidInlineFunctionNames = new HashSet<string>(_stringComparer);
invalidInlineFunctionNames.Add("anyelement");
invalidInlineFunctionNames.Add("element");
invalidInlineFunctionNames.Add("function");
invalidInlineFunctionNames.Add("grouppartition");
invalidInlineFunctionNames.Add("key");
invalidInlineFunctionNames.Add("ref");
invalidInlineFunctionNames.Add("row");
invalidInlineFunctionNames.Add("skip");
invalidInlineFunctionNames.Add("top");
invalidInlineFunctionNames.Add("value");
_invalidInlineFunctionNames = invalidInlineFunctionNames;
#endregion
}
return _invalidInlineFunctionNames;
}
}
private static Dictionary<string, short> InternalOperatorDictionary
{
get
{
if (null == _operators)
{
#region Initializes operator dictionary
Dictionary<string, short> operators = new Dictionary<string, short>(16, _stringComparer);
operators.Add("==", CqlParser.OP_EQ);
operators.Add("!=", CqlParser.OP_NEQ);
operators.Add("<>", CqlParser.OP_NEQ);
operators.Add("<", CqlParser.OP_LT);
operators.Add("<=", CqlParser.OP_LE);
operators.Add(">", CqlParser.OP_GT);
operators.Add(">=", CqlParser.OP_GE);
operators.Add("&&", CqlParser.AND);
operators.Add("||", CqlParser.OR);
operators.Add("!", CqlParser.NOT);
operators.Add("+", CqlParser.PLUS);
operators.Add("-", CqlParser.MINUS);
operators.Add("*", CqlParser.STAR);
operators.Add("/", CqlParser.FSLASH);
operators.Add("%", CqlParser.PERCENT);
_operators = operators;
#endregion
}
return _operators;
}
}
private static Dictionary<string, short> InternalPunctuatorDictionary
{
get
{
if (null == _punctuators)
{
#region Initializes punctuators dictionary
Dictionary<string, short> punctuators = new Dictionary<string, short>(16, _stringComparer);
punctuators.Add(",", CqlParser.COMMA);
punctuators.Add(":", CqlParser.COLON);
punctuators.Add(".", CqlParser.DOT);
punctuators.Add("?", CqlParser.QMARK);
punctuators.Add("(", CqlParser.L_PAREN);
punctuators.Add(")", CqlParser.R_PAREN);
punctuators.Add("[", CqlParser.L_BRACE);
punctuators.Add("]", CqlParser.R_BRACE);
punctuators.Add("{", CqlParser.L_CURLY);
punctuators.Add("}", CqlParser.R_CURLY);
punctuators.Add(";", CqlParser.SCOLON);
punctuators.Add("=", CqlParser.EQUAL);
_punctuators = punctuators;
#endregion
}
return _punctuators;
}
}
private static HashSet<string> InternalCanonicalFunctionNames
{
get
{
if (null == _canonicalFunctionNames)
{
HashSet<string> canonicalFunctionNames = new HashSet<string>(_stringComparer);
canonicalFunctionNames.Add("left");
canonicalFunctionNames.Add("right");
_canonicalFunctionNames = canonicalFunctionNames;
}
return _canonicalFunctionNames;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using Mono.Cecil.Cil;
using System.Linq;
using Mono.Cecil.CodeDom.Parser.Branching;
using Mono.Cecil.CodeDom.Parser.Tcf;
using System.Runtime.InteropServices;
using System.Security.AccessControl;
using System.Runtime.CompilerServices;
namespace Mono.Cecil.CodeDom.Parser
{
public class CodeDomBranchesParser : CodeDomParserBase
{
private readonly Dictionary<Instruction, Instruction> _incoming = new Dictionary<Instruction, Instruction>();
private readonly List<Instruction> _processed = new List<Instruction>();
#region Step 0: Method root
/// <summary>
/// Light method to help to make base nodes for method root.
/// </summary>
public CodeDomGroupExpression ParseMethodRoot(Context context)
{
var group = new CodeDomGroupExpression(context);
var node = new CodeDomUnparsedExpression(context , context.Method.Body.Instructions.First() , context.Method.Body.Instructions.Last());
group.Add(node);
return group;
}
#endregion
#region Step 1: Parsing all try-catch-finally blocks
/// <summary>
/// Parses all Tcf blocks. Should be called after ParseBranches.
/// </summary>
/// <param name="context"></param>
public void ParseTcfBlocks(Context context)
{
var cecilHandlers = context.Method.Body.ExceptionHandlers;
// Groups of handlers - when one Try have many Catches or/and Finally block
var groups = cecilHandlers.GroupBy(item => new {item.TryStart, item.TryEnd}).OrderByDescending(gr => gr.Key.TryEnd.Offset);
var root = this.MethodBodyRoot;
foreach (var grouping in groups)
{
var tryinfo = grouping.First();
// split if needed, and return Expression, that covers TCF block
var tcfblock = SplitGroupAndReplace(context, tryinfo.TryStart, grouping.Last().HandlerEnd.Previous, groupit: true);
var tryblock = SplitGroupAndReplace(context, tryinfo.TryStart, tryinfo.TryEnd.Previous, groupit: true, exp_root: tcfblock);
CodeDomExpression faultBlock = null;
CodeDomExpression finallyBlock = null;
CodeDomExpression filterBlock = null;
List<CatchBlockExpression> catches = new List<CatchBlockExpression>();
foreach (var handler in grouping)
{
var group = SplitGroupAndReplace(context, handler.HandlerStart, handler.HandlerEnd.Previous,
exp_root: tcfblock);
switch (handler.HandlerType)
{
case ExceptionHandlerType.Catch:
var catchExpression = new CatchBlockExpression(context, handler.CatchType, group);
catches.Add(catchExpression);
// looking up first instructions list
var expression = group.FindFirstPostorder<CodeDomUnparsedExpression>();
// looking up instruction, which reads exception from stack
var instruction = ResolveStackBlockEnd(expression.Instructions.First, -1);
if (instruction != null)
{
catchExpression.VariableReference = (instruction.Operand as VariableReference);
context.UserLocals[catchExpression] = instruction;
}
break;
case ExceptionHandlerType.Filter:
filterBlock = group;
break;
case ExceptionHandlerType.Finally:
finallyBlock = group;
break;
case ExceptionHandlerType.Fault:
faultBlock = group;
break;
}
}
// create uncreated
if (filterBlock == null) filterBlock = new CodeDomExpression(context);
if (finallyBlock == null) finallyBlock = new CodeDomExpression(context);
if (faultBlock == null) faultBlock = new CodeDomExpression(context);
// replace with our result
var tcf = new TryExpression(context, grouping.ToList(), tryblock, faultBlock, filterBlock, finallyBlock, catches.ToArray());
tcfblock.ReplaceWith(tcf);
}
}
/// <summary>
/// Should be used to split abstract instructions range.
/// Returns Expression, that covers given instructions range
/// </summary>
private CodeDomExpression SplitGroupAndReplace(Context context, Instruction start, Instruction end, bool groupit = false, CodeDomExpression exp_root = null)
{
var gr_left = context.GetExpression(start);
var gr_right = context.GetExpression(end);
var root = exp_root ?? GetRootForNodes(gr_left, gr_right);
if (root == null)
{
throw new ArgumentException("Given instructions have different roots");
}
gr_left = LookupNearestSubnode(root, gr_left);
gr_right = LookupNearestSubnode(root, gr_right);
// if both sides in one node
if (gr_left == gr_right)
{
// if this node isn't parsed
if (gr_left is CodeDomUnparsedExpression)
{
// replace node with group of three subnodes, where
// one subnode contains our instructions range.
var node = gr_left as CodeDomUnparsedExpression;
CodeDomExpression coverto;
var last = node.Instructions.Last;
var group = node.ParentNode as CodeDomGroupExpression;
var index = group.Nodes.IndexOf(node);
if (start != node.Instructions.First)
{
node.Instructions.Last = start.Previous;
index++;
}
else
group.RemoveAt(index);
var item = new CodeDomUnparsedExpression(context, start, end);
if (groupit)
group.Insert(index, coverto = new CodeDomGroupExpression(context) { item });
else
group.Insert(index, coverto = item);
index++;
if (end != node.Instructions.Last)
group.Insert(index, new CodeDomUnparsedExpression(context, end.Next, last));
return coverto;
}
// node is parsed (may be if-else): nothing to split
return gr_left;
}
else
{
if (!(gr_left.ParentNode is CodeDomGroupExpression))
throw new ArgumentException("nodes root should be group");
var rootgroup = gr_left.ParentNode as CodeDomGroupExpression;
var group = new CodeDomGroupExpression(context);
var i_left = rootgroup.IndexOf(gr_left);
var i_right = rootgroup.IndexOf(gr_right);
// parse left side if unparsed or include it whole
if (gr_left is CodeDomUnparsedExpression && (gr_left as CodeDomUnparsedExpression).Instructions.First != start)
{
var node = gr_left as CodeDomUnparsedExpression;
node.Instructions.Last = start.Previous;
group.Add(new CodeDomUnparsedExpression(context, start, node.Instructions.Last));
node.ResetInstructionsInMap();
}
else
{
rootgroup.Remove(gr_left);
group.Add(gr_left);
}
// move all between side expressions into group
while (i_right != i_left + 1)
{
group.Add(rootgroup[i_left + 1]);
rootgroup.RemoveAt(i_left + 1);
i_right--;
}
if (gr_right is CodeDomUnparsedExpression && (gr_right as CodeDomUnparsedExpression).Instructions.Last != end)
{
var node = gr_right as CodeDomUnparsedExpression;
group.Add(new CodeDomUnparsedExpression(context, node.Instructions.First, end));
node.Instructions.First = end;
node.ResetInstructionsInMap();
}
else
{
rootgroup.Remove(gr_right);
group.Add(gr_right);
}
// insert all grouped to the root
rootgroup.Insert(i_left, group);
return group;
}
}
/// <summary>
/// Looks up by parents to root's child in the same subtree.
/// </summary>
private CodeDomExpression LookupNearestSubnode(CodeDomExpression root, CodeDomExpression node)
{
while (node.ParentNode != root)
node = node.ParentNode;
return node;
}
/// <summary>
/// Looks for nearest to leafes parent node
/// </summary>
private CodeDomExpression GetRootForNodes(CodeDomExpression left, CodeDomExpression right)
{
var parents = new List<CodeDomExpression>();
bool first = true;
while (right != null)
{
if (!first) parents.Add(right);
first = false;
right = right.ParentNode;
}
first = true;
while (left != null)
{
if (!first && parents.Contains(left))
{
return left;
}
first = false;
left = left.ParentNode;
}
return null;
}
#endregion
#region Step 2a: For if-else-while: calculate which instructions are the targets of jump instructions.
public void ParseBranches(Context context, List<CodeDomUnparsedExpression> items)
{
CalculateIncomingJumps(context.Method.Body);
foreach (var item in items)
{
ParseInstructionsBlock(context, item);
}
}
#endregion
#region Step 2b: For if-else-while: Walking from last to first instruction, will find all branches
private void ParseInstructionsBlock(Context context, CodeDomUnparsedExpression expression)
{
var from = expression.Instructions.First;
var to = expression.Instructions.Last;
var current = from;
while (current != to)
{
Instruction found = null;
Instruction target = current.Operand as Instruction;
var isWhileViaDoWhile = (HasOutgoingJump(current) && (current.OpCode.Code == Code.Br) &&
FindFlowControl(target, to, from, target.Previous, out found, true, FlowControl.Cond_Branch));
if (current.OpCode.Code == Code.Switch)
{
ParseSwitch(context, from, to, expression, current);
}
if ((HasIncomingJump(current) && !_processed.Contains(_incoming[current])) || (found != null) ) // Has incoming branch. This means we have "while"
{
ParseLoop(context, from, to, expression, current, isWhileViaDoWhile);
return;
}
if (HasOutgoingJump(current) && !_processed.Contains(current)) // Has outgoing branch. This means we have "if-else" statement
{
ParseIfElse(context, from, to, expression, current);
return;
}
current = current.Next;
}
}
private void ParseSwitch(Context context, Instruction from, Instruction to, CodeDomExpression expression,
Instruction current)
{
var cases = new List<CodeDomCaseExpression>();
var instructions = current.Operand as Instruction[];
if (instructions == null || instructions.Length < 2)
{
throw new ArgumentException("strange array: < 2");
}
// find first non-equal branch
var index = 0;
while (instructions[index].Offset == instructions[0].Offset)
index++;
// lookup jump from case to the end of switch
Instruction found = null;
if (FindFlowControl(current, instructions[index].Previous, instructions[index], to, out found, false))
{
CodeDomUnparsedExpression postfix = null;
var condStart = ResolveStackBlockStart(current);
// in case we're at end of if-else true block and switch end points out of if-else
var switchEnd = found.Offset > to.Offset ? to : found.Operand as Instruction;
// default branch
Instruction defaultStart = null;
Instruction defaultEnd;
CodeDomExpression defaultNode = null;
if (FindFlowControl(current, instructions[0].Previous, instructions[0], to, out defaultEnd, true))
{
_processed.Add(defaultEnd);
if (defaultEnd.Previous != current)
{
defaultStart = current.Next;
defaultNode = new CodeDomUnparsedExpression(context, defaultStart, defaultEnd);
}
else
{
if (defaultEnd.Operand != switchEnd)
{
defaultStart = defaultEnd.Operand as Instruction;
defaultEnd = switchEnd.Previous;
defaultNode = new CodeDomUnparsedExpression(context, defaultStart, defaultEnd);
}
else
{
defaultNode = new CodeDomExpression(context);
}
}
}
// lookup cases
var groups = instructions.Where(ins => ins != defaultStart).Select((ins, ind) => new {Instruction = ins, Index = ind})
.GroupBy(gr => gr.Instruction.Offset);
foreach (var @group in groups)
{
var blockStart = @group.First().Instruction;
if (blockStart != switchEnd)
{
// lookup case block end
var blockEnd = blockStart;
while ((blockEnd.OpCode.Code != Code.Br && (blockEnd.Operand != switchEnd)) &&
blockEnd.Next != switchEnd)
{
blockEnd = blockEnd.Next;
}
_processed.Add(blockEnd);
var block = new CodeDomCaseExpression(context, group.Select(g => g.Index).ToArray(),
new CodeDomUnparsedExpression(context, blockStart, blockEnd));
cases.Add(block);
}
}
var switchGroup = new CodeDomGroupExpression(context);
if (from != condStart)
{
switchGroup.Add(new CodeDomUnparsedExpression(context, from, condStart.Previous));
}
var conditionNode = new CodeDomUnparsedExpression(context, condStart, current);
switchGroup.Add(new CodeDomSwitchExpression(context, current, conditionNode, defaultNode, cases.ToArray()));
if (to != switchEnd)
{
switchGroup.Add(postfix = new CodeDomUnparsedExpression(context, switchEnd, to));
}
expression.ReplaceWith(switchGroup);
foreach (var @case in cases)
{
ParseInstructionsBlock(context, @case.Body as CodeDomUnparsedExpression);
}
if (postfix != null) ParseInstructionsBlock(context, postfix);
if (defaultNode is CodeDomUnparsedExpression) ParseInstructionsBlock(context, defaultNode as CodeDomUnparsedExpression);
return;
}
// else
throw new ArgumentException("Bad switch block");
}
private void ParseLoop(Context context, Instruction from, Instruction to, CodeDomUnparsedExpression expression, Instruction current, bool doWhileJump)
{
var group = new CodeDomGroupExpression(context);
CodeDomUnparsedExpression prefix = null, conditionResult, body, postfix = null;
CodeDomConditionExpression condition;
LoopType looptype;
// we have while(){ ... } which is made via do{ ... } while(); template
if (doWhileJump)
{
// @current points to unconditional jump to condition block.
// condition block is placed after body, like in do .. while.
if(from.Offset < current.Offset)
prefix = new CodeDomUnparsedExpression(context, from, current.Previous);
body = new CodeDomUnparsedExpression(context , current.Next , (current.Operand as Instruction).Previous);
// condition
var conditionEnd = _incoming[body.Instructions.First];
var conditionStart = ResolveStackBlockStart(conditionEnd);
conditionResult = new CodeDomUnparsedExpression(context , conditionStart , conditionEnd);
var conditionEvalStart = body.Instructions.Last.Next;
var conditionEvalEnd = conditionStart.Previous;
if (conditionEvalEnd.Offset >= conditionEvalStart.Offset)
{
var conditionEvaluation = new CodeDomUnparsedExpression(context, body.Instructions.Last.Next, conditionStart.Previous);
condition = new CodeDomConditionExpression(context, conditionResult, conditionEvaluation);
}
else
{
condition = new CodeDomConditionExpression(context, conditionResult);
}
if(conditionEnd.Offset < to.Offset)
postfix = new CodeDomUnparsedExpression(context, conditionEnd.Next, to);
_processed.Add(conditionEnd);
_processed.Add(current);
looptype = LoopType.While;
}
// if loop ends with unconditional branch, we have "while(<condition>) { <body> }"
else
{
var incoming = _incoming[current];
_processed.Add(incoming);
if (incoming.OpCode.FlowControl == FlowControl.Branch)
{
Instruction conditionEnd;
if (!FindFlowControl(current, incoming, incoming.Next, to, out conditionEnd, true, FlowControl.Cond_Branch))
{
throw new ArgumentException("looks like loop with no exit");
}
var conditionStart = ResolveStackBlockStart(conditionEnd);
if(from.Offset < conditionStart.Offset)
prefix = new CodeDomUnparsedExpression(context, from, conditionStart.Previous);
conditionResult = new CodeDomUnparsedExpression(context, conditionStart, conditionEnd);
var conditionEvalStart = current;
var conditionEvalEnd = conditionStart.Previous;
if (conditionEvalEnd.Offset > conditionEvalStart.Offset)
{
condition = new CodeDomConditionExpression(context, conditionResult, new CodeDomUnparsedExpression(context, conditionEvalStart, conditionEvalEnd));
}
else
{
condition = new CodeDomConditionExpression(context, conditionResult);
}
body = new CodeDomUnparsedExpression(context, conditionEnd.Next, incoming);
if(incoming.Offset < to.Offset)
postfix = new CodeDomUnparsedExpression(context, incoming.Next, to);
looptype = LoopType.While;
}
// otherwice we have " do { <body> } while(<condition>); "
else
{
var conditionStart = ResolveStackBlockStart(incoming);
if(from.Offset < current.Offset)
prefix = new CodeDomUnparsedExpression(context, from, current.Previous);
conditionResult = new CodeDomUnparsedExpression(context, conditionStart, incoming);
condition = new CodeDomConditionExpression(context, conditionResult);
body = new CodeDomUnparsedExpression(context, current, conditionStart.Previous);
if(incoming.Offset < to.Offset)
postfix = new CodeDomUnparsedExpression(context, incoming.Next, to);
looptype = LoopType.DoWhile;
}
}
var condinstruction = conditionResult.Instructions.Last;
if(prefix != null) group.Add(prefix);
group.Add(new CodeDomLoopExpression(context, doWhileJump ? current : null, condinstruction, looptype, condition, body));
if(postfix != null) group.Add(postfix);
expression.ReplaceWith(group);
// run for each subblock
ParseInstructionsBlock(context, body);
if(postfix != null) {
ParseInstructionsBlock(context, postfix);
}
return;
}
private void ParseIfElse(Context context, Instruction from, Instruction to, CodeDomUnparsedExpression expression, Instruction current)
{
_processed.Add(current);
var group = new CodeDomGroupExpression(context);
var target = (current.Operand as Instruction);
var condStarts = ResolveStackBlockStart(current);
// Prefix before condition
if (condStarts != from)
{
group.Add(new CodeDomUnparsedExpression(context, from, condStarts.Previous));
}
// condition block = condition except branching instruction
var conditionNode = new CodeDomUnparsedExpression(context, condStarts, current);
var falseStart = current.Next;
var falseEnd = target.Previous;
var trueStart = target;
var trueEnd = to;
Instruction uncondJump;
CodeDomExpression trueNode, falseNode, postfixNode;
if (FindFlowControl(falseStart, falseEnd, trueStart, trueEnd, out uncondJump, isforward: false))
{
trueEnd = (uncondJump.Operand as Instruction).Previous;
falseNode = new CodeDomUnparsedExpression(context, falseStart, falseEnd); // "false" branch
trueNode = new CodeDomUnparsedExpression(context, trueStart, trueEnd); // "true" branch
postfixNode = new CodeDomUnparsedExpression(context, trueEnd.Next, to); // out of "if-else" block
_processed.Add(uncondJump);
}
else
{
falseNode = new CodeDomUnparsedExpression(context, falseStart, falseEnd); // "false" branch
trueNode = new CodeDomExpression(context); // "true" branch
postfixNode = new CodeDomUnparsedExpression(context, trueStart, trueEnd); // out of "if-else" block
}
var ifelse =
new CodeDomIfElseExpression(context, current,
new CodeDomConditionExpression(context, conditionNode),
new CodeDomGroupExpression(context) { trueNode },
new CodeDomGroupExpression(context) { falseNode }
);
group.Add(ifelse);
group.Add(new CodeDomGroupExpression(context){ postfixNode });
expression.ReplaceWith(group);
// run for each subblock
var unparsedTrueNode = trueNode as CodeDomUnparsedExpression;
if(unparsedTrueNode != null) ParseInstructionsBlock(context, unparsedTrueNode);
var unparsedFalseNode = falseNode as CodeDomUnparsedExpression;
if(unparsedFalseNode != null) ParseInstructionsBlock(context, unparsedFalseNode);
ParseInstructionsBlock(context, postfixNode as CodeDomUnparsedExpression);
return;
}
#endregion
#region tools
private void CalculateIncomingJumps(MethodBody body)
{
_incoming.Clear();
foreach (var instruction in body.Instructions.Where(HasOutgoingJump))
{
_incoming[(instruction.Operand as Instruction)] = instruction;
}
}
private bool HasIncomingJump(Instruction position)
{
return _incoming.ContainsKey(position);
}
private bool HasOutgoingJump(Instruction position)
{
switch (position.OpCode.Code)
{
case Code.Br: // Unconditional
case Code.Brfalse: // if false, 0, null
case Code.Brtrue: // if true, <>0, <>null
case Code.Beq: // if 2 values equal
case Code.Bge: // if first >= second
case Code.Bgt: // if first > second
case Code.Ble: // if first <= second
case Code.Blt: // if first < second
case Code.Bne_Un: // if unsigned1 != unsigned2
case Code.Bge_Un: // if unsigned >= unsigned2
case Code.Bgt_Un: // if unsigned > unsigned2
case Code.Ble_Un: // if unsigned <= unsigned2
case Code.Blt_Un: // if unsigned < unsigned2
return true;
default:
return false;
}
}
private Instruction ResolveStackBlockStart(Instruction to)
{
Instruction current = to;
var stack_delta = 0;
do
{
stack_delta += current.StackDelta();
current = current.Previous;
} while(stack_delta != 0);
return current.Next;
}
private Instruction ResolveStackBlockEnd(Instruction from, int stackSizeWeNeed)
{
var current = from;
var stack_delta = 0;
do
{
stack_delta += current.StackDelta(forward: true);
if(stack_delta == stackSizeWeNeed)
return current;
current = current.Next;
} while (current != null);
return null;
}
private bool FindFlowControl(Instruction fromStart, Instruction fromEnd,
Instruction toStart, Instruction toEnd,
out Instruction found,
bool isforward,
FlowControl flowcontrol = FlowControl.Branch)
{
bool starts = false;
var cur = isforward ? fromStart : fromEnd;
var end = isforward ? fromEnd : fromStart;
found = null;
while (cur != end)
{
if (starts)
{
cur = isforward ? cur.Next : cur.Previous;
}
starts = true;
if(cur.OpCode.FlowControl == flowcontrol)
{
var offset = (cur.Operand as Instruction).Offset;
if(toStart.Offset <= offset && offset <= toEnd.Offset)
{
found = cur;
return true;
}
}
}
return false;
}
#endregion
}
}
| |
/*
* MindTouch Dream - a distributed REST framework
* Copyright (C) 2006-2014 MindTouch, Inc.
* www.mindtouch.com [email protected]
*
* For community documentation and downloads visit mindtouch.com;
* please review the licensing section.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Security.Cryptography;
using System.Security.Principal;
using System.Text;
using Autofac;
using MindTouch.Security.Cryptography;
using MindTouch.Tasking;
using MindTouch.Web;
namespace MindTouch.Dream {
/// <summary>
/// Provides request context information for <see cref="DreamFeature"/> request processing.
/// </summary>
public class DreamContext : ITaskLifespan {
//--- Class Fields ---
private static log4net.ILog _log = LogUtils.CreateLog();
private static int _contextIdCounter = 0;
//--- Class Properties ---
/// <summary>
/// Singleton accessor for the current request context.
/// </summary>
/// <remarks> Will throw an <see cref="DreamContextAccessException"/> if there is no context defined.</remarks>
/// <exception cref="DreamContextAccessException">Thrown if no context is defined in the current environment or the context has been disposed.</exception>
public static DreamContext Current {
get {
TaskEnv current = TaskEnv.CurrentOrNull;
if(current == null) {
throw new DreamContextAccessException("DreamContext.Current is not set because there is no task environment.");
}
DreamContext context = current.GetState<DreamContext>();
if(context == null) {
throw new DreamContextAccessException("DreamContext.Current is not set because the current task environment does not contain a reference.");
}
if(context._isTaskDisposed) {
throw new DreamContextAccessException("DreamContext.Current is not set because the current context is already disposed.");
}
return context;
}
}
/// <summary>
/// Singleton accessor to current request context or <see langword="null"/>, if none is defined.
/// </summary>
public static DreamContext CurrentOrNull {
get {
TaskEnv current = TaskEnv.CurrentOrNull;
if(current == null) {
return null;
}
DreamContext context = current.GetState<DreamContext>();
if(context == null) {
return null;
}
if(context._isTaskDisposed) {
_log.Warn("requested already disposed context via CurrentOrNull, returning null");
return null;
}
return context;
}
}
//--- Fields ---
/// <summary>
/// Dream environment.
/// </summary>
public readonly IDreamEnvironment Env;
/// <summary>
/// Unique Identifier of the request.
/// </summary>
public readonly int ID;
/// <summary>
/// Request Http Verb.
/// </summary>
public readonly string Verb;
/// <summary>
/// Request Uri.
/// </summary>
public readonly XUri Uri;
/// <summary>
/// Dream feature responsible for handling the request.
/// </summary>
public readonly DreamFeature Feature;
/// <summary>
/// Incoming request message.
/// </summary>
public readonly DreamMessage Request;
// TODO (arnec): StartTime should eventually be mirroed with EndTime and a Duration attribute on features
// for now it's just the time the request started that should be used instead of GlobalClock.UtcNow
/// <summary>
/// Time the request started.
/// </summary>
public readonly DateTime StartTime;
private readonly XUri _publicUri;
private readonly string[] _suffixes;
private readonly Dictionary<string, string[]> _pathParams;
private readonly Dictionary<string, string> _license;
private readonly Func<Action<ContainerBuilder>, ILifetimeScope> _requestContainerFactory;
private XUri _publicUriOverride;
private XUri _serverUri;
private Hashtable _state;
private System.Diagnostics.StackTrace _stackTrace = DebugUtil.GetStackTrace();
private CultureInfo _culture;
private bool _isTaskDisposed;
private ILifetimeScope _lifetimeScope;
private TaskEnv _ownerEnv;
//--- Constructors ---
/// <summary>
/// Create instance.
/// </summary>
/// <param name="env">Dream Environment.</param>
/// <param name="verb">Http request verb.</param>
/// <param name="uri">Request Uri.</param>
/// <param name="feature">Request handling feature.</param>
/// <param name="publicUri">Public Uri for incoming request.</param>
/// <param name="serverUri">Server Uri for Dream Host.</param>
/// <param name="request">Request message.</param>
/// <param name="culture">Request Culture.</param>
/// <param name="requestContainerFactory">Factory delegate to create a request container on demand.</param>
public DreamContext(IDreamEnvironment env, string verb, XUri uri, DreamFeature feature, XUri publicUri, XUri serverUri, DreamMessage request, CultureInfo culture, Func<Action<ContainerBuilder>, ILifetimeScope> requestContainerFactory) {
if(env == null) {
throw new ArgumentNullException("env");
}
if(verb == null) {
throw new ArgumentNullException("verb");
}
if(uri == null) {
throw new ArgumentNullException("uri");
}
if(feature == null) {
throw new ArgumentNullException("feature");
}
if(publicUri == null) {
throw new ArgumentNullException("publicUri");
}
if(request == null) {
throw new ArgumentNullException("request");
}
if(culture == null) {
throw new ArgumentNullException("culture");
}
if(requestContainerFactory == null) {
throw new ArgumentNullException("requestContainerFactory");
}
this.ID = System.Threading.Interlocked.Increment(ref _contextIdCounter);
this.Env = env;
this.Verb = verb;
this.Uri = uri;
this.Feature = feature;
this.Feature.ExtractArguments(this.Uri, out _suffixes, out _pathParams);
this.ServerUri = serverUri;
this.Request = request;
this.StartTime = GlobalClock.UtcNow;
_publicUri = publicUri;
_culture = culture;
_requestContainerFactory = requestContainerFactory;
// get service license
_license = CheckServiceLicense();
}
private DreamContext(IDreamEnvironment env, string verb, XUri uri, DreamFeature feature, XUri publicUri, XUri serverUri, DreamMessage request, CultureInfo culture, Func<Action<ContainerBuilder>, ILifetimeScope> requestContainerFactory, Dictionary<string, string> license) {
if(env == null) {
throw new ArgumentNullException("env");
}
if(verb == null) {
throw new ArgumentNullException("verb");
}
if(uri == null) {
throw new ArgumentNullException("uri");
}
if(feature == null) {
throw new ArgumentNullException("feature");
}
if(publicUri == null) {
throw new ArgumentNullException("publicUri");
}
if(request == null) {
throw new ArgumentNullException("request");
}
if(culture == null) {
throw new ArgumentNullException("culture");
}
if(requestContainerFactory == null) {
throw new ArgumentNullException("requestContainerFactory");
}
this.ID = System.Threading.Interlocked.Increment(ref _contextIdCounter);
this.Env = env;
this.Verb = verb;
this.Uri = uri;
this.Feature = feature;
this.Feature.ExtractArguments(this.Uri, out _suffixes, out _pathParams);
this.ServerUri = serverUri;
this.Request = request;
this.StartTime = GlobalClock.UtcNow;
_publicUri = publicUri;
_culture = culture;
_requestContainerFactory = requestContainerFactory;
_license = license;
}
//--- Properties ---
/// <summary>
/// Dream Service handling the request.
/// </summary>
public IDreamService Service { get { return Feature.Service; } }
/// <summary>
/// <see langword="True"/> if the context has state attached to it.
/// </summary>
public bool HasState { get { return _state != null; } }
/// <summary>
/// Service license for this request.
/// </summary>
public Dictionary<string, string> ServiceLicense { get { return _license; } }
/// <summary>
/// <see langword="True"/> if the underlying Task environment has disposed this context.
/// </summary>
public bool IsTaskEnvDisposed { get { return _isTaskDisposed; } }
/// <summary>
/// Uri by which the host is known publicly in the context of this request.
/// </summary>
public XUri PublicUri {
get {
return _publicUriOverride ?? _publicUri;
}
}
/// <summary>
/// Culture of the request.
/// </summary>
public CultureInfo Culture {
get {
return _culture;
}
set {
if(value == null) {
throw new NullReferenceException("value");
}
_culture = value;
}
}
/// <summary>
/// Uri the Dream Host is registered for.
/// </summary>
public XUri ServerUri {
get {
return _serverUri;
}
set {
if(_serverUri != null) {
throw new Exception("server uri already set");
}
if(value == null) {
throw new ArgumentNullException("value");
}
_serverUri = value;
}
}
/// <summary>
/// User, if any, authenticated for this request.
/// </summary>
public IPrincipal User {
get {
return GetState<IPrincipal>();
}
set {
SetState(value);
}
}
/// <summary>
/// Request Inversion of Control container.
/// </summary>
public ILifetimeScope Container {
get {
if(_lifetimeScope == null ) {
_lifetimeScope = _requestContainerFactory(builder => builder.RegisterInstance(this).ExternallyOwned());
}
return _lifetimeScope;
}
}
/// <summary>
/// Request State.
/// </summary>
private Hashtable State {
get {
if(_state == null) {
_state = new Hashtable();
}
return _state;
}
}
//--- Methods ---
/// <summary>
/// Attach the context to the current context.
/// </summary>
/// <remarks>
/// Throws <see cref="DreamContextAccessException"/> if the context is already attached to
/// a task environemnt of the task environment already has a context.
/// </remarks>
/// <exception cref="DreamContextAccessException">Context is either attached to a <see cref="TaskEnv"/> or the current <see cref="TaskEnv"/>
/// already has a context attached.</exception>
public void AttachToCurrentTaskEnv() {
lock(this) {
var env = TaskEnv.Current;
if(env.GetState<DreamContext>() != null) {
throw new DreamContextAccessException("tried to attach dreamcontext to env that already has a dreamcontext");
}
if(_ownerEnv != null && _ownerEnv == env) {
throw new DreamContextAccessException("tried to re-attach dreamcontext to env it is already attached to");
}
if(_ownerEnv != null) {
throw new DreamContextAccessException("tried to attach dreamcontext to an env, when it already is attached to another");
}
_ownerEnv = env;
env.SetState(this);
}
}
/// <summary>
/// Detach the context from the its task environment.
/// </summary>
/// <remarks>
/// Must be done in the context's task environment.
/// </remarks>
public void DetachFromTaskEnv() {
lock(this) {
if(TaskEnv.CurrentOrNull != _ownerEnv) {
_log.Warn("detaching context in env other than owning end");
}
_ownerEnv.RemoveState(this);
_ownerEnv = null;
}
}
/// <summary>
/// Override the <see cref="PublicUri"/> for this request.
/// </summary>
/// <param name="publicUri">Publicly accessible Uri.</param>
public void SetPublicUriOverride(XUri publicUri) {
_publicUriOverride = publicUri;
}
/// <summary>
/// Remove any <see cref="PublicUri"/> override.
/// </summary>
public void ClearPublicUriOverride() {
_publicUriOverride = null;
}
/// <summary>
/// Number of suffixes for this feature path.
/// </summary>
/// <returns></returns>
public int GetSuffixCount() {
EnsureFeatureIsSet();
return _suffixes.Length;
}
/// <summary>
/// Get a suffix.
/// </summary>
/// <param name="index">Index of path suffix.</param>
/// <param name="format">Uri path format.</param>
/// <returns>Suffix.</returns>
public string GetSuffix(int index, UriPathFormat format) {
EnsureFeatureIsSet();
string suffix = _suffixes[index];
switch(format) {
case UriPathFormat.Original:
return suffix;
case UriPathFormat.Decoded:
return XUri.Decode(suffix);
case UriPathFormat.Normalized:
return XUri.Decode(suffix).ToLowerInvariant();
default:
throw new ArgumentException("format");
}
}
/// <summary>
/// Get all suffixes.
/// </summary>
/// <param name="format">Uri path format for suffixes.</param>
/// <returns>Array of suffixes.</returns>
public string[] GetSuffixes(UriPathFormat format) {
EnsureFeatureIsSet();
string[] result = new string[_suffixes.Length];
switch(format) {
case UriPathFormat.Original:
for(int i = 0; i < result.Length; ++i) {
result[i] = _suffixes[i];
}
break;
case UriPathFormat.Decoded:
for(int i = 0; i < result.Length; ++i) {
result[i] = XUri.Decode(_suffixes[i]);
}
break;
case UriPathFormat.Normalized:
for(int i = 0; i < result.Length; ++i) {
result[i] = XUri.Decode(_suffixes[i]).ToLowerInvariant();
}
break;
default:
throw new ArgumentException("format");
}
return result;
}
/// <summary>
/// Request parameters.
/// </summary>
/// <remarks>
/// Parameters refers to both query and path parameters.
/// </remarks>
/// <returns>Array parameter key/value pairs.</returns>
public KeyValuePair<string, string>[] GetParams() {
EnsureFeatureIsSet();
if(Uri.Params != null) {
int count = _pathParams.Count + Uri.Params.Length;
List<KeyValuePair<string, string>> result = new List<KeyValuePair<string, string>>(count);
foreach(KeyValuePair<string, string[]> pair in _pathParams) {
foreach(string value in pair.Value) {
result.Add(new KeyValuePair<string, string>(pair.Key, value));
}
}
foreach(KeyValuePair<string, string> pair in Uri.Params) {
result.Add(pair);
}
return result.ToArray();
} else {
return new KeyValuePair<string, string>[0];
}
}
/// <summary>
/// Get all values for a named parameter.
/// </summary>
/// <param name="key"><see cref="DreamFeatureParamAttribute"/> name.</param>
/// <returns>Text values of parameter.</returns>
/// <exception cref="DreamAbortException">Throws if parameter does not exist.</exception>
public string[] GetParams(string key) {
EnsureFeatureIsSet();
if(key == null) {
throw new ArgumentNullException("key");
}
string[] values;
if(!_pathParams.TryGetValue(key, out values) || (values == null)) {
values = Uri.GetParams(key);
}
return values ?? new string[0];
}
/// <summary>
/// Get a named parameter.
/// </summary>
/// <remarks>
/// Will throw <see cref="DreamAbortException"/> if the named parameter does not exist.
/// </remarks>
/// <param name="key"><see cref="DreamFeatureParamAttribute"/> name.</param>
/// <returns>Text value of parameter.</returns>
/// <exception cref="DreamAbortException">Throws if parameter does not exist.</exception>
public string GetParam(string key) {
EnsureFeatureIsSet();
if(key == null) {
throw new ArgumentNullException("key");
}
string result;
string[] values;
_pathParams.TryGetValue(key, out values);
if((values != null) && (values.Length > 0)) {
result = values[0];
} else {
result = Uri.GetParam(key, null);
}
if(result == null) {
throw new DreamAbortException(DreamMessage.BadRequest(string.Format("missing feature parameter '{0}'", key)));
}
return result;
}
/// <summary>
/// Get a named parameter.
/// </summary>
/// <typeparam name="T">Type to convert parameter to.</typeparam>
/// <param name="key"><see cref="DreamFeatureParamAttribute"/> name.</param>
/// <returns>Parameter value converted to requested type.</returns>
public T GetParam<T>(string key) {
string result = GetParam(key);
try {
return (T)SysUtil.ChangeType(result, typeof(T));
} catch {
throw new DreamAbortException(DreamMessage.BadRequest(string.Format("invalid value for feature parameter '{0}'", key)));
}
}
/// <summary>
/// Get a named parameter.
/// </summary>
/// <param name="key"><see cref="DreamFeatureParamAttribute"/> name.</param>
/// <param name="def">Default value to return in case parameter is not defined.</param>
/// <returns>Text value of parameter</returns>
public string GetParam(string key, string def) {
EnsureFeatureIsSet();
if(key == null) {
throw new ArgumentNullException("key");
}
string result;
string[] values;
_pathParams.TryGetValue(key, out values);
if((values != null) && (values.Length > 0)) {
result = values[0];
} else {
result = Uri.GetParam(key, null);
}
return result ?? def;
}
/// <summary>
/// Get a named parameter.
/// </summary>
/// <typeparam name="T">Type to convert parameter to.</typeparam>
/// <param name="key"><see cref="DreamFeatureParamAttribute"/> name.</param>
/// <param name="def">Default value to return in case parameter is not defined.</param>
/// <returns>Parameter value converted to requested type.</returns>
public T GetParam<T>(string key, T def) {
string result = GetParam(key, null);
if(result != null) {
try {
return (T)SysUtil.ChangeType<T>(result);
} catch {
throw new DreamAbortException(DreamMessage.BadRequest(string.Format("invalid value for feature parameter '{0}'", key)));
}
}
return def;
}
/// <summary>
/// Relay a request to another service using the current query parameters, service cookies and verb.
/// </summary>
/// <remarks>
/// Must be yielded by a coroutine or invoked with <see cref="Coroutine.Invoke"/>.
/// </remarks>
/// <param name="plug">Location of relay recipient.</param>
/// <param name="request">Request message to relay.</param>
/// <param name="response">The <see cref="Result{DreamMessage}"/> instance this coroutine will use as a synchronization handle.</param>
/// <returns>Iterator used by <see cref="Coroutine"/> execution environment.</returns>
public IYield Relay(Plug plug, DreamMessage request, Result<DreamMessage> response) {
return Relay(plug, Verb, request, response);
}
/// <summary>
/// Relay a request to another service using the current query parameters and service cookies.
/// </summary>
/// <remarks>
/// Must be yielded by a coroutine or invoked with <see cref="Coroutine.Invoke"/>.
/// </remarks>
/// <param name="plug">Location of relay recipient.</param>
/// <param name="verb">Http verb to use for relay.</param>
/// <param name="request">Request message to relay.</param>
/// <param name="response">The <see cref="Result{DreamMessage}"/> instance this coroutine will use as a synchronization handle.</param>
/// <returns>Iterator used by <see cref="Coroutine"/> execution environment.</returns>
public IYield Relay(Plug plug, string verb, DreamMessage request, Result<DreamMessage> response) {
// combine query parameters of current request with new target URI, then append suffix segments
Result<DreamMessage> inner = new Result<DreamMessage>(response.Timeout);
Result result = new Result(TimeSpan.MaxValue);
Plug.New(plug.Uri.WithParamsFrom(Uri)).InvokeEx(verb, request, inner).WhenDone(_unused => {
response.Return(inner);
result.Return();
});
return result;
}
/// <summary>
/// Get a typed state variable
/// </summary>
/// <remarks>Since the type is used as the state key, can only contain one instance for this type. This call is thread-safe.</remarks>
/// <typeparam name="T">Type of state variable.</typeparam>
/// <returns>Instance or default for type.</returns>
public T GetState<T>() {
lock(State) {
return (T)(State.ContainsKey(typeof(T)) ? State[typeof(T)] : default(T));
}
}
/// <summary>
/// Store a typed state variable.
/// </summary>
/// <remarks>Since the type is used as the state key, can only contain one instance for this type. This call is thread-safe.</remarks>
/// <typeparam name="T">Type of state variable.</typeparam>
/// <param name="value">Instance to store.</param>
public void SetState<T>(T value) {
lock(State) {
State[typeof(T)] = value;
}
}
/// <summary>
/// Get a typed state variable by key.
/// </summary>
/// <remarks>This call is thread-safe.</remarks>
/// <typeparam name="T">Type of state variable.</typeparam>
/// <param name="key">State variable key.</param>
/// <returns>Instance or default for type.</returns>
public T GetState<T>(string key) {
lock(State) {
return (T)(State.ContainsKey(key) ? State[key] : default(T));
}
}
/// <summary>
/// Store a typed state variable by key.
/// </summary>
/// <remarks>This call is thread-safe.</remarks>
/// <typeparam name="T">Type of state variable.</typeparam>
/// <param name="key">State variable key.</param>
/// <param name="value">Instance to store.</param>
public void SetState<T>(string key, T value) {
lock(State) {
State[key] = value;
}
}
/// <summary>
/// Convert a Uri to a host local Uri, if possible.
/// </summary>
/// <remarks>
/// Will return the original Uri if there is no local equivalent.
/// </remarks>
/// <param name="uri">Uri to convert.</param>
/// <returns>Local Uri.</returns>
public XUri AsLocalUri(XUri uri) {
XUri result = uri;
if(uri.Similarity(PublicUri) == PublicUri.MaxSimilarity) {
result = uri.ChangePrefix(PublicUri, Env.LocalMachineUri);
} else if((ServerUri != null) && (uri.Similarity(ServerUri) == ServerUri.MaxSimilarity)) {
result = uri.ChangePrefix(ServerUri, Env.LocalMachineUri);
}
return result;
}
/// <summary>
/// Convert a Uri to uri relative to the requests public uri, if possible.
/// </summary>
/// <remarks>
/// Will return the original Uri if there is no public equivalent.
/// </remarks>
/// <param name="uri">Uri to convert.</param>
/// <returns>Public Uri.</returns>
public XUri AsPublicUri(XUri uri) {
XUri result = uri;
if(uri.Similarity(Env.LocalMachineUri) == Env.LocalMachineUri.MaxSimilarity) {
result = uri.ChangePrefix(Env.LocalMachineUri, PublicUri);
}
return result;
}
/// <summary>
/// Convert a Uri to uri relative to the server's public uri, if possible.
/// </summary>
/// <remarks>
/// Will return the original Uri if there is no public equivalent.
/// </remarks>
/// <param name="uri">Uri to convert.</param>
/// <returns>Public Uri.</returns>
public XUri AsServerUri(XUri uri) {
XUri result = uri;
if((ServerUri != null) && (uri.Similarity(Env.LocalMachineUri) == Env.LocalMachineUri.MaxSimilarity)) {
result = uri.ChangePrefix(Env.LocalMachineUri, ServerUri);
}
return result;
}
/// <summary>
/// Replace the context's own state with a clone of the state of another context.
/// </summary>
/// <param name="context"></param>
public void CloneStateFromContext(DreamContext context) {
if(context.HasState) {
lock(context._state) {
var state = State;
foreach(DictionaryEntry entry in context._state) {
var cloneable = entry.Value as ITaskLifespan;
state[entry.Key] = (cloneable == null) ? entry.Value : cloneable.Clone();
}
}
}
}
internal DreamContext CreateContext(string verb, XUri uri, DreamFeature feature, DreamMessage message) {
return new DreamContext(Env, verb, uri, feature, PublicUri, ServerUri, message, Culture, _requestContainerFactory, null);
}
private void EnsureFeatureIsSet() {
if(Feature == null) {
throw new InvalidOperationException("feature not set");
}
}
private Dictionary<string, string> CheckServiceLicense() {
// check request validity (unless it's for the @config uri, which is a special case)
Dictionary<string, string> result = null;
if((Feature.Service.Self != null) && (Feature.Service is IDreamServiceLicense) && !(Uri.LastSegment ?? string.Empty).EqualsInvariant("@config")) {
string service_license = ((IDreamServiceLicense)Feature.Service).ServiceLicense;
if(string.IsNullOrEmpty(service_license)) {
throw new DreamAbortException(DreamMessage.LicenseRequired("service-license missing"));
}
// extract public RSA key for validation
RSACryptoServiceProvider public_key = RSAUtil.ProviderFrom(Feature.Service.GetType().Assembly);
if(public_key == null) {
throw new DreamAbortException(DreamMessage.InternalError("service assembly invalid"));
}
// validate the service-license
try {
// parse service-license
result = HttpUtil.ParseNameValuePairs(service_license);
if(!Encoding.UTF8.GetBytes(service_license.Substring(0, service_license.LastIndexOf(','))).VerifySignature(result["dsig"], public_key)) {
throw new DreamAbortException(DreamMessage.InternalError("invalid service-license"));
}
} catch(Exception e) {
// unexpected error, blame it on the license
if(e is DreamAbortException) {
throw;
} else {
throw new DreamAbortException(DreamMessage.InternalError("corrupt service-license"));
}
}
// check license
string text;
if((!result.TryGetValue("licensee", out text) || string.IsNullOrEmpty(text)) && !result.ContainsKey("expire")) {
// unexpected error, blame it on the license
throw new DreamAbortException(DreamMessage.InternalError("corrupt service-license"));
}
// determine 'now' date-time
DateTime now = GlobalClock.UtcNow;
DateTime? request_date = Request.Headers.Date;
if(request_date.HasValue) {
now = (request_date.Value > now) ? request_date.Value : now;
}
// check expiration
DateTime expire;
if(result.TryGetValue("expire", out text) && (!DateTimeUtil.TryParseInvariant(text, out expire) || (expire.ToUniversalTime() < now))) {
throw new DreamAbortException(DreamMessage.LicenseRequired("service-license has expired"));
}
}
return result;
}
#region ITaskLifespan Members
object ITaskLifespan.Clone() {
var context = new DreamContext(Env, Verb, Uri, Feature, _publicUri, _serverUri, Request, _culture, _requestContainerFactory, _license);
context.CloneStateFromContext(this);
return context;
}
void ITaskLifespan.Dispose() {
if(_isTaskDisposed) {
_log.Warn("disposing already disposed context");
}
_isTaskDisposed = true;
if(_lifetimeScope != null) {
_lifetimeScope.Dispose();
_lifetimeScope = null;
}
if(_state == null) {
return;
}
lock(_state) {
foreach(var item in _state.Values) {
var disposable = item as ITaskLifespan;
if(disposable == null) {
continue;
}
disposable.Dispose();
}
_state.Clear();
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Reflection;
using UnuGames.MVVM;
namespace UnuGames
{
static public class ReflectUtils
{
#if !UNITY_EDITOR
static Dictionary<object, string[]> cachedMembersName = new Dictionary<object, string[]> ();
static Dictionary<object, Type> cachedTypes = new Dictionary<object, Type> ();
#endif
static List<string> cachedAssembly = new List<string> ();
static List<Type> allTypes = new List<Type> ();
static Dictionary<Type, object> cachedInstance = new Dictionary<Type, object> ();
static Dictionary<string, Type> cachedTypeName = new Dictionary<string, Type> ();
/// <summary>
/// Get all member with suitable type of current object
/// </summary>
/// <param name="obj"></param>
/// <param name="memberTypes"></param>
/// <returns></returns>
static public string[] GetAllMembers (this IObservable type, params MemberTypes[] memberTypes)
{
if (type == null)
return null;
MemberInfo[] members = null;
members = type.GetCachedType ().GetMembers ();
bool all = false;
if (memberTypes == null || (memberTypes != null && memberTypes.Length == 0))
all = true;
List<string> results = new List<string> ();
for (int i = 0; i < members.Length; i++) {
if (all) {
results.Add (members [i].Name);
} else {
for (int j = 0; j < memberTypes.Length; j++) {
if (all || members [i].MemberType == memberTypes [j]) {
results.Add (members [i].Name);
break;
}
}
}
}
return results.ToArray ();
}
static public MemberInfo[] GetAllMembersInfo (this IObservable type, params MemberTypes[] memberTypes)
{
if (type == null)
return null;
MemberInfo[] members = null;
members = type.GetCachedType ().GetMembers ();
bool all = false;
if (memberTypes == null || (memberTypes != null && memberTypes.Length == 0))
all = true;
List<MemberInfo> results = new List<MemberInfo> ();
for (int i = 0; i < members.Length; i++) {
if (all) {
results.Add (members [i]);
} else {
for (int j = 0; j < memberTypes.Length; j++) {
if (members [i].MemberType == memberTypes [j]) {
results.Add (members [i]);
break;
}
}
}
}
return results.ToArray ();
}
static public string[] GetAllMembers (this PropertyInfo proInfo, params MemberTypes[] memberTypes)
{
if (proInfo == null)
return null;
MemberInfo[] members = null;
members = proInfo.PropertyType.GetMembers ();
bool all = false;
if (memberTypes == null || (memberTypes != null && memberTypes.Length == 0))
all = true;
List<string> results = new List<string> ();
for (int i = 0; i < members.Length; i++) {
if (all) {
results.Add (members [i].Name);
} else {
for (int j = 0; j < memberTypes.Length; j++) {
if (members [i].MemberType == memberTypes [j]) {
results.Add (members [i].Name);
break;
}
}
}
}
return results.ToArray ();
}
static public MemberInfo[] GetAllMembersInfo (this PropertyInfo proInfo, params MemberTypes[] memberTypes)
{
if (proInfo == null)
return null;
MemberInfo[] members = null;
members = proInfo.PropertyType.GetMembers ();
bool all = false;
if (memberTypes == null || (memberTypes != null && memberTypes.Length == 0))
all = true;
List<MemberInfo> results = new List<MemberInfo> ();
for (int i = 0; i < members.Length; i++) {
for (int j = 0; j < memberTypes.Length; j++) {
if (all || members [i].MemberType == memberTypes [j]) {
results.Add (members [i]);
break;
}
}
}
return results.ToArray ();
}
static public MemberInfo GetMemberInfo (this IObservable type, string memberName, params MemberTypes[] memberTypes)
{
MemberInfo[] infos = type.GetAllMembersInfo (memberTypes);
MemberInfo result = null;
for (int i = 0; i < infos.Length; i++) {
if (infos [i].Name == memberName) {
result = infos [i];
break;
}
}
return result;
}
static public FieldInfo ToField (this MemberInfo member)
{
return (member as FieldInfo);
}
static public PropertyInfo ToProperty (this MemberInfo member)
{
return (member as PropertyInfo);
}
static public MethodInfo ToMethod (this MemberInfo member)
{
return (member as MethodInfo);
}
static public Type GetCachedType (this object obj)
{
Type type = null;
#if UNITY_EDITOR
if (obj != null)
type = obj.GetType ();
else
return null;
#else
if (!cachedTypes.TryGetValue (obj, out type)) {
type = obj.GetType ();
cachedTypes.Add (obj, type);
}
#endif
return type;
}
static public List<string> GetAllAssembly ()
{
return cachedAssembly;
}
static public List<Type> GetAllTypes ()
{
return allTypes;
}
static public void RefreshAssembly (bool force)
{
if (!force && cachedAssembly.Count > 0)
return;
cachedAssembly.Clear ();
allTypes.Clear ();
Assembly[] assemblies = AppDomain.CurrentDomain.GetAssemblies ();
for (int i = 0; i < assemblies.Length; i++) {
Assembly asem = assemblies [i];
if (!asem.Location.Contains ("Editor"))
cachedAssembly.Add (asem.FullName);
}
for (int i = 0; i < assemblies.Length; i++) {
Type[] types = assemblies [i].GetTypes ();
for (int j = 0; j < types.Length; j++) {
if (types [j].IsPublic) {
allTypes.Add (types [j]);
}
}
}
}
static public string[] GetAllUIManType ()
{
List<string> uiManTypes = new List<string> ();
List<Type> types = GetAllTypes ();
for (int i = 0; i < types.Count; i++) {
string typeName = types [i].Name;
Type type = Type.GetType (typeName);
if (type != null) {
if (type.BaseType == typeof(UIManScreen) || type.BaseType == typeof(UIManDialog) || type.BaseType == typeof(ObservableModel)) {
if (!uiManTypes.Contains (typeName)) {
uiManTypes.Add (typeName);
}
}
}
}
return uiManTypes.ToArray ();
}
static public string[] GetAllRefType (Type baseType)
{
List<string> refTypes = new List<string> ();
List<Type> types = GetAllTypes ();
for (int i = 0; i < types.Count; i++) {
string typeName = types [i].Name;
Type type = Type.GetType (typeName);
if (type != null) {
if (type.BaseType == baseType) {
if (!refTypes.Contains (typeName)) {
refTypes.Add (typeName);
}
}
}
}
return refTypes.ToArray ();
}
static public Type GetTypeByName (string name)
{
if (cachedTypeName.ContainsKey (name))
return cachedTypeName [name];
List<Type> types = GetAllTypes ();
for (int i = 0; i < types.Count; i++) {
if (types [i].Name == name || types [i].GetAllias () == name) {
cachedTypeName.Add (name, types [i]);
return types [i];
}
}
return null;
}
static public Type GetUIManTypeByName (string typeName)
{
Type uiManType = null;
Type type = Type.GetType (typeName);
if (type != null) {
if (type.BaseType == typeof(UIManScreen) || type.BaseType == typeof(UIManDialog) || type.BaseType == typeof(ObservableModel)) {
if (type.Name == typeName) {
uiManType = type;
}
}
}
return uiManType;
}
#if UNITY_EDITOR
static public CustomPropertyInfo[] GetUIManProperties (this Type uiManType)
{
PropertyInfo[] properties = uiManType.GetProperties ();
List<CustomPropertyInfo> customProperties = new List<CustomPropertyInfo> ();
foreach (PropertyInfo property in properties) {
if (property.IsDefined (typeof(UIManProperty), true)) {
object instance = GetCachedTypeInstance (uiManType);
customProperties.Add (new CustomPropertyInfo (property.Name, property.PropertyType, property.GetValue (instance, null)));
}
}
return customProperties.ToArray ();
}
#endif
static public string GetAllias (this Type type)
{
if (type == null)
return null;
Dictionary<string, string> dict = new Dictionary<string, string> ();
dict.Add ("String", "string");
dict.Add ("Boolean", "bool");
dict.Add ("Int32", "int");
dict.Add ("Int64", "long");
dict.Add ("Single", "float");
dict.Add ("Double", "double");
if (dict.ContainsKey (type.Name))
return dict [type.Name];
else
return type.Name;
/*object : System.Object
string : System.String
bool : System.Boolean
byte : System.Byte
char : System.Char
decimal : System.Decimal
double : System.Double
short : System.Int16
int : System.Int32
long : System.Int64
sbyte : System.SByte
float : System.Single
ushort : System.UInt16
uint : System.UInt32
ulong : System.UInt64
void : System.Void*/
}
static public string[] GetAllObservableType (Type excludeType = null)
{
List<Type> types = GetAllTypes ();
List<string> observableTypes = new List<string> ();
for (int i = 0; i < types.Count; i++) {
if ((types [i].BaseType == typeof(ObservableModel) || types [i].IsAllias () || types [i].IsSupportType ()) && types [i] != excludeType) {
observableTypes.Add (types [i].GetAllias ());
}
}
return observableTypes.ToArray ();
}
static public bool IsAllias (this System.Type type)
{
if (type.GetAllias () == type.Name)
return false;
else
return true;
}
static public bool IsSupportType (this System.Type type)
{
if (type == null)
return false;
List<string> listType = new List<string> ();
listType.Add ("Color");
listType.Add ("Vector3");
if (listType.Contains (type.Name))
return true;
return false;
}
static public object GetCachedTypeInstance (Type type)
{
object instance = null;
if (!cachedInstance.TryGetValue (type, out instance)) {
instance = GetDefaultValue (type);
cachedInstance.Add (type, instance);
} else {
if (instance == null) {
cachedInstance.Remove (type);
instance = GetCachedTypeInstance (type);
}
}
return instance;
}
static public object GetDefaultValue (Type type)
{
try {
return Activator.CreateInstance (type);
} catch {
UnuLogger.LogError ("Cannot get default value of target type!");
return null;
}
}
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Microsoft.PythonTools.Interpreter;
using Microsoft.VisualStudio.ComponentModelHost;
namespace Microsoft.PythonTools.Profiling {
/// <summary>
/// Provides a view model for the StandaloneTarget class.
/// </summary>
public sealed class StandaloneTargetView : INotifyPropertyChanged {
private ReadOnlyCollection<PythonInterpreterView> _availableInterpreters;
private readonly PythonInterpreterView _customInterpreter;
private PythonInterpreterView _interpreter;
private string _interpreterPath;
private bool _canSpecifyInterpreterPath;
private string _workingDirectory;
private string _scriptPath;
private string _arguments;
private bool _isValid;
/// <summary>
/// Create a StandaloneTargetView with default values.
/// </summary>
[Obsolete("An IServiceProvider should be provided")]
public StandaloneTargetView()
: this(PythonProfilingPackage.Instance) {
}
public StandaloneTargetView(IServiceProvider serviceProvider) {
var componentService = (IComponentModel)(serviceProvider.GetService(typeof(SComponentModel)));
var interpreterService = componentService.GetService<IInterpreterOptionsService>();
var availableInterpreters = interpreterService.Interpreters.Select(factory => new PythonInterpreterView(factory)).ToList();
_customInterpreter = new PythonInterpreterView("Other...", Guid.Empty, new Version(), null);
availableInterpreters.Add(_customInterpreter);
_availableInterpreters = new ReadOnlyCollection<PythonInterpreterView>(availableInterpreters);
_interpreterPath = null;
_canSpecifyInterpreterPath = false;
_scriptPath = null;
_workingDirectory = null;
_arguments = null;
_isValid = false;
PropertyChanged += new PropertyChangedEventHandler(StandaloneTargetView_PropertyChanged);
if (IsAnyAvailableInterpreters) {
var defaultId = interpreterService.DefaultInterpreter.Id;
var defaultVersion = interpreterService.DefaultInterpreter.Configuration.Version;
Interpreter = AvailableInterpreters.FirstOrDefault(v => v.Id == defaultId && v.Version == defaultVersion);
}
}
/// <summary>
/// Create a StandaloneTargetView with values taken from a template.
/// </summary>
public StandaloneTargetView(StandaloneTarget template)
: this(PythonProfilingPackage.Instance, template) {
}
public StandaloneTargetView(IServiceProvider serviceProvider, StandaloneTarget template)
: this(serviceProvider) {
if (template.PythonInterpreter != null) {
Version version;
if (IsAnyAvailableInterpreters && Version.TryParse(template.PythonInterpreter.Version, out version)) {
Interpreter = AvailableInterpreters
.FirstOrDefault(v => v.Id == template.PythonInterpreter.Id && v.Version == version);
} else {
Interpreter = _customInterpreter;
}
} else {
InterpreterPath = template.InterpreterPath;
}
ScriptPath = template.Script;
WorkingDirectory = template.WorkingDirectory;
Arguments = template.Arguments;
}
/// <summary>
/// Returns a StandaloneTarget with values taken from the view model.
/// </summary>
/// <returns></returns>
public StandaloneTarget GetTarget() {
if (IsValid) {
return new StandaloneTarget {
PythonInterpreter = CanSpecifyInterpreterPath ? null : Interpreter.GetInterpreter(),
InterpreterPath = CanSpecifyInterpreterPath ? InterpreterPath : null,
Script = ScriptPath ?? string.Empty,
WorkingDirectory = WorkingDirectory ?? string.Empty,
Arguments = Arguments ?? string.Empty
};
} else {
return null;
}
}
/// <summary>
/// The interpreters that may be selected.
/// </summary>
public ReadOnlyCollection<PythonInterpreterView> AvailableInterpreters {
get {
return _availableInterpreters;
}
}
/// <summary>
/// True if AvailableInterpreters has at least one item.
/// </summary>
public bool IsAnyAvailableInterpreters {
get {
return _availableInterpreters.Count > 0;
}
}
/// <summary>
/// The currently selected Python interpreter. Setting this to null will select a
/// custom interpreter.
/// </summary>
public PythonInterpreterView Interpreter {
get {
return _interpreter;
}
set {
if (_interpreter != value) {
_interpreter = value ?? _customInterpreter;
OnPropertyChanged("Interpreter");
CanSpecifyInterpreterPath = (_interpreter == _customInterpreter);
}
}
}
/// <summary>
/// The current interpreter path. This can be set regardless of the value of
/// CanSpecifyInterpreterPath.
/// </summary>
public string InterpreterPath {
get {
return _interpreterPath;
}
set {
if (_interpreterPath != value) {
_interpreterPath = value;
OnPropertyChanged("InterpreterPath");
}
}
}
/// <summary>
/// True if InterpreterPath is valid; false if it will be ignored.
/// </summary>
public bool CanSpecifyInterpreterPath {
get {
return _canSpecifyInterpreterPath;
}
private set {
if (_canSpecifyInterpreterPath != value) {
_canSpecifyInterpreterPath = value;
OnPropertyChanged("CanSpecifyInterpreterPath");
}
}
}
/// <summary>
/// The current script path.
/// </summary>
public string ScriptPath {
get {
return _scriptPath;
}
set {
if (_scriptPath != value) {
_scriptPath = value;
OnPropertyChanged("ScriptPath");
//if (string.IsNullOrEmpty(WorkingDirectory)) {
// WorkingDirectory = Path.GetDirectoryName(_scriptPath);
//}
}
}
}
/// <summary>
/// The current working directory.
/// </summary>
public string WorkingDirectory {
get {
return _workingDirectory;
}
set {
if (_workingDirectory != value) {
_workingDirectory = value;
OnPropertyChanged("WorkingDirectory");
}
}
}
/// <summary>
/// The current set of arguments to pass to the script.
/// </summary>
public string Arguments {
get {
return _arguments;
}
set {
if (_arguments != value) {
_arguments = value;
OnPropertyChanged("Arguments");
}
}
}
/// <summary>
/// Receives our own property change events to update IsValid.
/// </summary>
void StandaloneTargetView_PropertyChanged(object sender, PropertyChangedEventArgs e) {
Debug.Assert(sender == this);
if (e.PropertyName != "IsValid") {
IsValid = File.Exists(ScriptPath) &&
Directory.Exists(WorkingDirectory) &&
(CanSpecifyInterpreterPath == false || File.Exists(InterpreterPath));
}
}
/// <summary>
/// True if the settings are valid and all paths exist; otherwise, false.
/// </summary>
public bool IsValid {
get {
return _isValid;
}
private set {
if (_isValid != value) {
_isValid = value;
OnPropertyChanged("IsValid");
}
}
}
private void OnPropertyChanged(string propertyName) {
var evt = PropertyChanged;
if (evt != null) {
evt(this, new PropertyChangedEventArgs(propertyName));
}
}
/// <summary>
/// Raised when the value of a property changes.
/// </summary>
public event PropertyChangedEventHandler PropertyChanged;
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using sc = System.Collections;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Cloud.ApigeeConnect.V1
{
/// <summary>Settings for <see cref="ConnectionServiceClient"/> instances.</summary>
public sealed partial class ConnectionServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="ConnectionServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="ConnectionServiceSettings"/>.</returns>
public static ConnectionServiceSettings GetDefault() => new ConnectionServiceSettings();
/// <summary>Constructs a new <see cref="ConnectionServiceSettings"/> object with default settings.</summary>
public ConnectionServiceSettings()
{
}
private ConnectionServiceSettings(ConnectionServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
ListConnectionsSettings = existing.ListConnectionsSettings;
OnCopy(existing);
}
partial void OnCopy(ConnectionServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>ConnectionServiceClient.ListConnections</c> and <c>ConnectionServiceClient.ListConnectionsAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 1000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.Unknown"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 60 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ListConnectionsSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(60000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(1000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.Unknown)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="ConnectionServiceSettings"/> object.</returns>
public ConnectionServiceSettings Clone() => new ConnectionServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="ConnectionServiceClient"/> to provide simple configuration of credentials, endpoint
/// etc.
/// </summary>
public sealed partial class ConnectionServiceClientBuilder : gaxgrpc::ClientBuilderBase<ConnectionServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public ConnectionServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public ConnectionServiceClientBuilder()
{
UseJwtAccessWithScopes = ConnectionServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref ConnectionServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<ConnectionServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override ConnectionServiceClient Build()
{
ConnectionServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<ConnectionServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<ConnectionServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private ConnectionServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return ConnectionServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<ConnectionServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return ConnectionServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => ConnectionServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => ConnectionServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => ConnectionServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>ConnectionService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service Interface for the Apigee Connect connection management APIs.
/// </remarks>
public abstract partial class ConnectionServiceClient
{
/// <summary>
/// The default endpoint for the ConnectionService service, which is a host of "apigeeconnect.googleapis.com"
/// and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "apigeeconnect.googleapis.com:443";
/// <summary>The default ConnectionService scopes.</summary>
/// <remarks>
/// The default ConnectionService scopes are:
/// <list type="bullet">
/// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item>
/// </list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/cloud-platform",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="ConnectionServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use <see cref="ConnectionServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="ConnectionServiceClient"/>.</returns>
public static stt::Task<ConnectionServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new ConnectionServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="ConnectionServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use <see cref="ConnectionServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="ConnectionServiceClient"/>.</returns>
public static ConnectionServiceClient Create() => new ConnectionServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="ConnectionServiceClient"/> which uses the specified call invoker for remote operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="ConnectionServiceSettings"/>.</param>
/// <returns>The created <see cref="ConnectionServiceClient"/>.</returns>
internal static ConnectionServiceClient Create(grpccore::CallInvoker callInvoker, ConnectionServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
ConnectionService.ConnectionServiceClient grpcClient = new ConnectionService.ConnectionServiceClient(callInvoker);
return new ConnectionServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC ConnectionService client</summary>
public virtual ConnectionService.ConnectionServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable sequence of <see cref="Connection"/> resources.</returns>
public virtual gax::PagedEnumerable<ListConnectionsResponse, Connection> ListConnections(ListConnectionsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable asynchronous sequence of <see cref="Connection"/> resources.</returns>
public virtual gax::PagedAsyncEnumerable<ListConnectionsResponse, Connection> ListConnectionsAsync(ListConnectionsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="parent">
/// Required. Parent name of the form:
/// `projects/{project_number or project_id}/endpoints/{endpoint}`.
/// </param>
/// <param name="pageToken">
/// The token returned from the previous request. A value of <c>null</c> or an empty string retrieves the first
/// page.
/// </param>
/// <param name="pageSize">
/// The size of page to request. The response will not be larger than this, but may be smaller. A value of
/// <c>null</c> or <c>0</c> uses a server-defined page size.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable sequence of <see cref="Connection"/> resources.</returns>
public virtual gax::PagedEnumerable<ListConnectionsResponse, Connection> ListConnections(string parent, string pageToken = null, int? pageSize = null, gaxgrpc::CallSettings callSettings = null) =>
ListConnections(new ListConnectionsRequest
{
Parent = gax::GaxPreconditions.CheckNotNullOrEmpty(parent, nameof(parent)),
PageToken = pageToken ?? "",
PageSize = pageSize ?? 0,
}, callSettings);
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="parent">
/// Required. Parent name of the form:
/// `projects/{project_number or project_id}/endpoints/{endpoint}`.
/// </param>
/// <param name="pageToken">
/// The token returned from the previous request. A value of <c>null</c> or an empty string retrieves the first
/// page.
/// </param>
/// <param name="pageSize">
/// The size of page to request. The response will not be larger than this, but may be smaller. A value of
/// <c>null</c> or <c>0</c> uses a server-defined page size.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable asynchronous sequence of <see cref="Connection"/> resources.</returns>
public virtual gax::PagedAsyncEnumerable<ListConnectionsResponse, Connection> ListConnectionsAsync(string parent, string pageToken = null, int? pageSize = null, gaxgrpc::CallSettings callSettings = null) =>
ListConnectionsAsync(new ListConnectionsRequest
{
Parent = gax::GaxPreconditions.CheckNotNullOrEmpty(parent, nameof(parent)),
PageToken = pageToken ?? "",
PageSize = pageSize ?? 0,
}, callSettings);
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="parent">
/// Required. Parent name of the form:
/// `projects/{project_number or project_id}/endpoints/{endpoint}`.
/// </param>
/// <param name="pageToken">
/// The token returned from the previous request. A value of <c>null</c> or an empty string retrieves the first
/// page.
/// </param>
/// <param name="pageSize">
/// The size of page to request. The response will not be larger than this, but may be smaller. A value of
/// <c>null</c> or <c>0</c> uses a server-defined page size.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable sequence of <see cref="Connection"/> resources.</returns>
public virtual gax::PagedEnumerable<ListConnectionsResponse, Connection> ListConnections(EndpointName parent, string pageToken = null, int? pageSize = null, gaxgrpc::CallSettings callSettings = null) =>
ListConnections(new ListConnectionsRequest
{
ParentAsEndpointName = gax::GaxPreconditions.CheckNotNull(parent, nameof(parent)),
PageToken = pageToken ?? "",
PageSize = pageSize ?? 0,
}, callSettings);
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="parent">
/// Required. Parent name of the form:
/// `projects/{project_number or project_id}/endpoints/{endpoint}`.
/// </param>
/// <param name="pageToken">
/// The token returned from the previous request. A value of <c>null</c> or an empty string retrieves the first
/// page.
/// </param>
/// <param name="pageSize">
/// The size of page to request. The response will not be larger than this, but may be smaller. A value of
/// <c>null</c> or <c>0</c> uses a server-defined page size.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable asynchronous sequence of <see cref="Connection"/> resources.</returns>
public virtual gax::PagedAsyncEnumerable<ListConnectionsResponse, Connection> ListConnectionsAsync(EndpointName parent, string pageToken = null, int? pageSize = null, gaxgrpc::CallSettings callSettings = null) =>
ListConnectionsAsync(new ListConnectionsRequest
{
ParentAsEndpointName = gax::GaxPreconditions.CheckNotNull(parent, nameof(parent)),
PageToken = pageToken ?? "",
PageSize = pageSize ?? 0,
}, callSettings);
}
/// <summary>ConnectionService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service Interface for the Apigee Connect connection management APIs.
/// </remarks>
public sealed partial class ConnectionServiceClientImpl : ConnectionServiceClient
{
private readonly gaxgrpc::ApiCall<ListConnectionsRequest, ListConnectionsResponse> _callListConnections;
/// <summary>
/// Constructs a client wrapper for the ConnectionService service, with the specified gRPC client and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="ConnectionServiceSettings"/> used within this client.</param>
public ConnectionServiceClientImpl(ConnectionService.ConnectionServiceClient grpcClient, ConnectionServiceSettings settings)
{
GrpcClient = grpcClient;
ConnectionServiceSettings effectiveSettings = settings ?? ConnectionServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callListConnections = clientHelper.BuildApiCall<ListConnectionsRequest, ListConnectionsResponse>(grpcClient.ListConnectionsAsync, grpcClient.ListConnections, effectiveSettings.ListConnectionsSettings).WithGoogleRequestParam("parent", request => request.Parent);
Modify_ApiCall(ref _callListConnections);
Modify_ListConnectionsApiCall(ref _callListConnections);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_ListConnectionsApiCall(ref gaxgrpc::ApiCall<ListConnectionsRequest, ListConnectionsResponse> call);
partial void OnConstruction(ConnectionService.ConnectionServiceClient grpcClient, ConnectionServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC ConnectionService client</summary>
public override ConnectionService.ConnectionServiceClient GrpcClient { get; }
partial void Modify_ListConnectionsRequest(ref ListConnectionsRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable sequence of <see cref="Connection"/> resources.</returns>
public override gax::PagedEnumerable<ListConnectionsResponse, Connection> ListConnections(ListConnectionsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ListConnectionsRequest(ref request, ref callSettings);
return new gaxgrpc::GrpcPagedEnumerable<ListConnectionsRequest, ListConnectionsResponse, Connection>(_callListConnections, request, callSettings);
}
/// <summary>
/// Lists connections that are currently active for the given Apigee Connect
/// endpoint.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A pageable asynchronous sequence of <see cref="Connection"/> resources.</returns>
public override gax::PagedAsyncEnumerable<ListConnectionsResponse, Connection> ListConnectionsAsync(ListConnectionsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ListConnectionsRequest(ref request, ref callSettings);
return new gaxgrpc::GrpcPagedAsyncEnumerable<ListConnectionsRequest, ListConnectionsResponse, Connection>(_callListConnections, request, callSettings);
}
}
public partial class ListConnectionsRequest : gaxgrpc::IPageRequest
{
}
public partial class ListConnectionsResponse : gaxgrpc::IPageResponse<Connection>
{
/// <summary>Returns an enumerator that iterates through the resources in this response.</summary>
public scg::IEnumerator<Connection> GetEnumerator() => Connections.GetEnumerator();
sc::IEnumerator sc::IEnumerable.GetEnumerator() => GetEnumerator();
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using log4net;
namespace NetGore.IO
{
/// <summary>
/// Base class for a collection of messages loaded from a file.
/// </summary>
/// <typeparam name="T">The Type of key.</typeparam>
public abstract class MessageCollectionBase<T> : IMessageCollection<T>
{
static readonly ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
readonly IEnumerable<CompilerError> _compilationErrors = Enumerable.Empty<CompilerError>();
/// <summary>
/// Dictionary of messages for this language.
/// </summary>
readonly Dictionary<T, string> _messages;
/// <summary>
/// Initializes a new instance of the <see cref="MessageCollectionBase{T}"/> class.
/// </summary>
/// <param name="file">Path to the file to load the messages from.</param>
/// <param name="secondary">Collection of messages to add missing messages from. If null, the
/// collection will only contain messages specified in the file. Otherwise, any message that exists
/// in this secondary collection but does not exist in the <paramref name="file"/> will be loaded
/// to this collection from this secondary collection.</param>
protected MessageCollectionBase(string file, IEnumerable<KeyValuePair<T, string>> secondary = null)
{
if (log.IsDebugEnabled)
log.DebugFormat("Loading MessageCollectionBase from file `{0}`.", file);
// Load the script messages
_messages = Load(file, secondary);
}
/// <summary>
/// Gets the <see cref="CompilerError"/>s from trying to compile the messages. Will be empty if
/// the compilation was successful or has not happened yet.
/// </summary>
public IEnumerable<CompilerError> CompilationErrors
{
get { return _compilationErrors; }
}
/// <summary>
/// Adds all messages from the <paramref name="source"/> that do not exist in the <paramref name="dest"/>.
/// </summary>
/// <param name="dest">Dictionary to add the messages to.</param>
/// <param name="source">Source to get the missing messages from.</param>
static void AddMissingMessages(IDictionary<T, string> dest, IEnumerable<KeyValuePair<T, string>> source)
{
foreach (var sourceMsg in source)
{
if (dest.ContainsKey(sourceMsg.Key))
continue;
dest.Add(sourceMsg.Key, sourceMsg.Value);
if (log.IsDebugEnabled)
log.DebugFormat("Added message `{0}` from default messages.", sourceMsg.Key);
}
}
/// <summary>
/// Gets the IEqualityComparer to use for collections created by this collection.
/// </summary>
/// <returns>The IEqualityComparer to use for collections created by this collection.</returns>
protected virtual IEqualityComparer<T> GetEqualityComparer()
{
return EqualityComparer<T>.Default;
}
/// <summary>
/// Checks if the given line is one that should be ignored. Typically, this means checking if a line starts with
/// comment characters. By default, lines starting with a hash (#), slash (\ or /), and apostrophe (') are ignored.
/// Blank lines are always ignored.
/// </summary>
/// <param name="fileLine">The line to check.</param>
/// <returns>True if the line should be ignored; otherwise false.</returns>
protected virtual bool IsLineToIgnore(string fileLine)
{
return fileLine.StartsWith("#") || fileLine.StartsWith("\\") || fileLine.StartsWith("'");
}
/// <summary>
/// Loads the messages from a file.
/// </summary>
/// <param name="filePath">The full path of the file to load the message from.</param>
/// <param name="secondary">The collection of messages to add missing messages from.</param>
/// <returns>A dictionary containing the loaded messages.</returns>
/// <exception cref="FileNotFoundException">No file was found at the <paramref name="filePath"/>.</exception>
Dictionary<T, string> Load(string filePath, IEnumerable<KeyValuePair<T, string>> secondary)
{
// Check if the file exists
if (!File.Exists(filePath))
{
const string errmsg = "Failed to load the MessageCollection because file does not exist: `{0}`";
if (log.IsErrorEnabled)
log.ErrorFormat(errmsg, filePath);
throw new FileNotFoundException();
}
var loadedMessages = new Dictionary<T, string>(GetEqualityComparer());
// Load all the lines in the file
var lines = File.ReadAllLines(filePath);
// Parse the lines
foreach (var fileLine in lines)
{
T id;
string msg;
if (TryParseLine(fileLine, out id, out msg))
loadedMessages.Add(id, msg);
}
// Add missing messages from the secondary collection if specified
if (secondary != null)
AddMissingMessages(loadedMessages, secondary);
return loadedMessages;
}
/// <summary>
/// Helper for parsing an enum. <typeparamref name="T"/> must be an Enum. Returns false if
/// the parse failed, or if the <paramref name="id"/> does not exist in the Enum.
/// </summary>
/// <param name="str">String to parse.</param>
/// <param name="id">Parsed ID from the <paramref name="str"/>.</param>
/// <returns>True if the ID was parsed successfully and exists in the Enum, else false.</returns>
protected bool ParseEnumHelper(string str, out T id)
{
// Parse the string
id = (T)Enum.Parse(typeof(T), str, true);
// Check if it is part of the enum
if (!Enum.IsDefined(typeof(T), id))
{
const string errmsg = "Languages file contains id `{0}`, but this is not in the ServerMessage enum.";
if (log.IsErrorEnabled)
log.ErrorFormat(errmsg, id);
Debug.Fail(string.Format(errmsg, id));
return false;
}
return true;
}
/// <summary>
/// When overridden in the derived class, tries to parse a string to get the ID.
/// </summary>
/// <param name="str">String to parse.</param>
/// <param name="id">Parsed ID.</param>
/// <returns>True if the ID was parsed successfully, else false.</returns>
protected abstract bool TryParseID(string str, out T id);
/// <summary>
/// Parses a single line from the file.
/// </summary>
/// <param name="fileLine">File line to parse.</param>
/// <param name="id">Parsed ID of the message.</param>
/// <param name="msg">Parsed message for the corresponding <paramref name="id"/>.</param>
/// <returns>True if the line was parsed successfully, else false.</returns>
protected virtual bool TryParseLine(string fileLine, out T id, out string msg)
{
// Check for a valid line
if (!string.IsNullOrEmpty(fileLine))
{
// Trim the line and remove tabs
fileLine = fileLine.Replace('\t'.ToString(), string.Empty).Trim();
// Check for a still-valid line
if (!string.IsNullOrEmpty(fileLine))
{
if (!IsLineToIgnore(fileLine))
{
var colonIndex = fileLine.IndexOf(':');
if (colonIndex > 0)
{
// Split the message identifier and text
var idStr = fileLine.Substring(0, colonIndex).Trim();
msg = fileLine.Substring(colonIndex + 1).Trim();
// Find the corresponding type T for the id
if (TryParseID(idStr, out id))
return true;
}
}
}
}
// Something went wrong somewhere
id = default(T);
msg = null;
return false;
}
#region IMessageCollection<T> Members
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection.
/// </returns>
public IEnumerator<KeyValuePair<T, string>> GetEnumerator()
{
return _messages.GetEnumerator();
}
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>
/// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection.
/// </returns>
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// Gets the specified message, parsed using the supplied parameters.
/// </summary>
/// <param name="id">ID of the message to get.</param>
/// <param name="args">Parameters used to parse the message.</param>
/// <returns>Parsed message for the <paramref name="id"/>, or null if the <paramref name="id"/>
/// is not found or invalid.</returns>
public virtual string GetMessage(T id, params string[] args)
{
string msg;
if (!_messages.TryGetValue(id, out msg))
return null;
try
{
return string.Format(msg, args);
}
catch (Exception ex)
{
if (log.IsErrorEnabled)
log.ErrorFormat("string.Format() failed on MsgId `{0}`. Exception: {1}", id, ex);
return msg;
}
}
#endregion
}
}
| |
using System;
using System.ComponentModel;
using System.Data;
using DBSchemaInfo.Base;
namespace CslaGenerator.Metadata
{
/// <summary>
/// Summary description for DbBindColumn.
/// </summary>
[Serializable]
public class DbBindColumn : ICloneable
{
#region Fields
private ColumnOriginType _columnOriginType = ColumnOriginType.None;
// these fields are used to serialize the column name so it can be loaded from a schema
private readonly string _tableName = String.Empty;
private readonly string _viewName = String.Empty;
private readonly string _spName = String.Empty;
private int _spResultSetIndex;
private string _columnName = String.Empty;
private DbType _dataType = DbType.String;
private string _nativeType = String.Empty;
private long _size;
private bool _isPrimaryKey;
private ICatalog _catalog;
private IColumnInfo _column;
private string _objectName;
private string _catalogName;
private string _schemaName;
private IDataBaseObject _databaseObject;
private IResultSet _resultSet;
private bool _isNullable;
private bool _isIdentity;
#endregion
#region Properties
public ColumnOriginType ColumnOriginType
{
set { _columnOriginType = value; }
get { return _columnOriginType; }
}
internal IColumnInfo Column
{
get { return _column; }
}
public DbType DataType
{
get
{
if (Column == null) { return _dataType; }
return Column.DbType;
}
set { _dataType = value; }
}
public string NativeType
{
get
{
if (Column == null) { return _nativeType; }
return Column.NativeTypeName;
}
set
{
if (value != null)
_nativeType = value;
}
}
public long Size
{
get
{
if (Column == null) { return _size; }
return Column.ColumnLength;
}
set { _size = value; }
}
public int SpResultIndex
{
get { return _spResultSetIndex; }
set { _spResultSetIndex = value; }
}
[Browsable(false)]
public string TableName
{
get { return _tableName; }
set
{
if (string.IsNullOrEmpty(_objectName) && !string.IsNullOrEmpty(value))
_objectName = value;
//_tableName = value;
}
}
//[Obsolete("Use Object Name instead")]
[Browsable(false)]
public string ViewName
{
get { return _viewName; }
set
{
if (string.IsNullOrEmpty(_objectName) && !string.IsNullOrEmpty(value))
_objectName = value;
//_viewName = value;
}
}
//[Obsolete("Use Object Name instead")]
[Browsable(false)]
public string SpName
{
get { return _spName; }
set
{
if (string.IsNullOrEmpty(_objectName) && !string.IsNullOrEmpty(value))
_objectName = value;
//_spName = value;
}
}
public string ColumnName
{
get { return _columnName; }
set { _columnName = value; }
}
public bool IsPrimaryKey
{
get { return _isPrimaryKey; }
set { _isPrimaryKey = value; }
}
public string ObjectName
{
get { return _objectName; }
set
{
value = value.Trim().Replace(" ", " ").Replace(' ', '_');
_objectName = value;
}
}
public string CatalogName
{
get { return _catalogName; }
set { _catalogName = value; }
}
public string SchemaName
{
get { return _schemaName; }
set { _schemaName = value; }
}
[Browsable(false)]
public IDataBaseObject DatabaseObject
{
get { return _databaseObject; }
}
[Browsable(false)]
public IResultSet ResultSet
{
get { return _resultSet; }
}
#endregion
#region Methods
internal void LoadColumn(ICatalog catalog)
{
_catalog = catalog;
_resultSet = null;
_databaseObject = null;
_column = null;
string cat = null;
if (_catalogName != null)
{
if (string.Compare(_catalogName, _catalog.CatalogName, true) != 0)
cat = null; //When connecting to a DB with a different name
else
cat = _catalogName;
}
try
{
switch (_columnOriginType)
{
case ColumnOriginType.Table:
ITableInfo tInfo = _catalog.Tables[cat, _schemaName, _objectName];
if (tInfo != null)
{
_databaseObject = tInfo;
_resultSet = tInfo;
}
break;
case ColumnOriginType.View:
//_Column = _Catalog.Views[_CatalogName, _SchemaName, _objectName].Columns[_columnName];
IViewInfo vInfo = _catalog.Views[cat, _schemaName, _objectName];
if (vInfo != null)
{
_databaseObject = vInfo;
_resultSet = vInfo;
}
break;
case ColumnOriginType.StoredProcedure:
IStoredProcedureInfo pInfo = _catalog.Procedures[cat, _schemaName, _objectName];
if (pInfo != null)
{
_databaseObject = pInfo;
if (pInfo.ResultSets.Count > _spResultSetIndex)
_resultSet = pInfo.ResultSets[_spResultSetIndex];
}
break;
case ColumnOriginType.None:
break;
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
if (_resultSet != null)
_column = _resultSet.Columns[_columnName];
ReloadColumnInfo();
}
private void ReloadColumnInfo()
{
if (_column == null)
return;
if (_catalogName == null)
_catalogName = _databaseObject.ObjectCatalog;
if (_schemaName == null)
_schemaName = _databaseObject.ObjectSchema;
_isPrimaryKey = _column.IsPrimaryKey;
_isNullable = _column.IsNullable;
_isIdentity = _column.IsIdentity;
_dataType = _column.DbType;
_nativeType = _column.NativeTypeName;
}
public bool IsNullable
{
get { return _isNullable; }
set { _isNullable = value; }
}
public bool IsIdentity
{
get { return _isIdentity; }
set { _isIdentity = value; }
}
public object Clone()
{
var clone = (DbBindColumn)Util.ObjectCloner.CloneShallow(this);
clone.LoadColumn(_catalog);
return clone;
//DbBindColumn col = new DbBindColumn();
//col._columnName = this._columnName;
//col._columnOriginType = this._columnOriginType;
//col._spName = this._spName;
//col._spResultSetIndex = this._spResultSetIndex;
//col._spSchema = this._spSchema;
//col._tableName = this._tableName;
//col._tableSchema = this._tableSchema;
//col._viewName = this._viewName;
//col._viewSchema = this._viewSchema;
//col._dataType = this._dataType;
//col._nativeType = this._nativeType;
//return col;
}
#endregion
}
}
| |
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Storage;
using System.Collections.Generic;
using System;
using System.Diagnostics;
using Microsoft.AspNetCore.Http;
using System.Linq;
using System.Security.Claims;
using Microsoft.EntityFrameworkCore.ChangeTracking;
namespace HETSAPI.Models
{
/// <summary>
/// Database Context Factory Interface
/// </summary>
public interface IDbAppContextFactory
{
/// <summary>
/// Create new database context
/// </summary>
/// <returns></returns>
IDbAppContext Create();
}
/// <summary>
/// Database Context Factory
/// </summary>
public class DbAppContextFactory : IDbAppContextFactory
{
private readonly DbContextOptions<DbAppContext> _options;
private readonly IHttpContextAccessor _httpContextAccessor;
/// <summary>
/// Database Context Factory Constructor
/// </summary>
/// <param name="httpContextAccessor"></param>
/// <param name="options"></param>
public DbAppContextFactory(IHttpContextAccessor httpContextAccessor, DbContextOptions<DbAppContext> options)
{
_options = options;
_httpContextAccessor = httpContextAccessor;
}
/// <summary>
/// Create new database context
/// </summary>
/// <returns></returns>
public IDbAppContext Create()
{
return new DbAppContext(_httpContextAccessor, _options);
}
}
/// <summary>
/// Database Context Interface
/// </summary>
public interface IDbAppContext
{
/// <summary>
/// Attachment
/// </summary>
DbSet<Attachment> Attachments { get; set; }
/// <summary>
/// City
/// </summary>
DbSet<City> Cities { get; set; }
/// <summary>
/// ConditionType
/// </summary>
DbSet<ConditionType> ConditionTypes { get; set; }
/// <summary>
/// Contact
/// </summary>
DbSet<Contact> Contacts { get; set; }
/// <summary>
/// District
/// </summary>
DbSet<District> Districts { get; set; }
/// <summary>
/// District Equipment Type (district specific equipment subtypes)
/// </summary>
DbSet<DistrictEquipmentType> DistrictEquipmentTypes { get; set; }
/// <summary>
/// Equipment
/// </summary>
DbSet<Equipment> Equipments { get; set; }
/// <summary>
/// Equipment Attachment
/// </summary>
DbSet<EquipmentAttachment> EquipmentAttachments { get; set; }
/// <summary>
/// Equipment Type
/// </summary>
DbSet<EquipmentType> EquipmentTypes { get; set; }
/// <summary>
/// History (log of activity)
/// </summary>
DbSet<History> Historys { get; set; }
/// <summary>
/// Import Map
/// </summary>
DbSet<ImportMap> ImportMaps { get; set; }
/// <summary>
/// Local Area
/// </summary>
DbSet<LocalArea> LocalAreas { get; set; }
/// <summary>
/// Local Area Rotation List
/// </summary>
DbSet<LocalAreaRotationList> LocalAreaRotationLists { get; set; }
/// <summary>
/// Note
/// </summary>
DbSet<Note> Notes { get; set; }
/// <summary>
/// Owner
/// </summary>
DbSet<Owner> Owners { get; set; }
/// <summary>
/// Permission
/// </summary>
DbSet<Permission> Permissions { get; set; }
/// <summary>
/// Project
/// </summary>
DbSet<Project> Projects { get; set; }
/// <summary>
/// ProvincialRateType
/// </summary>
DbSet<ProvincialRateType> ProvincialRateTypes { get; set; }
/// <summary>
/// Region
/// </summary>
DbSet<Region> Regions { get; set; }
/// <summary>
/// Rental Agreement
/// </summary>
DbSet<RentalAgreement> RentalAgreements { get; set; }
/// <summary>
/// Rental Agreement Condition
/// </summary>
DbSet<RentalAgreementCondition> RentalAgreementConditions { get; set; }
/// <summary>
/// Rental Agreement Rate
/// </summary>
DbSet<RentalAgreementRate> RentalAgreementRates { get; set; }
/// <summary>
/// Rental Request
/// </summary>
DbSet<RentalRequest> RentalRequests { get; set; }
/// <summary>
/// Rental Request Attachment (document)
/// </summary>
DbSet<RentalRequestAttachment> RentalRequestAttachments { get; set; }
/// <summary>
/// Rental Request Rotation List
/// </summary>
DbSet<RentalRequestRotationList> RentalRequestRotationLists { get; set; }
/// <summary>
/// Role
/// </summary>
DbSet<Role> Roles { get; set; }
/// <summary>
/// Role Permissions
/// </summary>
DbSet<RolePermission> RolePermissions { get; set; }
/// <summary>
/// Seniority Audit
/// </summary>
DbSet<SeniorityAudit> SeniorityAudits { get; set; }
/// <summary>
/// Service Area
/// </summary>
DbSet<ServiceArea> ServiceAreas { get; set; }
/// <summary>
/// Time Record
/// </summary>
DbSet<TimeRecord> TimeRecords { get; set; }
/// <summary>
/// User Districts
/// </summary>
DbSet<UserDistrict> UserDistricts { get; set; }
/// <summary>
/// User
/// </summary>
DbSet<User> Users { get; set; }
/// <summary>
/// User Favourite
/// </summary>
DbSet<UserFavourite> UserFavourites { get; set; }
/// <summary>
/// User Role
/// </summary>
DbSet<UserRole> UserRoles { get; set; }
/// <summary>
/// Starts a new transaction.
/// </summary>
/// <returns>
/// A Microsoft.EntityFrameworkCore.Storage.IDbContextTransaction that represents
/// the started transaction.
/// </returns>
IDbContextTransaction BeginTransaction();
/// <summary>
/// Save changes to the database
/// </summary>
/// <returns></returns>
int SaveChanges();
}
/// <summary>
/// Database Context Interface
/// </summary>
public class DbAppContext : DbContext, IDbAppContext
{
private readonly IHttpContextAccessor _httpContextAccessor;
/// <summary>
/// Constructor for Class used for Entity Framework access.
/// </summary>
/// <param name="httpContextAccessor"></param>
/// <param name="options"></param>
public DbAppContext(IHttpContextAccessor httpContextAccessor, DbContextOptions<DbAppContext> options) : base(options)
{
_httpContextAccessor = httpContextAccessor;
// override the default timeout as some operations are time intensive
Database?.SetCommandTimeout(180);
}
/// <summary>
/// Override for OnModelCreating - used to change the database naming convention.
/// </summary>
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
// add our naming convention extension
modelBuilder.UpperCaseUnderscoreSingularConvention();
}
/// <summary>
/// Attachment
/// </summary>
public DbSet<Attachment> Attachments { get; set; }
/// <summary>
/// City
/// </summary>
public DbSet<City> Cities { get; set; }
/// <summary>
/// ConditionType
/// </summary>
public DbSet<ConditionType> ConditionTypes { get; set; }
/// <summary>
/// Contact
/// </summary>
public DbSet<Contact> Contacts { get; set; }
/// <summary>
/// District
/// </summary>
public DbSet<District> Districts { get; set; }
/// <summary>
/// Equipment
/// </summary>
public DbSet<Equipment> Equipments { get; set; }
/// <summary>
/// Equipment Attachment (hardware attached to the Equipment)
/// </summary>
public DbSet<EquipmentAttachment> EquipmentAttachments { get; set; }
/// <summary>
/// District Equipment Type (subtype of Equipment by District)
/// </summary>
public DbSet<DistrictEquipmentType> DistrictEquipmentTypes { get; set; }
/// <summary>
/// Equipment Type
/// </summary>
public DbSet<EquipmentType> EquipmentTypes { get; set; }
/// <summary>
/// History (activity log)
/// </summary>
public DbSet<History> Historys { get; set; }
/// <summary>
/// IMport Map
/// </summary>
public DbSet<ImportMap> ImportMaps { get; set; }
/// <summary>
/// Local Area
/// </summary>
public DbSet<LocalArea> LocalAreas { get; set; }
/// <summary>
/// Local Area Rotation List
/// </summary>
public DbSet<LocalAreaRotationList> LocalAreaRotationLists { get; set; }
/// <summary>
/// Note
/// </summary>
public DbSet<Note> Notes { get; set; }
/// <summary>
/// Owner
/// </summary>
public DbSet<Owner> Owners { get; set; }
/// <summary>
/// Permission
/// </summary>
public DbSet<Permission> Permissions { get; set; }
/// <summary>
/// Project
/// </summary>
public DbSet<Project> Projects { get; set; }
/// <summary>
/// ProvincialRateType
/// </summary>
public DbSet<ProvincialRateType> ProvincialRateTypes { get; set; }
/// <summary>
/// Region
/// </summary>
public DbSet<Region> Regions { get; set; }
/// <summary>
/// Rental Agreement
/// </summary>
public DbSet<RentalAgreement> RentalAgreements { get; set; }
/// <summary>
/// Rental Agreement Condition
/// </summary>
public DbSet<RentalAgreementCondition> RentalAgreementConditions { get; set; }
/// <summary>
/// Rental Agreement Rate
/// </summary>
public DbSet<RentalAgreementRate> RentalAgreementRates { get; set; }
/// <summary>
/// Rental Request
/// </summary>
public DbSet<RentalRequest> RentalRequests { get; set; }
/// <summary>
/// Rental Request Attachment (document)
/// </summary>
public DbSet<RentalRequestAttachment> RentalRequestAttachments { get; set; }
/// <summary>
/// Rental Request Rotation List
/// </summary>
public DbSet<RentalRequestRotationList> RentalRequestRotationLists { get; set; }
/// <summary>
/// Role
/// </summary>
public DbSet<Role> Roles { get; set; }
/// <summary>
/// Role Permission
/// </summary>
public DbSet<RolePermission> RolePermissions { get; set; }
/// <summary>
/// Seniority Audit
/// </summary>
public DbSet<SeniorityAudit> SeniorityAudits { get; set; }
/// <summary>
/// Service Area
/// </summary>
public DbSet<ServiceArea> ServiceAreas { get; set; }
/// <summary>
/// Time Record
/// </summary>
public DbSet<TimeRecord> TimeRecords { get; set; }
/// <summary>
/// User Districts
/// </summary>
public DbSet<UserDistrict> UserDistricts { get; set; }
/// <summary>
/// User
/// </summary>
public DbSet<User> Users { get; set; }
/// <summary>
/// User Favourite
/// </summary>
public DbSet<UserFavourite> UserFavourites { get; set; }
/// <summary>
/// User Role
/// </summary>
public DbSet<UserRole> UserRoles { get; set; }
/// <summary>
/// Starts a new transaction.
/// </summary>
/// <returns>
/// A Microsoft.EntityFrameworkCore.Storage.IDbContextTransaction that represents
/// the started transaction.
/// </returns>
public IDbContextTransaction BeginTransaction()
{
bool existingTransaction = true;
IDbContextTransaction transaction = Database.CurrentTransaction;
if (transaction == null)
{
existingTransaction = false;
transaction = Database.BeginTransaction();
}
return new DbContextTransactionWrapper(transaction, existingTransaction);
}
/// <summary>
/// Returns the current web user
/// </summary>
private ClaimsPrincipal HttpContextUser => _httpContextAccessor.HttpContext.User;
/// <summary>
/// Returns the current user ID
/// </summary>
/// <returns></returns>
private string GetCurrentUserId()
{
string result;
try
{
result = HttpContextUser.FindFirst(ClaimTypes.Name).Value;
}
catch
{
result = null;
}
return result;
}
private void DoEquipmentAudit(List<SeniorityAudit> audits, EntityEntry entry , string smUserId)
{
Equipment changed = (Equipment)entry.Entity;
int tempChangedId = changed.Id;
// if this is an "empy" record - exit
if (tempChangedId <= 0)
{
return;
}
Equipment original = Equipments.AsNoTracking()
.Include(x => x.LocalArea)
.Include(x => x.Owner)
.First(a => a.Id == tempChangedId);
// compare the old and new
if (changed.IsSeniorityAuditRequired(original))
{
DateTime currentTime = DateTime.UtcNow;
// create the audit entry.
SeniorityAudit seniorityAudit = new SeniorityAudit
{
BlockNumber = original.BlockNumber,
EndDate = currentTime
};
int tempLocalAreaId = original.LocalArea.Id;
int tempOwnerId = original.Owner.Id;
changed.SeniorityEffectiveDate = currentTime;
seniorityAudit.AppCreateTimestamp = currentTime;
seniorityAudit.AppLastUpdateTimestamp = currentTime;
seniorityAudit.AppCreateUserid = smUserId;
seniorityAudit.AppLastUpdateUserid = smUserId;
seniorityAudit.EquipmentId = tempChangedId;
seniorityAudit.LocalAreaId = tempLocalAreaId;
seniorityAudit.OwnerId = tempOwnerId;
if (seniorityAudit.Owner != null)
{
seniorityAudit.OwnerOrganizationName = seniorityAudit.Owner.OrganizationName;
}
if (original.SeniorityEffectiveDate != null)
{
seniorityAudit.StartDate = (DateTime) original.SeniorityEffectiveDate;
}
seniorityAudit.Seniority = original.Seniority;
seniorityAudit.ServiceHoursLastYear = original.ServiceHoursLastYear;
seniorityAudit.ServiceHoursTwoYearsAgo = original.ServiceHoursTwoYearsAgo;
seniorityAudit.ServiceHoursThreeYearsAgo = original.ServiceHoursThreeYearsAgo;
audits.Add(seniorityAudit);
}
}
/// <summary>
/// Override for Save Changes to implement the audit log
/// </summary>
/// <returns></returns>
public override int SaveChanges()
{
// *************************************************
// update the audit fields for this record
// *************************************************
string smUserId = null;
if (_httpContextAccessor != null)
{
smUserId = GetCurrentUserId();
}
// get all of the modified records
IEnumerable<EntityEntry> modifiedEntries = ChangeTracker.Entries()
.Where(e => e.State == EntityState.Added ||
e.State == EntityState.Modified);
// manage the audit columns and the concurrency column
DateTime currentTime = DateTime.UtcNow;
List <SeniorityAudit> seniorityAudits = new List<SeniorityAudit>();
foreach (EntityEntry entry in modifiedEntries)
{
if (entry.Entity.GetType().InheritsOrImplements(typeof(AuditableEntity)))
{
AuditableEntity theObject = (AuditableEntity)entry.Entity;
theObject.AppLastUpdateUserid = smUserId;
theObject.AppLastUpdateTimestamp = currentTime;
if (entry.State == EntityState.Added)
{
theObject.AppCreateUserid = smUserId;
theObject.AppCreateTimestamp = currentTime;
theObject.ConcurrencyControlNumber = 1;
}
else
{
theObject.ConcurrencyControlNumber = theObject.ConcurrencyControlNumber + 1;
}
}
if (entry.Entity.GetType().InheritsOrImplements(typeof(Equipment)))
{
DoEquipmentAudit(seniorityAudits, entry, smUserId);
}
}
// *************************************************
// attempt to save updates
// *************************************************
int result;
try
{
result = base.SaveChanges();
}
catch (Exception e)
{
Console.WriteLine(e);
// e.InnerException.Message "20180: Concurrency Failure 5" string
if (e.InnerException != null &&
e.InnerException.Message.StartsWith("20180"))
{
// concurrency error
throw new HetsDbConcurrencyException("This record has been updated by another user.");
}
throw;
}
// *************************************************
// manage seniority audit records
// *************************************************
if (seniorityAudits.Count > 0)
{
foreach (SeniorityAudit seniorityAudit in seniorityAudits)
{
SeniorityAudits.Add(seniorityAudit);
}
}
base.SaveChanges();
return result;
}
/// <summary>
/// This is for importing data only
/// </summary>
/// <returns></returns>
public int SaveChangesForImport()
{
// update the audit fields for this item.
IEnumerable<EntityEntry> modifiedEntries = ChangeTracker.Entries()
.Where(e => e.State == EntityState.Added || e.State == EntityState.Modified);
Debug.WriteLine("Saving Import Data. Total Entries: " + modifiedEntries.Count());
int result = base.SaveChanges();
return result;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Text;
using gView.Framework.Data;
namespace gView.Framework.Data
{
public class TableClassBindingSource : IListSource, IList, IEnumerator
{
private ITableClass _class;
private ICursor cursor = null;
private int _count=0;
public TableClassBindingSource(ITableClass Class)
{
_class = Class;
if (_class == null) return;
cursor = Class.Search(new QueryFilter());
}
#region IListSource Member
public bool ContainsListCollection
{
get { return false; }
}
public System.Collections.IList GetList()
{
return this;
}
#endregion
#region IList Member
public int Add(object value)
{
return -1;
}
public void Clear()
{
Reset();
}
public bool Contains(object value)
{
return false;
}
public int IndexOf(object value)
{
return -1;
}
public void Insert(int index, object value)
{
}
public bool IsFixedSize
{
get { return false; }
}
public bool IsReadOnly
{
get { return true; }
}
public void Remove(object value)
{
}
public void RemoveAt(int index)
{
}
public object this[int index]
{
get
{
return null;
}
set
{
}
}
#endregion
#region ICollection Member
public void CopyTo(Array array, int index)
{
throw new Exception("The method or operation is not implemented.");
}
public int Count
{
get { return _count; }
}
public bool IsSynchronized
{
get { return false; }
}
public object SyncRoot
{
get { throw new Exception("The method or operation is not implemented."); }
}
#endregion
#region IEnumerable Member
public IEnumerator GetEnumerator()
{
return this;
}
#endregion
#region IEnumerator Member
private object _current;
public object Current
{
get
{
return _current;
}
}
public bool MoveNext()
{
if (cursor is IFeatureCursor)
_current = ((IFeatureCursor)cursor).NextFeature;
else if (cursor is IRowCursor)
_current = ((IRowCursor)cursor).NextRow;
else
_current = null;
if(_current==null) return false;
_count++;
return true;
}
public void Reset()
{
if (_class == null) return;
if (cursor != null) cursor.Dispose();
cursor = _class.Search(new QueryFilter());
_count=0;
}
#endregion
}
public class TableClassBindingSource2 : IBindingList
{
#region IBindingList Member
public void AddIndex(PropertyDescriptor property)
{
throw new Exception("The method or operation is not implemented.");
}
public object AddNew()
{
throw new Exception("The method or operation is not implemented.");
}
public bool AllowEdit
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool AllowNew
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool AllowRemove
{
get { throw new Exception("The method or operation is not implemented."); }
}
public void ApplySort(PropertyDescriptor property, ListSortDirection direction)
{
throw new Exception("The method or operation is not implemented.");
}
public int Find(PropertyDescriptor property, object key)
{
throw new Exception("The method or operation is not implemented.");
}
public bool IsSorted
{
get { throw new Exception("The method or operation is not implemented."); }
}
public event ListChangedEventHandler ListChanged;
public void RemoveIndex(PropertyDescriptor property)
{
throw new Exception("The method or operation is not implemented.");
}
public void RemoveSort()
{
throw new Exception("The method or operation is not implemented.");
}
public ListSortDirection SortDirection
{
get { throw new Exception("The method or operation is not implemented."); }
}
public PropertyDescriptor SortProperty
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool SupportsChangeNotification
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool SupportsSearching
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool SupportsSorting
{
get { throw new Exception("The method or operation is not implemented."); }
}
#endregion
#region IList Member
public int Add(object value)
{
throw new Exception("The method or operation is not implemented.");
}
public void Clear()
{
throw new Exception("The method or operation is not implemented.");
}
public bool Contains(object value)
{
throw new Exception("The method or operation is not implemented.");
}
public int IndexOf(object value)
{
throw new Exception("The method or operation is not implemented.");
}
public void Insert(int index, object value)
{
throw new Exception("The method or operation is not implemented.");
}
public bool IsFixedSize
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool IsReadOnly
{
get { throw new Exception("The method or operation is not implemented."); }
}
public void Remove(object value)
{
throw new Exception("The method or operation is not implemented.");
}
public void RemoveAt(int index)
{
throw new Exception("The method or operation is not implemented.");
}
public object this[int index]
{
get
{
throw new Exception("The method or operation is not implemented.");
}
set
{
throw new Exception("The method or operation is not implemented.");
}
}
#endregion
#region ICollection Member
public void CopyTo(Array array, int index)
{
throw new Exception("The method or operation is not implemented.");
}
public int Count
{
get { throw new Exception("The method or operation is not implemented."); }
}
public bool IsSynchronized
{
get { throw new Exception("The method or operation is not implemented."); }
}
public object SyncRoot
{
get { throw new Exception("The method or operation is not implemented."); }
}
#endregion
#region IEnumerable Member
public IEnumerator GetEnumerator()
{
throw new Exception("The method or operation is not implemented.");
}
#endregion
}
}
| |
/*
* SHS -- The Scalable Hyperlink Store
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT
* LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR
* A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
*
* See the Apache Version 2.0 License for specific language governing
* permissions and limitations under the License.
*/
using System;
using System.IO;
using System.Reflection;
namespace SHS {
public static class SerializerFactory {
public static Serializer Make(Type t) {
if (t == typeof(Boolean)) {
return new BooleanSerializer();
} else if (t == typeof(Byte)) {
return new ByteSerializer();
} else if (t == typeof(SByte)) {
return new SByteSerializer();
} else if (t == typeof(UInt16)) {
return new UInt16Serializer();
} else if (t == typeof(Int16)) {
return new Int16Serializer();
} else if (t == typeof(UInt32)) {
return new UInt32Serializer();
} else if (t == typeof(Int32)) {
return new Int32Serializer();
} else if (t == typeof(UInt64)) {
return new UInt64Serializer();
} else if (t == typeof(Int64)) {
return new Int64Serializer();
} else if (t == typeof(Single)) {
return new SingleSerializer();
} else if (t == typeof(Double)) {
return new DoubleSerializer();
} else if (t == typeof(Decimal)) {
return new DecimalSerializer();
} else if (t == typeof(Char)) {
return new CharSerializer();
} else if (t == typeof(String)) {
return new StringSerializer();
} else {
return new GeneralSerializer(t);
}
}
}
public abstract class Serializer {
public abstract object Read(Rd rd);
public abstract void Write(object val, Wr wr);
public abstract object Read(BinaryReader rd);
public abstract void Write(object val, BinaryWriter wr);
}
public class GeneralSerializer : Serializer {
private MethodInfo rdChMeth;
private MethodInfo wrChMeth;
private MethodInfo rdBrMeth;
private MethodInfo wrBwMeth;
public GeneralSerializer(Type t) {
this.rdChMeth = t.GetMethod("Read", new Type[]{t, typeof(Rd)});
if (this.rdChMeth == null) {
throw new Exception(t + " does not have a Read(Rd) method");
}
this.wrChMeth = t.GetMethod("Write", new Type[]{t, typeof(Wr)});
if (this.wrChMeth == null) {
throw new Exception(t + " does not have a Write(Wr) method");
}
this.rdBrMeth = t.GetMethod("Read", new Type[] { t, typeof(BinaryReader) });
if (this.rdBrMeth == null) {
throw new Exception(t + " does not have a Read(BinaryReader) method");
}
this.wrBwMeth = t.GetMethod("Write", new Type[] { t, typeof(BinaryWriter) });
if (this.wrBwMeth == null) {
throw new Exception(t + " does not have a Write(BinaryWriter) method");
}
}
public override object Read(Rd rd) {
return this.rdChMeth.Invoke(null, new object[]{null, rd});
}
public override object Read(BinaryReader rd) {
return this.rdBrMeth.Invoke(null, new object[]{null, rd});
}
public override void Write(object val, Wr wr) {
wrChMeth.Invoke(null, new object[]{val, wr});
}
public override void Write(object val, BinaryWriter wr) {
wrBwMeth.Invoke(null, new object[]{val, wr});
}
}
public class BooleanSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadBoolean();
}
public override object Read(BinaryReader rd) {
return rd.ReadBoolean();
}
public override void Write(object val, Wr wr) {
wr.WriteBoolean((Boolean)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Boolean)val);
}
}
public class ByteSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadByte();
}
public override object Read(BinaryReader rd) {
return rd.ReadByte();
}
public override void Write(object val, Wr wr) {
wr.WriteUInt8((Byte)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Byte)val);
}
}
public class SByteSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadSByte();
}
public override object Read(BinaryReader rd) {
return rd.ReadSByte();
}
public override void Write(object val, Wr wr) {
wr.WriteInt8((SByte)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((SByte)val);
}
}
public class UInt16Serializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadUInt16();
}
public override object Read(BinaryReader rd) {
return rd.ReadUInt16();
}
public override void Write(object val, Wr wr) {
wr.WriteUInt16((UInt16)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((UInt16)val);
}
}
public class Int16Serializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadInt16();
}
public override object Read(BinaryReader rd) {
return rd.ReadInt16();
}
public override void Write(object val, Wr wr) {
wr.WriteInt16((Int16)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Int16)val);
}
}
public class UInt32Serializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadUInt32();
}
public override object Read(BinaryReader rd) {
return rd.ReadUInt32();
}
public override void Write(object val, Wr wr) {
wr.WriteUInt32((UInt32)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((UInt32)val);
}
}
public class Int32Serializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadInt32();
}
public override object Read(BinaryReader rd) {
return rd.ReadInt32();
}
public override void Write(object val, Wr wr) {
wr.WriteInt32((Int32)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Int32)val);
}
}
public class UInt64Serializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadUInt64();
}
public override object Read(BinaryReader rd) {
return rd.ReadUInt64();
}
public override void Write(object val, Wr wr) {
wr.WriteUInt64((UInt64)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((UInt64)val);
}
}
public class Int64Serializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadInt64();
}
public override object Read(BinaryReader rd) {
return rd.ReadInt64();
}
public override void Write(object val, Wr wr) {
wr.WriteInt64((Int64)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Int64)val);
}
}
public class SingleSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadSingle();
}
public override object Read(BinaryReader rd) {
return rd.ReadSingle();
}
public override void Write(object val, Wr wr) {
wr.WriteSingle((Single)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Single)val);
}
}
public class DoubleSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadDouble();
}
public override object Read(BinaryReader rd) {
return rd.ReadDouble();
}
public override void Write(object val, Wr wr) {
wr.WriteDouble((Double)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Double)val);
}
}
public class DecimalSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadDecimal();
}
public override object Read(BinaryReader rd) {
return rd.ReadDecimal();
}
public override void Write(object val, Wr wr) {
wr.WriteDecimal((Decimal)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Decimal)val);
}
}
public class CharSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadChar();
}
public override object Read(BinaryReader rd) {
return rd.ReadChar();
}
public override void Write(object val, Wr wr) {
wr.WriteChar((Char)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((Char)val);
}
}
public class StringSerializer : Serializer {
public override object Read(Rd rd) {
return rd.ReadString();
}
public override object Read(BinaryReader rd) {
return rd.ReadString();
}
public override void Write(object val, Wr wr) {
wr.WriteString((String)val);
}
public override void Write(object val, BinaryWriter wr) {
wr.Write((String)val);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using FluentAssertions.Common;
using FluentAssertions.Execution;
using Xunit;
using Xunit.Sdk;
namespace FluentAssertions.Specs.Execution
{
public class AssertionScopeSpecs
{
#region Lifecycle Management
[Fact]
public void When_disposed_it_should_throw_any_failures()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("Failure1");
// Act
Action act = scope.Dispose;
// Assert
try
{
act();
}
catch (Exception exception)
{
exception.Message.Should().StartWith("Failure1");
}
}
[Fact]
public void When_disposed_it_should_throw_any_failures_and_properly_format_using_args()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("Failure{0}", 1);
// Act
Action act = scope.Dispose;
// Assert
try
{
act();
}
catch (Exception exception)
{
exception.Message.Should().StartWith("Failure1");
}
}
[Fact]
public void When_lazy_version_is_not_disposed_it_should_not_execute_fail_reason_function()
{
// Arrange
var scope = new AssertionScope();
bool failReasonCalled = false;
AssertionScope.Current
.ForCondition(true)
.FailWith(() =>
{
failReasonCalled = true;
return new FailReason("Failure");
});
// Act
Action act = scope.Dispose;
// Assert
act();
failReasonCalled.Should().BeFalse(" fail reason function cannot be called for scope that successful");
}
[Fact]
public void When_lazy_version_is_disposed_it_should_throw_any_failures_and_properly_format_using_args()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith(() => new FailReason("Failure{0}", 1));
// Act
Action act = scope.Dispose;
// Assert
try
{
act();
}
catch (Exception exception)
{
exception.Message.Should().StartWith("Failure1");
}
}
[Fact]
public void When_multiple_scopes_are_nested_it_should_throw_all_failures_from_the_outer_scope()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("Failure1");
using (var nestedScope = new AssertionScope())
{
nestedScope.FailWith("Failure2");
using var deeplyNestedScope = new AssertionScope();
deeplyNestedScope.FailWith("Failure3");
}
// Act
Action act = scope.Dispose;
// Assert
try
{
act();
}
catch (Exception exception)
{
exception.Message.Should().Contain("Failure1");
exception.Message.Should().Contain("Failure2");
exception.Message.Should().Contain("Failure3");
}
}
[Fact]
public void When_a_nested_scope_is_discarded_its_failures_should_also_be_discarded()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("Failure1");
using (var nestedScope = new AssertionScope())
{
nestedScope.FailWith("Failure2");
using var deeplyNestedScope = new AssertionScope();
deeplyNestedScope.FailWith("Failure3");
deeplyNestedScope.Discard();
}
// Act
Action act = scope.Dispose;
// Assert
try
{
act();
}
catch (Exception exception)
{
exception.Message.Should().Contain("Failure1");
exception.Message.Should().Contain("Failure2");
exception.Message.Should().NotContain("Failure3");
}
}
[Fact]
public async Task When_using_AssertionScope_across_thread_boundaries_it_should_work()
{
using var semaphore = new SemaphoreSlim(0, 1);
await Task.WhenAll(SemaphoreYieldAndWait(semaphore), SemaphoreYieldAndRelease(semaphore));
}
private static async Task SemaphoreYieldAndWait(SemaphoreSlim semaphore)
{
await Task.Yield();
var scope = new AssertionScope();
await semaphore.WaitAsync();
scope.Should().BeSameAs(AssertionScope.Current);
}
private static async Task SemaphoreYieldAndRelease(SemaphoreSlim semaphore)
{
await Task.Yield();
var scope = new AssertionScope();
semaphore.Release();
scope.Should().BeSameAs(AssertionScope.Current);
}
[Fact]
public void When_custom_strategy_used_respect_its_behavior()
{
// Arrange
var scope = new AssertionScope(new FailWithStupidMessageAssertionStrategy());
// Act
Action act = () => scope.FailWith("Failure 1");
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("Good luck with understanding what's going on!");
}
[Fact]
public void When_custom_strategy_is_null_it_should_throw()
{
// Arrange
IAssertionStrategy strategy = null;
// Arrange / Act
Func<AssertionScope> act = () => new AssertionScope(strategy);
// Assert
act.Should().ThrowExactly<ArgumentNullException>()
.WithParameterName("assertionStrategy");
}
[Fact]
public void When_using_a_custom_strategy_it_should_include_failure_messages_of_all_failing_assertions()
{
// Arrange
var scope = new AssertionScope(new CustomAssertionStrategy());
false.Should().BeTrue();
true.Should().BeFalse();
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("*but found false*but found true*");
}
public class CustomAssertionStrategy : IAssertionStrategy
{
private readonly List<string> failureMessages = new List<string>();
public IEnumerable<string> FailureMessages => failureMessages;
public IEnumerable<string> DiscardFailures()
{
var discardedFailures = failureMessages.ToArray();
failureMessages.Clear();
return discardedFailures;
}
public void ThrowIfAny(IDictionary<string, object> context)
{
if (failureMessages.Any())
{
var builder = new StringBuilder();
builder.AppendLine(string.Join(Environment.NewLine, failureMessages));
if (context.Any())
{
foreach (KeyValuePair<string, object> pair in context)
{
builder.AppendFormat(CultureInfo.InvariantCulture, "\nWith {0}:\n{1}", pair.Key, pair.Value);
}
}
Services.ThrowException(builder.ToString());
}
}
public void HandleFailure(string message)
{
failureMessages.Add(message);
}
}
internal class FailWithStupidMessageAssertionStrategy : IAssertionStrategy
{
public IEnumerable<string> FailureMessages => new string[0];
public void HandleFailure(string message) =>
Services.ThrowException("Good luck with understanding what's going on!");
public IEnumerable<string> DiscardFailures() => new string[0];
public void ThrowIfAny(IDictionary<string, object> context)
{
// do nothing
}
}
#endregion
#region Message Formatting
[Fact]
public void When_the_same_failure_is_handled_twice_or_more_it_should_still_report_it_once()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("Failure");
AssertionScope.Current.FailWith("Failure");
using (var nestedScope = new AssertionScope())
{
nestedScope.FailWith("Failure");
nestedScope.FailWith("Failure");
}
// Act
Action act = scope.Dispose;
// Assert
try
{
act();
}
catch (Exception exception)
{
int matches = new Regex(".*Failure.*").Matches(exception.Message).Count;
matches.Should().Be(4);
}
}
[Fact]
public void The_failure_message_should_use_the_name_of_the_scope_as_context()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope("foo");
new[] { 1, 2, 3 }.Should().Equal(3, 2, 1);
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected foo to be equal to*");
}
[Fact]
public void The_failure_message_should_use_the_lazy_name_of_the_scope_as_context()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope(new Lazy<string>(() => "lazy foo"));
new[] { 1, 2, 3 }.Should().Equal(3, 2, 1);
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected lazy foo to be equal to*");
}
[Fact]
public void When_an_assertion_fails_on_ContainKey_succeeding_message_should_be_included()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
var values = new Dictionary<int, int>();
values.Should().ContainKey(0);
values.Should().ContainKey(1);
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected*to contain key 0*Expected*to contain key 1*");
}
[Fact]
public void When_an_assertion_fails_on_ContainSingle_succeeding_message_should_be_included()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
var values = new List<int>();
values.Should().ContainSingle();
values.Should().ContainSingle();
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected*to contain a single item, but the collection is empty*" +
"Expected*to contain a single item, but the collection is empty*");
}
[Fact]
public void When_an_assertion_fails_on_BeOfType_succeeding_message_should_be_included()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
var item = string.Empty;
item.Should().BeOfType<int>();
item.Should().BeOfType<long>();
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage(
"Expected type to be System.Int32, but found System.String.*" +
"Expected type to be System.Int64, but found System.String.");
}
[Fact]
public void When_an_assertion_fails_on_BeAssignableTo_succeeding_message_should_be_included()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
var item = string.Empty;
item.Should().BeAssignableTo<int>();
item.Should().BeAssignableTo<long>();
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage(
"Expected * to be assignable to System.Int32, but System.String is not.*" +
"Expected * to be assignable to System.Int64, but System.String is not.");
}
[Fact]
public void When_parentheses_are_used_in_the_because_arguments_it_should_render_them_correctly()
{
// Act
Action act = () => 1.Should().Be(2, "can't use these in becauseArgs: {0} {1}", "{", "}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("*because can't use these in becauseArgs: { }*");
}
[Fact]
public void When_becauseArgs_is_null_it_should_render_reason_correctly()
{
// Act
object[] becauseArgs = null;
Action act = () => 1.Should().Be(2, "it should still work", becauseArgs);
// Assert
act.Should().Throw<XunitException>()
.WithMessage("*it should still work*");
}
[Fact]
public void When_invalid_format_is_used_in_because_parameter_without_becauseArgs_it_should_still_render_reason_correctly()
{
// Act
Action act = () => 1.Should().Be(2, "use of {} is okay if there are no because parameters");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("*because use of {} is okay if there are no because parameters*");
}
[Fact]
public void When_invalid_format_is_used_in_because_parameter_along_with_becauseArgs_it_should_render_default_text()
{
// Act
Action act = () => 1.Should().Be(2, "use of {} is considered invalid in because parameter with becauseArgs", "additional becauseArgs parameter");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("*because message 'use of {} is considered invalid in because parameter with becauseArgs' could not be formatted with string.Format*");
}
[Fact]
public void When_an_assertion_fails_in_a_scope_with_braces_it_should_use_the_name_as_the_assertion_context()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope("{}");
default(int[]).Should().Equal(3, 2, 1);
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected {} to be equal to*");
}
[Fact]
public void When_parentheses_are_used_in_literal_values_it_should_render_them_correctly()
{
// Act
Action act = () => "{foo}".Should().Be("{bar}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected string to be \"{bar}\", but \"{foo}\" differs near*");
}
[Fact]
public void When_message_contains_double_braces_they_should_not_be_replaced_with_context()
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("{{empty}}");
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("*empty*");
}
[InlineData("\r")]
[InlineData("\\r")]
[InlineData("\\\r")]
[InlineData("\\\\r")]
[InlineData("\\\\\r")]
[Theory]
public void When_message_contains_backslash_followed_by_r_is_should_format_correctly(string str)
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith(str);
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage(str);
}
[InlineData("\r")]
[InlineData("\\r")]
[InlineData("\\\r")]
[InlineData("\\\\r")]
[InlineData("\\\\\r")]
[Theory]
public void When_message_argument_contains_backslash_followed_by_r_is_should_format_correctly(string str)
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("\\{0}\\A", str);
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("\\\"" + str + "\"\\A*");
}
[InlineData("\n")]
[InlineData("\\n")]
[InlineData("\\\n")]
[InlineData("\\\\n")]
[InlineData("\\\\\n")]
[Theory]
public void When_message_contains_backslash_followed_by_n_is_should_format_correctly(string str)
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith(str);
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage(str);
}
[InlineData("\n")]
[InlineData("\\n")]
[InlineData("\\\n")]
[InlineData("\\\\n")]
[InlineData("\\\\\n")]
[Theory]
public void When_message_argument_contains_backslash_followed_by_n_is_should_format_correctly(string str)
{
// Arrange
var scope = new AssertionScope();
AssertionScope.Current.FailWith("\\{0}\\A", str);
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("\\\"" + str + "\"\\A*");
}
[Fact]
public void When_subject_has_trailing_backslash_the_failure_message_should_contain_the_trailing_backslash()
{
// Arrange / Act
Action act = () => "A\\".Should().Be("A");
// Assert
act.Should().Throw<XunitException>()
.WithMessage(@"* near ""\"" *", "trailing backslashes should not be removed from failure message");
}
[Fact]
public void When_expectation_has_trailing_backslash_the_failure_message_should_contain_the_trailing_backslash()
{
// Arrange / Act
Action act = () => "A".Should().Be("A\\");
// Assert
act.Should().Throw<XunitException>()
.WithMessage(@"* to be ""A\"" *", "trailing backslashes should not be removed from failure message");
}
[Fact]
public void When_message_starts_with_single_braces_they_should_be_replaced_with_context()
{
// Arrange
var scope = new AssertionScope();
scope.AddReportable("MyKey", "MyValue");
AssertionScope.Current.FailWith("{MyKey}");
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("MyValue*");
}
[Fact]
public void When_message_starts_with_two_single_braces_they_should_be_replaced_with_context()
{
// Arrange
var scope = new AssertionScope();
scope.AddReportable("SomeKey", "SomeValue");
scope.AddReportable("AnotherKey", "AnotherValue");
AssertionScope.Current.FailWith("{SomeKey}{AnotherKey}");
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("SomeValueAnotherValue*");
}
[Fact]
public void When_adding_reportable_values_they_should_be_reported_after_the_message()
{
// Arrange
var scope = new AssertionScope();
scope.AddReportable("SomeKey", "SomeValue");
scope.AddReportable("AnotherKey", "AnotherValue");
AssertionScope.Current.FailWith("{SomeKey}{AnotherKey}");
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("*With SomeKey:\nSomeValue\nWith AnotherKey:\nAnotherValue");
}
[Fact]
public void When_adding_non_reportable_value_it_should_not_be_reported_after_the_message()
{
// Arrange
var scope = new AssertionScope();
scope.AddNonReportable("SomeKey", "SomeValue");
AssertionScope.Current.FailWith("{SomeKey}");
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.Which.Message.Should().NotContain("With SomeKey:\nSomeValue");
}
[Fact]
public void When_adding_non_reportable_value_it_should_be_retrievable_from_context()
{
// Arrange
var scope = new AssertionScope();
scope.AddNonReportable("SomeKey", "SomeValue");
// Act
var value = scope.Get<string>("SomeKey");
// Assert
value.Should().Be("SomeValue");
}
[Fact]
public void When_using_a_deferred_reportable_value_it_is_not_calculated_if_there_are_no_failures()
{
// Arrange
var scope = new AssertionScope();
var deferredValueInvoked = false;
scope.AddReportable("MyKey", () =>
{
deferredValueInvoked = true;
return "MyValue";
});
// Act
scope.Dispose();
// Assert
deferredValueInvoked.Should().BeFalse();
}
[Fact]
public void When_using_a_deferred_reportable_value_it_is_calculated_if_there_is_a_failure()
{
// Arrange
var scope = new AssertionScope();
var deferredValueInvoked = false;
scope.AddReportable("MyKey", () =>
{
deferredValueInvoked = true;
return "MyValue";
});
AssertionScope.Current.FailWith("{MyKey}");
// Act
Action act = scope.Dispose;
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("*MyValue*");
deferredValueInvoked.Should().BeTrue();
}
[Fact]
public void When_an_expectation_is_defined_it_should_be_preceeding_the_failure_message()
{
// Act
Action act = () => Execute.Assertion
.WithExpectation("Expectations are the root ")
.ForCondition(false)
.FailWith("of disappointment");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expectations are the root of disappointment");
}
[Fact]
public void When_an_expectation_with_arguments_is_defined_it_should_be_preceeding_the_failure_message()
{
// Act
Action act = () => Execute.Assertion
.WithExpectation("Expectations are the {0} ", "root")
.ForCondition(false)
.FailWith("of disappointment");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expectations are the \"root\" of disappointment");
}
[Fact]
public void When_no_identifier_can_be_resolved_replace_context_with_object()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(false)
.FailWith("Expected {context}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected object");
}
[Fact]
public void When_no_identifier_can_be_resolved_replace_context_with_inline_declared_fallback_identifier()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(false)
.FailWith("Expected {context:fallback}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected fallback");
}
[Fact]
public void When_no_identifier_can_be_resolved_replace_context_with_defined_default_identifier()
{
// Act
Action act = () => Execute.Assertion
.WithDefaultIdentifier("identifier")
.ForCondition(false)
.FailWith("Expected {context}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected identifier");
}
[Fact]
public void The_failure_message_should_contain_the_reason()
{
// Act
Action act = () => Execute.Assertion
.BecauseOf("because reasons")
.FailWith("Expected{reason}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected because reasons");
}
[Fact]
public void The_failure_message_should_contain_the_reason_with_arguments()
{
// Act
Action act = () => Execute.Assertion
.BecauseOf("because {0}", "reasons")
.FailWith("Expected{reason}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected because reasons");
}
#endregion
#region Chaining API
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_next_one()
{
bool succeeded = false;
// Act
try
{
Execute.Assertion
.ForCondition(condition: true)
.FailWith("First assertion")
.Then
.FailWith("Second assertion");
}
catch (Exception e)
{
// Assert
succeeded = (e is XunitException xUnitException) && xUnitException.Message.Contains("Second");
}
if (!succeeded)
{
throw new XunitException("Expected the second assertion to fail");
}
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_next_one_with_arguments()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(true)
.FailWith("First assertion")
.Then
.FailWith("Second {0}", "assertion");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Second \"assertion\"");
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_next_one_with_argument_providers()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(true)
.FailWith("First assertion")
.Then
.FailWith("Second {0}", () => "assertion");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Second \"assertion\"");
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_next_one_with_a_fail_reason_function()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(true)
.FailWith("First assertion")
.Then
.FailWith(() => new FailReason("Second {0}", "assertion"));
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Second \"assertion\"");
}
[Fact]
public void When_continuing_an_assertion_chain_the_reason_should_be_part_of_consecutive_failures()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(true)
.FailWith("First assertion")
.Then
.BecauseOf("because reasons")
.FailWith("Expected{reason}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected because reasons");
}
[Fact]
public void When_continuing_an_assertion_chain_the_reason_with_arguments_should_be_part_of_consecutive_failures()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(true)
.FailWith("First assertion")
.Then
.BecauseOf("because {0}", "reasons")
.FailWith("Expected{reason}");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected because reasons");
}
[Fact]
public void When_a_given_is_used_before_an_assertion_then_the_result_should_be_available_for_evaluation()
{
// Act
Action act = () => Execute.Assertion
.Given(() => new[] { "a", "b" })
.ForCondition(collection => collection.Length > 0)
.FailWith("First assertion");
// Assert
act.Should().NotThrow();
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_evaluate_the_succeeding_given_statement()
{
// Arrange
using var _ = new AssertionScope(new IgnoringFailuresAssertionStrategy());
// Act / Assert
Execute.Assertion
.ForCondition(false)
.FailWith("First assertion")
.Then
.Given<object>(() => throw new InvalidOperationException());
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_evaluate_the_succeeding_condition()
{
// Arrange
bool secondConditionEvaluated = false;
try
{
using var _ = new AssertionScope();
// Act
Execute.Assertion
.Given(() => (string)null)
.ForCondition(s => s is not null)
.FailWith("but is was null")
.Then
.ForCondition(s => secondConditionEvaluated = true)
.FailWith("it should be 42");
}
catch
{
// Ignore
}
// Assert
secondConditionEvaluated.Should().BeFalse("because the 2nd condition should not be invoked");
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_failure()
{
// Arrange
using var scope = new AssertionScope();
// Act
Execute.Assertion
.ForCondition(false)
.FailWith("First assertion")
.Then
.ForCondition(false)
.FailWith("Second assertion");
string[] failures = scope.Discard();
scope.Dispose();
Assert.Single(failures);
Assert.Contains("First assertion", failures);
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_failure_with_arguments()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.ForCondition(false)
.FailWith("First assertion")
.Then
.FailWith("Second {0}", "assertion");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("First assertion");
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_failure_with_argument_providers()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.ForCondition(false)
.FailWith("First assertion")
.Then
.FailWith("Second {0}", () => "assertion");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("First assertion");
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_failure_with_a_fail_reason_function()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.ForCondition(false)
.FailWith("First assertion")
.Then
.FailWith(() => new FailReason("Second {0}", "assertion"));
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("First assertion");
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_expectation()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.WithExpectation("Expectations are the root ")
.ForCondition(false)
.FailWith("of disappointment")
.Then
.WithExpectation("Assumptions are the root ")
.FailWith("of all evil");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expectations are the root of disappointment");
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_expectation_with_arguments()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.WithExpectation("Expectations are the {0} ", "root")
.ForCondition(false)
.FailWith("of disappointment")
.Then
.WithExpectation("Assumptions are the {0} ", "root")
.FailWith("of all evil");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expectations are the \"root\" of disappointment");
}
[Fact]
public void When_the_previous_assertion_failed_it_should_not_execute_the_succeeding_default_identifier()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.WithDefaultIdentifier("identifier")
.ForCondition(false)
.FailWith("Expected {context}")
.Then
.WithDefaultIdentifier("other")
.FailWith("Expected {context}");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected identifier");
}
[Fact]
public void When_continuing_a_failed_assertion_chain_consecutive_resons_are_ignored()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.BecauseOf("because {0}", "whatever")
.ForCondition(false)
.FailWith("Expected{reason}")
.Then
.BecauseOf("because reasons")
.FailWith("Expected{reason}");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected because whatever");
}
[Fact]
public void When_continuing_a_failed_assertion_chain_consecutive_resons_with_arguments_are_ignored()
{
// Act
Action act = () =>
{
using var _ = new AssertionScope();
Execute.Assertion
.BecauseOf("because {0}", "whatever")
.ForCondition(false)
.FailWith("Expected{reason}")
.Then
.BecauseOf("because {0}", "reasons")
.FailWith("Expected{reason}");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected because whatever");
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_evaluate_the_succeeding_given_statement()
{
// Act
Action act = () => Execute.Assertion
.ForCondition(true)
.FailWith("First assertion")
.Then
.Given<object>(() => throw new InvalidOperationException());
// Assert
Assert.Throws<InvalidOperationException>(act);
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_succeeding_expectation()
{
// Act
Action act = () => Execute.Assertion
.WithExpectation("Expectations are the root ")
.ForCondition(true)
.FailWith("of disappointment")
.Then
.WithExpectation("Assumptions are the root ")
.FailWith("of all evil");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Assumptions are the root of all evil");
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_succeeding_expectation_with_arguments()
{
// Act
Action act = () => Execute.Assertion
.WithExpectation("Expectations are the {0} ", "root")
.ForCondition(true)
.FailWith("of disappointment")
.Then
.WithExpectation("Assumptions are the {0} ", "root")
.FailWith("of all evil");
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Assumptions are the \"root\" of all evil");
}
[Fact]
public void When_the_previous_assertion_succeeded_it_should_not_affect_the_succeeding_default_identifier()
{
// Act
Action act = () =>
{
Execute.Assertion
.WithDefaultIdentifier("identifier")
.ForCondition(true)
.FailWith("Expected {context}")
.Then
.WithDefaultIdentifier("other")
.FailWith("Expected {context}");
};
// Assert
act.Should().Throw<XunitException>()
.WithMessage("Expected other");
}
#endregion
}
}
#pragma warning disable RCS1110, CA1050 // Declare type inside namespace.
public class AssertionScopeSpecsWithoutNamespace
#pragma warning restore RCS1110, CA1050 // Declare type inside namespace.
{
[Fact]
public void This_class_should_not_be_inside_a_namespace()
{
// Arrange
Type type = typeof(AssertionScopeSpecsWithoutNamespace);
// Act / Assert
type.Assembly.Should().DefineType(null, type.Name, "this class should not be inside a namespace");
}
[Fact]
public void When_the_test_method_is_not_inside_a_namespace_it_should_not_throw_a_NullReferenceException()
{
// Act
Action act = () => 1.Should().Be(2, "we don't want a NullReferenceException");
// Assert
act.Should().ThrowExactly<XunitException>()
.WithMessage("*we don't want a NullReferenceException*");
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.Threading;
using Pk2 = PICkit2V2.PICkitFunctions;
using KONST = PICkit2V2.Constants;
using UTIL = PICkit2V2.Utilities;
using System.IO;
namespace PICkit2V2
{
public partial class DialogUART : Form
{
public DelegateVddCallback VddCallback;
public static string CustomBaud = "";
private struct baudTable
{
public string baudRate;
public uint baudValue;
}
private baudTable[] baudList;
StreamWriter logFile = null;
private bool newRX = true;
private int hex1Length = 0;
private int hex2Length = 0;
private int hex3Length = 0;
private int hex4Length = 0;
public DialogUART()
{
InitializeComponent();
this.KeyPress += new KeyPressEventHandler(OnKeyPress);
baudList = new baudTable[7];
baudList[0].baudRate = "300";
baudList[0].baudValue = 0xB1F2;
baudList[1].baudRate = "1200";
baudList[1].baudValue = 0xEC8A;
baudList[2].baudRate = "2400";
baudList[2].baudValue = 0xF64E;
baudList[3].baudRate = "4800";
baudList[3].baudValue = 0xFB30;
baudList[4].baudRate = "9600";
baudList[4].baudValue = 0xFDA1;
baudList[5].baudRate = "19200";
baudList[5].baudValue = 0xFEDA;
baudList[6].baudRate = "38400";
baudList[6].baudValue = 0xFF76;
buildBaudList();
}
public string GetBaudRate()
{
return comboBoxBaud.SelectedItem.ToString();
}
public bool IsHexMode()
{
return radioButtonHex.Checked;
}
public string GetStringMacro(int macroNum)
{
if (macroNum == 2)
{
return textBoxString2.Text;
}
else if (macroNum == 3)
{
return textBoxString3.Text;
}
else if (macroNum == 4)
{
return textBoxString4.Text;
}
else
{
return textBoxString1.Text;
}
}
public bool GetAppendCRLF()
{
return checkBoxCRLF.Checked;
}
public bool GetWrap()
{
return checkBoxWrap.Checked;
}
public bool GetEcho()
{
return checkBoxEcho.Checked;
}
public void SetBaudRate(string baudRate)
{
for (int i = 0; i < baudList.Length; i++)
{
if (baudRate == comboBoxBaud.Items[i].ToString())
{
comboBoxBaud.SelectedIndex = i;
break;
}
if ((i + 1) == baudList.Length)
{// didn't find it- must be custom
comboBoxBaud.Items.Add(baudRate);
comboBoxBaud.SelectedIndex = i + 3;
}
}
}
public void SetStringMacro(string macro, int macroNum)
{
if (macroNum == 2)
{
textBoxString2.Text = macro;
hex1Length = macro.Length;
}
else if (macroNum == 3)
{
textBoxString3.Text = macro;
hex2Length = macro.Length;
}
else if (macroNum == 4)
{
textBoxString4.Text = macro;
hex3Length = macro.Length;
}
else
{
textBoxString1.Text = macro;
hex4Length = macro.Length;
}
}
public void SetModeHex()
{
radioButtonHex.Checked = true;
}
public void ClearAppendCRLF()
{
checkBoxCRLF.Checked = false;
}
public void ClearWrap()
{
checkBoxWrap.Checked = false;
}
public void ClearEcho()
{
checkBoxEcho.Checked = false;
}
public void SetVddBox(bool enable, bool check)
{
checkBoxVDD.Enabled = enable;
checkBoxVDD.Checked = check;
}
private const string CUSTOM_BAUD = "Custom...";
private void buildBaudList()
{
for (int i = 0; i < baudList.Length; i++)
{
comboBoxBaud.Items.Add(baudList[i].baudRate);
}
comboBoxBaud.Items.Add(CUSTOM_BAUD);
comboBoxBaud.SelectedIndex = 0;
}
private void buttonExit_Click(object sender, EventArgs e)
{
this.Close();
}
private void DialogUART_FormClosing(object sender, FormClosingEventArgs e)
{
if (logFile != null)
{
closeLogFile();
}
timerPollForData.Enabled = false;
Pk2.ExitUARTMode();
radioButtonConnect.Checked = false;
radioButtonDisconnect.Checked = true;
comboBoxBaud.Enabled = true;
buttonString1.Enabled = false;
buttonString2.Enabled = false;
buttonString3.Enabled = false;
buttonString4.Enabled = false;
panelVdd.Enabled = true; // no VDD changes when connected
}
public void OnKeyPress(object sender, KeyPressEventArgs e)
{
string hexChars = "0123456789ABCDEF";
if (textBoxString1.ContainsFocus | textBoxString2.ContainsFocus
| textBoxString3.ContainsFocus |textBoxString4.ContainsFocus)
{ // ignore typing in textboxes
return;
}
// check for copy/cut
if ((e.KeyChar == 3) || (e.KeyChar == 24))
{
textBoxDisplay.Copy();
return;
}
if (radioButtonDisconnect.Checked)
{ // don't do anything else if not connected
return;
}
textBoxDisplay.Focus();
if (radioButtonHex.Checked)
{ // hex mode
string charTyped = e.KeyChar.ToString(); // get typed char
charTyped = charTyped.ToUpper();
if (charTyped.IndexOfAny(hexChars.ToCharArray()) == 0)
{ // valid Hex character
if (labelTypeHex.Visible)
{ // first nibble already typed - send byte
string dataString = labelTypeHex.Text.Substring(11,1) + charTyped;
labelTypeHex.Text = "Type Hex : ";
labelTypeHex.Visible = false;
byte[] hexByte = new byte[1];
hexByte[0] = (byte)Utilities.Convert_Value_To_Int("0x" + dataString);
dataString = "TX: " + dataString + "\r\n";
textBoxDisplay.AppendText(dataString);
textBoxDisplay.SelectionStart = textBoxDisplay.Text.Length;
textBoxDisplay.ScrollToCaret();
if (logFile != null)
{
logFile.Write(dataString);
}
Pk2.DataDownload(hexByte, 0, hexByte.Length);
}
else
{ // show first nibble
labelTypeHex.Text = "Type Hex : " + charTyped + "_";
labelTypeHex.Visible = true;
}
}
else
{ // other char - clear typed hex
labelTypeHex.Text = "Type Hex : ";
labelTypeHex.Visible = false;
}
}
else
{ // ASCII mode
// check for paste
if (e.KeyChar == 22)
{
textBoxDisplay.SelectionStart = textBoxDisplay.Text.Length; //cursor at end
TextBox tempBox = new TextBox();
tempBox.Multiline = true;
tempBox.Paste();
do
{
int pasteLength = tempBox.Text.Length;
if (pasteLength > 60)
{
pasteLength = 60;
}
sendString(tempBox.Text.Substring(0, pasteLength), false);
tempBox.Text = tempBox.Text.Substring(pasteLength);
// wait according to the baud rate so we don't overflow the download buffer
float baud = float.Parse((comboBoxBaud.SelectedItem.ToString()));
baud = (1F / baud) * 12F * (float)pasteLength; // to ensure we don't overflow, give each byte 12 bits
baud *= 1000F; // baud is now in ms.
Thread.Sleep((int)baud);
} while (tempBox.Text.Length > 0);
tempBox.Dispose();
return;
}
string charTyped = e.KeyChar.ToString();
if (charTyped == "\r")
{
charTyped = "\r\n";
}
sendString(charTyped, false);
}
}
private void radioButtonConnect_Click_1(object sender, EventArgs e)
{
if (!radioButtonConnect.Checked)
{
if (comboBoxBaud.SelectedIndex == 0)
{
MessageBox.Show("Please Select a Baud Rate.");
return;
}
uint baudValue = 0;
for (int i = 0; i < baudList.Length; i++)
{
if (comboBoxBaud.SelectedItem.ToString() == baudList[i].baudRate)
{
baudValue = baudList[i].baudValue;
break;
}
if ((i + 1) == baudList.Length)
{// didn't find it- must be custom
try
{
float baudRate = float.Parse(comboBoxBaud.SelectedItem.ToString());
baudRate = ((1F / baudRate) - 3e-6F) / 1.6667e-7F;
baudValue = 0x10000 - (uint)baudRate;
}
catch
{
MessageBox.Show("Error with Baud setting.");
return;
}
}
}
panelVdd.Enabled = false; // no VDD changes when connected
Pk2.EnterUARTMode(baudValue);
radioButtonConnect.Checked = true;
radioButtonDisconnect.Checked = false;
buttonString1.Enabled = true;
buttonString2.Enabled = true;
buttonString3.Enabled = true;
buttonString4.Enabled = true;
comboBoxBaud.Enabled = false; // can't change value when connected.
if (baudValue < 0xEC8A)
{// 1200 or less: slower polling
timerPollForData.Interval = 75;
}
else
{ // faster
timerPollForData.Interval = 15;
}
timerPollForData.Enabled = true;
}
}
private void radioButtonDisconnect_Click(object sender, EventArgs e)
{
if (!radioButtonDisconnect.Checked)
{
radioButtonConnect.Checked = false;
radioButtonDisconnect.Checked = true;
Pk2.ExitUARTMode();
comboBoxBaud.Enabled = true;
timerPollForData.Enabled = false;
buttonString1.Enabled = false;
buttonString2.Enabled = false;
buttonString3.Enabled = false;
buttonString4.Enabled = false;
panelVdd.Enabled = true; // no VDD changes when connected
// clear partial hex typing
labelTypeHex.Text = "Type Hex : ";
labelTypeHex.Visible = false;
}
}
private void buttonClearScreen_Click(object sender, EventArgs e)
{
textBoxDisplay.Text = "";
}
private void timerPollForData_Tick(object sender, EventArgs e)
{
Pk2.UploadData();
if (Pk2.Usb_read_array[1] > 0)
{
string newData = "";
if (radioButtonASCII.Checked)
{
newData = Encoding.ASCII.GetString(Pk2.Usb_read_array, 2, Pk2.Usb_read_array[1]);
}
else
{ // hex mode
if (newRX)
{
newData = "RX: ";
newRX = false;
}
for (int b = 0; b < Pk2.Usb_read_array[1]; b++)
{
newData += string.Format("{0:X2} ", Pk2.Usb_read_array[b + 2]);
}
}
if (logFile != null)
{
logFile.Write(newData);
}
textBoxDisplay.AppendText(newData);
while (textBoxDisplay.Text.Length > 16400)
{// about 200 lines
// delete a line
int endOfLine = textBoxDisplay.Text.IndexOf("\r\n") + 2;
if (endOfLine == 1)
{// no line found
endOfLine = textBoxDisplay.Text.Length - 16000; // delete several hundred chars
}
textBoxDisplay.Text = textBoxDisplay.Text.Substring(endOfLine);
}
textBoxDisplay.SelectionStart = textBoxDisplay.Text.Length;
textBoxDisplay.ScrollToCaret();
}
else
{
if (!newRX && radioButtonHex.Checked)
{
textBoxDisplay.AppendText("\r\n");
if (logFile != null)
{
logFile.Write("\r\n");
}
textBoxDisplay.SelectionStart = textBoxDisplay.Text.Length;
textBoxDisplay.ScrollToCaret();
}
newRX = true;
}
}
private int getLastLineLength(string text)
{
int lastLine = text.LastIndexOf("\r\n") + 2;
if (lastLine < 2)
{
lastLine = 0;
}
return (text.Length - lastLine);
}
private const int MaxLengthASCII = 60;
private void textBoxString1_TextChanged(object sender, EventArgs e)
{
if ((textBoxString1.Text.Length > MaxLengthASCII) && radioButtonASCII.Checked)
{
textBoxString1.Text = textBoxString1.Text.Substring(0, MaxLengthASCII);
textBoxString1.SelectionStart = MaxLengthASCII;
}
if (radioButtonHex.Checked)
{
formatHexString(textBoxString1, ref hex1Length);
}
}
private void textBoxString2_TextChanged(object sender, EventArgs e)
{
if ((textBoxString2.Text.Length > MaxLengthASCII) && radioButtonASCII.Checked)
{
textBoxString2.Text = textBoxString2.Text.Substring(0, MaxLengthASCII);
textBoxString2.SelectionStart = MaxLengthASCII;
}
if (radioButtonHex.Checked)
{
formatHexString(textBoxString2, ref hex2Length);
}
}
private void textBoxString3_TextChanged(object sender, EventArgs e)
{
if ((textBoxString3.Text.Length > MaxLengthASCII) && radioButtonASCII.Checked)
{
textBoxString3.Text = textBoxString3.Text.Substring(0, MaxLengthASCII);
textBoxString3.SelectionStart = MaxLengthASCII;
}
if (radioButtonHex.Checked)
{
formatHexString(textBoxString3, ref hex3Length);
}
}
private void textBoxString4_TextChanged(object sender, EventArgs e)
{
if ((textBoxString4.Text.Length > MaxLengthASCII) && radioButtonASCII.Checked)
{
textBoxString4.Text = textBoxString4.Text.Substring(0, MaxLengthASCII);
textBoxString4.SelectionStart = MaxLengthASCII;
}
if (radioButtonHex.Checked)
{
formatHexString(textBoxString4, ref hex4Length);
}
}
private const int MaxHexLength = 143; // 48 bytes
private void formatHexString(TextBox textBoxToFormat, ref int priorLength)
{
string workString = textBoxToFormat.Text.ToUpper();
workString = workString.Replace(" ", "");
string spacedString = "";
for (int i = 0; i < workString.Length; i++)
{
if (!char.IsNumber(workString, i) && (workString[i] != 'A') && (workString[i] != 'B')
&& (workString[i] != 'C') && (workString[i] != 'D') && (workString[i] != 'E') && (workString[i] != 'F'))
{ // non hex character
spacedString += '0';
}
else
{
spacedString += workString[i];
}
if (((i + 1) % 2) == 0)
{
spacedString += " ";
}
}
if (spacedString.Length > MaxHexLength)
{
spacedString = spacedString.Substring(0, MaxHexLength);
}
int selectSave = textBoxToFormat.SelectionStart;
if ((selectSave > 0) && (selectSave <= spacedString.Length) && (selectSave < textBoxToFormat.Text.Length)
&& (textBoxToFormat.Text[selectSave] == ' ') && (spacedString[selectSave -1] == ' '))
{
selectSave ++;
}
else if ((selectSave >= textBoxToFormat.Text.Length) && (priorLength < textBoxToFormat.Text.Length))
{
selectSave = spacedString.Length;
}
textBoxToFormat.Text = spacedString;
textBoxToFormat.SelectionStart = selectSave;
priorLength = textBoxToFormat.Text.Length;
}
private void buttonString1_Click(object sender, EventArgs e)
{
sendString(textBoxString1.Text, checkBoxCRLF.Checked);
}
private void buttonString2_Click(object sender, EventArgs e)
{
sendString(textBoxString2.Text, checkBoxCRLF.Checked);
}
private void buttonString3_Click(object sender, EventArgs e)
{
sendString(textBoxString3.Text, checkBoxCRLF.Checked);
}
private void buttonString4_Click(object sender, EventArgs e)
{
sendString(textBoxString4.Text, checkBoxCRLF.Checked);
}
private void sendString(string dataString, bool appendCRLF)
{
if (dataString.Length == 0)
{
return;
}
if (radioButtonASCII.Checked)
{
if (appendCRLF)
{
dataString += "\r\n";
}
if (checkBoxEcho.Checked)
{
textBoxDisplay.AppendText(dataString);
textBoxDisplay.SelectionStart = textBoxDisplay.Text.Length;
textBoxDisplay.ScrollToCaret();
}
if (logFile != null)
{
logFile.Write(dataString);
}
byte[] unicodeBytes = Encoding.Unicode.GetBytes(dataString);
byte[] asciiBytes = Encoding.Convert(Encoding.Unicode, Encoding.ASCII, unicodeBytes);
Pk2.DataDownload(asciiBytes, 0, asciiBytes.Length);
}
else
{// hex data
int numBytes = 0;
if (dataString.Length > (MaxHexLength - 1))
{
numBytes = ((MaxHexLength + 1) / 3);
}
else
{
numBytes = dataString.Length / 3;
dataString = dataString.Substring(0, (numBytes * 3));
}
byte[] hexBytes = new byte[numBytes];
for (int i = 0; i < numBytes; i++)
{
hexBytes[i] = (byte)Utilities.Convert_Value_To_Int("0x" + dataString.Substring((3 * i), 2));
}
dataString = "TX: " + dataString + "\r\n";
textBoxDisplay.AppendText(dataString);
textBoxDisplay.SelectionStart = textBoxDisplay.Text.Length;
textBoxDisplay.ScrollToCaret();
if (logFile != null)
{
logFile.Write(dataString);
}
Pk2.DataDownload(hexBytes, 0, hexBytes.Length);
}
}
private void buttonLog_Click(object sender, EventArgs e)
{
if (logFile == null)
{
saveFileDialogLogFile.ShowDialog();
}
else
{
closeLogFile();
}
}
private void closeLogFile()
{
logFile.Close();
logFile = null;
buttonLog.Text = "Log to File";
buttonLog.BackColor = System.Drawing.SystemColors.ControlLight;
}
private void saveFileDialogLogFile_FileOk(object sender, CancelEventArgs e)
{
logFile = new StreamWriter(saveFileDialogLogFile.FileName);
buttonLog.Text = "Logging Data...";
buttonLog.BackColor = Color.Green;
}
private void radioButtonASCII_CheckedChanged(object sender, EventArgs e)
{
if (radioButtonASCII.Checked)
{
checkBoxCRLF.Visible = true;
checkBoxEcho.Enabled = true;
labelTypeHex.Visible = false;
labelTypeHex.Text = "Type Hex : ";
labelMacros.Text = "String Macros:";
textBoxString1.Text = convertHexSequenceToStringMacro(textBoxString1.Text);
textBoxString2.Text = convertHexSequenceToStringMacro(textBoxString2.Text);
textBoxString3.Text = convertHexSequenceToStringMacro(textBoxString3.Text);
textBoxString4.Text = convertHexSequenceToStringMacro(textBoxString4.Text);
if ((textBoxDisplay.Text.Length > 0) && (textBoxDisplay.Text[textBoxDisplay.Text.Length -1] != '\n'))
{
textBoxDisplay.AppendText("\r\n");
}
}
}
private void radioButtonHex_CheckedChanged(object sender, EventArgs e)
{
if (radioButtonHex.Checked)
{
checkBoxCRLF.Visible = false;
checkBoxEcho.Enabled = false;
labelTypeHex.Text = "Type Hex : ";
labelTypeHex.Visible = false;
labelMacros.Text = "Send Hex Sequences:";
textBoxString1.Text = convertStringMacroToHexSequence(textBoxString1.Text);
textBoxString2.Text = convertStringMacroToHexSequence(textBoxString2.Text);
textBoxString3.Text = convertStringMacroToHexSequence(textBoxString3.Text);
textBoxString4.Text = convertStringMacroToHexSequence(textBoxString4.Text);
if ((textBoxDisplay.Text.Length > 0) && (textBoxDisplay.Text[textBoxDisplay.Text.Length - 1] != '\n'))
{
textBoxDisplay.AppendText("\r\n");
}
}
}
private string convertHexSequenceToStringMacro(string hexSeq)
{
int numBytes = 0;
if (hexSeq.Length > (MaxHexLength -1))
{
numBytes = ((MaxHexLength+1) / 3);
}
else
{
numBytes = hexSeq.Length / 3;
}
byte[] hexBytes = new byte[numBytes];
for (int i = 0; i < numBytes; i++)
{
hexBytes[i] = (byte)Utilities.Convert_Value_To_Int("0x" + hexSeq.Substring((3 * i), 2));
}
return Encoding.ASCII.GetString(hexBytes, 0, hexBytes.Length);
}
private string convertStringMacroToHexSequence(string stringMacro)
{
if (stringMacro.Length > ((MaxHexLength + 1) / 3))
{
stringMacro = stringMacro.Substring(0, ((MaxHexLength + 1) / 3));
}
byte[] unicodeBytes = Encoding.Unicode.GetBytes(stringMacro);
byte[] asciiBytes = Encoding.Convert(Encoding.Unicode, Encoding.ASCII, unicodeBytes);
string hexSeq = "";
for (int i = 0; i < asciiBytes.Length; i++)
{
hexSeq += string.Format("{0:X2} ", asciiBytes[i]);
}
return hexSeq;
}
private void checkBoxWrap_CheckedChanged(object sender, EventArgs e)
{
textBoxDisplay.WordWrap = checkBoxWrap.Checked;
}
private void comboBoxBaud_SelectedIndexChanged(object sender, EventArgs e)
{
if (comboBoxBaud.SelectedItem.ToString() == CUSTOM_BAUD)
{
DialogCustomBaud baudDialog = new DialogCustomBaud();
baudDialog.ShowDialog();
if (CustomBaud == "")
{
comboBoxBaud.SelectedIndex = 0;
}
else
{
if (comboBoxBaud.Items.Count != (comboBoxBaud.SelectedIndex + 1))
{// currently another custom value.
comboBoxBaud.Items.RemoveAt(comboBoxBaud.SelectedIndex + 1);
}
comboBoxBaud.Items.Add(CustomBaud);
comboBoxBaud.SelectedIndex += 1;
}
}
}
private void pictureBoxHelp_Click(object sender, EventArgs e)
{
try
{
System.Diagnostics.Process.Start(FormPICkit2.HomeDirectory + KONST.UserGuideFileName);
}
catch
{
MessageBox.Show("Unable to open User's Guide.");
}
}
private void checkBoxVDD_Click(object sender, EventArgs e)
{
VddCallback(true, checkBoxVDD.Checked);
}
private void textBoxDisplay_Leave(object sender, EventArgs e)
{ // if the user clicks on something else, clear any pending type hex
labelTypeHex.Visible = false;
labelTypeHex.Text = "Type Hex : ";
}
}
}
| |
#define NET_2_0
/*
* $Id: IrcConnection.cs 292 2009-03-08 14:23:22Z meebey $
* $URL: svn://svn.qnetp.net/smartirc/SmartIrc4net/trunk/src/IrcConnection/IrcConnection.cs $
* $Rev: 292 $
* $Author: meebey $
* $Date: 2009-03-08 10:23:22 -0400 (Sun, 08 Mar 2009) $
*
* SmartIrc4net - the IRC library for .NET/C# <http://smartirc4net.sf.net>
*
* Copyright (c) 2003-2005 Mirco Bauer <[email protected]> <http://www.meebey.net>
*
* Full LGPL License: <http://www.gnu.org/licenses/lgpl.txt>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
using System;
using System.IO;
using System.Text;
using System.Collections;
using System.Threading;
using System.Reflection;
using System.Net.Sockets;
#if NET_2_0
using System.Net.Security;
#endif
namespace Meebey.SmartIrc4net
{
/// <summary>
///
/// </summary>
/// <threadsafety static="true" instance="true" />
public class IrcConnection
{
private string _VersionNumber;
private string _VersionString;
private string[] _AddressList = {"localhost"};
private int _CurrentAddress;
private int _Port;
#if NET_2_0
private bool _UseSsl;
#endif
private StreamReader _Reader;
private StreamWriter _Writer;
private ReadThread _ReadThread;
private WriteThread _WriteThread;
private IdleWorkerThread _IdleWorkerThread;
private IrcTcpClient _TcpClient;
private Hashtable _SendBuffer = Hashtable.Synchronized(new Hashtable());
private int _SendDelay = 200;
private bool _IsRegistered;
private bool _IsConnected;
private bool _IsConnectionError;
private bool _IsDisconnecting;
private int _ConnectTries;
private bool _AutoRetry;
private int _AutoRetryDelay = 30;
private bool _AutoReconnect;
private Encoding _Encoding = Encoding.Default;
private int _SocketReceiveTimeout = 600;
private int _SocketSendTimeout = 600;
private int _IdleWorkerInterval = 60;
private int _PingInterval = 60;
private int _PingTimeout = 300;
private DateTime _LastPingSent;
private DateTime _LastPongReceived;
private TimeSpan _Lag;
/// <event cref="OnReadLine">
/// Raised when a \r\n terminated line is read from the socket
/// </event>
public event ReadLineEventHandler OnReadLine;
/// <event cref="OnWriteLine">
/// Raised when a \r\n terminated line is written to the socket
/// </event>
public event WriteLineEventHandler OnWriteLine;
/// <event cref="OnConnect">
/// Raised before the connect attempt
/// </event>
public event EventHandler OnConnecting;
/// <event cref="OnConnect">
/// Raised on successful connect
/// </event>
public event EventHandler OnConnected;
/// <event cref="OnConnect">
/// Raised before the connection is closed
/// </event>
public event EventHandler OnDisconnecting;
/// <event cref="OnConnect">
/// Raised when the connection is closed
/// </event>
public event EventHandler OnDisconnected;
/// <event cref="OnConnectionError">
/// Raised when the connection got into an error state
/// </event>
public event EventHandler OnConnectionError;
/// <event cref="AutoConnectErrorEventHandler">
/// Raised when the connection got into an error state during auto connect loop
/// </event>
public event AutoConnectErrorEventHandler OnAutoConnectError;
/// <summary>
/// When a connection error is detected this property will return true
/// </summary>
protected bool IsConnectionError {
get {
lock (this) {
return _IsConnectionError;
}
}
set {
lock (this) {
_IsConnectionError = value;
}
}
}
protected bool IsDisconnecting {
get {
lock (this) {
return _IsDisconnecting;
}
}
set {
lock (this) {
_IsDisconnecting = value;
}
}
}
/// <summary>
/// Gets the current address of the connection
/// </summary>
public string Address {
get {
return _AddressList[_CurrentAddress];
}
}
/// <summary>
/// Gets the address list of the connection
/// </summary>
public string[] AddressList {
get {
return _AddressList;
}
}
/// <summary>
/// Gets the used port of the connection
/// </summary>
public int Port {
get {
return _Port;
}
}
/// <summary>
/// By default nothing is done when the library looses the connection
/// to the server.
/// Default: false
/// </summary>
/// <value>
/// true, if the library should reconnect on lost connections
/// false, if the library should not take care of it
/// </value>
public bool AutoReconnect {
get {
return _AutoReconnect;
}
set {
#if LOG4NET
if (value) {
Logger.Connection.Info("AutoReconnect enabled");
} else {
Logger.Connection.Info("AutoReconnect disabled");
}
#endif
_AutoReconnect = value;
}
}
/// <summary>
/// If the library should retry to connect when the connection fails.
/// Default: false
/// </summary>
/// <value>
/// true, if the library should retry to connect
/// false, if the library should not retry
/// </value>
public bool AutoRetry {
get {
return _AutoRetry;
}
set {
#if LOG4NET
if (value) {
Logger.Connection.Info("AutoRetry enabled");
} else {
Logger.Connection.Info("AutoRetry disabled");
}
#endif
_AutoRetry = value;
}
}
/// <summary>
/// Delay between retry attempts in Connect() in seconds.
/// Default: 30
/// </summary>
public int AutoRetryDelay {
get {
return _AutoRetryDelay;
}
set {
_AutoRetryDelay = value;
}
}
/// <summary>
/// To prevent flooding the IRC server, it's required to delay each
/// message, given in milliseconds.
/// Default: 200
/// </summary>
public int SendDelay {
get {
return _SendDelay;
}
set {
_SendDelay = value;
}
}
/// <summary>
/// On successful registration on the IRC network, this is set to true.
/// </summary>
public bool IsRegistered {
get {
return _IsRegistered;
}
}
/// <summary>
/// On successful connect to the IRC server, this is set to true.
/// </summary>
public bool IsConnected {
get {
return _IsConnected;
}
}
/// <summary>
/// Gets the SmartIrc4net version number
/// </summary>
public string VersionNumber {
get {
return _VersionNumber;
}
}
/// <summary>
/// Gets the full SmartIrc4net version string
/// </summary>
public string VersionString {
get {
return _VersionString;
}
}
/// <summary>
/// Encoding which is used for reading and writing to the socket
/// Default: encoding of the system
/// </summary>
public Encoding Encoding {
get {
return _Encoding;
}
set {
_Encoding = value;
}
}
#if NET_2_0
/// <summary>
/// Enables/disables using SSL for the connection
/// Default: false
/// </summary>
public bool UseSsl {
get {
return _UseSsl;
}
set {
_UseSsl = value;
}
}
#endif
/// <summary>
/// Timeout in seconds for receiving data from the socket
/// Default: 600
/// </summary>
public int SocketReceiveTimeout {
get {
return _SocketReceiveTimeout;
}
set {
_SocketReceiveTimeout = value;
}
}
/// <summary>
/// Timeout in seconds for sending data to the socket
/// Default: 600
/// </summary>
public int SocketSendTimeout {
get {
return _SocketSendTimeout;
}
set {
_SocketSendTimeout = value;
}
}
/// <summary>
/// Interval in seconds to run the idle worker
/// Default: 60
/// </summary>
public int IdleWorkerInterval {
get {
return _IdleWorkerInterval;
}
set {
_IdleWorkerInterval = value;
}
}
/// <summary>
/// Interval in seconds to send a PING
/// Default: 60
/// </summary>
public int PingInterval {
get {
return _PingInterval;
}
set {
_PingInterval = value;
}
}
/// <summary>
/// Timeout in seconds for server response to a PING
/// Default: 600
/// </summary>
public int PingTimeout {
get {
return _PingTimeout;
}
set {
_PingTimeout = value;
}
}
/// <summary>
/// Latency between client and the server
/// </summary>
public TimeSpan Lag {
get {
if (_LastPingSent > _LastPongReceived) {
// there is an outstanding ping, thus we don't have a current lag value
return DateTime.Now - _LastPingSent;
}
return _Lag;
}
}
/// <summary>
/// Initializes the message queues, read and write thread
/// </summary>
public IrcConnection()
{
#if LOG4NET
Logger.Init();
Logger.Main.Debug("IrcConnection created");
#endif
_SendBuffer[Priority.High] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.AboveMedium] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.Medium] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.BelowMedium] = Queue.Synchronized(new Queue());
_SendBuffer[Priority.Low] = Queue.Synchronized(new Queue());
// setup own callbacks
OnReadLine += new ReadLineEventHandler(_SimpleParser);
OnConnectionError += new EventHandler(_OnConnectionError);
_ReadThread = new ReadThread(this);
_WriteThread = new WriteThread(this);
_IdleWorkerThread = new IdleWorkerThread(this);
//Assembly assm = Assembly.GetAssembly(this.GetType());
//AssemblyName assm_name = assm.GetName(false);
//AssemblyProductAttribute pr = (AssemblyProductAttribute)assm.GetCustomAttributes(typeof(AssemblyProductAttribute), false)[0];
_VersionNumber = "0.4.5.0"; //assm_name.Version.ToString();
_VersionString = "SmartIrc4net 0.4.5.0"; //pr.Product+" "+_VersionNumber;
}
#if LOG4NET
~IrcConnection()
{
Logger.Main.Debug("IrcConnection destroyed");
}
#endif
/// <overloads>this method has 2 overloads</overloads>
/// <summary>
/// Connects to the specified server and port, when the connection fails
/// the next server in the list will be used.
/// </summary>
/// <param name="addresslist">List of servers to connect to</param>
/// <param name="port">Portnumber to connect to</param>
/// <exception cref="CouldNotConnectException">The connection failed</exception>
/// <exception cref="AlreadyConnectedException">If there is already an active connection</exception>
public void Connect(string[] addresslist, int port)
{
if (_IsConnected) {
throw new AlreadyConnectedException("Already connected to: "+Address+":"+Port);
}
_ConnectTries++;
#if LOG4NET
Logger.Connection.Info(String.Format("connecting... (attempt: {0})",
_ConnectTries));
#endif
_AddressList = (string[])addresslist.Clone();
_Port = port;
if (OnConnecting != null) {
OnConnecting(this, EventArgs.Empty);
}
try {
System.Net.IPAddress ip = System.Net.Dns.Resolve(Address).AddressList[0];
_TcpClient = new IrcTcpClient();
_TcpClient.NoDelay = true;
_TcpClient.Socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.KeepAlive, 1);
// Check for forced ip config -=Derrick=-
if (HackMaineIrcBot.Irc.IrcBot.FixedEndpoint != null)
_TcpClient.Client.Bind(new System.Net.IPEndPoint(HackMaineIrcBot.Irc.IrcBot.FixedEndpoint.Address, 0));
// set timeout, after this the connection will be aborted
_TcpClient.ReceiveTimeout = _SocketReceiveTimeout*1000;
_TcpClient.SendTimeout = _SocketSendTimeout*1000;
_TcpClient.Connect(ip, port);
Stream stream = _TcpClient.GetStream();
#if NET_2_0
if (_UseSsl) {
SslStream sslStream = new SslStream(stream, false, delegate {
return true;
});
sslStream.AuthenticateAsClient(Address);
stream = sslStream;
}
#endif
_Reader = new StreamReader(stream, _Encoding);
_Writer = new StreamWriter(stream, _Encoding);
if (_Encoding.GetPreamble().Length > 0) {
// HACK: we have an encoding that has some kind of preamble
// like UTF-8 has a BOM, this will confuse the IRCd!
// Thus we send a \r\n so the IRCd can safely ignore that
// garbage.
_Writer.WriteLine();
// make sure we flush the BOM+CRLF correctly
_Writer.Flush();
}
// Connection was succeful, reseting the connect counter
_ConnectTries = 0;
// updating the connection error state, so connecting is possible again
IsConnectionError = false;
_IsConnected = true;
// lets power up our threads
_ReadThread.Start();
_WriteThread.Start();
_IdleWorkerThread.Start();
#if LOG4NET
Logger.Connection.Info("connected");
#endif
if (OnConnected != null) {
OnConnected(this, EventArgs.Empty);
}
} catch (Exception e) {
if (_Reader != null) {
try {
_Reader.Close();
} catch (ObjectDisposedException) {
}
}
if (_Writer != null) {
try {
_Writer.Close();
} catch (ObjectDisposedException) {
}
}
if (_TcpClient != null) {
_TcpClient.Close();
}
_IsConnected = false;
IsConnectionError = true;
#if LOG4NET
Logger.Connection.Info("connection failed: "+e.Message);
#endif
if (_AutoRetry &&
_ConnectTries <= 3) {
if (OnAutoConnectError != null) {
OnAutoConnectError(this, new AutoConnectErrorEventArgs(Address, Port, e));
}
#if LOG4NET
Logger.Connection.Debug("delaying new connect attempt for "+_AutoRetryDelay+" sec");
#endif
System.Diagnostics.Trace.WriteLine("Sleeping IRC Connect Thread.");
Thread.Sleep(_AutoRetryDelay * 1000);
_NextAddress();
Connect(_AddressList, _Port);
} else {
throw new CouldNotConnectException("Could not connect to: "+Address+":"+Port+" "+e.Message, e);
}
}
}
/// <summary>
/// Connects to the specified server and port.
/// </summary>
/// <param name="address">Server address to connect to</param>
/// <param name="port">Port number to connect to</param>
public void Connect(string address, int port)
{
Connect(new string[] {address}, port);
}
/// <summary>
/// Reconnects to the server
/// </summary>
/// <exception cref="NotConnectedException">
/// If there was no active connection
/// </exception>
/// <exception cref="CouldNotConnectException">
/// The connection failed
/// </exception>
/// <exception cref="AlreadyConnectedException">
/// If there is already an active connection
/// </exception>
public void Reconnect()
{
#if LOG4NET
Logger.Connection.Info("reconnecting...");
#endif
Disconnect();
Connect(_AddressList, _Port);
}
/// <summary>
/// Disconnects from the server
/// </summary>
/// <exception cref="NotConnectedException">
/// If there was no active connection
/// </exception>
public void Disconnect()
{
if (!IsConnected) {
throw new NotConnectedException("The connection could not be disconnected because there is no active connection");
}
#if LOG4NET
Logger.Connection.Info("disconnecting...");
#endif
if (OnDisconnecting != null) {
OnDisconnecting(this, EventArgs.Empty);
}
IsDisconnecting = true;
_ReadThread.Stop();
_WriteThread.Stop();
_TcpClient.Close();
_IsConnected = false;
_IsRegistered = false;
IsDisconnecting = false;
if (OnDisconnected != null) {
OnDisconnected(this, EventArgs.Empty);
}
#if LOG4NET
Logger.Connection.Info("disconnected");
#endif
}
/// <summary>
///
/// </summary>
/// <param name="blocking"></param>
public void Listen(bool blocking)
{
if (blocking) {
while (IsConnected) {
ReadLine(true);
}
} else {
while (ReadLine(false).Length > 0) {
// loop as long as we receive messages
}
}
}
/// <summary>
///
/// </summary>
public void Listen()
{
Listen(true);
}
/// <summary>
///
/// </summary>
/// <param name="blocking"></param>
public void ListenOnce(bool blocking)
{
ReadLine(blocking);
}
/// <summary>
///
/// </summary>
public void ListenOnce()
{
ListenOnce(true);
}
/// <summary>
///
/// </summary>
/// <param name="blocking"></param>
/// <returns></returns>
public string ReadLine(bool blocking)
{
string data = "";
if (blocking) {
// block till the queue has data, but bail out on connection error
while (IsConnected &&
!IsConnectionError &&
_ReadThread.Queue.Count == 0) {
System.Diagnostics.Trace.WriteLine("Sleeping IRC ReadLine Thread.");
Thread.Sleep(10);
}
}
if (IsConnected &&
_ReadThread.Queue.Count > 0) {
data = (string)(_ReadThread.Queue.Dequeue());
}
if (data != null && data.Length > 0) {
#if LOG4NET
Logger.Queue.Debug("read: \""+data+"\"");
#endif
if (OnReadLine != null) {
OnReadLine(this, new ReadLineEventArgs(data));
}
}
if (IsConnectionError &&
!IsDisconnecting &&
OnConnectionError != null) {
OnConnectionError(this, EventArgs.Empty);
}
return data;
}
/// <summary>
///
/// </summary>
/// <param name="data"></param>
/// <param name="priority"></param>
public void WriteLine(string data, Priority priority)
{
if (priority == Priority.Critical) {
if (!IsConnected) {
throw new NotConnectedException();
}
_WriteLine(data);
} else {
((Queue)_SendBuffer[priority]).Enqueue(data);
}
}
/// <summary>
///
/// </summary>
/// <param name="data"></param>
public void WriteLine(string data)
{
WriteLine(data, Priority.Medium);
}
private bool _WriteLine(string data)
{
if (IsConnected) {
try {
_Writer.Write(data+"\r\n");
_Writer.Flush();
} catch (IOException) {
#if LOG4NET
Logger.Socket.Warn("sending data failed, connection lost");
#endif
IsConnectionError = true;
return false;
} catch (ObjectDisposedException) {
#if LOG4NET
Logger.Socket.Warn("sending data failed (stream error), connection lost");
#endif
IsConnectionError = true;
return false;
}
#if LOG4NET
Logger.Socket.Debug("sent: \""+data+"\"");
#endif
if (OnWriteLine != null) {
OnWriteLine(this, new WriteLineEventArgs(data));
}
return true;
}
return false;
}
private void _NextAddress()
{
_CurrentAddress++;
if (_CurrentAddress >= _AddressList.Length) {
_CurrentAddress = 0;
}
#if LOG4NET
Logger.Connection.Info("set server to: "+Address);
#endif
}
private void _SimpleParser(object sender, ReadLineEventArgs args)
{
string rawline = args.Line;
string[] rawlineex = rawline.Split(new char[] {' '});
string messagecode = "";
if (rawline[0] == ':') {
messagecode = rawlineex[1];
ReplyCode replycode = ReplyCode.Null;
try {
replycode = (ReplyCode)int.Parse(messagecode);
} catch (FormatException) {
}
if (replycode != ReplyCode.Null) {
switch (replycode) {
case ReplyCode.Welcome:
_IsRegistered = true;
#if LOG4NET
Logger.Connection.Info("logged in");
#endif
break;
}
} else {
switch (rawlineex[1]) {
case "PONG":
DateTime now = DateTime.Now;
_LastPongReceived = now;
_Lag = now - _LastPingSent;
#if LOG4NET
Logger.Connection.Debug("PONG received, took: "+_Lag.TotalMilliseconds+" ms");
#endif
break;
}
}
} else {
messagecode = rawlineex[0];
switch (messagecode) {
case "ERROR":
// FIXME: handle server errors differently than connection errors!
//IsConnectionError = true;
break;
}
}
}
private void _OnConnectionError(object sender, EventArgs e)
{
try {
if (AutoReconnect) {
// lets try to recover the connection
Reconnect();
} else {
// make sure we clean up
Disconnect();
}
} catch (ConnectionException) {
}
}
/// <summary>
///
/// </summary>
private class ReadThread
{
#if LOG4NET
private static readonly log4net.ILog _Logger = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
#endif
private IrcConnection _Connection;
private Thread _Thread;
private Queue _Queue = Queue.Synchronized(new Queue());
public Queue Queue {
get {
return _Queue;
}
}
/// <summary>
///
/// </summary>
/// <param name="connection"></param>
public ReadThread(IrcConnection connection)
{
_Connection = connection;
}
/// <summary>
///
/// </summary>
public void Start()
{
_Thread = new Thread(new ThreadStart(_Worker));
_Thread.Name = "ReadThread ("+_Connection.Address+":"+_Connection.Port+")";
_Thread.IsBackground = true;
System.Diagnostics.Trace.WriteLine("Starting IRC Read Thread.");
_Thread.Start();
}
/// <summary>
///
/// </summary>
public void Stop()
{
#if LOG4NET
_Logger.Debug("Stop()");
#endif
#if LOG4NET
_Logger.Debug("Stop(): aborting thread...");
#endif
_Thread.Abort();
// make sure we close the stream after the thread is gone, else
// the thread will think the connection is broken!
#if LOG4NET
_Logger.Debug("Stop(): joining thread...");
#endif
_Thread.Join();
#if LOG4NET
_Logger.Debug("Stop(): closing reader...");
#endif
try {
_Connection._Reader.Close();
} catch (ObjectDisposedException) {
}
}
private void _Worker()
{
#if LOG4NET
Logger.Socket.Debug("ReadThread started");
#endif
try {
string data = "";
try {
while (_Connection.IsConnected &&
((data = _Connection._Reader.ReadLine()) != null)) {
_Queue.Enqueue(data);
#if LOG4NET
Logger.Socket.Debug("received: \""+data+"\"");
#endif
}
} catch (IOException e) {
#if LOG4NET
Logger.Socket.Warn("IOException: "+e.Message);
#endif
} finally {
#if LOG4NET
Logger.Socket.Warn("connection lost");
#endif
// only flag this as connection error if we are not
// cleanly disconnecting
if (!_Connection.IsDisconnecting) {
_Connection.IsConnectionError = true;
}
}
} catch (ThreadAbortException) {
Thread.ResetAbort();
#if LOG4NET
Logger.Socket.Debug("ReadThread aborted");
#endif
} catch (Exception ex) {
#if LOG4NET
Logger.Socket.Error(ex);
#endif
}
}
}
/// <summary>
///
/// </summary>
private class WriteThread
{
private IrcConnection _Connection;
private Thread _Thread;
private int _HighCount;
private int _AboveMediumCount;
private int _MediumCount;
private int _BelowMediumCount;
private int _LowCount;
private int _AboveMediumSentCount;
private int _MediumSentCount;
private int _BelowMediumSentCount;
private int _AboveMediumThresholdCount = 4;
private int _MediumThresholdCount = 2;
private int _BelowMediumThresholdCount = 1;
private int _BurstCount;
/// <summary>
///
/// </summary>
/// <param name="connection"></param>
public WriteThread(IrcConnection connection)
{
_Connection = connection;
}
/// <summary>
///
/// </summary>
public void Start()
{
_Thread = new Thread(new ThreadStart(_Worker));
_Thread.Name = "WriteThread ("+_Connection.Address+":"+_Connection.Port+")";
_Thread.IsBackground = true;
System.Diagnostics.Trace.WriteLine("Starting IRC Write Thread.");
_Thread.Start();
}
/// <summary>
///
/// </summary>
public void Stop()
{
#if LOG4NET
Logger.Connection.Debug("Stopping WriteThread...");
#endif
_Thread.Abort();
// make sure we close the stream after the thread is gone, else
// the thread will think the connection is broken!
_Thread.Join();
try {
_Connection._Writer.Close();
} catch (ObjectDisposedException) {
}
}
private void _Worker()
{
#if LOG4NET
Logger.Socket.Debug("WriteThread started");
#endif
try {
try {
while (_Connection.IsConnected) {
_CheckBuffer();
System.Diagnostics.Trace.WriteLine("Sleeping IRC Write Worker Thread.");
Thread.Sleep(_Connection._SendDelay);
}
} catch (IOException e) {
#if LOG4NET
Logger.Socket.Warn("IOException: "+e.Message);
#endif
} finally {
#if LOG4NET
Logger.Socket.Warn("connection lost");
#endif
// only flag this as connection error if we are not
// cleanly disconnecting
if (!_Connection.IsDisconnecting) {
_Connection.IsConnectionError = true;
}
}
} catch (ThreadAbortException) {
Thread.ResetAbort();
#if LOG4NET
Logger.Socket.Debug("WriteThread aborted");
#endif
} catch (Exception ex) {
#if LOG4NET
Logger.Socket.Error(ex);
#endif
}
}
#region WARNING: complex scheduler, don't even think about changing it!
// WARNING: complex scheduler, don't even think about changing it!
private void _CheckBuffer()
{
// only send data if we are succefully registered on the IRC network
if (!_Connection._IsRegistered) {
return;
}
_HighCount = ((Queue)_Connection._SendBuffer[Priority.High]).Count;
_AboveMediumCount = ((Queue)_Connection._SendBuffer[Priority.AboveMedium]).Count;
_MediumCount = ((Queue)_Connection._SendBuffer[Priority.Medium]).Count;
_BelowMediumCount = ((Queue)_Connection._SendBuffer[Priority.BelowMedium]).Count;
_LowCount = ((Queue)_Connection._SendBuffer[Priority.Low]).Count;
if (_CheckHighBuffer() &&
_CheckAboveMediumBuffer() &&
_CheckMediumBuffer() &&
_CheckBelowMediumBuffer() &&
_CheckLowBuffer()) {
// everything is sent, resetting all counters
_AboveMediumSentCount = 0;
_MediumSentCount = 0;
_BelowMediumSentCount = 0;
_BurstCount = 0;
}
if (_BurstCount < 3) {
_BurstCount++;
//_CheckBuffer();
}
}
private bool _CheckHighBuffer()
{
if (_HighCount > 0) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.High]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.High]).Enqueue(data);
}
if (_HighCount > 1) {
// there is more data to send
return false;
}
}
return true;
}
private bool _CheckAboveMediumBuffer()
{
if ((_AboveMediumCount > 0) &&
(_AboveMediumSentCount < _AboveMediumThresholdCount)) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.AboveMedium]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.AboveMedium]).Enqueue(data);
}
_AboveMediumSentCount++;
if (_AboveMediumSentCount < _AboveMediumThresholdCount) {
return false;
}
}
return true;
}
private bool _CheckMediumBuffer()
{
if ((_MediumCount > 0) &&
(_MediumSentCount < _MediumThresholdCount)) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.Medium]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.Medium]).Enqueue(data);
}
_MediumSentCount++;
if (_MediumSentCount < _MediumThresholdCount) {
return false;
}
}
return true;
}
private bool _CheckBelowMediumBuffer()
{
if ((_BelowMediumCount > 0) &&
(_BelowMediumSentCount < _BelowMediumThresholdCount)) {
string data = (string)((Queue)_Connection._SendBuffer[Priority.BelowMedium]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.BelowMedium]).Enqueue(data);
}
_BelowMediumSentCount++;
if (_BelowMediumSentCount < _BelowMediumThresholdCount) {
return false;
}
}
return true;
}
private bool _CheckLowBuffer()
{
if (_LowCount > 0) {
if ((_HighCount > 0) ||
(_AboveMediumCount > 0) ||
(_MediumCount > 0) ||
(_BelowMediumCount > 0)) {
return true;
}
string data = (string)((Queue)_Connection._SendBuffer[Priority.Low]).Dequeue();
if (_Connection._WriteLine(data) == false) {
#if LOG4NET
Logger.Queue.Warn("Sending data was not sucessful, data is requeued!");
#endif
((Queue)_Connection._SendBuffer[Priority.Low]).Enqueue(data);
}
if (_LowCount > 1) {
return false;
}
}
return true;
}
// END OF WARNING, below this you can read/change again ;)
#endregion
}
/// <summary>
///
/// </summary>
private class IdleWorkerThread
{
private IrcConnection _Connection;
private Thread _Thread;
/// <summary>
///
/// </summary>
/// <param name="connection"></param>
public IdleWorkerThread(IrcConnection connection)
{
_Connection = connection;
}
/// <summary>
///
/// </summary>
public void Start()
{
DateTime now = DateTime.Now;
_Connection._LastPingSent = now;
_Connection._LastPongReceived = now;
_Thread = new Thread(new ThreadStart(_Worker));
_Thread.Name = "IdleWorkerThread ("+_Connection.Address+":"+_Connection.Port+")";
_Thread.IsBackground = true;
System.Diagnostics.Trace.WriteLine("Starting IRC Idle Worker Thread.");
_Thread.Start();
}
/// <summary>
///
/// </summary>
public void Stop()
{
_Thread.Abort();
}
private void _Worker()
{
#if LOG4NET
Logger.Socket.Debug("IdleWorkerThread started");
#endif
try {
while (_Connection.IsConnected ) {
System.Diagnostics.Trace.WriteLine("Sleeping IRC Idle Thread Thread.");
Thread.Sleep(_Connection._IdleWorkerInterval);
// only send active pings if we are registered
if (!_Connection.IsRegistered) {
continue;
}
DateTime now = DateTime.Now;
int last_ping_sent = (int)(now - _Connection._LastPingSent).TotalSeconds;
int last_pong_rcvd = (int)(now - _Connection._LastPongReceived).TotalSeconds;
// determins if the resoponse time is ok
if (last_ping_sent < _Connection._PingTimeout) {
if (_Connection._LastPingSent > _Connection._LastPongReceived) {
// there is a pending ping request, we have to wait
continue;
}
// determines if it need to send another ping yet
if (last_pong_rcvd > _Connection._PingInterval) {
_Connection.WriteLine(Rfc2812.Ping(_Connection.Address), Priority.Critical);
_Connection._LastPingSent = now;
//_Connection._LastPongReceived = now;
} // else connection is fine, just continue
} else {
if (_Connection.IsDisconnecting) {
break;
}
#if LOG4NET
Logger.Socket.Warn("ping timeout, connection lost");
#endif
// only flag this as connection error if we are not
// cleanly disconnecting
_Connection.IsConnectionError = true;
break;
}
}
} catch (ThreadAbortException) {
Thread.ResetAbort();
#if LOG4NET
Logger.Socket.Debug("IdleWorkerThread aborted");
#endif
} catch (Exception ex) {
#if LOG4NET
Logger.Socket.Error(ex);
#endif
}
}
}
}
}
| |
using System;
using Server.Network;
using Server.Gumps;
namespace Server.Menus.Questions
{
public class StuckMenuEntry
{
private int m_Name;
private Point3D[] m_Locations;
public int Name { get { return m_Name; } }
public Point3D[] Locations{ get{ return m_Locations; } }
public StuckMenuEntry( int name, Point3D[] locations)
{
m_Name = name;
m_Locations = locations;
}
}
public class StuckMenu : Gump
{
private static StuckMenuEntry[] m_Entries = new StuckMenuEntry[]
{
new StuckMenuEntry( 1078608, new Point3D[]
{
new Point3D( 3503, 2574, 14 )
/*new Point3D( 1519, 1619, 10 ),
new Point3D( 1457, 1538, 30 ),
new Point3D( 1607, 1568, 20 ),
new Point3D( 1643, 1680, 18 )*/
} ),
/*// Britain
new StuckMenuEntry( 1011028, new Point3D[]
{
new Point3D( 1522, 1757, 28 ),
new Point3D( 1519, 1619, 10 ),
new Point3D( 1457, 1538, 30 ),
new Point3D( 1607, 1568, 20 ),
new Point3D( 1643, 1680, 18 )
} ),
// Trinsic
new StuckMenuEntry( 1011029, new Point3D[]
{
new Point3D( 2005, 2754, 30 ),
new Point3D( 1993, 2827, 0 ),
new Point3D( 2044, 2883, 0 ),
new Point3D( 1876, 2859, 20 ),
new Point3D( 1865, 2687, 0 )
} ),
// Vesper
new StuckMenuEntry( 1011030, new Point3D[]
{
new Point3D( 2973, 891, 0 ),
new Point3D( 3003, 776, 0 ),
new Point3D( 2910, 727, 0 ),
new Point3D( 2865, 804, 0 ),
new Point3D( 2832, 927, 0 )
} ),
// Minoc
new StuckMenuEntry( 1011031, new Point3D[]
{
new Point3D( 2498, 392, 0 ),
new Point3D( 2433, 541, 0 ),
new Point3D( 2445, 501, 15 ),
new Point3D( 2501, 469, 15 ),
new Point3D( 2444, 420, 15 )
} ),
// Yew
new StuckMenuEntry( 1011032, new Point3D[]
{
new Point3D( 490, 1166, 0 ),
new Point3D( 652, 1098, 0 ),
new Point3D( 650, 1013, 0 ),
new Point3D( 536, 979, 0 ),
new Point3D( 464, 970, 0 )
} ),
// Cove
new StuckMenuEntry( 1011033, new Point3D[]
{
new Point3D( 2230, 1159, 0 ),
new Point3D( 2218, 1203, 0 ),
new Point3D( 2247, 1194, 0 ),
new Point3D( 2236, 1224, 0 ),
new Point3D( 2273, 1231, 0 )
} )*/
};
private static StuckMenuEntry[] m_T2AEntries = new StuckMenuEntry[]
{
/*// Papua
new StuckMenuEntry( 1011057, new Point3D[]
{
new Point3D( 5720, 3109, -1 ),
new Point3D( 5677, 3176, -3 ),
new Point3D( 5678, 3227, 0 ),
new Point3D( 5769, 3206, -2 ),
new Point3D( 5777, 3270, -1 )
} ),
// Delucia
new StuckMenuEntry( 1011058, new Point3D[]
{
new Point3D( 5216, 4033, 37 ),
new Point3D( 5262, 4049, 37 ),
new Point3D( 5284, 4006, 37 ),
new Point3D( 5189, 3971, 39 ),
new Point3D( 5243, 3960, 37 )
} )*/
};
private static bool IsInSecondAgeArea( Mobile m )
{
if ( m.Map != Map.Trammel && m.Map != Map.Felucca )
return false;
if ( m.X >= 5120 && m.Y >= 2304 )
return true;
if ( m.Region.IsPartOf( "Terathan Keep" ) )
return true;
return false;
}
private Mobile m_Mobile, m_Sender;
private bool m_MarkUse;
private Timer m_Timer;
public StuckMenu( Mobile beholder, Mobile beheld, bool markUse ) : base( 150, 50 )
{
m_Sender = beholder;
m_Mobile = beheld;
m_MarkUse = markUse;
Closable = false;
Dragable = false;
Disposable = false;
AddBackground( 0, 0, 270, 320, 2600 );
AddHtmlLocalized( 50, 20, 250, 35, 1011027, false, false ); // Chose a town:
StuckMenuEntry[] entries = IsInSecondAgeArea( beheld ) ? m_T2AEntries : m_Entries;
for ( int i = 0; i < entries.Length; i++ )
{
StuckMenuEntry entry = entries[i];
AddButton( 50, 55 + 35 * i, 208, 209, i + 1, GumpButtonType.Reply, 0 );
AddHtmlLocalized( 75, 55 + 35 * i, 335, 40, entry.Name, false, false );
}
AddButton( 55, 263, 4005, 4007, 0, GumpButtonType.Reply, 0 );
AddHtmlLocalized( 90, 265, 200, 35, 1011012, false, false ); // CANCEL
}
public void BeginClose()
{
StopClose();
m_Timer = new CloseTimer( m_Mobile );
m_Timer.Start();
m_Mobile.Frozen = true;
}
public void StopClose()
{
if ( m_Timer != null )
m_Timer.Stop();
m_Mobile.Frozen = false;
}
public override void OnResponse( NetState state, RelayInfo info )
{
StopClose();
if ( Factions.Sigil.ExistsOn( m_Mobile ) )
{
m_Mobile.SendLocalizedMessage( 1061632 ); // You can't do that while carrying the sigil.
}
else if ( info.ButtonID == 0 )
{
if ( m_Mobile == m_Sender )
m_Mobile.SendLocalizedMessage( 1010588 ); // You choose not to go to any city.
}
else if (m_Mobile.Map == Map.TerMur)
{
m_Mobile.SendMessage("Vous ne pourrez sortir d'ici aussi facilement");
}
else
{
int index = info.ButtonID - 1;
StuckMenuEntry[] entries = IsInSecondAgeArea( m_Mobile ) ? m_T2AEntries : m_Entries;
if ( index >= 0 && index < entries.Length )
Teleport( entries[index] );
}
}
private void Teleport( StuckMenuEntry entry )
{
if ( m_MarkUse )
{
m_Mobile.SendLocalizedMessage( 1010589 ); // You will be teleported within the next two minutes.
new TeleportTimer( m_Mobile, entry, TimeSpan.FromSeconds( 10.0 + (Utility.RandomDouble() * 110.0) ) ).Start();
m_Mobile.UsedStuckMenu();
}
else
{
new TeleportTimer( m_Mobile, entry, TimeSpan.Zero ).Start();
}
}
private class CloseTimer : Timer
{
private Mobile m_Mobile;
private DateTime m_End;
public CloseTimer( Mobile m ) : base( TimeSpan.Zero, TimeSpan.FromSeconds( 1.0 ) )
{
m_Mobile = m;
m_End = DateTime.Now + TimeSpan.FromMinutes( 3.0 );
}
protected override void OnTick()
{
if ( m_Mobile.NetState == null || DateTime.Now > m_End )
{
m_Mobile.Frozen = false;
m_Mobile.CloseGump( typeof( StuckMenu ) );
Stop();
}
else
{
m_Mobile.Frozen = true;
}
}
}
private class TeleportTimer : Timer
{
private Mobile m_Mobile;
private StuckMenuEntry m_Destination;
private DateTime m_End;
public TeleportTimer( Mobile mobile, StuckMenuEntry destination, TimeSpan delay ) : base( TimeSpan.Zero, TimeSpan.FromSeconds( 1.0 ) )
{
Priority = TimerPriority.TwoFiftyMS;
m_Mobile = mobile;
m_Destination = destination;
m_End = DateTime.Now + delay;
}
protected override void OnTick()
{
if ( DateTime.Now < m_End )
{
m_Mobile.Frozen = true;
}
else
{
m_Mobile.Frozen = false;
Stop();
if ( Factions.Sigil.ExistsOn( m_Mobile ) )
{
m_Mobile.SendLocalizedMessage( 1061632 ); // You can't do that while carrying the sigil.
return;
}
int idx = Utility.Random( m_Destination.Locations.Length );
Point3D dest = m_Destination.Locations[idx];
Map destMap;
//if ( m_Mobile.Map == Map.Trammel )
destMap = Map.Trammel;
/*else if ( m_Mobile.Map == Map.Felucca )
destMap = Map.Felucca;
else
destMap = m_Mobile.Kills >= 5 ? Map.Felucca : Map.Trammel;*/
Mobiles.BaseCreature.TeleportPets( m_Mobile, dest, destMap );
m_Mobile.MoveToWorld( dest, destMap );
}
}
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System;
namespace AmplifyShaderEditor
{
[Serializable]
[NodeAttributes( "Triplanar Sampler", "Textures", "Triplanar Mapping" )]
public sealed class TriplanarNode : ParentNode
{
[SerializeField]
private string m_uniqueName;
private bool m_editPropertyNameMode = false;
[SerializeField]
private string m_propertyInspectorName = "Triplanar Sampler";
private enum TriplanarType { Spherical, Cylindrical }
[SerializeField]
private TriplanarType m_selectedTriplanarType = TriplanarType.Spherical;
private enum TriplanarSpace { Object, World }
[SerializeField]
private TriplanarSpace m_selectedTriplanarSpace = TriplanarSpace.World;
[SerializeField]
private bool m_normalCorrection = false;
[SerializeField]
private TexturePropertyNode m_topTexture;
[SerializeField]
private TexturePropertyNode m_midTexture;
[SerializeField]
private TexturePropertyNode m_botTexture;
private string m_tempTopInspectorName = string.Empty;
private string m_tempTopName = string.Empty;
private TexturePropertyValues m_tempTopDefaultValue = TexturePropertyValues.white;
private int m_tempTopOrderIndex = -1;
private Texture2D m_tempTopDefaultTexture = null;
private string m_tempMidInspectorName = string.Empty;
private string m_tempMidName = string.Empty;
private TexturePropertyValues m_tempMidDefaultValue = TexturePropertyValues.white;
private int m_tempMidOrderIndex = -1;
private Texture2D m_tempMidDefaultTexture = null;
private string m_tempBotInspectorName = string.Empty;
private string m_tempBotName = string.Empty;
private TexturePropertyValues m_tempBotDefaultValue = TexturePropertyValues.white;
private int m_tempBotOrderIndex = -1;
private Texture2D m_tempBotDefaultTexture = null;
private TexturePropertyNode m_topTexPropRef = null;
private TexturePropertyNode m_midTexPropRef = null;
private TexturePropertyNode m_botTexPropRef = null;
public bool m_firstFrame = true;
private bool m_topTextureFoldout = true;
private bool m_midTextureFoldout = true;
private bool m_botTextureFoldout = true;
private string m_functionNormalCall = "TriplanarNormal( {0}, {1}, {2}, {3}, {4}, {5}, {6}, {7} )";
private string m_functionNormalHeader = "inline float3 TriplanarNormal( sampler2D topBumpMap, sampler2D midBumpMap, sampler2D botBumpMap, float3 worldPos, float3 worldNormal, float falloff, float tilling, float vertex )";
private string[] m_functionNormalBody = {
"float3 projNormal = ( pow( abs( worldNormal ), falloff ) );",
"projNormal /= projNormal.x + projNormal.y + projNormal.z;",
"float3 nsign = sign(worldNormal);",
"half3 xNorm; half3 yNorm; half3 zNorm;",
"if(vertex == 1){",
"xNorm = UnpackNormal( tex2Dlod( topBumpMap, float4((tilling * worldPos.zy * float2( nsign.x, 1.0 )).xy,0,0) ) );",
"yNorm = UnpackNormal( tex2Dlod( topBumpMap, float4((tilling * worldPos.zx).xy,0,0) ) );",
"zNorm = UnpackNormal( tex2Dlod( topBumpMap, float4((tilling * worldPos.xy * float2( -nsign.z, 1.0 )).xy,0,0) ) );",
"} else {",
"xNorm = UnpackNormal( tex2D( topBumpMap, tilling * worldPos.zy * float2( nsign.x, 1.0 ) ) );",
"yNorm = UnpackNormal( tex2D( topBumpMap, tilling * worldPos.zx ) );",
"zNorm = UnpackNormal( tex2D( topBumpMap, tilling * worldPos.xy * float2( -nsign.z, 1.0 ) ) );",
"}",
"xNorm = normalize( half3( xNorm.xy * float2( nsign.x, 1.0 ) + worldNormal.zy, worldNormal.x ) );",
"yNorm = normalize( half3( yNorm.xy + worldNormal.zx, worldNormal.y));",
"zNorm = normalize( half3( zNorm.xy * float2( -nsign.z, 1.0 ) + worldNormal.xy, worldNormal.z ) );",
"xNorm = xNorm.zyx;",
"yNorm = yNorm.yzx;",
"zNorm = zNorm.xyz;",
"return xNorm * projNormal.x + yNorm * projNormal.y + zNorm * projNormal.z;"
};
private string[] m_functionNormalBodyTMB = {
"float3 projNormal = ( pow( abs( worldNormal ), falloff ) );",
"projNormal /= projNormal.x + projNormal.y + projNormal.z;",
"float3 nsign = sign(worldNormal);",
"float negProjNormalY = max( 0, projNormal.y * -nsign.y );",
"projNormal.y = max( 0, projNormal.y * nsign.y );",
"half3 xNorm; half3 yNorm; half3 yNormN; half3 zNorm;",
"if(vertex == 1){",
"xNorm = UnpackNormal( tex2Dlod( midBumpMap, float4((tilling * worldPos.zy * float2( nsign.x, 1.0 )).xy,0,0) ) );",
"yNorm = UnpackNormal( tex2Dlod( topBumpMap, float4((tilling * worldPos.zx).xy,0,0) ) );",
"yNormN = UnpackNormal( tex2Dlod( botBumpMap, float4((tilling * worldPos.zx).xy,0,0) ) );",
"zNorm = UnpackNormal( tex2Dlod( midBumpMap, float4((tilling * worldPos.xy * float2( -nsign.z, 1.0 )).xy,0,0) ) );",
"} else {",
"xNorm = UnpackNormal( tex2D( midBumpMap, tilling * worldPos.zy * float2( nsign.x, 1.0 ) ) );",
"yNorm = UnpackNormal( tex2D( topBumpMap, tilling * worldPos.zx ) );",
"yNormN = UnpackNormal( tex2D( botBumpMap, tilling * worldPos.zx ) );",
"zNorm = UnpackNormal( tex2D( midBumpMap, tilling * worldPos.xy * float2( -nsign.z, 1.0 ) ) );",
"}",
"xNorm = normalize( half3( xNorm.xy * float2( nsign.x, 1.0 ) + worldNormal.zy, worldNormal.x ) );",
"yNorm = normalize( half3( yNorm.xy + worldNormal.zx, worldNormal.y));",
"yNormN = normalize( half3( yNormN.xy + worldNormal.zx, worldNormal.y));",
"zNorm = normalize( half3( zNorm.xy * float2( -nsign.z, 1.0 ) + worldNormal.xy, worldNormal.z ) );",
"xNorm = xNorm.zyx;",
"yNorm = yNorm.yzx;",
"yNormN = yNormN.yzx;",
"zNorm = zNorm.xyz;",
"return xNorm * projNormal.x + yNorm * projNormal.y + yNormN * negProjNormalY + zNorm * projNormal.z;"
};
private string m_functionSamplingCall = "TriplanarSampling( {0}, {1}, {2}, {3}, {4}, {5}, {6}, {7} )";
private string m_functionSamplingHeader = "inline float4 TriplanarSampling( sampler2D topTexMap, sampler2D midTexMap, sampler2D botTexMap, float3 worldPos, float3 worldNormal, float falloff, float tilling, float vertex )";
private string[] m_functionSamplingBody = {
"float3 projNormal = ( pow( abs( worldNormal ), falloff ) );",
"projNormal /= projNormal.x + projNormal.y + projNormal.z;",
"float3 nsign = sign( worldNormal );",
"half4 xNorm; half4 yNorm; half4 zNorm;",
"if(vertex == 1){",
"xNorm = ( tex2Dlod( topTexMap, float4((tilling * worldPos.zy * float2( nsign.x, 1.0 )).xy,0,0) ) );",
"yNorm = ( tex2Dlod( topTexMap, float4((tilling * worldPos.zx).xy,0,0) ) );",
"zNorm = ( tex2Dlod( topTexMap, float4((tilling * worldPos.xy * float2( -nsign.z, 1.0 )).xy,0,0) ) );",
"} else {",
"xNorm = ( tex2D( topTexMap, tilling * worldPos.zy * float2( nsign.x, 1.0 ) ) );",
"yNorm = ( tex2D( topTexMap, tilling * worldPos.zx ) );",
"zNorm = ( tex2D( topTexMap, tilling * worldPos.xy * float2( -nsign.z, 1.0 ) ) );",
"}",
"return xNorm* projNormal.x + yNorm* projNormal.y + zNorm* projNormal.z;"
};
private string[] m_functionSamplingBodyTMB = {
"float3 projNormal = ( pow( abs( worldNormal ), falloff ) );",
"projNormal /= projNormal.x + projNormal.y + projNormal.z;",
"float3 nsign = sign( worldNormal );",
"float negProjNormalY = max( 0, projNormal.y * -nsign.y );",
"projNormal.y = max( 0, projNormal.y * nsign.y );",
"half4 xNorm; half4 yNorm; half4 yNormN; half4 zNorm;",
"if(vertex == 1){",
"xNorm = ( tex2Dlod( midTexMap, float4((tilling * worldPos.zy * float2( nsign.x, 1.0 )).xy,0,0) ) );",
"yNorm = ( tex2Dlod( topTexMap, float4((tilling * worldPos.zx).xy,0,0) ) );",
"yNormN = ( tex2Dlod( botTexMap, float4((tilling * worldPos.zx).xy,0,0) ) );",
"zNorm = ( tex2Dlod( midTexMap, float4((tilling * worldPos.xy * float2( -nsign.z, 1.0 )).xy,0,0) ) );",
"} else {",
"xNorm = ( tex2D( midTexMap, tilling * worldPos.zy * float2( nsign.x, 1.0 ) ) );",
"yNorm = ( tex2D( topTexMap, tilling * worldPos.zx ) );",
"yNormN = ( tex2D( botTexMap, tilling * worldPos.zx ) );",
"zNorm = ( tex2D( midTexMap, tilling * worldPos.xy * float2( -nsign.z, 1.0 ) ) );",
"}",
"return xNorm* projNormal.x + yNorm* projNormal.y + yNormN * negProjNormalY + zNorm* projNormal.z;"
};
protected override void CommonInit( int uniqueId )
{
base.CommonInit( uniqueId );
AddInputPort( WirePortDataType.SAMPLER2D, true, "Top" );
AddInputPort( WirePortDataType.SAMPLER2D, true, "Middle" );
AddInputPort( WirePortDataType.SAMPLER2D, true, "Bottom" );
AddInputPort( WirePortDataType.FLOAT, true, "Tiling" );
AddInputPort( WirePortDataType.FLOAT, true, "Falloff" );
AddOutputColorPorts( "RGBA" );
m_useInternalPortData = true;
InputPorts[ 3 ].FloatInternalData = 1;
InputPorts[ 4 ].FloatInternalData = 1;
m_selectedLocation = PreviewLocation.TopCenter;
m_marginPreviewLeft = 43;
m_drawPreviewAsSphere = true;
m_drawPreviewExpander = false;
m_drawPreview = true;
m_showPreview = true;
m_autoDrawInternalPortData = false;
m_textLabelWidth = 120;
//m_propertyInspectorName = "Triplanar Sampler";
m_previewShaderGUID = "8723015ec59743143aadfbe480e34391";
//ConfigurePorts();
}
public void Init()
{
m_topTexture = ScriptableObject.CreateInstance<TexturePropertyNode>();
m_topTexture.CustomPrefix = "Top Texture ";
m_topTexture.UniqueId = m_uniqueId;
if( UIUtils.IsUniformNameAvailable( m_tempTopName ) )
{
UIUtils.ReleaseUniformName( m_uniqueId, m_topTexture.PropertyName );
if ( !string.IsNullOrEmpty( m_tempTopInspectorName ) )
{
m_topTexture.SetInspectorName( m_tempTopInspectorName );
}
if ( !string.IsNullOrEmpty( m_tempTopName ) )
m_topTexture.SetPropertyName( m_tempTopName );
UIUtils.RegisterUniformName( m_uniqueId, m_topTexture.PropertyName );
}
m_topTexture.DefaultTextureValue = m_tempTopDefaultValue;
m_topTexture.OrderIndex = m_tempTopOrderIndex;
m_topTexture.DrawAutocast = false;
m_topTexture.CurrentParameterType = PropertyType.Property;
m_topTexture.DefaultValue = m_tempTopDefaultTexture;
UIUtils.RegisterPropertyNode( m_topTexture );
UIUtils.RegisterTexturePropertyNode( m_topTexture );
m_midTexture = ScriptableObject.CreateInstance<TexturePropertyNode>();
m_midTexture.CustomPrefix = "Mid Texture ";
m_midTexture.UniqueId = m_uniqueId;
if ( UIUtils.IsUniformNameAvailable( m_tempMidName ) )
{
UIUtils.ReleaseUniformName( m_uniqueId, m_midTexture.PropertyName );
if ( !string.IsNullOrEmpty( m_tempMidInspectorName ) )
m_midTexture.SetInspectorName( m_tempMidInspectorName );
if ( !string.IsNullOrEmpty( m_tempMidName ) )
m_midTexture.SetPropertyName( m_tempMidName );
UIUtils.RegisterUniformName( m_uniqueId, m_midTexture.PropertyName );
}
m_midTexture.DefaultTextureValue = m_tempMidDefaultValue;
m_midTexture.OrderIndex = m_tempMidOrderIndex;
m_midTexture.DrawAutocast = false;
m_midTexture.CurrentParameterType = PropertyType.Property;
m_midTexture.DefaultValue = m_tempMidDefaultTexture;
m_botTexture = ScriptableObject.CreateInstance<TexturePropertyNode>();
m_botTexture.CustomPrefix = "Bot Texture ";
m_botTexture.UniqueId = m_uniqueId;
if ( UIUtils.IsUniformNameAvailable( m_tempBotName ) )
{
UIUtils.ReleaseUniformName( m_uniqueId, m_botTexture.PropertyName );
if ( !string.IsNullOrEmpty( m_tempBotInspectorName ) )
m_botTexture.SetInspectorName( m_tempBotInspectorName );
if ( !string.IsNullOrEmpty( m_tempBotName ) )
m_botTexture.SetPropertyName( m_tempBotName );
UIUtils.RegisterUniformName( m_uniqueId, m_botTexture.PropertyName );
}
m_botTexture.DefaultTextureValue = m_tempBotDefaultValue;
m_botTexture.OrderIndex = m_tempBotOrderIndex;
m_botTexture.DrawAutocast = false;
m_botTexture.CurrentParameterType = PropertyType.Property;
m_botTexture.DefaultValue = m_tempBotDefaultTexture;
if (m_materialMode)
SetDelayedMaterialMode(UIUtils.CurrentWindow.CurrentGraph.CurrentMaterial);
if ( m_nodeAttribs != null )
m_uniqueName = m_nodeAttribs.Name + m_uniqueId;
ConfigurePorts();
ReRegisterPorts();
}
public override void Destroy()
{
base.Destroy();
//UIUtils.UnregisterPropertyNode( m_topTexture );
//UIUtils.UnregisterTexturePropertyNode( m_topTexture );
//UIUtils.UnregisterPropertyNode( m_midTexture );
//UIUtils.UnregisterTexturePropertyNode( m_midTexture );
//UIUtils.UnregisterPropertyNode( m_botTexture );
//UIUtils.UnregisterTexturePropertyNode( m_botTexture );
m_topTexture.Destroy();
m_topTexture = null;
m_midTexture.Destroy();
m_midTexture = null;
m_botTexture.Destroy();
m_botTexture = null;
m_tempTopDefaultTexture = null;
m_tempMidDefaultTexture = null;
m_tempBotDefaultTexture = null;
m_topTexPropRef = null;
m_midTexPropRef = null;
m_botTexPropRef = null;
}
public override void SetPreviewInputs()
{
base.SetPreviewInputs();
if ( m_topTexPropRef == null )
return;
if ( m_materialMode )
{
PreviewMaterial.SetTexture( "_A", m_topTexPropRef.MaterialValue );
if(m_selectedTriplanarType == TriplanarType.Cylindrical && m_midTexPropRef != null )
{
PreviewMaterial.SetTexture( "_B", m_midTexPropRef.MaterialValue );
PreviewMaterial.SetTexture( "_C", m_botTexPropRef.MaterialValue );
}
} else
{
PreviewMaterial.SetTexture( "_A", m_topTexPropRef.DefaultValue );
if ( m_selectedTriplanarType == TriplanarType.Cylindrical && m_midTexPropRef != null )
{
PreviewMaterial.SetTexture( "_B", m_midTexPropRef.DefaultValue );
PreviewMaterial.SetTexture( "_C", m_botTexPropRef.DefaultValue );
}
}
PreviewMaterial.SetFloat( "_IsNormal", (m_normalCorrection ? 1 : 0));
PreviewMaterial.SetFloat( "_IsSpherical", ( m_selectedTriplanarType == TriplanarType.Spherical ? 1 : 0 ) );
}
public void ReRegisterPorts()
{
switch ( m_selectedTriplanarType )
{
case TriplanarType.Spherical:
UIUtils.UnregisterPropertyNode( m_midTexture );
UIUtils.UnregisterTexturePropertyNode( m_midTexture );
UIUtils.UnregisterPropertyNode( m_botTexture );
UIUtils.UnregisterTexturePropertyNode( m_botTexture );
break;
case TriplanarType.Cylindrical:
UIUtils.UnregisterPropertyNode( m_midTexture );
UIUtils.UnregisterTexturePropertyNode( m_midTexture );
UIUtils.UnregisterPropertyNode( m_botTexture );
UIUtils.UnregisterTexturePropertyNode( m_botTexture );
UIUtils.RegisterPropertyNode( m_midTexture );
UIUtils.RegisterTexturePropertyNode( m_midTexture );
UIUtils.RegisterPropertyNode( m_botTexture );
UIUtils.RegisterTexturePropertyNode( m_botTexture );
break;
}
}
public void ConfigurePorts()
{
switch ( m_selectedTriplanarType )
{
case TriplanarType.Spherical:
InputPorts[ 0 ].Name = "Tex";
InputPorts[ 1 ].Visible = false;
InputPorts[ 2 ].Visible = false;
break;
case TriplanarType.Cylindrical:
InputPorts[ 0 ].Name = "Top";
InputPorts[ 1 ].Visible = true;
InputPorts[ 2 ].Visible = true;
break;
}
if ( m_normalCorrection )
{
m_outputPorts[ 0 ].ChangeProperties( "XYZ", WirePortDataType.FLOAT3, false );
m_outputPorts[ 1 ].ChangeProperties( "X", WirePortDataType.FLOAT, false );
m_outputPorts[ 2 ].ChangeProperties( "Y", WirePortDataType.FLOAT, false );
m_outputPorts[ 3 ].ChangeProperties( "Z", WirePortDataType.FLOAT, false );
m_outputPorts[ 4 ].Visible = false;
} else
{
m_outputPorts[ 0 ].ChangeProperties( "RGBA", WirePortDataType.FLOAT4, false );
m_outputPorts[ 1 ].ChangeProperties( "R", WirePortDataType.FLOAT, false );
m_outputPorts[ 2 ].ChangeProperties( "G", WirePortDataType.FLOAT, false );
m_outputPorts[ 3 ].ChangeProperties( "B", WirePortDataType.FLOAT, false );
m_outputPorts[ 4 ].ChangeProperties( "A", WirePortDataType.FLOAT, false );
m_outputPorts[ 4 ].Visible = true;
}
m_outputPorts[ 0 ].DirtyLabelSize = true;
}
public override void PropagateNodeData( NodeData nodeData )
{
base.PropagateNodeData( nodeData );
UIUtils.CurrentDataCollector.DirtyNormal = true;
}
public override void DrawProperties()
{
base.DrawProperties();
NodeUtils.DrawPropertyGroup( ref m_propertiesFoldout, "Parameters", DrawMainOptions);
DrawInternalDataGroup();
if ( m_selectedTriplanarType == TriplanarType.Spherical )
NodeUtils.DrawPropertyGroup( ref m_topTextureFoldout, "Texture", DrawTopTextureOptions );
else
NodeUtils.DrawPropertyGroup( ref m_topTextureFoldout, "Top Texture", DrawTopTextureOptions );
if ( m_selectedTriplanarType == TriplanarType.Cylindrical )
{
NodeUtils.DrawPropertyGroup( ref m_midTextureFoldout, "Middle Texture", DrawMidTextureOptions );
NodeUtils.DrawPropertyGroup( ref m_botTextureFoldout, "Bottom Texture", DrawBotTextureOptions );
}
}
void DrawMainOptions()
{
EditorGUI.BeginChangeCheck();
m_propertyInspectorName = EditorGUILayoutTextField( "Name", m_propertyInspectorName );
m_selectedTriplanarType = ( TriplanarType ) EditorGUILayoutEnumPopup( "Mapping", m_selectedTriplanarType );
m_selectedTriplanarSpace = ( TriplanarSpace ) EditorGUILayoutEnumPopup( "Space", m_selectedTriplanarSpace );
m_normalCorrection = EditorGUILayoutToggle( "Normal Map", m_normalCorrection );
if ( EditorGUI.EndChangeCheck() )
{
SetTitleText( m_propertyInspectorName );
ConfigurePorts();
ReRegisterPorts();
}
}
void DrawTopTextureOptions()
{
EditorGUI.BeginChangeCheck();
m_topTexture.ShowPropertyInspectorNameGUI();
m_topTexture.ShowPropertyNameGUI( true );
m_topTexture.ShowToolbar();
if ( EditorGUI.EndChangeCheck() )
{
m_topTexture.BeginPropertyFromInspectorCheck();
if ( m_materialMode )
m_requireMaterialUpdate = true;
}
m_topTexture.CheckPropertyFromInspector();
}
void DrawMidTextureOptions()
{
if ( m_midTexture == null )
return;
EditorGUI.BeginChangeCheck();
m_midTexture.ShowPropertyInspectorNameGUI();
m_midTexture.ShowPropertyNameGUI( true );
m_midTexture.ShowToolbar();
if ( EditorGUI.EndChangeCheck() )
{
m_midTexture.BeginPropertyFromInspectorCheck();
if ( m_materialMode )
m_requireMaterialUpdate = true;
}
m_midTexture.CheckPropertyFromInspector();
}
void DrawBotTextureOptions()
{
if ( m_botTexture == null )
return;
EditorGUI.BeginChangeCheck();
m_botTexture.ShowPropertyInspectorNameGUI();
m_botTexture.ShowPropertyNameGUI( true );
m_botTexture.ShowToolbar();
if ( EditorGUI.EndChangeCheck() )
{
m_botTexture.BeginPropertyFromInspectorCheck();
if ( m_materialMode )
m_requireMaterialUpdate = true;
}
m_botTexture.CheckPropertyFromInspector();
}
public override void OnEnable()
{
base.OnEnable();
m_firstFrame = true;
if ( m_topTexture != null )
m_topTexture.ReRegisterName = true;
if(m_selectedTriplanarType == TriplanarType.Cylindrical )
{
if ( m_midTexture != null )
m_midTexture.ReRegisterName = true;
if ( m_botTexture != null )
m_botTexture.ReRegisterName = true;
}
}
public override void Draw( DrawInfo drawInfo )
{
if( m_firstFrame )
{
m_firstFrame = false;
Init();
}
if ( m_topTexture.ReRegisterName )
{
m_topTexture.ReRegisterName = false;
UIUtils.RegisterUniformName( m_uniqueId, m_topTexture.PropertyName );
}
m_topTexture.CheckDelayedDirtyProperty();
m_topTexture.CheckPropertyFromInspector();
if ( m_selectedTriplanarType == TriplanarType.Cylindrical )
{
if ( m_midTexture.ReRegisterName )
{
m_midTexture.ReRegisterName = false;
UIUtils.RegisterUniformName( m_uniqueId, m_midTexture.PropertyName );
}
m_midTexture.CheckDelayedDirtyProperty();
m_midTexture.CheckPropertyFromInspector();
if ( m_botTexture.ReRegisterName )
{
m_botTexture.ReRegisterName = false;
UIUtils.RegisterUniformName( m_uniqueId, m_botTexture.PropertyName );
}
m_botTexture.CheckDelayedDirtyProperty();
m_botTexture.CheckPropertyFromInspector();
}
base.Draw( drawInfo );
//return;
Rect startPicker = m_previewRect;
startPicker.x -= 43 * drawInfo.InvertedZoom;
startPicker.width = 43 * drawInfo.InvertedZoom;
startPicker.height = 43 * drawInfo.InvertedZoom;
m_topTexPropRef = m_topTexture;
if ( m_inputPorts[ 0 ].IsConnected )
{
m_topTexPropRef = m_inputPorts[ 0 ].GetOutputNode( 0 ) as TexturePropertyNode;
if ( GUI.Button( startPicker, string.Empty, UIUtils.GetCustomStyle( CustomStyle.SamplerTextureRef ) ) )
UIUtils.FocusOnNode( m_topTexPropRef, 1, true );
if( m_topTexPropRef.Value != null)
EditorGUI.DrawPreviewTexture( startPicker, m_topTexPropRef.Value );
} else
{
if ( m_materialMode )
{
EditorGUI.BeginChangeCheck();
m_topTexPropRef.MaterialValue = EditorGUIObjectField( startPicker, m_topTexPropRef.MaterialValue, typeof( Texture ), false ) as Texture;
if ( EditorGUI.EndChangeCheck() )
m_requireMaterialUpdate = true;
}
else
{
m_topTexPropRef.DefaultValue = EditorGUIObjectField( startPicker, m_topTexPropRef.DefaultValue, typeof( Texture ), false ) as Texture;
}
}
// Mid
if ( m_selectedTriplanarType == TriplanarType.Cylindrical )
{
startPicker.y += startPicker.height;
m_midTexPropRef = m_midTexture;
if ( m_inputPorts[ 1 ].IsConnected )
{
m_midTexPropRef = m_inputPorts[ 1 ].GetOutputNode( 0 ) as TexturePropertyNode;
if ( GUI.Button( startPicker, string.Empty, UIUtils.GetCustomStyle( CustomStyle.SamplerTextureRef ) ) )
UIUtils.FocusOnNode( m_midTexPropRef, 1, true );
if ( m_midTexPropRef.Value != null )
EditorGUI.DrawPreviewTexture( startPicker, m_midTexPropRef.Value );
}
else
{
if ( m_materialMode )
{
EditorGUI.BeginChangeCheck();
m_midTexPropRef.MaterialValue = EditorGUIObjectField( startPicker, m_midTexPropRef.MaterialValue, typeof( Texture ), false ) as Texture;
if ( EditorGUI.EndChangeCheck() )
m_requireMaterialUpdate = true;
}
else
{
m_midTexPropRef.DefaultValue = EditorGUIObjectField( startPicker, m_midTexPropRef.DefaultValue, typeof( Texture ), false ) as Texture;
}
}
startPicker.y += startPicker.height;
startPicker.height = 42 * drawInfo.InvertedZoom;
m_botTexPropRef = m_botTexture;
if ( m_inputPorts[ 2 ].IsConnected )
{
m_botTexPropRef = m_inputPorts[ 2 ].GetOutputNode( 0 ) as TexturePropertyNode;
if ( GUI.Button( startPicker, string.Empty, UIUtils.GetCustomStyle( CustomStyle.SamplerTextureRef ) ) )
UIUtils.FocusOnNode( m_botTexPropRef, 1, true );
if ( m_botTexPropRef.Value != null )
EditorGUI.DrawPreviewTexture( startPicker, m_botTexPropRef.Value );
}
else
{
if ( m_materialMode )
{
EditorGUI.BeginChangeCheck();
m_botTexPropRef.MaterialValue = EditorGUIObjectField( startPicker, m_botTexPropRef.MaterialValue, typeof( Texture ), false ) as Texture;
if ( EditorGUI.EndChangeCheck() )
m_requireMaterialUpdate = true;
}
else
{
m_botTexPropRef.DefaultValue = EditorGUIObjectField( startPicker, m_botTexPropRef.DefaultValue, typeof( Texture ), false ) as Texture;
}
}
}
}
public override void OnNodeDoubleClicked( Vector2 currentMousePos2D )
{
if ( currentMousePos2D.y - m_globalPosition.y > Constants.NODE_HEADER_HEIGHT + Constants.NODE_HEADER_EXTRA_HEIGHT )
{
UIUtils.CurrentWindow.ParametersWindow.IsMaximized = !UIUtils.CurrentWindow.ParametersWindow.IsMaximized;
}
else
{
m_editPropertyNameMode = true;
GUI.FocusControl( m_uniqueName );
TextEditor te = ( TextEditor )GUIUtility.GetStateObject( typeof( TextEditor ), GUIUtility.keyboardControl );
if ( te != null )
{
te.SelectAll();
}
}
}
public override void OnNodeSelected( bool value )
{
base.OnNodeSelected( value );
if ( !value )
m_editPropertyNameMode = false;
}
public override void DrawTitle( Rect titlePos )
{
if ( m_editPropertyNameMode )
{
titlePos.height = Constants.NODE_HEADER_HEIGHT;
EditorGUI.BeginChangeCheck();
GUI.SetNextControlName( m_uniqueName );
m_propertyInspectorName = GUITextField( titlePos, m_propertyInspectorName, UIUtils.GetCustomStyle( CustomStyle.NodeTitle ) );
if ( EditorGUI.EndChangeCheck() )
{
SetTitleText( m_propertyInspectorName );
}
if ( Event.current.isKey && ( Event.current.keyCode == KeyCode.Return || Event.current.keyCode == KeyCode.KeypadEnter ) )
{
m_editPropertyNameMode = false;
GUIUtility.keyboardControl = 0;
}
}
else
{
base.DrawTitle( titlePos );
}
}
public override string GenerateShaderForOutput( int outputId, ref MasterNodeDataCollector dataCollector, bool ignoreLocalvar )
{
//ConfigureFunctions();
dataCollector.AddPropertyNode( m_topTexture );
dataCollector.AddPropertyNode( m_midTexture );
dataCollector.AddPropertyNode( m_botTexture );
bool isVertex = ( dataCollector.PortCategory == MasterNodePortCategory.Tessellation || dataCollector.PortCategory == MasterNodePortCategory.Vertex );
string texTop = string.Empty;
string texMid = string.Empty;
string texBot = string.Empty;
if ( m_inputPorts[ 0 ].IsConnected )
{
texTop = m_inputPorts[ 0 ].GeneratePortInstructions( ref dataCollector );
}
else
{
dataCollector.AddToUniforms( m_uniqueId, m_topTexture.GetTexture2DUniformValue() );
dataCollector.AddToProperties( m_uniqueId, m_topTexture.GetTexture2DPropertyValue(), m_topTexture.OrderIndex );
texTop = m_topTexture.PropertyName;
}
if ( m_selectedTriplanarType == TriplanarType.Spherical )
{
texMid = texTop;
texBot = texTop;
}
else
{
if ( m_inputPorts[ 1 ].IsConnected )
{
texMid = m_inputPorts[ 1 ].GeneratePortInstructions( ref dataCollector );
}
else
{
dataCollector.AddToUniforms( m_uniqueId, m_midTexture.GetTexture2DUniformValue() );
dataCollector.AddToProperties( m_uniqueId, m_midTexture.GetTexture2DPropertyValue(), m_midTexture.OrderIndex );
texMid = m_midTexture.PropertyName;
}
if ( m_inputPorts[ 2 ].IsConnected )
{
texBot = m_inputPorts[ 2 ].GeneratePortInstructions( ref dataCollector );
}
else
{
dataCollector.AddToUniforms( m_uniqueId, m_botTexture.GetTexture2DUniformValue() );
dataCollector.AddToProperties( m_uniqueId, m_botTexture.GetTexture2DPropertyValue(), m_botTexture.OrderIndex );
texBot = m_botTexture.PropertyName;
}
}
dataCollector.AddToInput( m_uniqueId, UIUtils.GetInputDeclarationFromType( m_currentPrecisionType, AvailableSurfaceInputs.WORLD_POS ), true );
dataCollector.AddToInput( m_uniqueId, UIUtils.GetInputDeclarationFromType( m_currentPrecisionType, AvailableSurfaceInputs.WORLD_NORMAL ), true );
string tilling = m_inputPorts[ 3 ].GeneratePortInstructions( ref dataCollector );
string falloff = m_inputPorts[ 4 ].GeneratePortInstructions( ref dataCollector );
dataCollector.ForceNormal = true;
dataCollector.AddToInput( m_uniqueId, Constants.InternalData, false );
if ( m_normalCorrection )
{
string worldToTangent = GeneratorUtils.GenerateWorldToTangentMatrix( ref dataCollector, m_uniqueId, m_currentPrecisionType );
string pos = GeneratorUtils.GenerateWorldPosition( ref dataCollector, m_uniqueId );
string norm = GeneratorUtils.GenerateWorldNormal( ref dataCollector, m_uniqueId );
if ( m_selectedTriplanarSpace == TriplanarSpace.Object )
{
dataCollector.AddLocalVariable( m_uniqueId, "float3 localTangent = mul( unity_WorldToObject, float4( " + GeneratorUtils.WorldTangentStr + ", 0 ) );" );
dataCollector.AddLocalVariable( m_uniqueId, "float3 localBitangent = mul( unity_WorldToObject, float4( " + GeneratorUtils.WorldBitangentStr + ", 0 ) );" );
dataCollector.AddLocalVariable( m_uniqueId, "float3 localNormal = mul( unity_WorldToObject, float4( "+ GeneratorUtils.WorldNormalStr + ", 0 ) );" );
norm = "localNormal";
dataCollector.AddLocalVariable( m_uniqueId, "float3x3 objectToTangent = float3x3(localTangent, localBitangent, localNormal);" );
dataCollector.AddLocalVariable( m_uniqueId, "float3 localPos = mul( unity_WorldToObject, float4( " + pos + ", 1 ) );" );
pos = "localPos";
worldToTangent = "objectToTangent";
}
string normalTriplanar = string.Empty;
IOUtils.AddFunctionHeader( ref normalTriplanar, m_functionNormalHeader );
if ( m_selectedTriplanarType == TriplanarType.Spherical )
{
for ( int i = 0; i < m_functionNormalBody.Length; i++ )
{
IOUtils.AddFunctionLine( ref normalTriplanar, m_functionNormalBody[ i ] );
}
}
else
{
for ( int i = 0; i < m_functionNormalBodyTMB.Length; i++ )
{
IOUtils.AddFunctionLine( ref normalTriplanar, m_functionNormalBodyTMB[ i ] );
}
}
IOUtils.CloseFunctionBody( ref normalTriplanar );
string call = dataCollector.AddFunctions( m_functionNormalCall, normalTriplanar, texTop, texMid, texBot, pos, norm, falloff, tilling, ( isVertex ? "1" : "0" ) );
dataCollector.AddToLocalVariables( dataCollector.PortCategory, m_uniqueId, "float3 worldTriplanarNormal" + m_uniqueId + " = " + call + ";" );
dataCollector.AddToLocalVariables( dataCollector.PortCategory, m_uniqueId, "float3 tanTriplanarNormal" + m_uniqueId + " = mul( " + worldToTangent + ", worldTriplanarNormal" + m_uniqueId + " );" );
return GetOutputVectorItem( 0, outputId, "tanTriplanarNormal" + m_uniqueId );
} else
{
string samplingTriplanar = string.Empty;
IOUtils.AddFunctionHeader( ref samplingTriplanar, m_functionSamplingHeader );
if(m_selectedTriplanarType == TriplanarType.Spherical )
{
for ( int i = 0; i < m_functionSamplingBody.Length; i++ )
{
IOUtils.AddFunctionLine( ref samplingTriplanar, m_functionSamplingBody[ i ] );
}
}
else
{
for ( int i = 0; i < m_functionSamplingBodyTMB.Length; i++ )
{
IOUtils.AddFunctionLine( ref samplingTriplanar, m_functionSamplingBodyTMB[ i ] );
}
}
IOUtils.CloseFunctionBody( ref samplingTriplanar );
string pos = GeneratorUtils.GenerateWorldPosition( ref dataCollector, m_uniqueId );
string norm = GeneratorUtils.GenerateWorldNormal( ref dataCollector, m_uniqueId );
if (m_selectedTriplanarSpace == TriplanarSpace.Object )
{
dataCollector.AddToLocalVariables( dataCollector.PortCategory, m_uniqueId, "float3 localPos = mul( unity_WorldToObject, float4( " + pos + ", 1 ) );" );
pos = "localPos";
dataCollector.AddToLocalVariables( dataCollector.PortCategory, m_uniqueId, "float3 localNormal = mul( unity_WorldToObject, float4( " + norm + ", 0 ) );" );
norm = "localNormal";
}
string call = dataCollector.AddFunctions( m_functionSamplingCall, samplingTriplanar, texTop, texMid, texBot, pos, norm, falloff, tilling, ( isVertex ? "1" : "0") );
dataCollector.AddToLocalVariables( dataCollector.PortCategory, m_uniqueId, "float4 triplanar" + m_uniqueId + " = " + call + ";" );
return GetOutputVectorItem( 0, outputId, "triplanar" + m_uniqueId );
}
}
public override void UpdateMaterial( Material mat )
{
base.UpdateMaterial( mat );
m_topTexture.OnPropertyNameChanged();
if ( mat.HasProperty( m_topTexture.PropertyName ) )
{
mat.SetTexture( m_topTexture.PropertyName, m_topTexture.MaterialValue );
}
m_midTexture.OnPropertyNameChanged();
if ( mat.HasProperty( m_midTexture.PropertyName ) )
{
mat.SetTexture( m_midTexture.PropertyName, m_midTexture.MaterialValue );
}
m_botTexture.OnPropertyNameChanged();
if ( mat.HasProperty( m_botTexture.PropertyName ) )
{
mat.SetTexture( m_botTexture.PropertyName, m_botTexture.MaterialValue );
}
}
public void SetDelayedMaterialMode( Material mat )
{
if ( mat.HasProperty( m_topTexture.PropertyName ) )
{
m_topTexture.MaterialValue = mat.GetTexture( m_topTexture.PropertyName );
}
if ( mat.HasProperty( m_midTexture.PropertyName ) )
{
m_midTexture.MaterialValue = mat.GetTexture( m_midTexture.PropertyName );
}
if ( mat.HasProperty( m_botTexture.PropertyName ) )
{
m_botTexture.MaterialValue = mat.GetTexture( m_botTexture.PropertyName );
}
}
public override void ForceUpdateFromMaterial( Material material )
{
base.ForceUpdateFromMaterial( material );
if ( material.HasProperty( m_topTexture.PropertyName ) )
{
m_topTexture.MaterialValue = material.GetTexture( m_topTexture.PropertyName );
}
if ( material.HasProperty( m_midTexture.PropertyName ) )
{
m_midTexture.MaterialValue = material.GetTexture( m_midTexture.PropertyName );
}
if ( material.HasProperty( m_botTexture.PropertyName ) )
{
m_botTexture.MaterialValue = material.GetTexture( m_botTexture.PropertyName );
}
}
public override void ReadFromString( ref string[] nodeParams )
{
base.ReadFromString( ref nodeParams );
m_selectedTriplanarType = ( TriplanarType )Enum.Parse( typeof( TriplanarType ), GetCurrentParam( ref nodeParams ) );
m_selectedTriplanarSpace = ( TriplanarSpace )Enum.Parse( typeof( TriplanarSpace ), GetCurrentParam( ref nodeParams ) );
m_normalCorrection = Convert.ToBoolean( GetCurrentParam( ref nodeParams ) );
m_tempTopInspectorName = GetCurrentParam( ref nodeParams );
m_tempTopName = GetCurrentParam( ref nodeParams );
m_tempTopDefaultValue = ( TexturePropertyValues )Enum.Parse( typeof( TexturePropertyValues ), GetCurrentParam( ref nodeParams ) );
m_tempTopOrderIndex = Convert.ToInt32( GetCurrentParam( ref nodeParams ) );
m_tempTopDefaultTexture = AssetDatabase.LoadAssetAtPath<Texture2D>( GetCurrentParam( ref nodeParams ) );
m_tempMidInspectorName = GetCurrentParam( ref nodeParams );
m_tempMidName = GetCurrentParam( ref nodeParams );
m_tempMidDefaultValue = ( TexturePropertyValues )Enum.Parse( typeof( TexturePropertyValues ), GetCurrentParam( ref nodeParams ) );
m_tempMidOrderIndex = Convert.ToInt32( GetCurrentParam( ref nodeParams ) );
m_tempMidDefaultTexture = AssetDatabase.LoadAssetAtPath<Texture2D>( GetCurrentParam( ref nodeParams ) );
m_tempBotInspectorName = GetCurrentParam( ref nodeParams );
m_tempBotName = GetCurrentParam( ref nodeParams );
m_tempBotDefaultValue = ( TexturePropertyValues )Enum.Parse( typeof( TexturePropertyValues ), GetCurrentParam( ref nodeParams ) );
m_tempBotOrderIndex = Convert.ToInt32( GetCurrentParam( ref nodeParams ) );
m_tempBotDefaultTexture = AssetDatabase.LoadAssetAtPath<Texture2D>( GetCurrentParam( ref nodeParams ) );
if ( UIUtils.CurrentShaderVersion() > 6102 )
m_propertyInspectorName = GetCurrentParam( ref nodeParams );
SetTitleText( m_propertyInspectorName );
ConfigurePorts();
}
public override void WriteToString( ref string nodeInfo, ref string connectionsInfo )
{
base.WriteToString( ref nodeInfo, ref connectionsInfo );
IOUtils.AddFieldValueToString( ref nodeInfo, m_selectedTriplanarType );
IOUtils.AddFieldValueToString( ref nodeInfo, m_selectedTriplanarSpace );
IOUtils.AddFieldValueToString( ref nodeInfo, m_normalCorrection );
IOUtils.AddFieldValueToString( ref nodeInfo, m_topTexture.PropertyInspectorName );
IOUtils.AddFieldValueToString( ref nodeInfo, m_topTexture.PropertyName );
IOUtils.AddFieldValueToString( ref nodeInfo, m_topTexture.DefaultTextureValue );
IOUtils.AddFieldValueToString( ref nodeInfo, m_topTexture.OrderIndex.ToString() );
IOUtils.AddFieldValueToString( ref nodeInfo, ( m_topTexture.DefaultValue != null ) ? AssetDatabase.GetAssetPath( m_topTexture.DefaultValue ) : Constants.NoStringValue );
IOUtils.AddFieldValueToString( ref nodeInfo, m_midTexture.PropertyInspectorName );
IOUtils.AddFieldValueToString( ref nodeInfo, m_midTexture.PropertyName );
IOUtils.AddFieldValueToString( ref nodeInfo, m_midTexture.DefaultTextureValue );
IOUtils.AddFieldValueToString( ref nodeInfo, m_midTexture.OrderIndex.ToString() );
IOUtils.AddFieldValueToString( ref nodeInfo, ( m_midTexture.DefaultValue != null ) ? AssetDatabase.GetAssetPath( m_midTexture.DefaultValue ) : Constants.NoStringValue );
IOUtils.AddFieldValueToString( ref nodeInfo, m_botTexture.PropertyInspectorName );
IOUtils.AddFieldValueToString( ref nodeInfo, m_botTexture.PropertyName );
IOUtils.AddFieldValueToString( ref nodeInfo, m_botTexture.DefaultTextureValue );
IOUtils.AddFieldValueToString( ref nodeInfo, m_botTexture.OrderIndex.ToString() );
IOUtils.AddFieldValueToString( ref nodeInfo, ( m_botTexture.DefaultValue != null ) ? AssetDatabase.GetAssetPath( m_botTexture.DefaultValue ) : Constants.NoStringValue );
IOUtils.AddFieldValueToString( ref nodeInfo, m_propertyInspectorName );
}
public override void RefreshOnUndo()
{
base.RefreshOnUndo();
if ( m_topTexture != null )
{
m_topTexture.BeginPropertyFromInspectorCheck();
}
if ( m_midTexture != null )
{
m_midTexture.BeginPropertyFromInspectorCheck();
}
if ( m_botTexture != null )
{
m_botTexture.BeginPropertyFromInspectorCheck();
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Sql
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for VirtualNetworkRulesOperations.
/// </summary>
public static partial class VirtualNetworkRulesOperationsExtensions
{
/// <summary>
/// Gets a virtual network rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
public static VirtualNetworkRule Get(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName)
{
return operations.GetAsync(resourceGroupName, serverName, virtualNetworkRuleName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a virtual network rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<VirtualNetworkRule> GetAsync(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, serverName, virtualNetworkRuleName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates an existing virtual network rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='parameters'>
/// The requested virtual Network Rule Resource state.
/// </param>
public static VirtualNetworkRule CreateOrUpdate(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, VirtualNetworkRule parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, serverName, virtualNetworkRuleName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates an existing virtual network rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='parameters'>
/// The requested virtual Network Rule Resource state.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<VirtualNetworkRule> CreateOrUpdateAsync(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, VirtualNetworkRule parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, serverName, virtualNetworkRuleName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the virtual network rule with the given name.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
public static void Delete(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName)
{
operations.DeleteAsync(resourceGroupName, serverName, virtualNetworkRuleName).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the virtual network rule with the given name.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteWithHttpMessagesAsync(resourceGroupName, serverName, virtualNetworkRuleName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Gets a list of virtual network rules in a server.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
public static IPage<VirtualNetworkRule> ListByServer(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName)
{
return operations.ListByServerAsync(resourceGroupName, serverName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual network rules in a server.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<VirtualNetworkRule>> ListByServerAsync(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByServerWithHttpMessagesAsync(resourceGroupName, serverName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates an existing virtual network rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='parameters'>
/// The requested virtual Network Rule Resource state.
/// </param>
public static VirtualNetworkRule BeginCreateOrUpdate(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, VirtualNetworkRule parameters)
{
return operations.BeginCreateOrUpdateAsync(resourceGroupName, serverName, virtualNetworkRuleName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates an existing virtual network rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='parameters'>
/// The requested virtual Network Rule Resource state.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<VirtualNetworkRule> BeginCreateOrUpdateAsync(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, VirtualNetworkRule parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, serverName, virtualNetworkRuleName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the virtual network rule with the given name.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
public static void BeginDelete(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName)
{
operations.BeginDeleteAsync(resourceGroupName, serverName, virtualNetworkRuleName).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the virtual network rule with the given name.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='virtualNetworkRuleName'>
/// The name of the virtual network rule.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDeleteAsync(this IVirtualNetworkRulesOperations operations, string resourceGroupName, string serverName, string virtualNetworkRuleName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, serverName, virtualNetworkRuleName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Gets a list of virtual network rules in a server.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<VirtualNetworkRule> ListByServerNext(this IVirtualNetworkRulesOperations operations, string nextPageLink)
{
return operations.ListByServerNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual network rules in a server.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<VirtualNetworkRule>> ListByServerNextAsync(this IVirtualNetworkRulesOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByServerNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// Copyright (c) Umbraco.
// See LICENSE for more details.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Castle.Core.Logging;
using Microsoft.Extensions.Logging;
using Moq;
using NUnit.Framework;
using Umbraco.Cms.Core.Cache;
using Umbraco.Cms.Core.Scoping;
using Umbraco.Cms.Infrastructure.Persistence;
using Umbraco.Cms.Tests.Common;
using Umbraco.Cms.Tests.Common.Testing;
using Umbraco.Cms.Tests.Integration.Testing;
namespace Umbraco.Cms.Tests.Integration.Umbraco.Infrastructure.Scoping
{
[TestFixture]
[UmbracoTest(Database = UmbracoTestOptions.Database.NewEmptyPerFixture)]
public class ScopeTests : UmbracoIntegrationTest
{
private new ScopeProvider ScopeProvider => (ScopeProvider)base.ScopeProvider;
[SetUp]
public void SetUp() => Assert.IsNull(ScopeProvider.AmbientScope); // gone
protected override AppCaches GetAppCaches()
{
// Need to have a mockable request cache for tests
var appCaches = new AppCaches(
NoAppCache.Instance,
Mock.Of<IRequestCache>(x => x.IsAvailable == false),
new IsolatedCaches(_ => NoAppCache.Instance));
return appCaches;
}
[Test]
public void GivenUncompletedScopeOnChildThread_WhenTheParentCompletes_TheTransactionIsRolledBack()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(ScopeProvider.AmbientScope);
IScope mainScope = scopeProvider.CreateScope();
var t = Task.Run(() =>
{
IScope nested = scopeProvider.CreateScope();
Thread.Sleep(2000);
nested.Dispose();
});
Thread.Sleep(1000); // mimic some long running operation that is shorter than the other thread
mainScope.Complete();
Assert.Throws<InvalidOperationException>(() => mainScope.Dispose());
Task.WaitAll(t);
}
[Test]
public void GivenNonDisposedChildScope_WhenTheParentDisposes_ThenInvalidOperationExceptionThrows()
{
// this all runs in the same execution context so the AmbientScope reference isn't a copy
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(ScopeProvider.AmbientScope);
IScope mainScope = scopeProvider.CreateScope();
IScope nested = scopeProvider.CreateScope(); // not disposing
InvalidOperationException ex = Assert.Throws<InvalidOperationException>(() => mainScope.Dispose());
Console.WriteLine(ex);
}
[Test]
public void GivenChildThread_WhenParentDisposedBeforeChild_ParentScopeThrows()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(ScopeProvider.AmbientScope);
IScope mainScope = scopeProvider.CreateScope();
var t = Task.Run(() =>
{
Console.WriteLine("Child Task start: " + scopeProvider.AmbientScope.InstanceId);
// This will push the child scope to the top of the Stack
IScope nested = scopeProvider.CreateScope();
Console.WriteLine("Child Task scope created: " + scopeProvider.AmbientScope.InstanceId);
Thread.Sleep(5000); // block for a bit to ensure the parent task is disposed first
Console.WriteLine("Child Task before dispose: " + scopeProvider.AmbientScope.InstanceId);
nested.Dispose();
Console.WriteLine("Child Task after dispose: " + scopeProvider.AmbientScope.InstanceId);
});
// provide some time for the child thread to start so the ambient context is copied in AsyncLocal
Thread.Sleep(2000);
// now dispose the main without waiting for the child thread to join
Console.WriteLine("Parent Task disposing: " + scopeProvider.AmbientScope.InstanceId);
// This will throw because at this stage a child scope has been created which means
// it is the Ambient (top) scope but here we're trying to dispose the non top scope.
Assert.Throws<InvalidOperationException>(() => mainScope.Dispose());
Task.WaitAll(t); // wait for the child to dispose
mainScope.Dispose(); // now it's ok
Console.WriteLine("Parent Task disposed: " + scopeProvider.AmbientScope?.InstanceId);
}
[Test]
public void GivenChildThread_WhenChildDisposedBeforeParent_OK()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(ScopeProvider.AmbientScope);
IScope mainScope = scopeProvider.CreateScope();
// Task.Run will flow the execution context unless ExecutionContext.SuppressFlow() is explicitly called.
// This is what occurs in normal async behavior since it is expected to await (and join) the main thread,
// but if Task.Run is used as a fire and forget thread without being done correctly then the Scope will
// flow to that thread.
var t = Task.Run(() =>
{
Console.WriteLine("Child Task start: " + scopeProvider.AmbientScope.InstanceId);
IScope nested = scopeProvider.CreateScope();
Console.WriteLine("Child Task before dispose: " + scopeProvider.AmbientScope.InstanceId);
nested.Dispose();
Console.WriteLine("Child Task after disposed: " + scopeProvider.AmbientScope.InstanceId);
});
Console.WriteLine("Parent Task waiting: " + scopeProvider.AmbientScope?.InstanceId);
Task.WaitAll(t);
Console.WriteLine("Parent Task disposing: " + scopeProvider.AmbientScope.InstanceId);
mainScope.Dispose();
Console.WriteLine("Parent Task disposed: " + scopeProvider.AmbientScope?.InstanceId);
Assert.Pass();
}
[Test]
public void SimpleCreateScope()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(ScopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
}
Assert.IsNull(scopeProvider.AmbientScope);
}
[Test]
public void SimpleCreateScopeContext()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
Assert.IsNotNull(scopeProvider.AmbientContext);
Assert.IsNotNull(scopeProvider.Context);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
}
[Test]
public void SimpleCreateScopeDatabase()
{
ScopeProvider scopeProvider = ScopeProvider;
IUmbracoDatabase database;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
database = scope.Database; // populates scope's database
Assert.IsNotNull(database);
Assert.IsNotNull(database.Connection); // in a transaction
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(database.Connection); // poof gone
}
[Test]
public void NestedCreateScope()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
using (IScope nested = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(nested);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(nested, scopeProvider.AmbientScope);
Assert.AreSame(scope, ((Scope)nested).ParentScope);
}
}
Assert.IsNull(scopeProvider.AmbientScope);
}
[Test]
public void NestedMigrateScope()
{
// Get the request cache mock and re-configure it to be available and used
var requestCacheDictionary = new Dictionary<string, object>();
IRequestCache requestCache = AppCaches.RequestCache;
var requestCacheMock = Mock.Get(requestCache);
requestCacheMock
.Setup(x => x.IsAvailable)
.Returns(true);
requestCacheMock
.Setup(x => x.Set(It.IsAny<string>(), It.IsAny<object>()))
.Returns((string key, object val) =>
{
requestCacheDictionary.Add(key, val);
return true;
});
requestCacheMock
.Setup(x => x.Get(It.IsAny<string>()))
.Returns((string key) => requestCacheDictionary.TryGetValue(key, out var val) ? val : null);
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
using (IScope nested = scopeProvider.CreateScope(callContext: true))
{
Assert.IsInstanceOf<Scope>(nested);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(nested, scopeProvider.AmbientScope);
Assert.AreSame(scope, ((Scope)nested).ParentScope);
// it's moved over to call context
ConcurrentStack<IScope> callContextScope = scopeProvider.GetCallContextScopeValue();
Assert.IsNotNull(callContextScope);
Assert.AreEqual(2, callContextScope.Count);
}
// it's naturally back in http context
}
Assert.IsNull(scopeProvider.AmbientScope);
}
[Test]
public void NestedCreateScopeContext()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
Assert.IsNotNull(scopeProvider.AmbientContext);
IScopeContext context;
using (IScope nested = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(nested);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(nested, scopeProvider.AmbientScope);
Assert.AreSame(scope, ((Scope)nested).ParentScope);
Assert.IsNotNull(scopeProvider.Context);
Assert.IsNotNull(scopeProvider.AmbientContext);
context = scopeProvider.Context;
}
Assert.IsNotNull(scopeProvider.AmbientContext);
Assert.AreSame(context, scopeProvider.AmbientContext);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
}
[Test]
public void NestedCreateScopeInnerException()
{
ScopeProvider scopeProvider = ScopeProvider;
bool? scopeCompleted = null;
Assert.IsNull(scopeProvider.AmbientScope);
try
{
using (IScope scope = scopeProvider.CreateScope())
{
scopeProvider.Context.Enlist("test", completed => scopeCompleted = completed);
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
using (IScope nested = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(nested);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(nested, scopeProvider.AmbientScope);
Assert.AreSame(scope, ((Scope)nested).ParentScope);
nested.Complete();
throw new Exception("bang!");
}
scope.Complete();
}
Assert.Fail("Expected exception.");
}
catch (Exception e)
{
if (e.Message != "bang!")
{
Assert.Fail("Wrong exception.");
}
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNotNull(scopeCompleted);
Assert.IsFalse(scopeCompleted.Value);
}
[Test]
public void NestedCreateScopeDatabase()
{
ScopeProvider scopeProvider = ScopeProvider;
IUmbracoDatabase database;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
database = scope.Database; // populates scope's database
Assert.IsNotNull(database);
Assert.IsNotNull(database.Connection); // in a transaction
using (IScope nested = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(nested);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(nested, scopeProvider.AmbientScope);
Assert.AreSame(scope, ((Scope)nested).ParentScope);
Assert.AreSame(database, nested.Database);
}
Assert.IsNotNull(database.Connection); // still
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(database.Connection); // poof gone
}
[Test]
public void Transaction()
{
ScopeProvider scopeProvider = ScopeProvider;
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("CREATE TABLE tmp3 (id INT, name NVARCHAR(64))");
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("INSERT INTO tmp3 (id, name) VALUES (1, 'a')");
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp3 WHERE id=1");
Assert.AreEqual("a", n);
}
using (IScope scope = scopeProvider.CreateScope())
{
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp3 WHERE id=1");
Assert.IsNull(n);
}
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("INSERT INTO tmp3 (id, name) VALUES (1, 'a')");
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp3 WHERE id=1");
Assert.AreEqual("a", n);
}
}
[Test]
public void NestedTransactionInnerFail()
{
ScopeProvider scopeProvider = ScopeProvider;
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute($"CREATE TABLE tmp1 (id INT, name NVARCHAR(64))");
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("INSERT INTO tmp1 (id, name) VALUES (1, 'a')");
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp1 WHERE id=1");
Assert.AreEqual("a", n);
using (IScope nested = scopeProvider.CreateScope())
{
nested.Database.Execute("INSERT INTO tmp1 (id, name) VALUES (2, 'b')");
string nn = nested.Database.ExecuteScalar<string>("SELECT name FROM tmp1 WHERE id=2");
Assert.AreEqual("b", nn);
}
n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp1 WHERE id=2");
Assert.AreEqual("b", n);
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp1 WHERE id=1");
Assert.IsNull(n);
n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp1 WHERE id=2");
Assert.IsNull(n);
}
}
[Test]
public void NestedTransactionOuterFail()
{
ScopeProvider scopeProvider = ScopeProvider;
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("CREATE TABLE tmp2 (id INT, name NVARCHAR(64))");
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("INSERT INTO tmp2 (id, name) VALUES (1, 'a')");
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp2 WHERE id=1");
Assert.AreEqual("a", n);
using (IScope nested = scopeProvider.CreateScope())
{
nested.Database.Execute("INSERT INTO tmp2 (id, name) VALUES (2, 'b')");
string nn = nested.Database.ExecuteScalar<string>("SELECT name FROM tmp2 WHERE id=2");
Assert.AreEqual("b", nn);
nested.Complete();
}
n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp2 WHERE id=2");
Assert.AreEqual("b", n);
}
using (IScope scope = scopeProvider.CreateScope())
{
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp2 WHERE id=1");
Assert.IsNull(n);
n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp2 WHERE id=2");
Assert.IsNull(n);
}
}
[Test]
public void NestedTransactionComplete()
{
ScopeProvider scopeProvider = ScopeProvider;
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("CREATE TABLE tmp (id INT, name NVARCHAR(64))");
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
scope.Database.Execute("INSERT INTO tmp (id, name) VALUES (1, 'a')");
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp WHERE id=1");
Assert.AreEqual("a", n);
using (IScope nested = scopeProvider.CreateScope())
{
nested.Database.Execute("INSERT INTO tmp (id, name) VALUES (2, 'b')");
string nn = nested.Database.ExecuteScalar<string>("SELECT name FROM tmp WHERE id=2");
Assert.AreEqual("b", nn);
nested.Complete();
}
n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp WHERE id=2");
Assert.AreEqual("b", n);
scope.Complete();
}
using (IScope scope = scopeProvider.CreateScope())
{
string n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp WHERE id=1");
Assert.AreEqual("a", n);
n = scope.Database.ExecuteScalar<string>("SELECT name FROM tmp WHERE id=2");
Assert.AreEqual("b", n);
}
}
[Test]
public void CallContextScope1()
{
var taskHelper = new TaskHelper(Mock.Of<ILogger<TaskHelper>>());
ScopeProvider scopeProvider = ScopeProvider;
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.IsNotNull(scopeProvider.AmbientContext);
// Run on another thread without a flowed context
Task t = taskHelper.ExecuteBackgroundTask(() =>
{
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
using (IScope newScope = scopeProvider.CreateScope())
{
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientScope.ParentScope);
Assert.IsNotNull(scopeProvider.AmbientContext);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
return Task.CompletedTask;
});
Task.WaitAll(t);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
}
[Test]
public void CallContextScope2()
{
var taskHelper = new TaskHelper(Mock.Of<ILogger<TaskHelper>>());
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.IsNotNull(scopeProvider.AmbientContext);
// Run on another thread without a flowed context
Task t = taskHelper.ExecuteBackgroundTask(() =>
{
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
using (IScope newScope = scopeProvider.CreateScope())
{
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientScope.ParentScope);
Assert.IsNotNull(scopeProvider.AmbientContext);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
return Task.CompletedTask;
});
Task.WaitAll(t);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
}
[Test]
public void ScopeReference()
{
ScopeProvider scopeProvider = ScopeProvider;
IScope scope = scopeProvider.CreateScope();
IScope nested = scopeProvider.CreateScope();
Assert.IsNotNull(scopeProvider.AmbientScope);
var scopeRef = new HttpScopeReference(scopeProvider);
scopeRef.Register();
scopeRef.Dispose();
Assert.IsNull(scopeProvider.AmbientScope);
Assert.Throws<ObjectDisposedException>(() =>
{
IUmbracoDatabase db = scope.Database;
});
Assert.Throws<ObjectDisposedException>(() =>
{
IUmbracoDatabase db = nested.Database;
});
}
[TestCase(true)]
[TestCase(false)]
public void ScopeContextEnlist(bool complete)
{
ScopeProvider scopeProvider = ScopeProvider;
bool? completed = null;
IScope ambientScope = null;
IScopeContext ambientContext = null;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
scopeProvider.Context.Enlist("name", c =>
{
completed = c;
ambientScope = scopeProvider.AmbientScope;
ambientContext = scopeProvider.AmbientContext;
});
if (complete)
{
scope.Complete();
}
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
Assert.IsNotNull(completed);
Assert.AreEqual(complete, completed.Value);
Assert.IsNull(ambientScope); // the scope is gone
Assert.IsNotNull(ambientContext); // the context is still there
}
[TestCase(true)]
[TestCase(false)]
public void ScopeContextEnlistAgain(bool complete)
{
ScopeProvider scopeProvider = ScopeProvider;
bool? completed = null;
bool? completed2 = null;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
scopeProvider.Context.Enlist("name", c =>
{
completed = c;
// at that point the scope is gone, but the context is still there
IScopeContext ambientContext = scopeProvider.AmbientContext;
ambientContext.Enlist("another", c2 => completed2 = c2);
});
if (complete)
{
scope.Complete();
}
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
Assert.IsNotNull(completed);
Assert.AreEqual(complete, completed.Value);
Assert.AreEqual(complete, completed2.Value);
}
[Test]
public void ScopeContextException()
{
ScopeProvider scopeProvider = ScopeProvider;
bool? completed = null;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
IScope detached = scopeProvider.CreateDetachedScope();
scopeProvider.AttachScope(detached);
// the exception does not prevent other enlisted items to run
// *and* it does not prevent the scope from properly going down
scopeProvider.Context.Enlist("name", c => throw new Exception("bang"));
scopeProvider.Context.Enlist("other", c => completed = c);
detached.Complete();
Assert.Throws<AggregateException>(() => detached.Dispose());
// even though disposing of the scope has thrown, it has exited
// properly ie it has removed itself, and the app remains clean
Assert.AreSame(scope, scopeProvider.AmbientScope);
scope.Complete();
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
Assert.IsNotNull(completed);
Assert.AreEqual(true, completed);
}
[Test]
public void DetachableScope()
{
ScopeProvider scopeProvider = ScopeProvider;
Assert.IsNull(scopeProvider.AmbientScope);
using (IScope scope = scopeProvider.CreateScope())
{
Assert.IsInstanceOf<Scope>(scope);
Assert.IsNotNull(scopeProvider.AmbientScope);
Assert.AreSame(scope, scopeProvider.AmbientScope);
Assert.IsNotNull(scopeProvider.AmbientContext); // the ambient context
Assert.IsNotNull(scopeProvider.Context); // the ambient context too (getter only)
IScopeContext context = scopeProvider.Context;
IScope detached = scopeProvider.CreateDetachedScope();
scopeProvider.AttachScope(detached);
Assert.AreEqual(detached, scopeProvider.AmbientScope);
Assert.AreNotSame(context, scopeProvider.Context);
// nesting under detached!
using (IScope nested = scopeProvider.CreateScope())
{
Assert.Throws<InvalidOperationException>(() =>
// cannot detach a non-detachable scope
scopeProvider.DetachScope());
nested.Complete();
}
Assert.AreEqual(detached, scopeProvider.AmbientScope);
Assert.AreNotSame(context, scopeProvider.Context);
// can detach
Assert.AreSame(detached, scopeProvider.DetachScope());
Assert.AreSame(scope, scopeProvider.AmbientScope);
Assert.AreSame(context, scopeProvider.AmbientContext);
Assert.Throws<InvalidOperationException>(() =>
// cannot disposed a non-attached scope
// in fact, only the ambient scope can be disposed
detached.Dispose());
scopeProvider.AttachScope(detached);
detached.Complete();
detached.Dispose();
// has self-detached, and is gone!
Assert.AreSame(scope, scopeProvider.AmbientScope);
Assert.AreSame(context, scopeProvider.AmbientContext);
}
Assert.IsNull(scopeProvider.AmbientScope);
Assert.IsNull(scopeProvider.AmbientContext);
}
}
}
| |
using System;
using System.Collections.Generic;
#if RAWR3
using System.Windows.Media;
#endif
using System.Text;
namespace Rawr.Cat
{
[Rawr.Calculations.RawrModelInfo("Cat", "Ability_Druid_CatForm", CharacterClass.Druid)]
public class CalculationsCat : CalculationsBase
{
//my insides all turned to ash / so slow
//and blew away as i collapsed / so cold
public override List<GemmingTemplate> DefaultGemmingTemplates
{
get
{
// Relevant Gem IDs for Cats
//Red
int[] delicate = { 39905, 39997, 40112, 42143 }; // Agi
int[] fractured = { 39909, 40002, 40117, 42153 }; // ArP
//Purple
int[] shifting = { 39935, 40023, 40130, 40130 }; // Agi/Sta
int[] puissant = { 39933, 40033, 40140, 40140 }; // ArP/Sta
int[] forceful = { 39978, 40091, 40169, 40169 }; // Haste/Sta
//Blue
//Green
//Yellow
//Orange
int[] glinting = { 39953, 40044, 40148, 40148 }; // Agi/Hit
int[] deadly = { 39952, 40043, 40147, 40147 }; // Agi/Crit
int[] deft = { 39955, 40046, 40150, 40150 }; // Agi/Haste
// Prismatic
int[] nightmare = { 49110, 49110, 49110, 49110 };
//Meta
int relentless = 41398;
List<GemmingTemplate> list = new List<GemmingTemplate>();
for (int tier = 0; tier < 4; tier++)
{
list.AddRange(new GemmingTemplate[]
{
CreateCatGemmingTemplate(tier, fractured, fractured, fractured, fractured, relentless), //Max ArP
CreateCatGemmingTemplate(tier, fractured, deadly, puissant, fractured, relentless), //ArP/Crit
CreateCatGemmingTemplate(tier, fractured, deft, puissant, fractured, relentless), //ArP/Haste
CreateCatGemmingTemplate(tier, fractured, glinting, puissant, fractured, relentless), //ArP/Hit
CreateCatGemmingTemplate(tier, fractured, deadly, nightmare, fractured, relentless), //ArP/Crit/Nightmare
CreateCatGemmingTemplate(tier, fractured, deft, nightmare, fractured, relentless), //ArP/Haste/Nightmare
CreateCatGemmingTemplate(tier, fractured, glinting, nightmare, fractured, relentless), //ArP/Hit/Nightmare
CreateCatGemmingTemplate(tier, delicate, delicate, delicate, delicate, relentless), //Max Agi
CreateCatGemmingTemplate(tier, delicate, deadly, shifting, delicate, relentless), //Agi/Crit
CreateCatGemmingTemplate(tier, delicate, deft, shifting, delicate, relentless), //Agi/Haste
CreateCatGemmingTemplate(tier, delicate, glinting, shifting, delicate, relentless), //Agi/Hit
CreateCatGemmingTemplate(tier, delicate, deadly, nightmare, delicate, relentless), //Agi/Crit/Nightmare
CreateCatGemmingTemplate(tier, delicate, deft, nightmare, delicate, relentless), //Agi/Haste/Nightmare
CreateCatGemmingTemplate(tier, delicate, glinting, nightmare, delicate, relentless), //Agi/Hit/Nightmare
});
}
return list;
}
}
private const int DEFAULT_GEMMING_TIER = 2;
private GemmingTemplate CreateCatGemmingTemplate(int tier, int[] red, int[] yellow, int[] blue, int[] prismatic, int meta)
{
return new GemmingTemplate()
{
Model = "Cat",
Group = (new string[] { "Uncommon", "Rare", "Epic", "Jeweler" })[tier],
Enabled = (tier == DEFAULT_GEMMING_TIER),
RedId = red[tier],
YellowId = yellow[tier],
BlueId = blue[tier],
PrismaticId = prismatic[tier],
MetaId = meta
};
}
#if RAWR3
private ICalculationOptionsPanel _calculationOptionsPanel = null;
public override ICalculationOptionsPanel CalculationOptionsPanel
#else
private CalculationOptionsPanelBase _calculationOptionsPanel = null;
public override CalculationOptionsPanelBase CalculationOptionsPanel
#endif
{
get
{
if (_calculationOptionsPanel == null)
{
_calculationOptionsPanel = new CalculationOptionsPanelCat();
}
return _calculationOptionsPanel;
}
}
private string[] _characterDisplayCalculationLabels = null;
public override string[] CharacterDisplayCalculationLabels
{
get
{
if (_characterDisplayCalculationLabels == null)
_characterDisplayCalculationLabels = new string[] {
"Summary:Overall Points*Sum of your DPS Points and Survivability Points",
"Summary:DPS Points*DPS Points is your theoretical DPS.",
"Summary:Survivability Points*One hundreth of your health.",
"Basic Stats:Health",
"Basic Stats:Attack Power",
"Basic Stats:Agility",
"Basic Stats:Strength",
"Basic Stats:Crit Rating",
"Basic Stats:Hit Rating",
"Basic Stats:Expertise Rating",
"Basic Stats:Haste Rating",
"Basic Stats:Armor Penetration Rating",
"Basic Stats:Weapon Damage",
"Complex Stats:Avoided Attacks",
"Complex Stats:Crit Chance",
"Complex Stats:Attack Speed",
"Complex Stats:Armor Mitigation",
"Abilities:Optimal Rotation",
"Abilities:Optimal Rotation DPS",
"Abilities:Custom Rotation DPS",
"Abilities:Melee",
"Abilities:Mangle",
"Abilities:Shred",
"Abilities:Rake",
"Abilities:Rip",
"Abilities:Bite",
//"Abilities:Melee Usage",
//"Abilities:Melee Stats",
//"Abilities:Mangle Usage",
//"Abilities:Mangle Stats",
//"Abilities:Shred Usage",
//"Abilities:Shred Stats",
//"Abilities:Rake Usage",
//"Abilities:Rake Stats",
//"Abilities:Rip Usage",
//"Abilities:Rip Stats",
//"Abilities:Bite Usage",
//"Abilities:Bite Stats",
};
return _characterDisplayCalculationLabels;
}
}
private string[] _optimizableCalculationLabels = null;
public override string[] OptimizableCalculationLabels
{
get
{
if (_optimizableCalculationLabels == null)
_optimizableCalculationLabels = new string[] {
"Custom Rotation DPS",
"Health",
"Avoided Attacks %",
"Nature Resist",
"Fire Resist",
"Frost Resist",
"Shadow Resist",
"Arcane Resist",
};
return _optimizableCalculationLabels;
}
}
private string[] _customChartNames = null;
public override string[] CustomChartNames
{
get
{
if (_customChartNames == null)
_customChartNames = new string[] {
//"Hit Test",
};
return _customChartNames;
}
}
#if RAWR3
private Dictionary<string, Color> _subPointNameColors = null;
public override Dictionary<string, Color> SubPointNameColors
{
get
{
if (_subPointNameColors == null)
{
_subPointNameColors = new Dictionary<string, Color>();
_subPointNameColors.Add("DPS", Color.FromArgb(255, 160, 0, 224));
_subPointNameColors.Add("Survivability", Color.FromArgb(255, 64, 128, 32));
}
return _subPointNameColors;
}
}
#else
private Dictionary<string, System.Drawing.Color> _subPointNameColors = null;
public override Dictionary<string, System.Drawing.Color> SubPointNameColors
{
get
{
if (_subPointNameColors == null)
{
_subPointNameColors = new Dictionary<string, System.Drawing.Color>();
_subPointNameColors.Add("DPS", System.Drawing.Color.FromArgb(160, 0, 224));
_subPointNameColors.Add("Survivability", System.Drawing.Color.FromArgb(64, 128, 32));
}
return _subPointNameColors;
}
}
#endif
private List<ItemType> _relevantItemTypes = null;
public override List<ItemType> RelevantItemTypes
{
get
{
if (_relevantItemTypes == null)
{
_relevantItemTypes = new List<ItemType>(new ItemType[]
{
ItemType.None,
ItemType.Leather,
ItemType.Idol,
ItemType.Staff,
ItemType.TwoHandMace,
ItemType.Polearm
});
}
return _relevantItemTypes;
}
}
public override CharacterClass TargetClass { get { return CharacterClass.Druid; } }
public override ComparisonCalculationBase CreateNewComparisonCalculation() { return new ComparisonCalculationCat(); }
public override CharacterCalculationsBase CreateNewCharacterCalculations() { return new CharacterCalculationsCat(); }
public override ICalculationOptionBase DeserializeDataObject(string xml)
{
System.Xml.Serialization.XmlSerializer serializer =
new System.Xml.Serialization.XmlSerializer(typeof(CalculationOptionsCat));
System.IO.StringReader reader = new System.IO.StringReader(xml);
CalculationOptionsCat calcOpts = serializer.Deserialize(reader) as CalculationOptionsCat;
return calcOpts;
}
public override CharacterCalculationsBase GetCharacterCalculations(Character character, Item additionalItem, bool referenceCalculation, bool significantChange, bool needsDisplayCalculations)
{
CalculationOptionsCat calcOpts = character.CalculationOptions as CalculationOptionsCat;
if (calcOpts == null) calcOpts = new CalculationOptionsCat();
int targetLevel = calcOpts.TargetLevel;
float targetArmor = calcOpts.TargetArmor;
StatsCat stats = GetCharacterStats(character, additionalItem) as StatsCat;
float levelDifference = (targetLevel - 80f) * 0.2f;
CharacterCalculationsCat calculatedStats = new CharacterCalculationsCat();
calculatedStats.BasicStats = stats;
calculatedStats.TargetLevel = targetLevel;
bool maintainMangle = stats.BonusBleedDamageMultiplier == 0f;
stats.BonusBleedDamageMultiplier = 0.3f;
#region Basic Chances and Constants
float modArmor = 0f;
for (int i = 0; i < stats.TemporaryArPenRatingUptimes.Length; i++)
{
modArmor += stats.TemporaryArPenRatingUptimes[i].Chance * StatConversion.GetArmorDamageReduction(character.Level, calcOpts.TargetLevel, calcOpts.TargetArmor, stats.ArmorPenetration, 0f, stats.ArmorPenetrationRating + stats.TemporaryArPenRatingUptimes[i].Value);
}
modArmor = 1f - modArmor;
float critMultiplier = 2f * (1f + stats.BonusCritMultiplier);
float critMultiplierBleed = 2f * (1f + stats.BonusCritMultiplier);
float hasteBonus = StatConversion.GetPhysicalHasteFromRating(stats.HasteRating, CharacterClass.Druid);
float attackSpeed = 1f / (1f + hasteBonus);
attackSpeed = attackSpeed / (1f + stats.PhysicalHaste);
float hitBonus = stats.PhysicalHit + StatConversion.GetPhysicalHitFromRating(stats.HitRating, CharacterClass.Druid);
float expertiseBonus = StatConversion.GetDodgeParryReducFromExpertise(StatConversion.GetExpertiseFromRating(stats.ExpertiseRating, CharacterClass.Druid) + stats.Expertise, CharacterClass.Druid);
float chanceDodge = Math.Max(0f, StatConversion.WHITE_DODGE_CHANCE_CAP[targetLevel-80] - expertiseBonus);
float chanceParry = 0f; //Math.Max(0f, StatConversion.WHITE_PARRY_CHANCE_CAP[targetLevel - 80] - expertiseBonus);
float chanceMiss = Math.Max(0f, StatConversion.WHITE_MISS_CHANCE_CAP[ targetLevel-80] - hitBonus);
float glanceMultiplier = 0.7f;
float chanceAvoided = chanceMiss + chanceDodge + chanceParry;
float chanceNonAvoided = 1f - chanceAvoided;
////Crit Chances
float chanceCritYellow = 0f;
float chanceHitYellow = 0f;
float cpPerCPG = 0f;
float chanceCritBite = 0f;
float chanceHitBite = 0f;
float chanceCritBleed = 0f;
float chanceCritRip = 0f;
float chanceCritRake = 0f;
float chanceGlance = 0f;
float chanceCritWhite = 0f;
float chanceHitWhite = 0f;
for (int i = 0; i < stats.TemporaryCritRatingUptimes.Length; i++)
{ //Sum up the weighted chances for each crit value
WeightedStat iStat = stats.TemporaryCritRatingUptimes[i];
//Yellow - 2 Roll, so total of X chance to avoid, total of 1 chance to crit and hit when not avoided
float chanceCritYellowTemp = Math.Min(1f, StatConversion.GetCritFromRating(stats.CritRating + iStat.Value, CharacterClass.Druid)
+ StatConversion.GetCritFromAgility(stats.Agility, CharacterClass.Druid)
+ stats.PhysicalCrit
+ StatConversion.NPC_LEVEL_CRIT_MOD[targetLevel - 80]);
float chanceHitYellowTemp = 1f - chanceCritYellowTemp;
float cpPerCPGTemp = chanceHitYellowTemp + chanceCritYellowTemp * (1f + stats.BonusCPOnCrit);
//Bite - Identical to Yellow, with higher crit chance
float chanceCritBiteTemp = Math.Min(1f, chanceCritYellowTemp + stats.BonusFerociousBiteCrit);
float chanceHitBiteTemp = 1f - chanceCritBiteTemp;
//Bleeds - 1 Roll, no avoidance, total of 1 chance to crit and hit
float chanceCritBleedTemp = character.DruidTalents.PrimalGore > 0 ? chanceCritYellowTemp : 0f;
float chanceCritRipTemp = Math.Min(1f, chanceCritBleedTemp > 0f ? chanceCritBleedTemp + stats.BonusRipCrit : 0f);
float chanceCritRakeTemp = stats.BonusRakeCrit > 0 ? chanceCritBleedTemp : 0;
//White
float chanceGlanceTemp = StatConversion.WHITE_GLANCE_CHANCE_CAP[targetLevel - 80];
float chanceCritWhiteTemp = Math.Min(chanceCritYellowTemp, 1f - chanceGlanceTemp - chanceAvoided);
float chanceHitWhiteTemp = 1f - chanceCritWhiteTemp - chanceAvoided - chanceGlanceTemp;
chanceCritYellow += iStat.Chance * chanceCritYellowTemp;
chanceHitYellow += iStat.Chance * chanceHitYellowTemp;
cpPerCPG += iStat.Chance * cpPerCPGTemp;
chanceCritBite += iStat.Chance * chanceCritBiteTemp;
chanceHitBite += iStat.Chance * chanceHitBiteTemp;
chanceCritBleed += iStat.Chance * chanceCritBleedTemp;
chanceCritRip += iStat.Chance * chanceCritRipTemp;
chanceCritRake += iStat.Chance * chanceCritRakeTemp;
chanceGlance += iStat.Chance * chanceGlanceTemp;
chanceCritWhite += iStat.Chance * chanceCritWhiteTemp;
chanceHitWhite += iStat.Chance * chanceHitWhiteTemp;
}
calculatedStats.DodgedAttacks = chanceDodge * 100f;
calculatedStats.ParriedAttacks = chanceParry * 100f;
calculatedStats.MissedAttacks = chanceMiss * 100f;
float timeToReapplyDebuffs = 1f / (1f - chanceAvoided) - 1f;
float lagVariance = (float)calcOpts.LagVariance / 1000f;
float mangleDurationUptime = 60f;// (character.DruidTalents.GlyphOfMangle ? 18f : 12f);
float mangleDurationAverage = mangleDurationUptime - timeToReapplyDebuffs - lagVariance;
float rakeDurationUptime = 9f + stats.BonusRakeDuration;
float rakeDurationAverage = rakeDurationUptime + timeToReapplyDebuffs + lagVariance;
float ripDurationUptime = 12f + stats.BonusRipDuration; //Doesn't include Glyph of Shred
float ripDurationAverage = ripDurationUptime + timeToReapplyDebuffs + lagVariance; //Doesn't include Glyph of Shred
float roarBonusDuration = stats.BonusSavageRoarDuration - lagVariance;
float berserkDuration = character.DruidTalents.Berserk > 0 ? (character.DruidTalents.GlyphOfBerserk ? 20f : 15f) : 0f;
#endregion
#region Attack Damages
float baseDamage = 55f + (stats.AttackPower / 14f) + stats.WeaponDamage;
float meleeDamageRaw = (baseDamage) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * modArmor;
float mangleDamageRaw = (baseDamage * 2f + 566f) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusMangleDamageMultiplier) * modArmor;
float shredDamageRaw = (baseDamage * 2.25f + 666f + stats.BonusShredDamage) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusShredDamageMultiplier) * (1f + stats.BonusBleedDamageMultiplier) * modArmor;
float rakeDamageRaw = (176f + stats.AttackPower * 0.01f) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusRakeDamageMultiplier) * (1f + stats.BonusBleedDamageMultiplier);
float rakeDamageDot = (1074f + stats.AttackPower * 0.18f) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusRakeDamageMultiplier) * (1f + stats.BonusBleedDamageMultiplier) * ((9f + stats.BonusRakeDuration) / 9f);
float ripDamageRaw = (3006f + stats.AttackPower * 0.3f + (stats.BonusRipDamagePerCPPerTick * 5f * 6f)) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusRipDamageMultiplier) * (1f + stats.BonusBleedDamageMultiplier);
float biteBaseDamageRaw = 190f * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusFerociousBiteDamageMultiplier) * modArmor;
float biteCPDamageRaw = (290f + stats.AttackPower * 0.07f) * (1f + stats.BonusPhysicalDamageMultiplier) * (1f + stats.BonusDamageMultiplier) * (1f + stats.BonusFerociousBiteDamageMultiplier) * modArmor;
float meleeDamageAverage = chanceGlance * meleeDamageRaw * glanceMultiplier +
chanceCritWhite * meleeDamageRaw * critMultiplier +
chanceHitWhite * meleeDamageRaw;
float mangleDamageAverage = (1f - chanceCritYellow) * mangleDamageRaw + chanceCritYellow * mangleDamageRaw * critMultiplier;
float shredDamageAverage = (1f - chanceCritYellow) * shredDamageRaw + chanceCritYellow * shredDamageRaw * critMultiplier;
float rakeDamageAverage = ((1f - chanceCritYellow) * rakeDamageRaw + chanceCritYellow * rakeDamageRaw * critMultiplier) + ((1f - chanceCritRake) * rakeDamageDot + chanceCritRake * rakeDamageDot * critMultiplierBleed);
float ripDamageAverage = ((1f - chanceCritRip) * ripDamageRaw + chanceCritRip * ripDamageRaw * critMultiplierBleed);
float biteBaseDamageAverage = (1f - chanceCritBite) * biteBaseDamageRaw + chanceCritBite * biteBaseDamageRaw * critMultiplier;
float biteCPDamageAverage = (1f - chanceCritBite) * biteCPDamageRaw + chanceCritBite * biteCPDamageRaw * critMultiplier;
//if (needsDisplayCalculations)
//{
// Console.WriteLine("White: {0:P} Avoided, {1:P} Glance, {2:P} Hit, {3:P} Crit - Swing: {4}", chanceAvoided, chanceGlance, chanceHitWhite, chanceCritWhite, meleeDamageAverage);
// Console.WriteLine("Yellow: {0:P} Avoided, {1:P} NonAvoided, {2:P} Hit, {3:P} Crit - Swing: {4}", chanceAvoided, chanceNonAvoided, 1f - chanceCritYellow, chanceCritYellow, mangleDamageAverage);
// Console.WriteLine("Bite: {0:P} Avoided, {1:P} NonAvoided, {2:P} Hit, {3:P} Crit - Swing: {4}", chanceAvoided, chanceNonAvoided, 1f - chanceCritBite, chanceCritBite, biteBaseDamageAverage);
// Console.WriteLine("RipBleed: {0:P} Hit, {1:P} Crit - Swing: {2}", 1f - chanceCritRip, chanceCritRip, ripDamageAverage);
// Console.WriteLine();
//}
#endregion
#region Energy Costs
float mangleEnergyRaw = 45f - stats.MangleCatCostReduction;
float shredEnergyRaw = 60f - stats.ShredCostReduction;
float rakeEnergyRaw = 40f - stats.RakeCostReduction;
float ripEnergyRaw = 30f - stats.RipCostReduction;
float biteEnergyRaw = 35f; //Assuming no wasted energy
float roarEnergyRaw = 25f;
//[rawCost + ((1/chance_to_land) - 1) * rawCost/5]
float cpgEnergyCostMultiplier = 1f + ((1f / chanceNonAvoided) - 1f) * 0.2f;
float finisherEnergyCostMultiplier = 1f + ((1f / chanceNonAvoided) - 1f) * (1f - stats.FinisherEnergyOnAvoid);
float mangleEnergyAverage = mangleEnergyRaw * cpgEnergyCostMultiplier;
float shredEnergyAverage = shredEnergyRaw * cpgEnergyCostMultiplier;
float rakeEnergyAverage = rakeEnergyRaw * cpgEnergyCostMultiplier;
float ripEnergyAverage = ripEnergyRaw * finisherEnergyCostMultiplier;
float biteEnergyAverage = biteEnergyRaw * finisherEnergyCostMultiplier;
float roarEnergyAverage = roarEnergyRaw;
#endregion
#region Ability Stats
CatAbilityStats meleeStats = new CatMeleeStats()
{
DamagePerHit = meleeDamageRaw,
DamagePerSwing = meleeDamageAverage,
};
CatAbilityStats mangleStats = new CatMangleStats()
{
DamagePerHit = mangleDamageRaw,
DamagePerSwing = mangleDamageAverage,
DurationUptime = mangleDurationUptime,
DurationAverage = mangleDurationAverage,
EnergyCost = mangleEnergyAverage,
};
CatAbilityStats shredStats = new CatShredStats()
{
DamagePerHit = shredDamageRaw,
DamagePerSwing = shredDamageAverage,
EnergyCost = shredEnergyAverage,
};
CatAbilityStats rakeStats = new CatRakeStats()
{
DamagePerHit = rakeDamageRaw + rakeDamageDot,
DamagePerSwing = rakeDamageAverage,
DurationUptime = rakeDurationUptime,
DurationAverage = rakeDurationAverage,
EnergyCost = rakeEnergyAverage,
};
CatAbilityStats ripStats = new CatRipStats()
{
DamagePerHit = ripDamageRaw ,
DamagePerSwing = ripDamageAverage,
DurationUptime = ripDurationUptime,
DurationAverage = ripDurationAverage,
EnergyCost = ripEnergyAverage,
};
CatAbilityStats biteStats = new CatBiteStats()
{
DamagePerHit = biteBaseDamageRaw,
DamagePerSwing = biteBaseDamageAverage,
DamagePerHitPerCP = biteCPDamageRaw,
DamagePerSwingPerCP = biteCPDamageAverage,
EnergyCost = biteEnergyAverage,
};
CatAbilityStats roarStats = new CatRoarStats()
{
DurationUptime = roarBonusDuration,
DurationAverage = 9f + roarBonusDuration,
EnergyCost = roarEnergyAverage,
DurationPerCP = 5f,
};
#endregion
#region Rotations
CatRotationCalculator rotationCalculator = new CatRotationCalculator(stats, calcOpts.Duration, cpPerCPG,
maintainMangle, berserkDuration, attackSpeed, character.DruidTalents.OmenOfClarity > 0,
character.DruidTalents.GlyphOfShred, chanceAvoided, chanceCritYellow * stats.BonusCPOnCrit,
cpgEnergyCostMultiplier, stats.ClearcastOnBleedChance, meleeStats, mangleStats, shredStats,
rakeStats, ripStats, biteStats, roarStats);
CatRotationCalculator.CatRotationCalculation rotationCalculationDPS = new CatRotationCalculator.CatRotationCalculation();
for (int roarCP = 1; roarCP < 6; roarCP++)
for (int biteCP = 0; biteCP < 6; biteCP++)
for (int useRake = 0; useRake < 2; useRake++)
for (int useShred = 0; useShred < 2; useShred++)
for (int useRip = 0; useRip < 2; useRip++)
{
CatRotationCalculator.CatRotationCalculation rotationCalculation =
rotationCalculator.GetRotationCalculations(
useRake == 1, useShred == 1, useRip == 1, biteCP, roarCP);
if (rotationCalculation.DPS > rotationCalculationDPS.DPS)
rotationCalculationDPS = rotationCalculation;
}
calculatedStats.HighestDPSRotation = rotationCalculationDPS;
calculatedStats.CustomRotation = rotationCalculator.GetRotationCalculations(
calcOpts.CustomUseRake, calcOpts.CustomUseShred, calcOpts.CustomUseRip, calcOpts.CustomCPFerociousBite, calcOpts.CustomCPSavageRoar);
if (character.DruidTalents.GlyphOfShred && rotationCalculationDPS.ShredCount > 0)
{
ripStats.DurationUptime += 6f;
ripStats.DurationAverage += 6f;
}
ripStats.DamagePerHit *= ripStats.DurationUptime / 12f;
ripStats.DamagePerSwing *= ripStats.DurationUptime / 12f;
#endregion
calculatedStats.AvoidedAttacks = chanceAvoided * 100f;
calculatedStats.DodgedAttacks = chanceDodge * 100f;
calculatedStats.ParriedAttacks = chanceParry * 100f;
calculatedStats.MissedAttacks = chanceMiss * 100f;
calculatedStats.CritChance = chanceCritYellow * 100f;
calculatedStats.AttackSpeed = attackSpeed;
calculatedStats.ArmorMitigation = (1f - modArmor) * 100f;
calculatedStats.Duration = calcOpts.Duration;
calculatedStats.MeleeStats = meleeStats;
calculatedStats.MangleStats = mangleStats;
calculatedStats.ShredStats = shredStats;
calculatedStats.RakeStats = rakeStats;
calculatedStats.RipStats = ripStats;
calculatedStats.RoarStats = roarStats;
calculatedStats.BiteStats = biteStats;
float magicDPS = (stats.ShadowDamage + stats.ArcaneDamage) * (1f + chanceCritYellow);
float abomDPS = (stats.MoteOfAnger * meleeDamageAverage);
calculatedStats.DPSPoints = calculatedStats.HighestDPSRotation.DPS + magicDPS + abomDPS;
calculatedStats.SurvivabilityPoints = stats.Health / 100f;
calculatedStats.OverallPoints = calculatedStats.DPSPoints + calculatedStats.SurvivabilityPoints;
return calculatedStats;
}
public override Stats GetCharacterStats(Character character, Item additionalItem)
{
CalculationOptionsCat calcOpts = character.CalculationOptions as CalculationOptionsCat;
int targetLevel = calcOpts.TargetLevel;
Stats statsItems = GetItemStats(character, additionalItem);
statsItems.Armor *= new float[] { 1f, 1.04f, 1.07f, 1.1f }[character.DruidTalents.ThickHide];
DruidTalents talents = character.DruidTalents;
StatsCat statsTotal = new StatsCat()
{
PhysicalCrit = 0.02f * talents.SharpenedClaws
+ 0.02f * talents.MasterShapeshifter
+ ((character.ActiveBuffsContains("Leader of the Pack") ||
character.ActiveBuffsContains("Rampage"))
? 0 : 0.05f * talents.LeaderOfThePack),
Dodge = 0.02f * talents.FeralSwiftness,
BonusStaminaMultiplier = (1f + 0.02f * talents.SurvivalOfTheFittest) * (1f + 0.01f * talents.ImprovedMarkOfTheWild) - 1f,
BonusAgilityMultiplier = (1f + 0.02f * talents.SurvivalOfTheFittest) * (1f + 0.01f * talents.ImprovedMarkOfTheWild) - 1f,
BonusStrengthMultiplier = (1f + 0.02f * talents.SurvivalOfTheFittest) * (1f + 0.01f * talents.ImprovedMarkOfTheWild) - 1f,
BonusAttackPowerMultiplier = 0.02f * talents.HeartOfTheWild,
CritChanceReduction = 0.02f * talents.SurvivalOfTheFittest,
BonusPhysicalDamageMultiplier = (1f + 0.02f * talents.Naturalist) * (talents.GlyphOfSavageRoar ? 1.33f : 1.3f) - 1f,
BonusMangleDamageMultiplier = (1f + 0.1f * talents.SavageFury) * (talents.GlyphOfMangle ? 1.1f : 1.0f) - 1f,
BonusRakeDamageMultiplier = 0.1f * talents.SavageFury,
BonusShredDamageMultiplier = 0.04f * talents.RendAndTear,
BonusFerociousBiteCrit = 0.05f * talents.RendAndTear,
BonusEnergyOnTigersFury = 20f * talents.KingOfTheJungle,
MangleCatCostReduction = 1f * talents.Ferocity + 2f * talents.ImprovedMangle,
RakeCostReduction = 1f * talents.Ferocity,
ShredCostReduction = 9f * talents.ShreddingAttacks,
BonusCPOnCrit = 0.5f * talents.PrimalFury,
Expertise = 5f * talents.PrimalPrecision,
FinisherEnergyOnAvoid = 0.4f * talents.PrimalPrecision,
AttackPower = (character.Level / 2f) * talents.PredatoryStrikes,
BonusCritMultiplier = 0.1f * ((float)talents.PredatoryInstincts / 3f),
BonusFerociousBiteDamageMultiplier = 0.03f * talents.FeralAggression,
BonusRipDuration = talents.GlyphOfRip ? 4f : 0f,
};
statsTotal.Accumulate(BaseStats.GetBaseStats(80, character.Class, character.Race, BaseStats.DruidForm.Cat));
statsTotal.Accumulate(statsItems);
statsTotal.Accumulate(GetBuffsStats(character, calcOpts));
float predatoryStrikesAP = 0f;
float fap = 0f;
if (character.MainHand != null)
{
fap = Math.Max(0f, (character.MainHand.Item.DPS - 54.8f) * 14f); //TODO Find a more accurate number for this?
predatoryStrikesAP = (fap + character.MainHand.Item.Stats.AttackPower) * 0.2f * (talents.PredatoryStrikes / 3f);
if (character.MainHand.Enchant != null)
{
predatoryStrikesAP += character.MainHand.Enchant.Stats.AttackPower * 0.2f * (talents.PredatoryStrikes / 3f);
}
}
statsTotal.Stamina = (float)Math.Floor(statsTotal.Stamina * (1f + statsTotal.BonusStaminaMultiplier));
statsTotal.Strength = (float)Math.Floor((statsTotal.Strength + statsTotal.CatFormStrength) * (1f + statsTotal.BonusStrengthMultiplier));
statsTotal.Agility = (float)Math.Floor(statsTotal.Agility * (1f + statsTotal.BonusAgilityMultiplier));
statsTotal.AttackPower += statsTotal.Strength * 2f + statsTotal.Agility + fap + predatoryStrikesAP;
statsTotal.AttackPower = (float)Math.Floor(statsTotal.AttackPower * (1f+ statsTotal.BonusAttackPowerMultiplier));
statsTotal.Health += (float)Math.Floor((statsTotal.Stamina - 20f) * 10f + 20f);
statsTotal.Health = (float)Math.Floor(statsTotal.Health * (1f + statsTotal.BonusHealthMultiplier));
statsTotal.Armor += 2f * statsTotal.Agility;
statsTotal.Armor = (float)Math.Floor(statsTotal.Armor * (1f + statsTotal.BonusArmorMultiplier));
statsTotal.NatureResistance += statsTotal.NatureResistanceBuff;
statsTotal.FireResistance += statsTotal.FireResistanceBuff;
statsTotal.FrostResistance += statsTotal.FrostResistanceBuff;
statsTotal.ShadowResistance += statsTotal.ShadowResistanceBuff;
statsTotal.ArcaneResistance += statsTotal.ArcaneResistanceBuff;
statsTotal.WeaponDamage += 16f; //Tiger's Fury
float hasteBonus = StatConversion.GetPhysicalHasteFromRating(statsTotal.HasteRating, CharacterClass.Druid);
hasteBonus = (1f + hasteBonus) * (1f + statsTotal.PhysicalHaste) - 1f;
float meleeHitInterval = 1f / ((1f + hasteBonus) + 1f / 3.5f);
float hitBonus = StatConversion.GetPhysicalHitFromRating(statsTotal.HitRating) + statsTotal.PhysicalHit;
float expertiseBonus = StatConversion.GetDodgeParryReducFromExpertise(StatConversion.GetExpertiseFromRating(statsTotal.ExpertiseRating, CharacterClass.Druid) + statsTotal.Expertise, CharacterClass.Druid);
float chanceDodge = Math.Max(0f, StatConversion.WHITE_DODGE_CHANCE_CAP[targetLevel-80] - expertiseBonus);
float chanceParry = 0f;// Math.Max(0f, StatConversion.WHITE_PARRY_CHANCE_CAP[targetLevel - 80] - expertiseBonus);
float chanceMiss = Math.Max(0f, StatConversion.WHITE_MISS_CHANCE_CAP[ targetLevel-80] - hitBonus);
float chanceAvoided = chanceMiss + chanceDodge + chanceParry;
float rawChanceCrit = StatConversion.GetPhysicalCritFromRating(statsTotal.CritRating)
+ StatConversion.GetPhysicalCritFromAgility(statsTotal.Agility, CharacterClass.Druid)
+ statsTotal.PhysicalCrit
+ StatConversion.NPC_LEVEL_CRIT_MOD[targetLevel - 80];
float chanceCrit = rawChanceCrit * (1f - chanceAvoided);
float chanceHit = 1f - chanceAvoided;
bool usesMangle = (talents.Mangle > 0 && !character.ActiveBuffsContains("Mangle") && !character.ActiveBuffsContains("Trauma"));
Dictionary<Trigger, float> triggerIntervals = new Dictionary<Trigger, float>();
Dictionary<Trigger, float> triggerChances = new Dictionary<Trigger, float>();
triggerIntervals[Trigger.Use] = 0f;
triggerIntervals[Trigger.MeleeAttack] = meleeHitInterval;
triggerIntervals[Trigger.MeleeHit] = meleeHitInterval;
triggerIntervals[Trigger.PhysicalHit] = meleeHitInterval;
triggerIntervals[Trigger.MeleeCrit] = meleeHitInterval;
triggerIntervals[Trigger.PhysicalCrit] = meleeHitInterval;
triggerIntervals[Trigger.DoTTick] = 1.5f;
triggerIntervals[Trigger.DamageDone] = meleeHitInterval / 2f;
triggerIntervals[Trigger.DamageOrHealingDone] = meleeHitInterval / 2f; // Need to Add Self-Heals
triggerIntervals[Trigger.RakeTick] = 3f + (float)calcOpts.LagVariance / 3000f;
if (usesMangle)
triggerIntervals[Trigger.MangleCatHit] = 60f;
triggerIntervals[Trigger.MangleCatOrShredHit] = usesMangle ? 3.76f : 3.87f;
triggerIntervals[Trigger.MangleCatOrShredOrInfectedWoundsHit] = triggerIntervals[Trigger.MangleCatOrShredHit] / ((talents.InfectedWounds > 0) ? 2f : 1f);
triggerChances[Trigger.Use] = 1f;
triggerChances[Trigger.MeleeAttack] = 1f;
triggerChances[Trigger.MeleeHit] = Math.Max(0f, chanceHit);
triggerChances[Trigger.PhysicalHit] = Math.Max(0f, chanceHit);
triggerChances[Trigger.MeleeCrit] = Math.Max(0f, chanceCrit);
triggerChances[Trigger.PhysicalCrit] = Math.Max(0f, chanceCrit);
triggerChances[Trigger.DoTTick] = 1f;
triggerChances[Trigger.DamageDone] = 1f - chanceAvoided / 2f;
triggerChances[Trigger.DamageOrHealingDone] = 1f - chanceAvoided / 2f; // Need to Add Self-Heals
triggerChances[Trigger.RakeTick] = 1f;
if (talents.Mangle > 0 && !character.ActiveBuffsContains("Mangle") && !character.ActiveBuffsContains("Trauma"))
triggerChances[Trigger.MangleCatHit] = chanceHit;
triggerChances[Trigger.MangleCatOrShredHit] = chanceHit;
triggerChances[Trigger.MangleCatOrShredOrInfectedWoundsHit] = chanceHit;
// Handle Trinket procs
Stats statsProcs = new Stats();
foreach (SpecialEffect effect in statsTotal.SpecialEffects(se => triggerIntervals.ContainsKey(se.Trigger)))
{
// JOTHAY's NOTE: The following is an ugly hack to add Recursive Effects to Cat
// so Victor's Call and similar trinkets can be given more appropriate value
if (effect.Trigger == Trigger.Use && effect.Stats._rawSpecialEffectDataSize == 1
&& triggerIntervals.ContainsKey(effect.Stats._rawSpecialEffectData[0].Trigger))
{
float upTime = effect.GetAverageUptime(triggerIntervals[effect.Trigger],
triggerChances[effect.Trigger], 1f, calcOpts.Duration);
statsProcs.Accumulate(effect.Stats._rawSpecialEffectData[0].GetAverageStats(
triggerIntervals[effect.Stats._rawSpecialEffectData[0].Trigger],
triggerChances[effect.Stats._rawSpecialEffectData[0].Trigger], 1f, calcOpts.Duration),
upTime);
}
else if (effect.Stats.MoteOfAnger > 0)
{
// When in effect stats, MoteOfAnger is % of melee hits
// When in character stats, MoteOfAnger is average procs per second
statsProcs.MoteOfAnger = effect.Stats.MoteOfAnger * effect.GetAverageProcsPerSecond(triggerIntervals[effect.Trigger],
triggerChances[effect.Trigger], 1f, calcOpts.Duration) / effect.MaxStack;
}
else
{
statsProcs.Accumulate(effect.GetAverageStats(triggerIntervals[effect.Trigger],
triggerChances[effect.Trigger], 1f, calcOpts.Duration),
effect.Stats.DeathbringerProc > 0 ? 1f / 3f : 1f);
}
}
statsProcs.Agility += statsProcs.HighestStat + statsProcs.Paragon + statsProcs.DeathbringerProc;
statsProcs.Strength += statsProcs.DeathbringerProc;
statsProcs.Stamina = (float)Math.Floor(statsProcs.Stamina * (1f + statsTotal.BonusStaminaMultiplier));
statsProcs.Strength = (float)Math.Floor(statsProcs.Strength * (1f + statsTotal.BonusStrengthMultiplier));
statsProcs.Agility = (float)Math.Floor(statsProcs.Agility * (1f + statsTotal.BonusAgilityMultiplier));
statsProcs.AttackPower += statsProcs.Strength * 2f + statsProcs.Agility;
statsProcs.AttackPower = (float)Math.Floor(statsProcs.AttackPower * (1f + statsTotal.BonusAttackPowerMultiplier));
statsProcs.HasteRating += statsProcs.DeathbringerProc;
statsProcs.Health += (float)Math.Floor(statsProcs.Stamina * 10f);
statsProcs.Armor += 2f * statsProcs.Agility;
statsProcs.Armor = (float)Math.Floor(statsProcs.Armor * (1f + statsTotal.BonusArmorMultiplier));
//Agility is only used for crit from here on out; we'll be converting Agility to CritRating,
//and calculating CritRating separately, so don't add any Agility or CritRating from procs here.
//Also calculating ArPen separately, so don't add that either.
statsProcs.CritRating = statsProcs.Agility = statsProcs.ArmorPenetrationRating = 0;
statsTotal.Accumulate(statsProcs);
//Handle Crit procs
statsTotal.TemporaryCritRatingUptimes = new WeightedStat[0];
List<SpecialEffect> tempCritEffects = new List<SpecialEffect>();
List<float> tempCritEffectIntervals = new List<float>();
List<float> tempCritEffectChances = new List<float>();
List<float> tempCritEffectScales = new List<float>();
foreach (SpecialEffect effect in statsTotal.SpecialEffects(se => triggerIntervals.ContainsKey(se.Trigger) && (se.Stats.CritRating + se.Stats.Agility + se.Stats.DeathbringerProc + se.Stats.HighestStat + se.Stats.Paragon) > 0))
{
tempCritEffects.Add(effect);
tempCritEffectIntervals.Add(triggerIntervals[effect.Trigger]);
tempCritEffectChances.Add(triggerChances[effect.Trigger]);
tempCritEffectScales.Add(effect.Stats.DeathbringerProc > 0 ? 1f / 3f : 1f);
}
if (tempCritEffects.Count == 0)
{
statsTotal.TemporaryCritRatingUptimes = new WeightedStat[] { new WeightedStat() { Chance = 1f, Value = 0f } };
}
else if (tempCritEffects.Count == 1)
{ //Only one, add it to
SpecialEffect effect = tempCritEffects[0];
float uptime = effect.GetAverageUptime(triggerIntervals[effect.Trigger], triggerChances[effect.Trigger], 1f, calcOpts.Duration) * tempCritEffectScales[0];
float totalAgi = (float)effect.MaxStack * (effect.Stats.Agility + effect.Stats.DeathbringerProc + effect.Stats.HighestStat + effect.Stats.Paragon) * (1f + statsTotal.BonusAgilityMultiplier);
statsTotal.TemporaryCritRatingUptimes = new WeightedStat[] { new WeightedStat() { Chance = uptime, Value =
effect.Stats.CritRating + StatConversion.GetCritFromAgility(totalAgi,
CharacterClass.Druid) * StatConversion.RATING_PER_PHYSICALCRIT },
new WeightedStat() { Chance = 1f - uptime, Value = 0f }};
}
else if (tempCritEffects.Count > 1)
{
List<float> tempCritEffectsValues = new List<float>();
foreach (SpecialEffect effect in tempCritEffects)
{
float totalAgi = (float)effect.MaxStack * (effect.Stats.Agility + effect.Stats.DeathbringerProc + effect.Stats.HighestStat + effect.Stats.Paragon) * (1f + statsTotal.BonusAgilityMultiplier);
tempCritEffectsValues.Add(effect.Stats.CritRating +
StatConversion.GetCritFromAgility(totalAgi, CharacterClass.Druid) *
StatConversion.RATING_PER_PHYSICALCRIT);
}
float[] intervals = new float[tempCritEffects.Count];
float[] chances = new float[tempCritEffects.Count];
float[] offset = new float[tempCritEffects.Count];
for (int i = 0; i < tempCritEffects.Count; i++)
{
intervals[i] = triggerIntervals[tempCritEffects[i].Trigger];
chances[i] = triggerChances[tempCritEffects[i].Trigger];
}
if (tempCritEffects.Count >= 2)
{
offset[0] = calcOpts.TrinketOffset;
}
WeightedStat[] critWeights = SpecialEffect.GetAverageCombinedUptimeCombinations(tempCritEffects.ToArray(), intervals, chances, offset, tempCritEffectScales.ToArray(), 1f, calcOpts.Duration, tempCritEffectsValues.ToArray());
statsTotal.TemporaryCritRatingUptimes = critWeights;
}
//Handle ArPen procs
statsTotal.TemporaryArPenRatingUptimes = new WeightedStat[0];
List<SpecialEffect> tempArPenEffects = new List<SpecialEffect>();
List<float> tempArPenEffectIntervals = new List<float>();
List<float> tempArPenEffectChances = new List<float>();
List<float> tempArPenEffectScales = new List<float>();
foreach (SpecialEffect effect in statsTotal.SpecialEffects(se => triggerIntervals.ContainsKey(se.Trigger) && se.Stats.ArmorPenetrationRating > 0))
{
tempArPenEffects.Add(effect);
tempArPenEffectIntervals.Add(triggerIntervals[effect.Trigger]);
tempArPenEffectChances.Add(triggerChances[effect.Trigger]);
tempArPenEffectScales.Add(effect.Stats.DeathbringerProc > 0 ? 1f / 3f : 1f);
}
if (tempArPenEffects.Count == 0)
{
statsTotal.TemporaryArPenRatingUptimes = new WeightedStat[] { new WeightedStat() { Chance = 1f, Value = 0f } };
}
else if (tempArPenEffects.Count == 1)
{ //Only one, add it to
SpecialEffect effect = tempArPenEffects[0];
float uptime = effect.GetAverageUptime(triggerIntervals[effect.Trigger], triggerChances[effect.Trigger], 1f, calcOpts.Duration) * tempArPenEffectScales[0];
statsTotal.TemporaryArPenRatingUptimes = new WeightedStat[] { new WeightedStat() { Chance = uptime, Value = effect.Stats.ArmorPenetrationRating },
new WeightedStat() { Chance = 1f - uptime, Value = 0f }};
}
else if (tempArPenEffects.Count > 1)
{
List<float> tempArPenEffectValues = new List<float>();
foreach (SpecialEffect effect in tempArPenEffects)
{
tempArPenEffectValues.Add(effect.Stats.ArmorPenetrationRating);
}
float[] intervals = new float[tempArPenEffects.Count];
float[] chances = new float[tempArPenEffects.Count];
float[] offset = new float[tempArPenEffects.Count];
for (int i = 0; i < tempArPenEffects.Count; i++)
{
intervals[i] = triggerIntervals[tempArPenEffects[i].Trigger];
chances[i] = triggerChances[tempArPenEffects[i].Trigger];
}
if (tempArPenEffects.Count >= 2)
{
offset[0] = calcOpts.TrinketOffset;
}
WeightedStat[] arPenWeights = SpecialEffect.GetAverageCombinedUptimeCombinations(tempArPenEffects.ToArray(), intervals, chances, offset, tempArPenEffectScales.ToArray(), 1f, calcOpts.Duration, tempArPenEffectValues.ToArray());
statsTotal.TemporaryArPenRatingUptimes = arPenWeights;
}
return statsTotal;
}
public override ComparisonCalculationBase[] GetCustomChartData(Character character, string chartName)
{
switch (chartName)
{
case "White Combat Table":
CharacterCalculationsCat calcs = (CharacterCalculationsCat)GetCharacterCalculations(character);
float[] ct = null;//calcs.MeleeStats.CombatTable;
return new ComparisonCalculationBase[]
{
new ComparisonCalculationCat() { Name = "Miss", OverallPoints = ct[0], DPSPoints = ct[0]},
new ComparisonCalculationCat() { Name = "Dodge", OverallPoints = ct[1], DPSPoints = ct[1]},
new ComparisonCalculationCat() { Name = "Parry", OverallPoints = ct[2], DPSPoints = ct[2]},
new ComparisonCalculationCat() { Name = "Glance", OverallPoints = ct[3], DPSPoints = ct[3]},
new ComparisonCalculationCat() { Name = "Hit", OverallPoints = ct[4], DPSPoints = ct[4]},
new ComparisonCalculationCat() { Name = "Crit", OverallPoints = ct[5], DPSPoints = ct[5]},
};
default:
return new ComparisonCalculationBase[0];
}
}
public override bool IsItemRelevant(Item item)
{
if (item.Slot == ItemSlot.OffHand ||
(item.Slot == ItemSlot.Ranged && item.Type != ItemType.Idol) ||
item.Stats.SpellPower > 0)
return false;
foreach (var effect in item.Stats.SpecialEffects(s => s.Stats.SpellPower > 0))
return false;
return base.IsItemRelevant(item);
}
public override Stats GetRelevantStats(Stats stats)
{
Stats s = new Stats()
{
Agility = stats.Agility,
Strength = stats.Strength,
CatFormStrength = stats.CatFormStrength,
AttackPower = stats.AttackPower,
CritRating = stats.CritRating,
HitRating = stats.HitRating,
Stamina = stats.Stamina,
HasteRating = stats.HasteRating,
ExpertiseRating = stats.ExpertiseRating,
ArmorPenetration = stats.ArmorPenetration,
ArmorPenetrationRating = stats.ArmorPenetrationRating,
BonusShredDamage = stats.BonusShredDamage,
BonusRipDamagePerCPPerTick = stats.BonusRipDamagePerCPPerTick,
WeaponDamage = stats.WeaponDamage,
BonusAgilityMultiplier = stats.BonusAgilityMultiplier,
BonusAttackPowerMultiplier = stats.BonusAttackPowerMultiplier,
BonusCritMultiplier = stats.BonusCritMultiplier,
BonusDamageMultiplier = stats.BonusDamageMultiplier,
BonusHealthMultiplier = stats.BonusHealthMultiplier,
BonusRipDamageMultiplier = stats.BonusRipDamageMultiplier,
BonusStaminaMultiplier = stats.BonusStaminaMultiplier,
BonusStrengthMultiplier = stats.BonusStrengthMultiplier,
Health = stats.Health,
MangleCatCostReduction = stats.MangleCatCostReduction,
TigersFuryCooldownReduction = stats.TigersFuryCooldownReduction,
ThreatReductionMultiplier = stats.ThreatReductionMultiplier,
PhysicalHaste = stats.PhysicalHaste,
PhysicalHit = stats.PhysicalHit,
BonusBleedDamageMultiplier = stats.BonusBleedDamageMultiplier,
PhysicalCrit = stats.PhysicalCrit,
BonusSavageRoarDuration = stats.BonusSavageRoarDuration,
ClearcastOnBleedChance = stats.ClearcastOnBleedChance,
ArcaneDamage = stats.ArcaneDamage,
ShadowDamage = stats.ShadowDamage,
HighestStat = stats.HighestStat,
Paragon = stats.Paragon,
DeathbringerProc = stats.DeathbringerProc,
BonusRakeDuration = stats.BonusRakeDuration,
BonusRipCrit = stats.BonusRipCrit,
BonusRakeCrit = stats.BonusRakeCrit,
RipCostReduction = stats.RipCostReduction,
MoteOfAnger = stats.MoteOfAnger,
ArcaneResistance = stats.ArcaneResistance,
NatureResistance = stats.NatureResistance,
FireResistance = stats.FireResistance,
FrostResistance = stats.FrostResistance,
ShadowResistance = stats.ShadowResistance,
ArcaneResistanceBuff = stats.ArcaneResistanceBuff,
NatureResistanceBuff = stats.NatureResistanceBuff,
FireResistanceBuff = stats.FireResistanceBuff,
FrostResistanceBuff = stats.FrostResistanceBuff,
ShadowResistanceBuff = stats.ShadowResistanceBuff,
BonusRipDuration = stats.BonusRipDuration,
};
foreach (SpecialEffect effect in stats.SpecialEffects())
{
if (effect.Trigger == Trigger.Use || effect.Trigger == Trigger.MeleeCrit || effect.Trigger == Trigger.MeleeHit || effect.Trigger == Trigger.MeleeAttack
|| effect.Trigger == Trigger.PhysicalCrit || effect.Trigger == Trigger.PhysicalHit || effect.Trigger == Trigger.DoTTick
|| effect.Trigger == Trigger.DamageDone || effect.Trigger == Trigger.MangleCatHit || effect.Trigger == Trigger.RakeTick
|| effect.Trigger == Trigger.MangleCatOrShredHit || effect.Trigger == Trigger.MangleCatOrShredOrInfectedWoundsHit || effect.Trigger == Trigger.DamageOrHealingDone)
{
if (HasRelevantStats(effect.Stats))
{
s.AddSpecialEffect(effect);
}
}
}
return s;
}
public override bool HasRelevantStats(Stats stats)
{
bool relevant = (stats.Agility + stats.ArmorPenetration + stats.AttackPower + stats.PhysicalCrit +
stats.BonusAgilityMultiplier + stats.BonusAttackPowerMultiplier + stats.BonusCritMultiplier +
stats.ClearcastOnBleedChance + stats.BonusSavageRoarDuration + stats.BonusRakeCrit + stats.RipCostReduction +
stats.BonusDamageMultiplier + stats.BonusRipDamageMultiplier + stats.BonusShredDamage +
stats.BonusStaminaMultiplier + stats.BonusStrengthMultiplier + stats.CritRating + stats.ExpertiseRating +
stats.HasteRating + stats.Health + stats.HitRating + stats.MangleCatCostReduction + /*stats.Stamina +*/
stats.Strength + stats.CatFormStrength + stats.WeaponDamage + stats.DeathbringerProc +
stats.PhysicalHit + stats.BonusRipDamagePerCPPerTick + stats.BonusRipCrit + stats.MoteOfAnger +
stats.PhysicalHaste + stats.ArmorPenetrationRating + stats.BonusRipDuration + stats.BonusRakeDuration +
stats.ThreatReductionMultiplier + stats.ArcaneDamage + stats.ShadowDamage +
stats.ArcaneResistance + stats.NatureResistance + stats.FireResistance + stats.BonusBleedDamageMultiplier + stats.Paragon +
stats.FrostResistance + stats.ShadowResistance + stats.ArcaneResistanceBuff + stats.TigersFuryCooldownReduction + stats.HighestStat +
stats.NatureResistanceBuff + stats.FireResistanceBuff + stats.BonusPhysicalDamageMultiplier +
stats.FrostResistanceBuff + stats.ShadowResistanceBuff) > 0 || (stats.Stamina > 0 && stats.SpellPower == 0);
foreach (SpecialEffect effect in stats.SpecialEffects())
{
if (effect.Trigger == Trigger.Use || effect.Trigger == Trigger.MeleeCrit || effect.Trigger == Trigger.MeleeHit || effect.Trigger == Trigger.MeleeAttack
|| effect.Trigger == Trigger.PhysicalCrit || effect.Trigger == Trigger.PhysicalHit || effect.Trigger == Trigger.RakeTick
|| effect.Trigger == Trigger.MangleCatHit || effect.Trigger == Trigger.MangleCatOrShredHit
|| effect.Trigger == Trigger.MangleCatOrShredOrInfectedWoundsHit || effect.Trigger == Trigger.DamageOrHealingDone)
{
relevant |= HasRelevantStats(effect.Stats);
if (relevant) break;
}
}
return relevant;
}
public Stats GetBuffsStats(Character character, CalculationOptionsCat calcOpts) {
List<Buff> removedBuffs = new List<Buff>();
List<Buff> addedBuffs = new List<Buff>();
//float hasRelevantBuff;
#region Passive Ability Auto-Fixing
// Removes the Trueshot Aura Buff and it's equivalents Unleashed Rage and Abomination's Might if you are
// maintaining it yourself. We are now calculating this internally for better accuracy and to provide
// value to relevant talents
/*{
hasRelevantBuff = character.HunterTalents.TrueshotAura;
Buff a = Buff.GetBuffByName("Trueshot Aura");
Buff b = Buff.GetBuffByName("Unleashed Rage");
Buff c = Buff.GetBuffByName("Abomination's Might");
if (hasRelevantBuff > 0)
{
if (character.ActiveBuffs.Contains(a)) { character.ActiveBuffs.Remove(a); removedBuffs.Add(a); }
if (character.ActiveBuffs.Contains(b)) { character.ActiveBuffs.Remove(b); removedBuffs.Add(b); }
if (character.ActiveBuffs.Contains(c)) { character.ActiveBuffs.Remove(c); removedBuffs.Add(c); }
}
}*/
#endregion
Stats statsBuffs = GetBuffsStats(character.ActiveBuffs);
foreach (Buff b in removedBuffs) {
character.ActiveBuffsAdd(b);
}
foreach (Buff b in addedBuffs) {
character.ActiveBuffs.Remove(b);
}
return statsBuffs;
}
public override void SetDefaults(Character character)
{
character.ActiveBuffsAdd(("Horn of Winter"));
character.ActiveBuffsAdd(("Battle Shout"));
character.ActiveBuffsAdd(("Unleashed Rage"));
character.ActiveBuffsAdd(("Improved Moonkin Form"));
character.ActiveBuffsAdd(("Leader of the Pack"));
character.ActiveBuffsAdd(("Improved Icy Talons"));
character.ActiveBuffsAdd(("Power Word: Fortitude"));
character.ActiveBuffsAdd(("Mark of the Wild"));
character.ActiveBuffsAdd(("Blessing of Kings"));
character.ActiveBuffsAdd(("Sunder Armor"));
character.ActiveBuffsAdd(("Faerie Fire"));
character.ActiveBuffsAdd(("Totem of Wrath"));
character.ActiveBuffsAdd(("Flask of Endless Rage"));
character.ActiveBuffsAdd(("Agility Food"));
character.ActiveBuffsAdd(("Heroism/Bloodlust"));
if (character.PrimaryProfession == Profession.Alchemy ||
character.SecondaryProfession == Profession.Alchemy)
character.ActiveBuffsAdd(("Flask of Endless Rage (Mixology)"));
character.DruidTalents.GlyphOfSavageRoar = true;
character.DruidTalents.GlyphOfShred = true;
character.DruidTalents.GlyphOfRip = true;
}
private static List<string> _relevantGlyphs;
public override List<string> GetRelevantGlyphs()
{
if (_relevantGlyphs == null)
{
_relevantGlyphs = new List<string>();
_relevantGlyphs.Add("Glyph of Mangle");
_relevantGlyphs.Add("Glyph of Shred");
_relevantGlyphs.Add("Glyph of Rip");
_relevantGlyphs.Add("Glyph of Berserk");
_relevantGlyphs.Add("Glyph of Savage Roar");
}
return _relevantGlyphs;
}
}
public class ComparisonCalculationCat : ComparisonCalculationBase
{
private string _name = string.Empty;
public override string Name
{
get { return _name; }
set { _name = value; }
}
private string _desc = string.Empty;
public override string Description
{
get { return _desc; }
set { _desc = value; }
}
private float _overallPoints = 0f;
public override float OverallPoints
{
get { return _overallPoints; }
set { _overallPoints = value; }
}
private float[] _subPoints = new float[] { 0f, 0f };
public override float[] SubPoints
{
get { return _subPoints; }
set { _subPoints = value; }
}
public float DPSPoints
{
get { return _subPoints[0]; }
set { _subPoints[0] = value; }
}
public float SurvivabilityPoints
{
get { return _subPoints[1]; }
set { _subPoints[1] = value; }
}
private Item _item = null;
public override Item Item
{
get { return _item; }
set { _item = value; }
}
private ItemInstance _itemInstance = null;
public override ItemInstance ItemInstance
{
get { return _itemInstance; }
set { _itemInstance = value; }
}
private bool _equipped = false;
public override bool Equipped
{
get { return _equipped; }
set { _equipped = value; }
}
public override bool PartEquipped { get; set; }
public override string ToString()
{
return string.Format("{0}: ({1}O {2}DPS)", Name, Math.Round(OverallPoints), Math.Round(DPSPoints));
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Drawing;
using System.Globalization;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using Karbon.Cms.Core.IO;
using Karbon.Cms.Core.Mapping;
using Karbon.Cms.Core.Models;
using Karbon.Cms.Core.Serialization;
using Karbon.Cms.Core.Threading;
namespace Karbon.Cms.Core.Stores
{
internal class ContentStore : IContentStore
{
private readonly FileStore _fileStore;
private readonly DataSerializer _dataSerializer;
private readonly DataMapper _dataMapper;
private static readonly Regex VisibleRegex = new Regex(@"^\d+-");
private readonly ReaderWriterLockSlim _cacheLock = new ReaderWriterLockSlim();
private IDictionary<string, IContent> _contentCache = new ConcurrentDictionary<string, IContent>();
private bool _cacheDirty = true;
/// <summary>
/// Initializes a new instance of the <see cref="ContentStore"/> class.
/// </summary>
public ContentStore()
{
// Setup required components
_fileStore = FileStoreManager.Default;
_dataSerializer = DataSerializerManager.Default;
_dataMapper = new DataMapper();
// Setup file store event listener
_fileStore.FileChanged += (sender, args) => _cacheDirty = true;
}
#region Public API
/// <summary>
/// Gets a content item by URL.
/// </summary>
/// <param name="url">The URL.</param>
/// <returns></returns>
public IContent GetByUrl(string url)
{
// Make sure URL isn't null
if (url == null)
return null;
if (!_contentCache.ContainsKey(url))
return null;
return _contentCache[url];
}
/// <summary>
/// Gets the ancestor content.
/// </summary>
/// <param name="content">The content.</param>
/// <returns></returns>
public IEnumerable<IContent> GetAncestors(IContent content)
{
content = GetParent(content);
while (content != null)
{
yield return content;
content = GetParent(content);
}
}
/// <summary>
/// Gets the parent content.
/// </summary>
/// <param name="content">The content.</param>
/// <returns></returns>
public IContent GetParent(IContent content)
{
if (content.IsHomePage())
return null;
var parentUrl = content.RelativeUrl.Substring(0,
content.RelativeUrl.LastIndexOf("/", StringComparison.InvariantCulture));
if (parentUrl == "~")
parentUrl = "~/";
if (!_contentCache.ContainsKey(parentUrl))
return null;
return _contentCache[parentUrl];
}
/// <summary>
/// Gets the children.
/// </summary>
/// <param name="content">The content.</param>
/// <returns></returns>
public IEnumerable<IContent> GetChildren(IContent content)
{
return GetDescendants(content)
.Where(x => x.Depth == content.Depth + 1)
.OrderBy(x => x.SortOrder).ThenBy(x => x.Slug);
}
/// <summary>
/// Gets the descendants.
/// </summary>
/// <param name="content">The content.</param>
/// <returns></returns>
public IEnumerable<IContent> GetDescendants(IContent content)
{
var url = content.RelativeUrl.EnsureTrailingForwardSlash();
return _contentCache.Keys
.Where(x => x != url && x.StartsWith(url))
.Select(x => _contentCache[x])
.OrderBy(x => x.SortOrder).ThenBy(x => x.Slug);
}
#endregion
#region Cache Control
/// <summary>
/// Syncs the content cache.
/// </summary>
/// <exception cref="System.NotImplementedException"></exception>
public void SyncCache()
{
if(_cacheDirty)
{
using (new WriteLock(_cacheLock))
{
if(_cacheDirty)
{
var data = LoadContent();
_contentCache = new ConcurrentDictionary<string, IContent>(data);
_cacheDirty = false;
}
}
}
}
/// <summary>
/// Loads the content.
/// </summary>
/// <returns></returns>
private IDictionary<string, IContent> LoadContent()
{
return LoadContentRecursive(new[] { "" }, new Dictionary<string, IContent>());
}
/// <summary>
/// Loads the content recursively.
/// </summary>
/// <param name="dirs">The dirs.</param>
/// <param name="data">The data.</param>
/// <returns></returns>
private IDictionary<string, IContent> LoadContentRecursive(IEnumerable<string> dirs, IDictionary<string, IContent> data)
{
foreach (var dir in dirs)
{
var content = GetByPath(dir);
if (content != null)
data.Add(content.RelativeUrl, content);
LoadContentRecursive(_fileStore.GetDirectories(dir), data);
}
return data;
}
#endregion
#region Helper Methods
/// <summary>
/// Gets a content item by relative file path.
/// </summary>
/// <param name="path">The path.</param>
/// <returns></returns>
private IContent GetByPath(string path)
{
if (path == null)
return null;
// Check directory exists
if (!_fileStore.DirectoryExists(path))
return null;
// Parse directory name
var directoryNameInfo = ParseDirectoryName(_fileStore.GetName(path));
// Grab a files
var filePaths = _fileStore.GetFiles(path).ToList();
// Find the content file
var contentFilePath = filePaths
.FirstOrDefault(x => x.Count(y => y == '.') == 1
&& x.EndsWith("." + _dataSerializer.FileExtension));
// Create model object based on file name
var fileName = contentFilePath != null
? _fileStore.GetNameWithoutExtension(contentFilePath)
: "Content";
var type = TypeFinder.FindTypes<Content>()
.SingleOrDefault(x => x.Name == fileName)
?? typeof(Content);
var model = Activator.CreateInstance(type) as IContent;
if (model == null)
return null;
// Deserialize data
var data = contentFilePath != null
? _dataSerializer.Deserialize(_fileStore.OpenFile(contentFilePath))
: new Dictionary<string, string>();
// Map data to model
model.RelativePath = path;
model.TypeName = fileName;
model.Slug = directoryNameInfo.Name;
model.Name = GetNameFromSlug(directoryNameInfo.Name);
model.RelativeUrl = GetUrlFromPath(path);
model.SortOrder = directoryNameInfo.SortOrder;
model.Created = _fileStore.GetCreated(contentFilePath ?? path);
model.Modified = _fileStore.GetLastModified(contentFilePath ?? path);
model.Depth = model.RelativeUrl == "~/" ? 1 : model.RelativeUrl.Count(x => x == '/') + 1;
//model.Data = data;
model = (IContent)_dataMapper.Map(model.GetType(), model, data);
// Parse files
model.AllFiles = LoadFiles(filePaths.Where(x => x != contentFilePath), model.RelativeUrl);
// Return model
return model;
}
/// <summary>
/// Loads the files from the file paths list provided.
/// </summary>
/// <param name="filePaths">The file paths.</param>
/// <param name="contentUrl">The content relative URL.</param>
/// <returns></returns>
private IEnumerable<IFile> LoadFiles(IEnumerable<string> filePaths, string contentUrl)
{
var files = new List<IFile>();
var noneContentFilePaths = filePaths.Where(x => !x.EndsWith("." + _dataSerializer.FileExtension));
foreach (var noneContentFilePath in noneContentFilePaths)
{
// Parse file nae info
var fileNameInfo = ParseFileName(_fileStore.GetName(noneContentFilePath));
// See if there is a meta data file
var contentFilePath =
filePaths.SingleOrDefault(x => x == noneContentFilePath + "." + _dataSerializer.FileExtension);
// Find type for the file
var type = TypeFinder.FindTypes<File>()
.SingleOrDefault(x => x.Name == fileNameInfo.TypeName)
?? typeof(File);
// Create the file
var model = Activator.CreateInstance(type) as IFile;
if (model == null)
continue;
// Map data to the file
model.RelativePath = noneContentFilePath;
model.TypeName = fileNameInfo.TypeName;
model.Slug = fileNameInfo.Name;
model.Name = GetNameFromSlug(_fileStore.GetNameWithoutExtension(fileNameInfo.Name));
model.RelativeUrl = "~/media/" + contentUrl.TrimStart("~/") + "/" + model.Slug;
model.ContentRelativeUrl = contentUrl;
model.SortOrder = fileNameInfo.SortOrder;
model.Extension = fileNameInfo.Extension;
model.Created = _fileStore.GetCreated(noneContentFilePath);
model.Modified = _fileStore.GetLastModified(noneContentFilePath);
model.Size = _fileStore.GetSize(noneContentFilePath);
var data = contentFilePath != null
? _dataSerializer.Deserialize(_fileStore.OpenFile(contentFilePath))
: new Dictionary<string, string>();
// TODO: Make this bit provider driven so people can retreive their own data from a file
if(model.IsImage())
{
// Parse width height
var imageSize = GetImageSize(noneContentFilePath);
if(imageSize != Size.Empty)
{
data.Add("Width", imageSize.Width.ToString());
data.Add("Height", imageSize.Height.ToString());
}
}
//model.Data = data;
model = (IFile)_dataMapper.Map(model.GetType(), model, data);
// Return the file
files.Add(model);
}
return files.OrderBy(x => x.SortOrder).ThenBy(x => x.Slug);
}
/// <summary>
/// Gets the URL from a relative file path.
/// </summary>
/// <param name="path">The path.</param>
/// <returns></returns>
private string GetUrlFromPath(string path)
{
if (path == null)
return null;
var pathParts = _fileStore.GetPathParts(path);
var urlParts = pathParts
.Select(ParseDirectoryName)
.Select(nameInfo => nameInfo.Name)
.ToList();
return "~/" + string.Join("/", urlParts);
}
/// <summary>
/// Parses a directory name into it's constituent parts.
/// </summary>
/// <param name="name">The name.</param>
/// <returns></returns>
private DirectoryNameInfo ParseDirectoryName(string name)
{
var dirNameInfo = new DirectoryNameInfo
{
FullName = name,
Name = name,
SortOrder = -1
};
if (name.IndexOf('-') > 0)
{
var hyphenIndex = name.IndexOf('-');
var possibleSortOrder = name.Substring(0, hyphenIndex);
int parsedSortOrder;
if(int.TryParse(possibleSortOrder, out parsedSortOrder))
{
dirNameInfo.Name = name.Substring(hyphenIndex + 1);
dirNameInfo.SortOrder = parsedSortOrder;
return dirNameInfo;
}
}
return dirNameInfo;
}
/// <summary>
/// Parses a file name into it's constituent parts.
/// </summary>
/// <param name="name">The name.</param>
/// <returns></returns>
private FileNameInfo ParseFileName(string name)
{
var ext = System.IO.Path.GetExtension(name);
var fileNameInfo = new FileNameInfo
{
FullName = name,
SortOrder = -1,
Extension = ext,
TypeName = GetDefaultFileType(ext).Name
};
var nameParts = name.Split('.');
if(nameParts.Length > 2)
{
fileNameInfo.TypeName = nameParts[1];
}
int parsedSortOrder;
if (VisibleRegex.IsMatch(nameParts[0]))
{
var hyphenIndex = nameParts[0].IndexOf('-');
var possibleSortOrder = nameParts[0].Substring(0, hyphenIndex);
if (int.TryParse(possibleSortOrder, out parsedSortOrder))
{
fileNameInfo.Name = nameParts[0].Substring(hyphenIndex + 1) + fileNameInfo.Extension;
fileNameInfo.SortOrder = parsedSortOrder;
}
}
else
{
fileNameInfo.Name = nameParts[0] + fileNameInfo.Extension;
if(int.TryParse(nameParts[0], out parsedSortOrder))
{
fileNameInfo.SortOrder = parsedSortOrder;
}
}
return fileNameInfo;
}
/// <summary>
/// Gets the name from slug.
/// </summary>
/// <param name="slug">The slug.</param>
/// <returns></returns>
private string GetNameFromSlug(string slug)
{
var spaced = string.Join(" ", slug.Split('-', '_'));
var titleCased = CultureInfo.CurrentCulture.TextInfo.ToTitleCase(spaced);
return titleCased;
}
/// <summary>
/// Gets the default type of the file based on the supplied file extension.
/// </summary>
/// <param name="ext">The ext.</param>
/// <returns></returns>
private Type GetDefaultFileType(string ext)
{
if (IOHelper.IsImageExtension(ext))
return typeof (ImageFile);
if (IOHelper.IsVideoExtension(ext))
return typeof(VideoFile);
if (IOHelper.IsSoundExtension(ext))
return typeof(SoundFile);
if (IOHelper.IsDocumentExtension(ext))
return typeof(DocumentFile);
return typeof (File);
}
/// <summary>
/// Gets the size of the image.
/// </summary>
/// <param name="path">The path.</param>
/// <returns></returns>
private Size GetImageSize(string path)
{
var size = Size.Empty;
if (!string.IsNullOrEmpty(path) && _fileStore.FileExists(path))
{
using (var stream = _fileStore.OpenFile(path))
using (var img = Image.FromStream(stream))
{
size = img.Size;
}
}
return size;
}
#endregion
}
}
| |
#if !WINDOWS_8
#define IMPLEMENT_INTERNALS
#endif
using System;
using System.Collections.Generic;
using System.Text;
//#if SILVERLIGHT
//using SilverArcade.SilverSprite;
//using SilverArcade.SilverSprite.Input;
//using Vector2 = SilverArcade.SilverSprite.Vector2;
//#else
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Input;
//#endif
namespace FlatRedBall.Input
{
public class Xbox360GamePad
{
#region Enums
/// <summary>
/// Enumeration representing the buttons on the Xbox360 controller. The values for each
/// entry matches the value of the Xbox 360 button index in Managed DirectX. This improves
/// portability between FlatRedBall Managed DirectX and FlatRedBall XNA.
/// </summary>
public enum Button
{
A, // = 0
B,
X,
Y,
LeftShoulder,
RightShoulder, // = 5
Back,
Start,
LeftStick,
RightStick,
DPadUp, // = 10
DPadDown,
DPadLeft,
DPadRight,
LeftTrigger,
RightTrigger // 15 = 0
}
// This serves as a sentinel value.
const int NumberOfButtons = 16;
public enum DPadDirection
{
Up,
Down,
Left,
Right
}
public enum DirectionalControlDevice
{
LeftStick,
RightStick,
DPad
}
#if SILVERLIGHT
//PULLED FROM Microsoft.Xna.Framework.Input.GamePadDeadZone
// Summary:
// Specifies a type of dead zone processing to apply to Xbox 360 controller
// analog sticks when calling GamePad.GetState.
public enum GamePadDeadZone
{
// Summary:
// The values of each stick are not processed and are returned by GamePad.GetState
// as "raw" values. This is best if you intend to implement your own dead zone
// processing.
None = 0,
//
// Summary:
// The X and Y positions of each stick are compared against the dead zone independently.
// This setting is the default when calling GamePad.GetState.
IndependentAxes = 1,
//
// Summary:
// The combined X and Y position of each stick is compared against the dead
// zone. This provides better control than IndependentAxes when the stick is
// used as a two-dimensional control surface, such as when controlling a character's
// view in a first-person game.
Circular = 2,
}
#endif
#endregion
#region Fields
DelegateBased1DInput dPadHorizontal;
DelegateBased1DInput dPadVertical;
DelegateBased2DInput dPad;
const float AnalogOnThreshold = .5f;
//GamePadDeadZone mGamePadDeadZone;
GamePadState mGamePadState;
GamePadState mLastGamePadState;
AnalogStick mLeftStick;
AnalogStick mRightStick;
AnalogButton mLeftTrigger;
AnalogButton mRightTrigger;
PlayerIndex mPlayerIndex;
KeyboardButtonMap mButtonMap;
double[] mLastButtonPush = new double[NumberOfButtons];
double[] mLastRepeatRate = new double[NumberOfButtons];
#if !SILVERLIGHT
GamePadCapabilities mCapabilities;
#endif
bool[] mButtonsIgnoredForThisFrame = new bool[NumberOfButtons];
#endregion
#region Properties
public I1DInput DPadHorizontal
{
get
{
if(dPadHorizontal == null)
{
dPadHorizontal = new DelegateBased1DInput(
() =>
{
if(this.ButtonDown(Button.DPadLeft))
{
return -1;
}
else if(this.ButtonDown(Button.DPadRight))
{
return 1;
}
else
{
return 0;
}
},
() =>
{
if (this.ButtonPushed(Button.DPadLeft))
{
return -1 / TimeManager.SecondDifference;
}
else if (this.ButtonPushed(Button.DPadRight))
{
return 1 / TimeManager.SecondDifference;
}
else
{
return 0;
}
}
);
}
return dPadHorizontal;
}
}
public I1DInput DPadVertical
{
get
{
if (dPadVertical == null)
{
dPadVertical = new DelegateBased1DInput(
() =>
{
if (this.ButtonDown(Button.DPadDown))
{
return -1;
}
else if (this.ButtonDown(Button.DPadUp))
{
return 1;
}
else
{
return 0;
}
},
() =>
{
if (this.ButtonPushed(Button.DPadDown))
{
return -1 / TimeManager.SecondDifference;
}
else if (this.ButtonPushed(Button.DPadUp))
{
return 1 / TimeManager.SecondDifference;
}
else
{
return 0;
}
}
);
}
return dPadVertical;
}
}
public I2DInput DPad
{
get
{
if(this.dPad == null)
{
Func<float> getX = () =>
{
if (this.ButtonDown(Button.DPadLeft))
{
return -1;
}
else if (this.ButtonDown(Button.DPadRight))
{
return 1;
}
else
{
return 0;
}
};
Func<float> getXVelocity = () =>
{
if (this.ButtonPushed(Button.DPadLeft))
{
return -1 / TimeManager.SecondDifference;
}
else if (this.ButtonPushed(Button.DPadRight))
{
return 1 / TimeManager.SecondDifference;
}
else
{
return 0;
}
};
Func<float> getY = () =>
{
if (this.ButtonDown(Button.DPadDown))
{
return -1;
}
else if (this.ButtonDown(Button.DPadUp))
{
return 1;
}
else
{
return 0;
}
};
Func<float> getYVelocity = () =>
{
if (this.ButtonPushed(Button.DPadDown))
{
return -1 / TimeManager.SecondDifference;
}
else if (this.ButtonPushed(Button.DPadUp))
{
return 1 / TimeManager.SecondDifference;
}
else
{
return 0;
}
};
this.dPad = new DelegateBased2DInput(
getX,
getY,
getXVelocity,
getYVelocity);
}
return this.dPad;
}
}
public bool IsConnected
{
get
{
if (this.FakeIsConnected == true)
{
return true;
}
else
{
return mGamePadState.IsConnected;
}
}
}
#region XML Docs
/// <summary>
/// This value can force an Xbox360GamePad's
/// IsConnected to be true even if the controller
/// is not connected. This can be used if game logic
/// requires a certain number of GamePads to be connected.
/// </summary>
#endregion
public bool FakeIsConnected
{
set;
get;
}
public AnalogStick LeftStick
{
get { return mLeftStick; }
}
public AnalogStick RightStick
{
get { return mRightStick; }
}
public KeyboardButtonMap ButtonMap
{
set { mButtonMap = value; }
get { return mButtonMap; }
}
/// <summary>
/// Returns the left trigger's current value. When not pressed this property returns
/// 0.0f. When fully pressed this property returns 1.0f;
/// </summary>
public AnalogButton LeftTrigger
{
get { return mLeftTrigger; }
}
#region XML Docs
/// <summary>
/// Returns the right trigger's current value. When not pressed this property returns
/// 0.0f. When fully pressed this property returns 1.0f;
/// </summary>
#endregion
public AnalogButton RightTrigger
{
get { return mRightTrigger; }
}
public bool WasConnectedThisFrame
{
get
{
#if !SILVERLIGHT && !MONODROID
if (mLastGamePadState == null)
{
return false;
}
else
#endif
{
return !mLastGamePadState.IsConnected && mGamePadState.IsConnected;
}
}
}
public bool WasDisconnectedThisFrame
{
get
{
#if !SILVERLIGHT && !MONODROID
if (mLastGamePadState == null)
{
return false;
}
else
#endif
{
return mLastGamePadState.IsConnected && !mGamePadState.IsConnected;
}
}
}
#if !SILVERLIGHT && !MONODROID
public GamePadType GamePadType
{
get
{
return mCapabilities.GamePadType;
}
}
#endif
#endregion
#region Methods
#region Constructor
internal Xbox360GamePad(PlayerIndex playerIndex)
{
for (int i = 0; i < mLastButtonPush.Length; i++)
{
mLastButtonPush[i] = -1;
}
mPlayerIndex = playerIndex;
mLeftStick = new AnalogStick();
mRightStick = new AnalogStick();
mLeftTrigger = new AnalogButton();
mRightTrigger = new AnalogButton();
}
#endregion
#region Public Methods
#region Button States
/// <summary>
/// Returns whether any button was pushed on this Xbox360GamePad. This considers face buttons, trigger buttons, shoulder buttons, and d pad.
/// </summary>
/// <returns>Whether any button was pushed.</returns>
public bool AnyButtonPushed()
{
for (int i = 0; i < NumberOfButtons; i++)
{
if (ButtonPushed((Button)i))
{
return true;
}
}
return false;
}
public bool ButtonDown(Button button)
{
if (mButtonsIgnoredForThisFrame[(int)button] || InputManager.CurrentFrameInputSuspended)
return false;
bool returnValue = false;
#region If there is a ButtonMap
if (this.mButtonMap != null)
{
switch (button)
{
case Button.A:
returnValue |= mButtonMap.A != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.A);
break;
case Button.B:
returnValue |= mButtonMap.B != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.B);
break;
case Button.X:
returnValue |= mButtonMap.X != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.X);
break;
case Button.Y:
returnValue |= mButtonMap.Y != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.Y);
break;
case Button.LeftShoulder:
returnValue |= mButtonMap.LeftShoulder != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftShoulder);
break;
case Button.RightShoulder:
returnValue |= mButtonMap.RightShoulder != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightShoulder);
break;
case Button.Back:
returnValue |= mButtonMap.Back != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.Back);
break;
case Button.Start:
returnValue |= mButtonMap.Start != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.Start);
break;
case Button.LeftStick:
returnValue |= mButtonMap.LeftStick != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftStick);
break;
case Button.RightStick:
returnValue |= mButtonMap.RightStick != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightStick);
break;
case Button.DPadUp:
returnValue |= mButtonMap.DPadUp != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.DPadUp);
break;
case Button.DPadDown:
returnValue |= mButtonMap.DPadDown != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.DPadDown);
break;
case Button.DPadLeft:
returnValue |= mButtonMap.DPadLeft != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.DPadLeft);
break;
case Button.DPadRight:
returnValue |= mButtonMap.DPadRight != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.DPadRight);
break;
case Button.LeftTrigger:
returnValue |= mButtonMap.LeftTrigger != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftTrigger);
break;
case Button.RightTrigger:
returnValue |= mButtonMap.RightTrigger != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightTrigger);
break;
//default:
// return false;
}
}
#endregion
#region Handle the buttons if there isn't a ButtonMap (this can happen even if there is a ButtonMap)
switch (button)
{
case Button.A:
returnValue |= mGamePadState.Buttons.A == ButtonState.Pressed;
break;
case Button.B:
returnValue |= mGamePadState.Buttons.B == ButtonState.Pressed;
break;
case Button.X:
returnValue |= mGamePadState.Buttons.X == ButtonState.Pressed;
break;
case Button.Y:
returnValue |= mGamePadState.Buttons.Y == ButtonState.Pressed;
break;
case Button.LeftShoulder:
returnValue |= mGamePadState.Buttons.LeftShoulder == ButtonState.Pressed;
break;
case Button.RightShoulder:
returnValue |= mGamePadState.Buttons.RightShoulder == ButtonState.Pressed;
break;
case Button.Back:
returnValue |= mGamePadState.Buttons.Back == ButtonState.Pressed;
break;
case Button.Start:
returnValue |= mGamePadState.Buttons.Start == ButtonState.Pressed;
break;
case Button.LeftStick:
returnValue |= mGamePadState.Buttons.LeftStick == ButtonState.Pressed;
break;
case Button.RightStick:
returnValue |= mGamePadState.Buttons.RightStick == ButtonState.Pressed;
break;
case Button.DPadUp:
returnValue |= mGamePadState.DPad.Up == ButtonState.Pressed;
break;
case Button.DPadDown:
returnValue |= mGamePadState.DPad.Down == ButtonState.Pressed;
break;
case Button.DPadLeft:
returnValue |= mGamePadState.DPad.Left == ButtonState.Pressed;
break;
case Button.DPadRight:
returnValue |= mGamePadState.DPad.Right == ButtonState.Pressed;
break;
case Button.LeftTrigger:
returnValue |= mLeftTrigger.Position >= AnalogOnThreshold;
break;
case Button.RightTrigger:
returnValue |= mRightTrigger.Position >= AnalogOnThreshold;
break;
}
#endregion
return returnValue;
}
/// <summary>
/// Returns whether the argument button type is pushed. For analog buttons, such as LeftTrigger
/// and RightTrigger, the AnalogOnThreshold value is used to determine if the button is pressed.
/// </summary>
/// <param name="button">The button to check.</param>
/// <returns>true if the button is pressed, otherwise false</returns>
public bool ButtonPushed(Button button)
{
#if SILVERLIGHT
return false;
#else
if (InputManager.mIgnorePushesThisFrame || mButtonsIgnoredForThisFrame[(int)button] || InputManager.CurrentFrameInputSuspended)
return false;
bool returnValue = false;
if (this.mButtonMap != null)
{
switch (button)
{
case Button.A:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.A);
break;
case Button.B:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.B);
break;
case Button.X:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.X);
break;
case Button.Y:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.Y);
break;
case Button.LeftShoulder:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.LeftShoulder);
break;
case Button.RightShoulder:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.RightShoulder);
break;
case Button.Back:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.Back);
break;
case Button.Start:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.Start);
break;
case Button.LeftStick:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.LeftStick);
break;
case Button.RightStick:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.RightStick);
break;
case Button.DPadUp:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.DPadUp);
break;
case Button.DPadDown:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.DPadDown);
break;
case Button.DPadLeft:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.DPadLeft);
break;
case Button.DPadRight:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.DPadRight);
break;
case Button.LeftTrigger:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.LeftTrigger);
break;
case Button.RightTrigger:
returnValue |= InputManager.Keyboard.KeyPushed(mButtonMap.RightTrigger);
break;
}
}
switch (button)
{
case Button.A:
returnValue |= mGamePadState.Buttons.A == ButtonState.Pressed && mLastGamePadState.Buttons.A == ButtonState.Released;
break;
case Button.B:
returnValue |= mGamePadState.Buttons.B == ButtonState.Pressed && mLastGamePadState.Buttons.B == ButtonState.Released;
break;
case Button.X:
returnValue |= mGamePadState.Buttons.X == ButtonState.Pressed && mLastGamePadState.Buttons.X == ButtonState.Released;
break;
case Button.Y:
returnValue |= mGamePadState.Buttons.Y == ButtonState.Pressed && mLastGamePadState.Buttons.Y == ButtonState.Released;
break;
case Button.LeftShoulder:
returnValue |= mGamePadState.Buttons.LeftShoulder == ButtonState.Pressed && mLastGamePadState.Buttons.LeftShoulder == ButtonState.Released;
break;
case Button.RightShoulder:
returnValue |= mGamePadState.Buttons.RightShoulder == ButtonState.Pressed && mLastGamePadState.Buttons.RightShoulder == ButtonState.Released;
break;
case Button.Back:
returnValue |= mGamePadState.Buttons.Back == ButtonState.Pressed && mLastGamePadState.Buttons.Back == ButtonState.Released;
break;
case Button.Start:
returnValue |= mGamePadState.Buttons.Start == ButtonState.Pressed && mLastGamePadState.Buttons.Start == ButtonState.Released;
break;
case Button.LeftStick:
returnValue |= mGamePadState.Buttons.LeftStick == ButtonState.Pressed && mLastGamePadState.Buttons.LeftStick == ButtonState.Released;
break;
case Button.RightStick:
returnValue |= mGamePadState.Buttons.RightStick == ButtonState.Pressed && mLastGamePadState.Buttons.RightStick == ButtonState.Released;
break;
case Button.DPadUp:
returnValue |= mGamePadState.DPad.Up == ButtonState.Pressed && mLastGamePadState.DPad.Up == ButtonState.Released;
break;
case Button.DPadDown:
returnValue |= mGamePadState.DPad.Down == ButtonState.Pressed && mLastGamePadState.DPad.Down == ButtonState.Released;
break;
case Button.DPadLeft:
returnValue |= mGamePadState.DPad.Left == ButtonState.Pressed && mLastGamePadState.DPad.Left == ButtonState.Released;
break;
case Button.DPadRight:
returnValue |= mGamePadState.DPad.Right == ButtonState.Pressed && mLastGamePadState.DPad.Right == ButtonState.Released;
break;
case Button.LeftTrigger:
returnValue |= mLeftTrigger.Position >= AnalogOnThreshold && mLeftTrigger.LastPosition < AnalogOnThreshold;
break;
case Button.RightTrigger:
returnValue |= mRightTrigger.Position >= AnalogOnThreshold && mRightTrigger.LastPosition < AnalogOnThreshold;
break;
}
return returnValue;
#endif
}
public bool ButtonReleased(Button button)
{
if (mButtonsIgnoredForThisFrame[(int)button] || InputManager.CurrentFrameInputSuspended)
return false;
bool returnValue = false;
if (this.mButtonMap != null)
{
switch (button)
{
case Button.A:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.A);
break;
case Button.B:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.B);
break;
case Button.X:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.X);
break;
case Button.Y:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.Y);
break;
case Button.LeftShoulder:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.LeftShoulder);
break;
case Button.RightShoulder:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.RightShoulder);
break;
case Button.Back:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.Back);
break;
case Button.Start:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.Start);
break;
case Button.LeftStick:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.LeftStick);
break;
case Button.RightStick:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.RightStick);
break;
case Button.DPadUp:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.DPadUp);
break;
case Button.DPadDown:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.DPadDown);
break;
case Button.DPadLeft:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.DPadLeft);
break;
case Button.DPadRight:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.DPadRight);
break;
case Button.LeftTrigger:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.LeftTrigger);
break;
case Button.RightTrigger:
returnValue |= InputManager.Keyboard.KeyReleased(mButtonMap.RightTrigger);
break;
}
}
switch (button)
{
case Button.A:
returnValue |= mGamePadState.Buttons.A == ButtonState.Released && mLastGamePadState.Buttons.A == ButtonState.Pressed;
break;
case Button.B:
returnValue |= mGamePadState.Buttons.B == ButtonState.Released && mLastGamePadState.Buttons.B == ButtonState.Pressed;
break;
case Button.X:
returnValue |= mGamePadState.Buttons.X == ButtonState.Released && mLastGamePadState.Buttons.X == ButtonState.Pressed;
break;
case Button.Y:
returnValue |= mGamePadState.Buttons.Y == ButtonState.Released && mLastGamePadState.Buttons.Y == ButtonState.Pressed;
break;
case Button.LeftShoulder:
returnValue |= mGamePadState.Buttons.LeftShoulder == ButtonState.Released && mLastGamePadState.Buttons.LeftShoulder == ButtonState.Pressed;
break;
case Button.RightShoulder:
returnValue |= mGamePadState.Buttons.RightShoulder == ButtonState.Released && mLastGamePadState.Buttons.RightShoulder == ButtonState.Pressed;
break;
case Button.Back:
returnValue |= mGamePadState.Buttons.Back == ButtonState.Released && mLastGamePadState.Buttons.Back == ButtonState.Pressed;
break;
case Button.Start:
returnValue |= mGamePadState.Buttons.Start == ButtonState.Released && mLastGamePadState.Buttons.Start == ButtonState.Pressed;
break;
case Button.LeftStick:
returnValue |= mGamePadState.Buttons.LeftStick == ButtonState.Released && mLastGamePadState.Buttons.LeftStick == ButtonState.Pressed;
break;
case Button.RightStick:
returnValue |= mGamePadState.Buttons.RightStick == ButtonState.Released && mLastGamePadState.Buttons.RightStick == ButtonState.Pressed;
break;
case Button.DPadUp:
returnValue |= mGamePadState.DPad.Up == ButtonState.Released && mLastGamePadState.DPad.Up == ButtonState.Pressed;
break;
case Button.DPadDown:
returnValue |= mGamePadState.DPad.Down == ButtonState.Released && mLastGamePadState.DPad.Down == ButtonState.Pressed;
break;
case Button.DPadLeft:
returnValue |= mGamePadState.DPad.Left == ButtonState.Released && mLastGamePadState.DPad.Left == ButtonState.Pressed;
break;
case Button.DPadRight:
returnValue |= mGamePadState.DPad.Right == ButtonState.Released && mLastGamePadState.DPad.Right == ButtonState.Pressed;
break;
case Button.LeftTrigger:
returnValue |= mLeftTrigger.Position < AnalogOnThreshold && mLeftTrigger.LastPosition >= AnalogOnThreshold;
break;
case Button.RightTrigger:
returnValue |= mRightTrigger.Position < AnalogOnThreshold && mRightTrigger.LastPosition >= AnalogOnThreshold;
break;
}
return returnValue;
}
public bool ButtonRepeatRate(Button button)
{
// Ignoring is performed inside this call.
return ButtonRepeatRate(button, .35, .12);
}
public bool ButtonRepeatRate(Button button, double timeAfterPush, double timeBetweenRepeating)
{
if (mButtonsIgnoredForThisFrame[(int)button])
return false;
if (ButtonPushed(button))
return true;
// If this method is called multiple times per frame this line
// of code guarantees that the user will get true every time until
// the next TimeManager.Update (next frame).
bool repeatedThisFrame = mLastButtonPush[(int)button] == TimeManager.CurrentTime;
if (repeatedThisFrame ||
(
ButtonDown(button) &&
TimeManager.CurrentTime - mLastButtonPush[(int)button] > timeAfterPush &&
TimeManager.CurrentTime - mLastRepeatRate[(int)button] > timeBetweenRepeating)
)
{
mLastRepeatRate[(int)button] = TimeManager.CurrentTime;
return true;
}
return false;
}
public Xbox360ButtonReference GetButton(Button button)
{
var toReturn = new Xbox360ButtonReference();
toReturn.Button = button;
toReturn.GamePad = this;
return toReturn;
}
#endregion
public void Clear()
{
mGamePadState = new GamePadState();
mLastGamePadState = new GamePadState();
for (int i = 0; i < NumberOfButtons; i++)
{
if (ButtonPushed((Button)i))
{
IgnoreButtonForOneFrame((Button)i);
}
}
mLeftStick.Clear();
mRightStick.Clear();
mLeftTrigger.Clear();
mRightTrigger.Clear();
}
#region Control Positioned Object
public void ControlPositionedObject(PositionedObject positionedObject)
{
ControlPositionedObject(positionedObject, 10);
}
public void ControlPositionedObject(PositionedObject positionedObject, float velocity)
{
positionedObject.XVelocity = this.LeftStick.Position.X * velocity;
positionedObject.YVelocity = this.LeftStick.Position.Y * velocity;
if (ButtonDown(Button.LeftShoulder))
positionedObject.ZVelocity = velocity;
else if (ButtonDown(Button.RightShoulder))
positionedObject.ZVelocity = -velocity;
else
positionedObject.ZVelocity = 0;
}
public void ControlPositionedObjectDPad(PositionedObject positionedObject, float velocity)
{
if (ButtonDown(Button.DPadLeft))
positionedObject.XVelocity = -velocity;
else if (ButtonDown(Button.DPadRight))
positionedObject.XVelocity = velocity;
else
positionedObject.XVelocity = 0;
if (ButtonDown(Button.DPadUp))
positionedObject.YVelocity = velocity;
else if (ButtonDown(Button.DPadDown))
positionedObject.YVelocity = -velocity;
else
positionedObject.YVelocity = 0;
}
public void ControlPositionedObjectAcceleration(PositionedObject positionedObject, float acceleration)
{
positionedObject.Acceleration.X = this.LeftStick.Position.X * acceleration;
positionedObject.Acceleration.Y = this.LeftStick.Position.Y * acceleration;
}
public void ControlPositionedObjectFpsStyle(PositionedObject positionedObject, Vector3 up)
{
positionedObject.Velocity = new Vector3();
positionedObject.Velocity += positionedObject.RotationMatrix.Forward * LeftStick.Position.Y * 7;
positionedObject.Velocity += positionedObject.RotationMatrix.Right * LeftStick.Position.X * 7;
positionedObject.RotationMatrix *= Matrix.CreateFromAxisAngle(positionedObject.RotationMatrix.Right, TimeManager.SecondDifference * RightStick.Position.Y);
positionedObject.RotationMatrix *= Matrix.CreateFromAxisAngle(up, -TimeManager.SecondDifference * RightStick.Position.X);
}
#endregion
/// <summary>
/// Creates a ButtomMap for this controller using the default bindings. This is
/// a quick way to simulate an Xbox360 controller using the keyboard.
/// </summary>
/// <remarks>
/// This creates the following bindings:
/// * Left analog stick = arrow keys
/// * A button = A key
/// * B button = S key
/// * X button = Q key
/// * Y button = W key
/// * Left trigger = E key
/// * Right trigger = R key
/// * Left shoulder = D key
/// * Right Shoulder = F key
/// * Back button = Backspace key
/// * Start button = Enter key
///
/// This will not simulate that the controller is connected, so you will have to set
/// FakeIsConnected to true if your game checks the connected state.
/// </remarks>
public void CreateDefaultButtonMap()
{
this.ButtonMap = new KeyboardButtonMap();
ButtonMap.LeftAnalogLeft = Keys.Left;
ButtonMap.LeftAnalogRight = Keys.Right;
ButtonMap.LeftAnalogUp = Keys.Up;
ButtonMap.LeftAnalogDown = Keys.Down;
ButtonMap.A = Keys.A;
ButtonMap.B = Keys.S;
ButtonMap.X = Keys.Q;
ButtonMap.Y = Keys.W;
ButtonMap.LeftTrigger = Keys.E;
ButtonMap.RightTrigger = Keys.R;
ButtonMap.LeftShoulder = Keys.D;
ButtonMap.RightShoulder = Keys.F;
ButtonMap.Back = Keys.Back;
ButtonMap.Start = Keys.Enter;
}
/// <summary>
/// Makes this Xbox360Gamepad ignore the argument button for the rest of the current frame.
/// </summary>
/// <param name="buttonToIgnore">The button that should be ignored for the rest of the current frame.</param>
public void IgnoreButtonForOneFrame(Button buttonToIgnore)
{
mButtonsIgnoredForThisFrame[(int)buttonToIgnore] = true;
}
/// <summary>
/// Updates the Xbox360Gamepad according to the argument gamepadState. This is publicly available for games
/// which need to simulate Xbox360Gamepads.
/// </summary>
/// <remarks>
/// This function is normally called automatically by the FlatRedBall Engine
/// in its regular update loop. You only need to call this function if you want
/// to override the behavior of the gamepad. Be sure to call this function after
/// FlatRedBallServices.Update, but before any custom game logic (such as ScreenManager.Activity).
/// </remarks>
/// <param name="gamepadState">The state containing the data for this frame.</param>
public void Update(GamePadState gamepadState)
{
UpdateInputManagerBack();
mLastGamePadState = mGamePadState;
mGamePadState = gamepadState;
UpdateAnalogStickAndTriggerValues();
UpdateLastButtonPushedValues();
}
private void UpdateLastButtonPushedValues()
{
// Set the last pushed and clear the ignored input
for (int i = 0; i < NumberOfButtons; i++)
{
mButtonsIgnoredForThisFrame[i] = false;
if (ButtonPushed((Button)i))
{
mLastButtonPush[i] = TimeManager.CurrentTime;
}
}
}
private void UpdateAnalogStickAndTriggerValues()
{
if (mButtonMap == null)
{
mLeftStick.Update(mGamePadState.ThumbSticks.Left);
mRightStick.Update(mGamePadState.ThumbSticks.Right);
mLeftTrigger.Update(mGamePadState.Triggers.Left);
mRightTrigger.Update(mGamePadState.Triggers.Right);
}
else
{
Vector2 newPosition = new Vector2();
#region Set the left analog stick position
if (mButtonMap.LeftAnalogLeft != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftAnalogLeft))
{
newPosition.X = -1;
}
else if (mButtonMap.LeftAnalogRight != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftAnalogRight))
{
newPosition.X = 1;
}
if (mButtonMap.LeftAnalogUp != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftAnalogUp))
{
newPosition.Y = 1;
}
else if (mButtonMap.LeftAnalogDown != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftAnalogDown))
{
newPosition.Y = -1;
}
//cap for diagonal presses
if (System.Math.Abs(newPosition.X) > 0.7071068f
&& System.Math.Abs(newPosition.Y) > 0.7071068f)
{
newPosition.X = System.Math.Sign(newPosition.X) * 0.7071068f;
newPosition.Y = System.Math.Sign(newPosition.Y) * 0.7071068f;
}
mLeftStick.Update(newPosition);
#endregion
#region Set the right analog stick position
newPosition = new Vector2();
if (mButtonMap.RightAnalogLeft != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightAnalogLeft))
{
newPosition.X = -1;
}
else if (mButtonMap.RightAnalogRight != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightAnalogRight))
{
newPosition.X = 1;
}
if (mButtonMap.RightAnalogUp != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightAnalogUp))
{
newPosition.Y = 1;
}
else if (mButtonMap.RightAnalogDown != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightAnalogDown))
{
newPosition.Y = -1;
}
//cap for diagonal presses
if (System.Math.Abs(newPosition.X) > 0.7071068f
&& System.Math.Abs(newPosition.Y) > 0.7071068f)
{
newPosition.X = System.Math.Sign(newPosition.X) * 0.7071068f;
newPosition.Y = System.Math.Sign(newPosition.Y) * 0.7071068f;
}
mRightStick.Update(newPosition);
#endregion
#region Set the trigger positions
float newAnalogPosition = 0;
if (mButtonMap.LeftTrigger != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.LeftTrigger))
{
newAnalogPosition = 1;
}
else
{
newAnalogPosition = 0;
}
mLeftTrigger.Update(newAnalogPosition);
if (mButtonMap.RightTrigger != Keys.None && InputManager.Keyboard.KeyDown(mButtonMap.RightTrigger))
{
newAnalogPosition = 1;
}
else
{
newAnalogPosition = 0;
}
mRightTrigger.Update(newAnalogPosition);
#endregion
// Button remapping is used when the methods for push, release, and down are called.
// Nothing to do here.
}
}
private void UpdateInputManagerBack()
{
#if WINDOWS_PHONE || MONOGAME
if (mGamePadState.IsButtonDown(Buttons.Back) && !mLastGamePadState.IsButtonDown(Buttons.Back))
{
InputManager.BackPressed = true;
}
#endif
}
#region XML Docs
/// <summary>
/// Sets the vibration of the game pad.
/// </summary>
/// <param name="leftMotor">The low-frequency motor. Set between 0.0f and 1.0f</param>
/// <param name="rightMotor">The high-frequency motor. Set between 0.0f and 1.0f</param>
/// <returns>True if the vibration motors were successfully set; false if the controller
/// was unable to process the request.
/// </returns>
#endregion
public bool SetVibration(float leftMotor, float rightMotor)
{
#if IMPLEMENT_INTERNALS
return Microsoft.Xna.Framework.Input.GamePad.SetVibration(
mPlayerIndex, leftMotor, rightMotor);
#else
return false;
#endif
}
public override string ToString()
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.Append("Left Stick").Append(mLeftStick);
return stringBuilder.ToString();
}
#endregion
#region Internal Methods
internal void Update()
{
GamePadState gamepadState;
#if SILVERLIGHT || MONOGAME
gamepadState = Microsoft.Xna.Framework.Input.GamePad.GetState(mPlayerIndex);//, GamePadDeadZone.Circular);
#else
gamepadState = Microsoft.Xna.Framework.Input.GamePad.GetState(mPlayerIndex, GamePadDeadZone.Circular);
mCapabilities = Microsoft.Xna.Framework.Input.GamePad.GetCapabilities(mPlayerIndex);
#endif
Update(gamepadState);
}
#endregion
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Pipelines;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Connections.Features;
using Microsoft.AspNetCore.Http.Features;
namespace ClientSample
{
public class TcpConnection : ConnectionContext, IConnectionInherentKeepAliveFeature
{
private readonly Socket _socket;
private volatile bool _aborted;
private readonly EndPoint _endPoint;
private IDuplexPipe _application;
private readonly SocketSender _sender;
private readonly SocketReceiver _receiver;
public TcpConnection(EndPoint endPoint)
{
_socket = new Socket(SocketType.Stream, ProtocolType.Tcp);
_endPoint = endPoint;
_sender = new SocketSender(_socket, PipeScheduler.ThreadPool);
_receiver = new SocketReceiver(_socket, PipeScheduler.ThreadPool);
// Add IConnectionInherentKeepAliveFeature to the tcp connection impl since Kestrel doesn't implement
// the IConnectionHeartbeatFeature
Features.Set<IConnectionInherentKeepAliveFeature>(this);
}
public override IDuplexPipe Transport { get; set; }
public override IFeatureCollection Features { get; } = new FeatureCollection();
public override string ConnectionId { get; set; } = Guid.NewGuid().ToString();
public override IDictionary<object, object> Items { get; set; } = new ConnectionItems();
// We claim to have inherent keep-alive so the client doesn't kill the connection when it hasn't seen ping frames.
public bool HasInherentKeepAlive { get; } = true;
public override ValueTask DisposeAsync()
{
Transport?.Output.Complete();
Transport?.Input.Complete();
_socket?.Dispose();
return default;
}
public async ValueTask<ConnectionContext> StartAsync()
{
await _socket.ConnectAsync(_endPoint);
var pair = DuplexPipe.CreateConnectionPair(PipeOptions.Default, PipeOptions.Default);
Transport = pair.Transport;
_application = pair.Application;
_ = ExecuteAsync();
return this;
}
private async Task ExecuteAsync()
{
Exception sendError = null;
try
{
// Spawn send and receive logic
var receiveTask = DoReceive();
var sendTask = DoSend();
// If the sending task completes then close the receive
// We don't need to do this in the other direction because the kestrel
// will trigger the output closing once the input is complete.
if (await Task.WhenAny(receiveTask, sendTask) == sendTask)
{
// Tell the reader it's being aborted
_socket.Dispose();
}
// Now wait for both to complete
await receiveTask;
sendError = await sendTask;
// Dispose the socket(should noop if already called)
_socket.Dispose();
}
catch (Exception ex)
{
Console.WriteLine($"Unexpected exception in {nameof(TcpConnection)}.{nameof(StartAsync)}: " + ex);
}
finally
{
// Complete the output after disposing the socket
_application.Input.Complete(sendError);
}
}
private async Task DoReceive()
{
Exception error = null;
try
{
await ProcessReceives();
}
catch (SocketException ex) when (ex.SocketErrorCode == SocketError.ConnectionReset)
{
error = new ConnectionResetException(ex.Message, ex);
}
catch (SocketException ex) when (ex.SocketErrorCode == SocketError.OperationAborted ||
ex.SocketErrorCode == SocketError.ConnectionAborted ||
ex.SocketErrorCode == SocketError.Interrupted ||
ex.SocketErrorCode == SocketError.InvalidArgument)
{
if (!_aborted)
{
// Calling Dispose after ReceiveAsync can cause an "InvalidArgument" error on *nix.
error = new ConnectionAbortedException();
}
}
catch (ObjectDisposedException)
{
if (!_aborted)
{
error = new ConnectionAbortedException();
}
}
catch (IOException ex)
{
error = ex;
}
catch (Exception ex)
{
error = new IOException(ex.Message, ex);
}
finally
{
if (_aborted)
{
error = error ?? new ConnectionAbortedException();
}
_application.Output.Complete(error);
}
}
private async Task ProcessReceives()
{
while (true)
{
// Ensure we have some reasonable amount of buffer space
var buffer = _application.Output.GetMemory();
var bytesReceived = await _receiver.ReceiveAsync(buffer);
if (bytesReceived == 0)
{
// FIN
break;
}
_application.Output.Advance(bytesReceived);
var result = await _application.Output.FlushAsync();
if (result.IsCompleted)
{
// Pipe consumer is shut down, do we stop writing
break;
}
}
}
private async Task<Exception> DoSend()
{
Exception error = null;
try
{
await ProcessSends();
}
catch (SocketException ex) when (ex.SocketErrorCode == SocketError.OperationAborted)
{
error = null;
}
catch (ObjectDisposedException)
{
error = null;
}
catch (IOException ex)
{
error = ex;
}
catch (Exception ex)
{
error = new IOException(ex.Message, ex);
}
finally
{
_aborted = true;
_socket.Shutdown(SocketShutdown.Both);
}
return error;
}
private async Task ProcessSends()
{
while (true)
{
// Wait for data to write from the pipe producer
var result = await _application.Input.ReadAsync();
var buffer = result.Buffer;
if (result.IsCanceled)
{
break;
}
var end = buffer.End;
var isCompleted = result.IsCompleted;
if (!buffer.IsEmpty)
{
await _sender.SendAsync(buffer);
}
_application.Input.AdvanceTo(end);
if (isCompleted)
{
break;
}
}
}
}
}
| |
//
// Mono.Cxxi.Abi.VTable.cs: Managed VTable Implementation
//
// Author:
// Alexander Corrado ([email protected])
// Andreia Gaita ([email protected])
//
// Copyright (C) 2010-2011 Alexander Corrado
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Diagnostics;
using System.Collections.Generic;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.InteropServices;
namespace Mono.Cxxi.Abi {
// TODO: RTTI .. support virtual inheritance
public class VTable : IDisposable {
protected bool initialized;
protected CppTypeInfo type_info;
protected IntPtr vtPtr;
public virtual int EntryCount {
get { return type_info.VirtualMethods.Count; }
}
public virtual int EntrySize {
get { return IntPtr.Size; }
}
// Subclasses should allocate vtPtr and then call WriteOverrides
public VTable (CppTypeInfo typeInfo)
{
this.initialized = false;
this.type_info = typeInfo;
this.vtPtr = Marshal.AllocHGlobal ((EntryCount * EntrySize) + typeInfo.VTableTopPadding + typeInfo.VTableBottomPadding);
WriteOverrides ();
CppInstancePtr.RegisterManagedVTable (this);
}
protected virtual void WriteOverrides ()
{
IntPtr vtEntryPtr;
int currentOffset = type_info.VTableTopPadding;
for (int i = 0; i < EntryCount; i++) {
Delegate currentOverride = type_info.VTableOverrides [i];
if (currentOverride != null) // managed override
vtEntryPtr = Marshal.GetFunctionPointerForDelegate (currentOverride);
else
vtEntryPtr = IntPtr.Zero;
Marshal.WriteIntPtr (vtPtr, currentOffset, vtEntryPtr);
currentOffset += EntrySize;
}
}
public virtual T GetVirtualCallDelegate<T> (CppInstancePtr instance, int index)
where T : class /*Delegate*/
{
var vtable = instance.NativeVTable;
var ftnptr = Marshal.ReadIntPtr (vtable, (index * EntrySize) + type_info.VTableTopPadding);
if (ftnptr == IntPtr.Zero)
throw new NullReferenceException ("Native VTable contains null...possible abstract class???");
var del = Marshal.GetDelegateForFunctionPointer (ftnptr, typeof (T));
return del as T;
}
// FIXME: Make this method unsafe.. it would probably be much faster
public virtual void InitInstance (ref CppInstancePtr instance)
{
var basePtr = Marshal.ReadIntPtr (instance.Native);
Debug.Assert (basePtr != IntPtr.Zero);
if (basePtr == vtPtr)
return;
instance.NativeVTable = basePtr;
if (!initialized) {
// FIXME: This could probably be a more efficient memcpy
for (int i = 0; i < type_info.VTableTopPadding; i++)
Marshal.WriteByte(vtPtr, i, Marshal.ReadByte(basePtr, i));
int currentOffset = type_info.VTableTopPadding;
for (int i = 0; i < EntryCount; i++) {
if (Marshal.ReadIntPtr (vtPtr, currentOffset) == IntPtr.Zero)
Marshal.WriteIntPtr (vtPtr, currentOffset, Marshal.ReadIntPtr (basePtr, currentOffset));
currentOffset += EntrySize;
}
// FIXME: This could probably be a more efficient memcpy
for (int i = 0; i < type_info.VTableBottomPadding; i++)
Marshal.WriteByte(vtPtr, currentOffset + i, Marshal.ReadByte(basePtr, currentOffset + i));
initialized = true;
}
Marshal.WriteIntPtr (instance.Native, vtPtr);
}
public virtual void ResetInstance (CppInstancePtr instance)
{
Marshal.WriteIntPtr (instance.Native, instance.NativeVTable);
}
public CppTypeInfo TypeInfo {
get { return type_info; }
}
public IntPtr Pointer {
get { return vtPtr; }
}
protected virtual void Dispose (bool disposing)
{
if (vtPtr != IntPtr.Zero) {
Marshal.FreeHGlobal (vtPtr);
vtPtr = IntPtr.Zero;
}
}
// TODO: This WON'T usually be called because VTables are associated with classes
// (not instances) and managed C++ class wrappers are staticly held?
public void Dispose ()
{
Dispose (true);
GC.SuppressFinalize (this);
}
~VTable ()
{
Dispose (false);
}
public static bool BindToSignatureAndAttribute (MemberInfo member, object obj)
{
var overrideNative = member.GetCustomAttributes (typeof (OverrideNativeAttribute), true);
if (overrideNative.Length == 0)
return false;
var name = ((OverrideNativeAttribute)overrideNative [0]).NativeMethod ?? member.Name;
return BindToSignature (member, obj, name);
}
public static bool BindToSignature (MemberInfo member, object obj)
{
return BindToSignature (member, obj, member.Name);
}
public static bool BindToSignature (MemberInfo member, object obj, string nativeMethod)
{
MethodInfo imethod = (MethodInfo) obj;
MethodInfo candidate = (MethodInfo) member;
if (nativeMethod != imethod.Name)
return false;
ParameterInfo[] invokeParams = imethod.GetParameters ();
ParameterInfo[] methodParams = candidate.GetParameters ();
if (invokeParams.Length == methodParams.Length) {
for (int i = 0; i < invokeParams.Length; i++) {
if (!invokeParams [i].ParameterType.IsAssignableFrom (methodParams [i].ParameterType))
return false;
}
} else if (invokeParams.Length == methodParams.Length + 1) {
for (int i = 1; i < invokeParams.Length; i++) {
if (!invokeParams [i].ParameterType.IsAssignableFrom (methodParams [i - 1].ParameterType))
return false;
}
} else
return false;
return true;
}
}
}
| |
using System.Threading;
using MetaDslx.Compiler;
using MetaDslx.Compiler.Syntax;
using MetaDslx.Compiler.Text;
namespace MetaDslx.Languages.Soal.Syntax
{
public enum SoalTokenKind : int
{
None = 0,
Comment,
Identifier,
Keyword,
Number,
String,
Whitespace
}
public enum SoalLexerMode : int
{
None = 0,
DEFAULT_MODE = 0,
MULTILINE_COMMENT = 1,
DOUBLEQUOTE_VERBATIM_STRING = 2,
SINGLEQUOTE_VERBATIM_STRING = 3
}
public class SoalSyntaxFacts : SyntaxFacts
{
public static readonly SoalSyntaxFacts Instance = new SoalSyntaxFacts();
protected override int DefaultEndOfLineSyntaxKindCore
{
get { return (int)SoalSyntaxKind.LCrLf; }
}
protected override int DefaultWhitespaceSyntaxKindCore
{
get { return (int)SoalSyntaxKind.LWhiteSpace; }
}
public override bool IsToken(int rawKind)
{
return this.IsToken((SoalSyntaxKind)rawKind);
}
public bool IsToken(SoalSyntaxKind kind)
{
switch (kind)
{
case SoalSyntaxKind.Eof:
case SoalSyntaxKind.KNamespace:
case SoalSyntaxKind.KEnum:
case SoalSyntaxKind.KException:
case SoalSyntaxKind.KStruct:
case SoalSyntaxKind.KInterface:
case SoalSyntaxKind.KThrows:
case SoalSyntaxKind.KOneway:
case SoalSyntaxKind.KReturn:
case SoalSyntaxKind.KBinding:
case SoalSyntaxKind.KTransport:
case SoalSyntaxKind.KEncoding:
case SoalSyntaxKind.KProtocol:
case SoalSyntaxKind.KEndpoint:
case SoalSyntaxKind.KAddress:
case SoalSyntaxKind.KDatabase:
case SoalSyntaxKind.KEntity:
case SoalSyntaxKind.KAbstract:
case SoalSyntaxKind.KComponent:
case SoalSyntaxKind.KComposite:
case SoalSyntaxKind.KReference:
case SoalSyntaxKind.KService:
case SoalSyntaxKind.KWire:
case SoalSyntaxKind.KTo:
case SoalSyntaxKind.KImplementation:
case SoalSyntaxKind.KLanguage:
case SoalSyntaxKind.KAssembly:
case SoalSyntaxKind.KDeployment:
case SoalSyntaxKind.KEnvironment:
case SoalSyntaxKind.KRuntime:
case SoalSyntaxKind.KNull:
case SoalSyntaxKind.KTrue:
case SoalSyntaxKind.KFalse:
case SoalSyntaxKind.KObject:
case SoalSyntaxKind.KString:
case SoalSyntaxKind.KInt:
case SoalSyntaxKind.KLong:
case SoalSyntaxKind.KFloat:
case SoalSyntaxKind.KDouble:
case SoalSyntaxKind.KByte:
case SoalSyntaxKind.KBool:
case SoalSyntaxKind.KAny:
case SoalSyntaxKind.KTypeof:
case SoalSyntaxKind.KVoid:
case SoalSyntaxKind.TSemicolon:
case SoalSyntaxKind.TColon:
case SoalSyntaxKind.TDot:
case SoalSyntaxKind.TComma:
case SoalSyntaxKind.TAssign:
case SoalSyntaxKind.TOpenParen:
case SoalSyntaxKind.TCloseParen:
case SoalSyntaxKind.TOpenBracket:
case SoalSyntaxKind.TCloseBracket:
case SoalSyntaxKind.TOpenBrace:
case SoalSyntaxKind.TCloseBrace:
case SoalSyntaxKind.TLessThan:
case SoalSyntaxKind.TGreaterThan:
case SoalSyntaxKind.TQuestion:
case SoalSyntaxKind.TQuestionQuestion:
case SoalSyntaxKind.TAmpersand:
case SoalSyntaxKind.THat:
case SoalSyntaxKind.TBar:
case SoalSyntaxKind.TAndAlso:
case SoalSyntaxKind.TOrElse:
case SoalSyntaxKind.TPlusPlus:
case SoalSyntaxKind.TMinusMinus:
case SoalSyntaxKind.TPlus:
case SoalSyntaxKind.TMinus:
case SoalSyntaxKind.TTilde:
case SoalSyntaxKind.TExclamation:
case SoalSyntaxKind.TSlash:
case SoalSyntaxKind.TAsterisk:
case SoalSyntaxKind.TPercent:
case SoalSyntaxKind.TLessThanOrEqual:
case SoalSyntaxKind.TGreaterThanOrEqual:
case SoalSyntaxKind.TEqual:
case SoalSyntaxKind.TNotEqual:
case SoalSyntaxKind.TAsteriskAssign:
case SoalSyntaxKind.TSlashAssign:
case SoalSyntaxKind.TPercentAssign:
case SoalSyntaxKind.TPlusAssign:
case SoalSyntaxKind.TMinusAssign:
case SoalSyntaxKind.TLeftShiftAssign:
case SoalSyntaxKind.TRightShiftAssign:
case SoalSyntaxKind.TAmpersandAssign:
case SoalSyntaxKind.THatAssign:
case SoalSyntaxKind.TBarAssign:
case SoalSyntaxKind.IDate:
case SoalSyntaxKind.ITime:
case SoalSyntaxKind.IDateTime:
case SoalSyntaxKind.ITimeSpan:
case SoalSyntaxKind.IVersion:
case SoalSyntaxKind.IStyle:
case SoalSyntaxKind.IMTOM:
case SoalSyntaxKind.ISSL:
case SoalSyntaxKind.IHTTP:
case SoalSyntaxKind.IREST:
case SoalSyntaxKind.IWebSocket:
case SoalSyntaxKind.ISOAP:
case SoalSyntaxKind.IXML:
case SoalSyntaxKind.IJSON:
case SoalSyntaxKind.IClientAuthentication:
case SoalSyntaxKind.IWsAddressing:
case SoalSyntaxKind.IdentifierNormal:
case SoalSyntaxKind.IdentifierVerbatim:
case SoalSyntaxKind.LInteger:
case SoalSyntaxKind.LDecimal:
case SoalSyntaxKind.LScientific:
case SoalSyntaxKind.LDateTimeOffset:
case SoalSyntaxKind.LDateTime:
case SoalSyntaxKind.LDate:
case SoalSyntaxKind.LTime:
case SoalSyntaxKind.LRegularString:
case SoalSyntaxKind.LGuid:
case SoalSyntaxKind.LUtf8Bom:
case SoalSyntaxKind.LWhiteSpace:
case SoalSyntaxKind.LCrLf:
case SoalSyntaxKind.LLineEnd:
case SoalSyntaxKind.LSingleLineComment:
case SoalSyntaxKind.LMultiLineComment:
case SoalSyntaxKind.LDoubleQuoteVerbatimString:
case SoalSyntaxKind.LSingleQuoteVerbatimString:
case SoalSyntaxKind.DoubleQuoteVerbatimStringLiteralStart:
case SoalSyntaxKind.SingleQuoteVerbatimStringLiteralStart:
case SoalSyntaxKind.LCommentStart:
return true;
default:
return false;
}
}
public override bool IsFixedToken(int rawKind)
{
return this.IsFixedToken((SoalSyntaxKind)rawKind);
}
public bool IsFixedToken(SoalSyntaxKind kind)
{
switch (kind)
{
case SoalSyntaxKind.Eof:
case SoalSyntaxKind.KNamespace:
case SoalSyntaxKind.KEnum:
case SoalSyntaxKind.KException:
case SoalSyntaxKind.KStruct:
case SoalSyntaxKind.KInterface:
case SoalSyntaxKind.KThrows:
case SoalSyntaxKind.KOneway:
case SoalSyntaxKind.KReturn:
case SoalSyntaxKind.KBinding:
case SoalSyntaxKind.KTransport:
case SoalSyntaxKind.KEncoding:
case SoalSyntaxKind.KProtocol:
case SoalSyntaxKind.KEndpoint:
case SoalSyntaxKind.KAddress:
case SoalSyntaxKind.KDatabase:
case SoalSyntaxKind.KEntity:
case SoalSyntaxKind.KAbstract:
case SoalSyntaxKind.KComponent:
case SoalSyntaxKind.KComposite:
case SoalSyntaxKind.KReference:
case SoalSyntaxKind.KService:
case SoalSyntaxKind.KWire:
case SoalSyntaxKind.KTo:
case SoalSyntaxKind.KImplementation:
case SoalSyntaxKind.KLanguage:
case SoalSyntaxKind.KAssembly:
case SoalSyntaxKind.KDeployment:
case SoalSyntaxKind.KEnvironment:
case SoalSyntaxKind.KRuntime:
case SoalSyntaxKind.KNull:
case SoalSyntaxKind.KTrue:
case SoalSyntaxKind.KFalse:
case SoalSyntaxKind.KObject:
case SoalSyntaxKind.KString:
case SoalSyntaxKind.KInt:
case SoalSyntaxKind.KLong:
case SoalSyntaxKind.KFloat:
case SoalSyntaxKind.KDouble:
case SoalSyntaxKind.KByte:
case SoalSyntaxKind.KBool:
case SoalSyntaxKind.KAny:
case SoalSyntaxKind.KTypeof:
case SoalSyntaxKind.KVoid:
case SoalSyntaxKind.TSemicolon:
case SoalSyntaxKind.TColon:
case SoalSyntaxKind.TDot:
case SoalSyntaxKind.TComma:
case SoalSyntaxKind.TAssign:
case SoalSyntaxKind.TOpenParen:
case SoalSyntaxKind.TCloseParen:
case SoalSyntaxKind.TOpenBracket:
case SoalSyntaxKind.TCloseBracket:
case SoalSyntaxKind.TOpenBrace:
case SoalSyntaxKind.TCloseBrace:
case SoalSyntaxKind.TLessThan:
case SoalSyntaxKind.TGreaterThan:
case SoalSyntaxKind.TQuestion:
case SoalSyntaxKind.TQuestionQuestion:
case SoalSyntaxKind.TAmpersand:
case SoalSyntaxKind.THat:
case SoalSyntaxKind.TBar:
case SoalSyntaxKind.TAndAlso:
case SoalSyntaxKind.TOrElse:
case SoalSyntaxKind.TPlusPlus:
case SoalSyntaxKind.TMinusMinus:
case SoalSyntaxKind.TPlus:
case SoalSyntaxKind.TMinus:
case SoalSyntaxKind.TTilde:
case SoalSyntaxKind.TExclamation:
case SoalSyntaxKind.TSlash:
case SoalSyntaxKind.TPercent:
case SoalSyntaxKind.TLessThanOrEqual:
case SoalSyntaxKind.TGreaterThanOrEqual:
case SoalSyntaxKind.TEqual:
case SoalSyntaxKind.TNotEqual:
case SoalSyntaxKind.TAsteriskAssign:
case SoalSyntaxKind.TSlashAssign:
case SoalSyntaxKind.TPercentAssign:
case SoalSyntaxKind.TPlusAssign:
case SoalSyntaxKind.TMinusAssign:
case SoalSyntaxKind.TLeftShiftAssign:
case SoalSyntaxKind.TRightShiftAssign:
case SoalSyntaxKind.TAmpersandAssign:
case SoalSyntaxKind.THatAssign:
case SoalSyntaxKind.TBarAssign:
case SoalSyntaxKind.IDate:
case SoalSyntaxKind.ITime:
case SoalSyntaxKind.IDateTime:
case SoalSyntaxKind.ITimeSpan:
case SoalSyntaxKind.IVersion:
case SoalSyntaxKind.IStyle:
case SoalSyntaxKind.IMTOM:
case SoalSyntaxKind.ISSL:
case SoalSyntaxKind.IHTTP:
case SoalSyntaxKind.IREST:
case SoalSyntaxKind.IWebSocket:
case SoalSyntaxKind.ISOAP:
case SoalSyntaxKind.IXML:
case SoalSyntaxKind.IJSON:
case SoalSyntaxKind.IClientAuthentication:
case SoalSyntaxKind.IWsAddressing:
case SoalSyntaxKind.DoubleQuoteVerbatimStringLiteralStart:
case SoalSyntaxKind.SingleQuoteVerbatimStringLiteralStart:
case SoalSyntaxKind.LCommentStart:
case SoalSyntaxKind.LDoubleQuoteVerbatimString:
case SoalSyntaxKind.LSingleQuoteVerbatimString:
return true;
default:
return false;
}
}
public override string GetText(int rawKind)
{
return this.GetText((SoalSyntaxKind)rawKind);
}
public string GetText(SoalSyntaxKind kind)
{
switch (kind)
{
case SoalSyntaxKind.KNamespace:
return "namespace";
case SoalSyntaxKind.KEnum:
return "enum";
case SoalSyntaxKind.KException:
return "exception";
case SoalSyntaxKind.KStruct:
return "struct";
case SoalSyntaxKind.KInterface:
return "interface";
case SoalSyntaxKind.KThrows:
return "throws";
case SoalSyntaxKind.KOneway:
return "oneway";
case SoalSyntaxKind.KReturn:
return "return";
case SoalSyntaxKind.KBinding:
return "binding";
case SoalSyntaxKind.KTransport:
return "transport";
case SoalSyntaxKind.KEncoding:
return "encoding";
case SoalSyntaxKind.KProtocol:
return "protocol";
case SoalSyntaxKind.KEndpoint:
return "endpoint";
case SoalSyntaxKind.KAddress:
return "address";
case SoalSyntaxKind.KDatabase:
return "database";
case SoalSyntaxKind.KEntity:
return "entity";
case SoalSyntaxKind.KAbstract:
return "abstract";
case SoalSyntaxKind.KComponent:
return "component";
case SoalSyntaxKind.KComposite:
return "composite";
case SoalSyntaxKind.KReference:
return "reference";
case SoalSyntaxKind.KService:
return "service";
case SoalSyntaxKind.KWire:
return "wire";
case SoalSyntaxKind.KTo:
return "to";
case SoalSyntaxKind.KImplementation:
return "implementation";
case SoalSyntaxKind.KLanguage:
return "language";
case SoalSyntaxKind.KAssembly:
return "assembly";
case SoalSyntaxKind.KDeployment:
return "deployment";
case SoalSyntaxKind.KEnvironment:
return "environment";
case SoalSyntaxKind.KRuntime:
return "runtime";
case SoalSyntaxKind.KNull:
return "null";
case SoalSyntaxKind.KTrue:
return "true";
case SoalSyntaxKind.KFalse:
return "false";
case SoalSyntaxKind.KObject:
return "object";
case SoalSyntaxKind.KString:
return "string";
case SoalSyntaxKind.KInt:
return "int";
case SoalSyntaxKind.KLong:
return "long";
case SoalSyntaxKind.KFloat:
return "float";
case SoalSyntaxKind.KDouble:
return "double";
case SoalSyntaxKind.KByte:
return "byte";
case SoalSyntaxKind.KBool:
return "bool";
case SoalSyntaxKind.KAny:
return "any";
case SoalSyntaxKind.KTypeof:
return "typeof";
case SoalSyntaxKind.KVoid:
return "void";
case SoalSyntaxKind.TSemicolon:
return ";";
case SoalSyntaxKind.TColon:
return ":";
case SoalSyntaxKind.TDot:
return ".";
case SoalSyntaxKind.TComma:
return ",";
case SoalSyntaxKind.TAssign:
return "=";
case SoalSyntaxKind.TOpenParen:
return "(";
case SoalSyntaxKind.TCloseParen:
return ")";
case SoalSyntaxKind.TOpenBracket:
return "[";
case SoalSyntaxKind.TCloseBracket:
return "]";
case SoalSyntaxKind.TOpenBrace:
return "{";
case SoalSyntaxKind.TCloseBrace:
return "}";
case SoalSyntaxKind.TLessThan:
return "<";
case SoalSyntaxKind.TGreaterThan:
return ">";
case SoalSyntaxKind.TQuestion:
return "?";
case SoalSyntaxKind.TQuestionQuestion:
return "??";
case SoalSyntaxKind.TAmpersand:
return "&";
case SoalSyntaxKind.THat:
return "^";
case SoalSyntaxKind.TBar:
return "|";
case SoalSyntaxKind.TAndAlso:
return "&&";
case SoalSyntaxKind.TOrElse:
return "||";
case SoalSyntaxKind.TPlusPlus:
return "++";
case SoalSyntaxKind.TMinusMinus:
return "--";
case SoalSyntaxKind.TPlus:
return "+";
case SoalSyntaxKind.TMinus:
return "-";
case SoalSyntaxKind.TTilde:
return "~";
case SoalSyntaxKind.TExclamation:
return "!";
case SoalSyntaxKind.TSlash:
return "/";
case SoalSyntaxKind.TPercent:
return "%";
case SoalSyntaxKind.TLessThanOrEqual:
return "<=";
case SoalSyntaxKind.TGreaterThanOrEqual:
return ">=";
case SoalSyntaxKind.TEqual:
return "==";
case SoalSyntaxKind.TNotEqual:
return "!=";
case SoalSyntaxKind.TAsteriskAssign:
return "*=";
case SoalSyntaxKind.TSlashAssign:
return "/=";
case SoalSyntaxKind.TPercentAssign:
return "%=";
case SoalSyntaxKind.TPlusAssign:
return "+=";
case SoalSyntaxKind.TMinusAssign:
return "-=";
case SoalSyntaxKind.TLeftShiftAssign:
return "<<=";
case SoalSyntaxKind.TRightShiftAssign:
return ">>=";
case SoalSyntaxKind.TAmpersandAssign:
return "&=";
case SoalSyntaxKind.THatAssign:
return "^=";
case SoalSyntaxKind.TBarAssign:
return "|=";
case SoalSyntaxKind.IDate:
return "Date";
case SoalSyntaxKind.ITime:
return "Time";
case SoalSyntaxKind.IDateTime:
return "DateTime";
case SoalSyntaxKind.ITimeSpan:
return "TimeSpan";
case SoalSyntaxKind.IVersion:
return "Version";
case SoalSyntaxKind.IStyle:
return "Style";
case SoalSyntaxKind.IMTOM:
return "MTOM";
case SoalSyntaxKind.ISSL:
return "SSL";
case SoalSyntaxKind.IHTTP:
return "HTTP";
case SoalSyntaxKind.IREST:
return "REST";
case SoalSyntaxKind.IWebSocket:
return "WebSocket";
case SoalSyntaxKind.ISOAP:
return "SOAP";
case SoalSyntaxKind.IXML:
return "XML";
case SoalSyntaxKind.IJSON:
return "JSON";
case SoalSyntaxKind.IClientAuthentication:
return "ClientAuthentication";
case SoalSyntaxKind.IWsAddressing:
return "WsAddressing";
case SoalSyntaxKind.DoubleQuoteVerbatimStringLiteralStart:
return "@\"";
case SoalSyntaxKind.SingleQuoteVerbatimStringLiteralStart:
return "@\'";
case SoalSyntaxKind.LCommentStart:
return "/*";
case SoalSyntaxKind.LDoubleQuoteVerbatimString:
return "\"";
case SoalSyntaxKind.LSingleQuoteVerbatimString:
return "\'";
default:
return string.Empty;
}
}
public SoalSyntaxKind GetKind(string text)
{
switch (text)
{
case "namespace":
return SoalSyntaxKind.KNamespace;
case "enum":
return SoalSyntaxKind.KEnum;
case "exception":
return SoalSyntaxKind.KException;
case "struct":
return SoalSyntaxKind.KStruct;
case "interface":
return SoalSyntaxKind.KInterface;
case "throws":
return SoalSyntaxKind.KThrows;
case "oneway":
return SoalSyntaxKind.KOneway;
case "return":
return SoalSyntaxKind.KReturn;
case "binding":
return SoalSyntaxKind.KBinding;
case "transport":
return SoalSyntaxKind.KTransport;
case "encoding":
return SoalSyntaxKind.KEncoding;
case "protocol":
return SoalSyntaxKind.KProtocol;
case "endpoint":
return SoalSyntaxKind.KEndpoint;
case "address":
return SoalSyntaxKind.KAddress;
case "database":
return SoalSyntaxKind.KDatabase;
case "entity":
return SoalSyntaxKind.KEntity;
case "abstract":
return SoalSyntaxKind.KAbstract;
case "component":
return SoalSyntaxKind.KComponent;
case "composite":
return SoalSyntaxKind.KComposite;
case "reference":
return SoalSyntaxKind.KReference;
case "service":
return SoalSyntaxKind.KService;
case "wire":
return SoalSyntaxKind.KWire;
case "to":
return SoalSyntaxKind.KTo;
case "implementation":
return SoalSyntaxKind.KImplementation;
case "language":
return SoalSyntaxKind.KLanguage;
case "assembly":
return SoalSyntaxKind.KAssembly;
case "deployment":
return SoalSyntaxKind.KDeployment;
case "environment":
return SoalSyntaxKind.KEnvironment;
case "runtime":
return SoalSyntaxKind.KRuntime;
case "null":
return SoalSyntaxKind.KNull;
case "true":
return SoalSyntaxKind.KTrue;
case "false":
return SoalSyntaxKind.KFalse;
case "object":
return SoalSyntaxKind.KObject;
case "string":
return SoalSyntaxKind.KString;
case "int":
return SoalSyntaxKind.KInt;
case "long":
return SoalSyntaxKind.KLong;
case "float":
return SoalSyntaxKind.KFloat;
case "double":
return SoalSyntaxKind.KDouble;
case "byte":
return SoalSyntaxKind.KByte;
case "bool":
return SoalSyntaxKind.KBool;
case "any":
return SoalSyntaxKind.KAny;
case "typeof":
return SoalSyntaxKind.KTypeof;
case "void":
return SoalSyntaxKind.KVoid;
case ";":
return SoalSyntaxKind.TSemicolon;
case ":":
return SoalSyntaxKind.TColon;
case ".":
return SoalSyntaxKind.TDot;
case ",":
return SoalSyntaxKind.TComma;
case "=":
return SoalSyntaxKind.TAssign;
case "(":
return SoalSyntaxKind.TOpenParen;
case ")":
return SoalSyntaxKind.TCloseParen;
case "[":
return SoalSyntaxKind.TOpenBracket;
case "]":
return SoalSyntaxKind.TCloseBracket;
case "{":
return SoalSyntaxKind.TOpenBrace;
case "}":
return SoalSyntaxKind.TCloseBrace;
case "<":
return SoalSyntaxKind.TLessThan;
case ">":
return SoalSyntaxKind.TGreaterThan;
case "?":
return SoalSyntaxKind.TQuestion;
case "??":
return SoalSyntaxKind.TQuestionQuestion;
case "&":
return SoalSyntaxKind.TAmpersand;
case "^":
return SoalSyntaxKind.THat;
case "|":
return SoalSyntaxKind.TBar;
case "&&":
return SoalSyntaxKind.TAndAlso;
case "||":
return SoalSyntaxKind.TOrElse;
case "++":
return SoalSyntaxKind.TPlusPlus;
case "--":
return SoalSyntaxKind.TMinusMinus;
case "+":
return SoalSyntaxKind.TPlus;
case "-":
return SoalSyntaxKind.TMinus;
case "~":
return SoalSyntaxKind.TTilde;
case "!":
return SoalSyntaxKind.TExclamation;
case "/":
return SoalSyntaxKind.TSlash;
case "%":
return SoalSyntaxKind.TPercent;
case "<=":
return SoalSyntaxKind.TLessThanOrEqual;
case ">=":
return SoalSyntaxKind.TGreaterThanOrEqual;
case "==":
return SoalSyntaxKind.TEqual;
case "!=":
return SoalSyntaxKind.TNotEqual;
case "*=":
return SoalSyntaxKind.TAsteriskAssign;
case "/=":
return SoalSyntaxKind.TSlashAssign;
case "%=":
return SoalSyntaxKind.TPercentAssign;
case "+=":
return SoalSyntaxKind.TPlusAssign;
case "-=":
return SoalSyntaxKind.TMinusAssign;
case "<<=":
return SoalSyntaxKind.TLeftShiftAssign;
case ">>=":
return SoalSyntaxKind.TRightShiftAssign;
case "&=":
return SoalSyntaxKind.TAmpersandAssign;
case "^=":
return SoalSyntaxKind.THatAssign;
case "|=":
return SoalSyntaxKind.TBarAssign;
case "Date":
return SoalSyntaxKind.IDate;
case "Time":
return SoalSyntaxKind.ITime;
case "DateTime":
return SoalSyntaxKind.IDateTime;
case "TimeSpan":
return SoalSyntaxKind.ITimeSpan;
case "Version":
return SoalSyntaxKind.IVersion;
case "Style":
return SoalSyntaxKind.IStyle;
case "MTOM":
return SoalSyntaxKind.IMTOM;
case "SSL":
return SoalSyntaxKind.ISSL;
case "HTTP":
return SoalSyntaxKind.IHTTP;
case "REST":
return SoalSyntaxKind.IREST;
case "WebSocket":
return SoalSyntaxKind.IWebSocket;
case "SOAP":
return SoalSyntaxKind.ISOAP;
case "XML":
return SoalSyntaxKind.IXML;
case "JSON":
return SoalSyntaxKind.IJSON;
case "ClientAuthentication":
return SoalSyntaxKind.IClientAuthentication;
case "WsAddressing":
return SoalSyntaxKind.IWsAddressing;
case "@\"":
return SoalSyntaxKind.DoubleQuoteVerbatimStringLiteralStart;
case "@\'":
return SoalSyntaxKind.SingleQuoteVerbatimStringLiteralStart;
case "/*":
return SoalSyntaxKind.LCommentStart;
case "\"":
return SoalSyntaxKind.LDoubleQuoteVerbatimString;
case "\'":
return SoalSyntaxKind.LSingleQuoteVerbatimString;
default:
return SoalSyntaxKind.None;
}
}
public override string GetKindText(int rawKind)
{
return this.GetKindText((SoalSyntaxKind)rawKind);
}
public string GetKindText(SoalSyntaxKind kind)
{
return kind.ToString();
}
public override bool IsTriviaWithEndOfLine(int rawKind)
{
return this.IsTriviaWithEndOfLine((SoalSyntaxKind)rawKind);
}
public bool IsTriviaWithEndOfLine(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.LCrLf:
return true;
case SoalSyntaxKind.LLineEnd:
return true;
default:
return false;
}
}
public bool IsKeyword(int rawKind)
{
return this.IsKeyword((SoalSyntaxKind)rawKind);
}
public bool IsKeyword(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.KNamespace:
case SoalSyntaxKind.KEnum:
case SoalSyntaxKind.KException:
case SoalSyntaxKind.KStruct:
case SoalSyntaxKind.KInterface:
case SoalSyntaxKind.KThrows:
case SoalSyntaxKind.KOneway:
case SoalSyntaxKind.KReturn:
case SoalSyntaxKind.KBinding:
case SoalSyntaxKind.KTransport:
case SoalSyntaxKind.KEncoding:
case SoalSyntaxKind.KProtocol:
case SoalSyntaxKind.KEndpoint:
case SoalSyntaxKind.KAddress:
case SoalSyntaxKind.KDatabase:
case SoalSyntaxKind.KEntity:
case SoalSyntaxKind.KAbstract:
case SoalSyntaxKind.KComponent:
case SoalSyntaxKind.KComposite:
case SoalSyntaxKind.KReference:
case SoalSyntaxKind.KService:
case SoalSyntaxKind.KWire:
case SoalSyntaxKind.KTo:
case SoalSyntaxKind.KImplementation:
case SoalSyntaxKind.KLanguage:
case SoalSyntaxKind.KAssembly:
case SoalSyntaxKind.KDeployment:
case SoalSyntaxKind.KEnvironment:
case SoalSyntaxKind.KRuntime:
case SoalSyntaxKind.KNull:
case SoalSyntaxKind.KTrue:
case SoalSyntaxKind.KFalse:
case SoalSyntaxKind.KObject:
case SoalSyntaxKind.KString:
case SoalSyntaxKind.KInt:
case SoalSyntaxKind.KLong:
case SoalSyntaxKind.KFloat:
case SoalSyntaxKind.KDouble:
case SoalSyntaxKind.KByte:
case SoalSyntaxKind.KBool:
case SoalSyntaxKind.KAny:
case SoalSyntaxKind.KTypeof:
case SoalSyntaxKind.KVoid:
return true;
default:
return false;
}
}
public bool IsIdentifier(int rawKind)
{
return this.IsIdentifier((SoalSyntaxKind)rawKind);
}
public bool IsIdentifier(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.IdentifierNormal:
return true;
case SoalSyntaxKind.IdentifierVerbatim:
return true;
default:
return false;
}
}
public bool IsNumber(int rawKind)
{
return this.IsNumber((SoalSyntaxKind)rawKind);
}
public bool IsNumber(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.LInteger:
return true;
case SoalSyntaxKind.LDecimal:
return true;
case SoalSyntaxKind.LScientific:
return true;
case SoalSyntaxKind.LDateTimeOffset:
return true;
case SoalSyntaxKind.LDateTime:
return true;
case SoalSyntaxKind.LDate:
return true;
case SoalSyntaxKind.LTime:
return true;
default:
return false;
}
}
public bool IsString(int rawKind)
{
return this.IsString((SoalSyntaxKind)rawKind);
}
public bool IsString(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.LRegularString:
return true;
case SoalSyntaxKind.LGuid:
return true;
case SoalSyntaxKind.LDoubleQuoteVerbatimString:
return true;
case SoalSyntaxKind.LSingleQuoteVerbatimString:
return true;
default:
return false;
}
}
public bool IsWhitespace(int rawKind)
{
return this.IsWhitespace((SoalSyntaxKind)rawKind);
}
public bool IsWhitespace(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.LUtf8Bom:
return true;
case SoalSyntaxKind.LWhiteSpace:
return true;
case SoalSyntaxKind.LCrLf:
return true;
case SoalSyntaxKind.LLineEnd:
return true;
default:
return false;
}
}
public bool IsComment(int rawKind)
{
return this.IsComment((SoalSyntaxKind)rawKind);
}
public bool IsComment(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.LSingleLineComment:
return true;
case SoalSyntaxKind.LMultiLineComment:
return true;
default:
return false;
}
}
public SoalTokenKind GetTokenKind(int rawKind)
{
return this.GetTokenKind((SoalSyntaxKind)rawKind);
}
public SoalTokenKind GetTokenKind(SoalSyntaxKind kind)
{
switch(kind)
{
case SoalSyntaxKind.KNamespace:
case SoalSyntaxKind.KEnum:
case SoalSyntaxKind.KException:
case SoalSyntaxKind.KStruct:
case SoalSyntaxKind.KInterface:
case SoalSyntaxKind.KThrows:
case SoalSyntaxKind.KOneway:
case SoalSyntaxKind.KReturn:
case SoalSyntaxKind.KBinding:
case SoalSyntaxKind.KTransport:
case SoalSyntaxKind.KEncoding:
case SoalSyntaxKind.KProtocol:
case SoalSyntaxKind.KEndpoint:
case SoalSyntaxKind.KAddress:
case SoalSyntaxKind.KDatabase:
case SoalSyntaxKind.KEntity:
case SoalSyntaxKind.KAbstract:
case SoalSyntaxKind.KComponent:
case SoalSyntaxKind.KComposite:
case SoalSyntaxKind.KReference:
case SoalSyntaxKind.KService:
case SoalSyntaxKind.KWire:
case SoalSyntaxKind.KTo:
case SoalSyntaxKind.KImplementation:
case SoalSyntaxKind.KLanguage:
case SoalSyntaxKind.KAssembly:
case SoalSyntaxKind.KDeployment:
case SoalSyntaxKind.KEnvironment:
case SoalSyntaxKind.KRuntime:
case SoalSyntaxKind.KNull:
case SoalSyntaxKind.KTrue:
case SoalSyntaxKind.KFalse:
case SoalSyntaxKind.KObject:
case SoalSyntaxKind.KString:
case SoalSyntaxKind.KInt:
case SoalSyntaxKind.KLong:
case SoalSyntaxKind.KFloat:
case SoalSyntaxKind.KDouble:
case SoalSyntaxKind.KByte:
case SoalSyntaxKind.KBool:
case SoalSyntaxKind.KAny:
case SoalSyntaxKind.KTypeof:
case SoalSyntaxKind.KVoid:
return SoalTokenKind.Keyword;
case SoalSyntaxKind.IdentifierNormal:
return SoalTokenKind.Identifier;
case SoalSyntaxKind.IdentifierVerbatim:
return SoalTokenKind.Identifier;
case SoalSyntaxKind.LInteger:
return SoalTokenKind.Number;
case SoalSyntaxKind.LDecimal:
return SoalTokenKind.Number;
case SoalSyntaxKind.LScientific:
return SoalTokenKind.Number;
case SoalSyntaxKind.LDateTimeOffset:
return SoalTokenKind.Number;
case SoalSyntaxKind.LDateTime:
return SoalTokenKind.Number;
case SoalSyntaxKind.LDate:
return SoalTokenKind.Number;
case SoalSyntaxKind.LTime:
return SoalTokenKind.Number;
case SoalSyntaxKind.LRegularString:
return SoalTokenKind.String;
case SoalSyntaxKind.LGuid:
return SoalTokenKind.String;
case SoalSyntaxKind.LUtf8Bom:
return SoalTokenKind.Whitespace;
case SoalSyntaxKind.LWhiteSpace:
return SoalTokenKind.Whitespace;
case SoalSyntaxKind.LCrLf:
return SoalTokenKind.Whitespace;
case SoalSyntaxKind.LLineEnd:
return SoalTokenKind.Whitespace;
case SoalSyntaxKind.LSingleLineComment:
return SoalTokenKind.Comment;
case SoalSyntaxKind.LMultiLineComment:
return SoalTokenKind.Comment;
case SoalSyntaxKind.LDoubleQuoteVerbatimString:
return SoalTokenKind.String;
case SoalSyntaxKind.LSingleQuoteVerbatimString:
return SoalTokenKind.String;
default:
return SoalTokenKind.None;
}
}
public SoalTokenKind GetModeTokenKind(int rawKind)
{
return this.GetModeTokenKind((SoalLexerMode)rawKind);
}
public SoalTokenKind GetModeTokenKind(SoalLexerMode kind)
{
switch(kind)
{
case SoalLexerMode.MULTILINE_COMMENT:
return SoalTokenKind.Comment;
case SoalLexerMode.DOUBLEQUOTE_VERBATIM_STRING:
return SoalTokenKind.String;
case SoalLexerMode.SINGLEQUOTE_VERBATIM_STRING:
return SoalTokenKind.String;
default:
return SoalTokenKind.None;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Management.Automation;
using EnvDTE;
using NuGet.VisualStudio;
namespace NuGet.PowerShell.Commands
{
/// <summary>
/// This is the base class for all NuGet cmdlets.
/// </summary>
public abstract class NuGetBaseCommand : PSCmdlet, ILogger, IErrorHandler
{
// User Agent. Do NOT localize
private const string PSCommandsUserAgentClient = "NuGet Package Manager Console";
private Lazy<string> _psCommandsUserAgent = new Lazy<string>(() => HttpUtility.CreateUserAgentString(PSCommandsUserAgentClient));
private IVsPackageManager _packageManager;
private readonly ISolutionManager _solutionManager;
private readonly IVsPackageManagerFactory _vsPackageManagerFactory;
private ProgressRecordCollection _progressRecordCache;
private readonly IHttpClientEvents _httpClientEvents;
protected NuGetBaseCommand(ISolutionManager solutionManager, IVsPackageManagerFactory vsPackageManagerFactory, IHttpClientEvents httpClientEvents)
{
_solutionManager = solutionManager;
_vsPackageManagerFactory = vsPackageManagerFactory;
_httpClientEvents = httpClientEvents;
}
private ProgressRecordCollection ProgressRecordCache
{
get
{
if (_progressRecordCache == null)
{
_progressRecordCache = new ProgressRecordCollection();
}
return _progressRecordCache;
}
}
protected IErrorHandler ErrorHandler
{
get
{
return this;
}
}
protected ISolutionManager SolutionManager
{
get
{
return _solutionManager;
}
}
protected IVsPackageManagerFactory PackageManagerFactory
{
get
{
return _vsPackageManagerFactory;
}
}
internal bool IsSyncMode
{
get
{
if (Host == null || Host.PrivateData == null)
{
return false;
}
PSObject privateData = Host.PrivateData;
var syncModeProp = privateData.Properties["IsSyncMode"];
return syncModeProp != null && (bool)syncModeProp.Value;
}
}
/// <summary>
/// Gets an instance of VSPackageManager to be used throughout the execution of this command.
/// </summary>
/// <value>The package manager.</value>
protected internal IVsPackageManager PackageManager
{
get
{
if (_packageManager == null)
{
_packageManager = CreatePackageManager();
}
return _packageManager;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "We want to display friendly message to the console.")]
protected sealed override void ProcessRecord()
{
try
{
ProcessRecordCore();
}
catch (Exception ex)
{
// unhandled exceptions should be terminating
ErrorHandler.HandleException(ex, terminating: true);
}
}
/// <summary>
/// Derived classess must implement this method instead of ProcessRecord(), which is sealed by NuGetBaseCmdlet.
/// </summary>
protected abstract void ProcessRecordCore();
void ILogger.Log(MessageLevel level, string message, params object[] args)
{
string formattedMessage = String.Format(CultureInfo.CurrentCulture, message, args);
Log(level, formattedMessage);
}
internal void Execute()
{
BeginProcessing();
ProcessRecord();
EndProcessing();
}
protected override void BeginProcessing()
{
if (_httpClientEvents != null)
{
_httpClientEvents.SendingRequest += OnSendingRequest;
}
}
protected override void EndProcessing()
{
if (_httpClientEvents != null)
{
_httpClientEvents.SendingRequest -= OnSendingRequest;
}
}
protected void SubscribeToProgressEvents()
{
if (!IsSyncMode && _httpClientEvents != null)
{
_httpClientEvents.ProgressAvailable += OnProgressAvailable;
}
}
protected void UnsubscribeFromProgressEvents()
{
if (_httpClientEvents != null)
{
_httpClientEvents.ProgressAvailable -= OnProgressAvailable;
}
}
protected virtual void Log(MessageLevel level, string formattedMessage)
{
switch (level)
{
case MessageLevel.Debug:
WriteVerbose(formattedMessage);
break;
case MessageLevel.Warning:
WriteWarning(formattedMessage);
break;
case MessageLevel.Info:
WriteLine(formattedMessage);
break;
case MessageLevel.Error:
WriteError(formattedMessage);
break;
}
}
protected virtual IVsPackageManager CreatePackageManager()
{
if (!SolutionManager.IsSolutionOpen)
{
return null;
}
return PackageManagerFactory.CreatePackageManager();
}
/// <summary>
/// Return all projects in the solution matching the provided names. Wildcards are supported.
/// This method will automatically generate error records for non-wildcarded project names that
/// are not found.
/// </summary>
/// <param name="projectNames">An array of project names that may or may not include wildcards.</param>
/// <returns>Projects matching the project name(s) provided.</returns>
protected IEnumerable<Project> GetProjectsByName(string[] projectNames)
{
var allValidProjectNames = GetAllValidProjectNames().ToList();
foreach (string projectName in projectNames)
{
// if ctrl+c hit, leave immediately
if (Stopping)
{
break;
}
// Treat every name as a wildcard; results in simpler code
var pattern = new WildcardPattern(projectName, WildcardOptions.IgnoreCase);
var matches = from s in allValidProjectNames
where pattern.IsMatch(s)
select _solutionManager.GetProject(s);
int count = 0;
foreach (var project in matches)
{
count++;
yield return project;
}
// We only emit non-terminating error record if a non-wildcarded name was not found.
// This is consistent with built-in cmdlets that support wildcarded search.
// A search with a wildcard that returns nothing should not be considered an error.
if ((count == 0) && !WildcardPattern.ContainsWildcardCharacters(projectName))
{
ErrorHandler.WriteProjectNotFoundError(projectName, terminating: false);
}
}
}
/// <summary>
/// Return all possibly valid project names in the current solution. This includes all
/// unique names and safe names.
/// </summary>
/// <returns></returns>
private IEnumerable<string> GetAllValidProjectNames()
{
var safeNames = _solutionManager.GetProjects().Select(p => _solutionManager.GetProjectSafeName(p));
var uniqueNames = _solutionManager.GetProjects().Select(p => p.GetCustomUniqueName());
return uniqueNames.Concat(safeNames).Distinct();
}
/// <summary>
/// Translate a PSPath into a System.IO.* friendly Win32 path.
/// Does not resolve/glob wildcards.
/// </summary>
/// <param name="psPath">The PowerShell PSPath to translate which may reference PSDrives or have provider-qualified paths which are syntactically invalid for .NET APIs.</param>
/// <param name="path">The translated PSPath in a format understandable to .NET APIs.</param>
/// <param name="exists">Returns null if not tested, or a bool representing path existence.</param>
/// <param name="errorMessage">If translation failed, contains the reason.</param>
/// <returns>True if successfully translated, false if not.</returns>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", MessageId = "1#", Justification = "Following TryParse pattern in BCL", Target = "path")]
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", MessageId = "2#", Justification = "Following TryParse pattern in BCL", Target = "exists")]
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "ps", Justification = "ps is a common powershell prefix")]
protected bool TryTranslatePSPath(string psPath, out string path, out bool? exists, out string errorMessage)
{
return PSPathUtility.TryTranslatePSPath(SessionState, psPath, out path, out exists, out errorMessage);
}
/// <summary>
/// Create a package repository from the source by trying to resolve relative paths.
/// </summary>
protected IPackageRepository CreateRepositoryFromSource(IPackageRepositoryFactory repositoryFactory, IPackageSourceProvider sourceProvider, string source)
{
if (source == null)
{
throw new ArgumentNullException("source");
}
UriFormatException uriException = null;
string resolvedSource = sourceProvider.ResolveSource(source);
try
{
IPackageRepository repository = repositoryFactory.CreateRepository(resolvedSource);
if (repository != null)
{
return repository;
}
}
catch (UriFormatException ex)
{
// if the source is relative path, it can result in invalid uri exception
uriException = ex;
}
Uri uri;
// if it's not an absolute path, treat it as relative path
if (Uri.TryCreate(source, UriKind.Relative, out uri))
{
string outputPath;
bool? exists;
string errorMessage;
// translate relative path to absolute path
if (TryTranslatePSPath(source, out outputPath, out exists, out errorMessage) && exists == true)
{
return repositoryFactory.CreateRepository(outputPath);
}
else
{
return repositoryFactory.CreateRepository(source);
}
}
else
{
// if this is not a valid relative path either,
// we rethrow the UriFormatException that we caught earlier.
if (uriException != null)
{
throw uriException;
}
}
return null;
}
[SuppressMessage("Microsoft.Usage", "CA2201:DoNotRaiseReservedExceptionTypes", Justification = "This exception is passed to PowerShell. We really don't care about the type of exception here.")]
protected void WriteError(string message)
{
if (!String.IsNullOrEmpty(message))
{
WriteError(new Exception(message));
}
}
protected void WriteError(Exception exception)
{
ErrorHandler.HandleException(exception, terminating: false);
}
void IErrorHandler.WriteProjectNotFoundError(string projectName, bool terminating)
{
var notFoundException =
new ItemNotFoundException(
String.Format(
CultureInfo.CurrentCulture,
Resources.Cmdlet_ProjectNotFound, projectName));
ErrorHandler.HandleError(
new ErrorRecord(
notFoundException,
NuGetErrorId.ProjectNotFound, // This is your locale-agnostic error id.
ErrorCategory.ObjectNotFound,
projectName),
terminating: terminating);
}
void IErrorHandler.ThrowSolutionNotOpenTerminatingError()
{
ErrorHandler.HandleException(
new InvalidOperationException(Resources.Cmdlet_NoSolution),
terminating: true,
errorId: NuGetErrorId.NoActiveSolution,
category: ErrorCategory.InvalidOperation);
}
void IErrorHandler.ThrowNoCompatibleProjectsTerminatingError()
{
ErrorHandler.HandleException(
new InvalidOperationException(Resources.Cmdlet_NoCompatibleProjects),
terminating: true,
errorId: NuGetErrorId.NoCompatibleProjects,
category: ErrorCategory.InvalidOperation);
}
void IErrorHandler.HandleError(ErrorRecord errorRecord, bool terminating)
{
if (terminating)
{
ThrowTerminatingError(errorRecord);
}
else
{
WriteError(errorRecord);
}
}
void IErrorHandler.HandleException(Exception exception, bool terminating,
string errorId, ErrorCategory category, object target)
{
exception = ExceptionUtility.Unwrap(exception);
var error = new ErrorRecord(exception, errorId, category, target);
ErrorHandler.HandleError(error, terminating: terminating);
}
protected void WriteLine(string message = null)
{
if (Host == null)
{
// Host is null when running unit tests. Simply return in this case
return;
}
if (message == null)
{
Host.UI.WriteLine();
}
else
{
Host.UI.WriteLine(message);
}
}
protected void WriteProgress(int activityId, string operation, int percentComplete)
{
if (IsSyncMode)
{
// don't bother to show progress if we are in synchronous mode
return;
}
ProgressRecord progressRecord;
// retrieve the ProgressRecord object for this particular activity id from the cache.
if (ProgressRecordCache.Contains(activityId))
{
progressRecord = ProgressRecordCache[activityId];
}
else
{
progressRecord = new ProgressRecord(activityId, operation, operation);
ProgressRecordCache.Add(progressRecord);
}
progressRecord.CurrentOperation = operation;
progressRecord.PercentComplete = percentComplete;
WriteProgress(progressRecord);
}
private void OnProgressAvailable(object sender, ProgressEventArgs e)
{
WriteProgress(ProgressActivityIds.DownloadPackageId, e.Operation, e.PercentComplete);
}
private class ProgressRecordCollection : KeyedCollection<int, ProgressRecord>
{
protected override int GetKeyForItem(ProgressRecord item)
{
return item.ActivityId;
}
}
private void OnSendingRequest(object sender, WebRequestEventArgs e)
{
HttpUtility.SetUserAgent(e.Request, _psCommandsUserAgent.Value);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Windows;
using System.Windows.Controls;
//http://treemaps.codeplex.com/
namespace Magic.Controls
{
public class TreeMapsPanel : Panel
{
#region fields
private Rect _emptyArea;
private double _weightSum = 0;
private List<WeightUIElement> _items = new List<WeightUIElement>();
#endregion
#region dependency properties
public static readonly DependencyProperty
WeightProperty = DependencyProperty.RegisterAttached("Weight", typeof(double),typeof(TreeMapsPanel),new FrameworkPropertyMetadata(1.0,FrameworkPropertyMetadataOptions.AffectsParentArrange|FrameworkPropertyMetadataOptions.AffectsParentMeasure));
#endregion
#region enum
protected enum RowOrientation
{
Horizontal,
Vertical
}
#endregion
#region properties
public static double GetWeight(DependencyObject uiElement)
{
if (uiElement == null)
return 0;
else
return (double)uiElement.GetValue(TreeMapsPanel.WeightProperty);
}
public static void SetWeight(DependencyObject uiElement, double value)
{
if (uiElement != null)
uiElement.SetValue(TreeMapsPanel.WeightProperty, value);
}
protected Rect EmptyArea
{
get { return _emptyArea; }
set { _emptyArea = value; }
}
protected List<WeightUIElement> ManagedItems
{
get { return _items; }
}
#endregion
#region protected methods
protected override Size ArrangeOverride(Size arrangeSize)
{
foreach (WeightUIElement child in this.ManagedItems)
child.UIElement.Arrange(new Rect(child.ComputedLocation, child.ComputedSize));
return arrangeSize;
}
protected override Size MeasureOverride(Size constraint)
{
this.EmptyArea = new Rect(0, 0, constraint.Width, constraint.Height);
this.PrepareItems();
double area = this.EmptyArea.Width * this.EmptyArea.Height;
foreach (WeightUIElement item in this.ManagedItems)
item.RealArea = area * item.Weight / _weightSum;
this.ComputeBounds();
foreach (WeightUIElement child in this.ManagedItems)
{
if (this.IsValidSize(child.ComputedSize))
child.UIElement.Measure(child.ComputedSize);
else
child.UIElement.Measure(new Size(0, 0));
}
return constraint;
}
protected virtual void ComputeBounds()
{
this.ComputeTreeMaps(this.ManagedItems);
}
protected double GetShortestSide()
{
return Math.Min(this.EmptyArea.Width, this.EmptyArea.Height);
}
protected RowOrientation GetOrientation()
{
return (this.EmptyArea.Width > this.EmptyArea.Height ? RowOrientation.Horizontal : RowOrientation.Vertical);
}
protected virtual Rect GetRectangle(RowOrientation orientation, WeightUIElement item, double x, double y, double width, double height)
{
if (orientation == RowOrientation.Horizontal)
return new Rect(x, y, item.RealArea / height, height);
else
return new Rect(x, y, width, item.RealArea / width);
}
protected virtual void ComputeNextPosition(RowOrientation orientation, ref double xPos, ref double yPos, double width, double height)
{
if (orientation == RowOrientation.Horizontal)
xPos += width;
else
yPos += height;
}
protected void ComputeTreeMaps(List<WeightUIElement> items)
{
RowOrientation orientation = this.GetOrientation();
double areaSum = 0;
foreach (WeightUIElement item in items)
areaSum += item.RealArea;
Rect currentRow;
if (orientation == RowOrientation.Horizontal)
{
currentRow = new Rect(_emptyArea.X, _emptyArea.Y, areaSum / _emptyArea.Height, _emptyArea.Height);
_emptyArea = new Rect(_emptyArea.X + currentRow.Width, _emptyArea.Y, Math.Max(0, _emptyArea.Width - currentRow.Width), _emptyArea.Height);
}
else
{
currentRow = new Rect(_emptyArea.X, _emptyArea.Y, _emptyArea.Width, areaSum / _emptyArea.Width);
_emptyArea = new Rect(_emptyArea.X, _emptyArea.Y + currentRow.Height, _emptyArea.Width, Math.Max(0, _emptyArea.Height - currentRow.Height));
}
double prevX = currentRow.X;
double prevY = currentRow.Y;
foreach (WeightUIElement item in items)
{
Rect rect = this.GetRectangle(orientation, item, prevX, prevY, currentRow.Width, currentRow.Height);
item.AspectRatio = rect.Width / rect.Height;
item.ComputedSize = rect.Size;
item.ComputedLocation = rect.Location;
this.ComputeNextPosition(orientation, ref prevX, ref prevY, rect.Width, rect.Height);
}
}
#endregion
#region private methods
private bool IsValidSize(Size size)
{
return (!size.IsEmpty && size.Width > 0 && size.Width != double.NaN && size.Height > 0 && size.Height != double.NaN);
}
private bool IsValidItem(WeightUIElement item)
{
return (item != null && item.Weight != double.NaN && Math.Round(item.Weight, 0) != 0);
}
private void PrepareItems()
{
_weightSum = 0;
this.ManagedItems.Clear();
foreach (UIElement child in this.Children)
{
WeightUIElement element = new WeightUIElement(child, TreeMapsPanel.GetWeight(child));
if (this.IsValidItem(element))
{
_weightSum += element.Weight;
this.ManagedItems.Add(element);
}
else
{
element.ComputedSize = Size.Empty;
element.ComputedLocation = new Point(0, 0);
element.UIElement.Measure(element.ComputedSize);
element.UIElement.Visibility = Visibility.Collapsed;
}
}
this.ManagedItems.Sort(WeightUIElement.CompareByValueDecreasing);
}
#endregion
#region inner classes
protected class WeightUIElement
{
#region fields
private double _weight;
private double _area;
private UIElement _element;
private Size _desiredSize;
private Point _desiredLocation;
private double _ratio;
#endregion
#region ctors
public WeightUIElement(UIElement element, double weight)
{
_element = element;
_weight = weight;
}
#endregion
#region properties
internal Size ComputedSize
{
get { return _desiredSize; }
set { _desiredSize = value; }
}
internal Point ComputedLocation
{
get { return _desiredLocation; }
set { _desiredLocation = value; }
}
public double AspectRatio
{
get { return _ratio; }
set { _ratio = value; }
}
public double Weight
{
get { return _weight; }
}
public double RealArea
{
get { return _area; }
set { _area = value; }
}
public UIElement UIElement
{
get { return _element; }
}
#endregion
#region static members
public static int CompareByValueDecreasing(WeightUIElement x, WeightUIElement y)
{
if (x == null)
{
if (y == null)
return -1;
else
return 0;
}
else
{
if (y == null)
return 1;
else
return x.Weight.CompareTo(y.Weight) * -1;
}
}
#endregion
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Windows;
using Caliburn.Micro;
using csGeoLayers.Content.Panoramio;
using csGeoLayers.Content.RainRadar;
using csGeoLayers.GeoRSS;
using csShared;
using csShared.FloatingElements;
using csShared.Interfaces;
namespace csGeoLayers.Plugins.ContentDirectory
{
public interface IContentDirectory
{
IPlugin Plugin { get; set; }
}
[Export(typeof (IPlugin))]
public class LayerDirectoryPlugin : PropertyChangedBase, IPlugin
{
private bool hideFromSettings;
private bool isRunning;
private MenuItem menuItem;
private IPluginScreen screen;
private ISettingsScreen settings;
public FloatingElement Element { get; set; }
#region IPlugin Members
public bool CanStop
{
get { return true; }
}
public ISettingsScreen Settings
{
get { return settings; }
set
{
settings = value;
NotifyOfPropertyChange(() => Settings);
}
}
public IPluginScreen Screen
{
get { return screen; }
set
{
screen = value;
NotifyOfPropertyChange(() => Screen);
}
}
public bool HideFromSettings
{
get { return hideFromSettings; }
set
{
hideFromSettings = value;
NotifyOfPropertyChange(() => HideFromSettings);
}
}
public int Priority
{
get { return 5; }
}
public bool IsRunning
{
get { return isRunning; }
set
{
isRunning = value;
NotifyOfPropertyChange(() => IsRunning);
}
}
public string Icon
{
get { return @"/csCommon;component/Resources/Icons/mapcontent.png"; }
}
public AppStateSettings AppState { get; set; }
public string Name
{
get { return "ContentDirectory"; }
}
public void Init()
{
//AppState.ViewDef.Content.Add(new FlightTrackerContent {Name = "Flight Tracker Netherlands"});
AppState.ViewDef.Content.Add(new PanoramioContent {Name = "Panoramio"});
AppState.ViewDef.Content.Add(new GeoRSSContent
{
Name = "Earthquakes Past Hour (M1+)",
Location =
new Uri(
"http://earthquake.usgs.gov/earthquakes/catalogs/eqs1hour-M1.xml"),
Folder = "Disaster/Earthquakes",
IconUri =
new Uri(
@"http://maps.google.com/mapfiles/kml/shapes/earthquake.png")
});
AppState.ViewDef.Content.Add(new GeoRSSContent
{
Name = "Earthquakes Past Day (M1+)",
Location =
new Uri(
"http://earthquake.usgs.gov/earthquakes/catalogs/eqs1day-M1.xml"),
Folder = "Disaster/Earthquakes",
IconUri =
new Uri(
@"http://maps.google.com/mapfiles/kml/shapes/earthquake.png")
});
AppState.ViewDef.Content.Add(new GeoRSSContent
{
Name = "Earthquakes 7 Days (M5+)",
Location =
new Uri(
"http://earthquake.usgs.gov/earthquakes/catalogs/eqs7day-M5.xml"),
Folder = "Disaster/Earthquakes",
IconUri =
new Uri(
@"http://maps.google.com/mapfiles/kml/shapes/earthquake.png")
});
AppState.ViewDef.Content.Add(new GeoRSSContent
{
Name = "Reuters News",
Location =
new Uri(
"http://ws.geonames.org/rssToGeoRSS?feedUrl=http://feeds.reuters.com/reuters/worldNews"),
Folder = "News",
IconUri =
new Uri(@"http://www.borealbirds.org/images/icon-reuters-logo.gif"),
IconSize = 32
});
AppState.ViewDef.Content.Add(new WmsContent()
{
Name="BAG Data",
Location = "http://geodata.nationaalgeoregister.nl/bagviewer/wms?",
Layers = new[] { "pand", "ligplaats", "standplaats", "verblijfsobject"}
});
AppState.ViewDef.Content.Add(new WmsContent()
{
Name="Vaarwegen",
Location = "http://www.vaarweginformatie.nl/wfswms/services?",
Layers = new[] { "Bridge", "Fairway" }
});
AppState.ViewDef.Content.Add(new KmlContent
{
Name = "Global Disaster Alert and Coordination system",
Location = new Uri("http://www.gdacs.org/xml/gdacs.kml"),
Folder = "Disaster"
});
AppState.ViewDef.Content.Add(new ArcgisOnlineContentPlugin {Name = "ArcGis"});
AppState.ViewDef.Content.Add(new FlexibleRainRadarContent
{
Name = "RainRadar EU",
Config =
new Dictionary<string, string>
{
{"interval", "15"},
{"tlLat", "-14.9515"},
{"tlLon", "41.4389"},
{"brLat", "20.4106"},
{"brLon", "59.9934"},
{
"baseUrl",
"http://134.221.210.43:8000/BuienRadarService/RainImage/eu/warped/"
}
}
});
#region old
//AppState.ViewDef.Content.Add(new AirportContent() { Name = "Airports Worldwide" });
//AppState.ViewDef.Content.Add(new GeoRSSContent() { Name = "C2000 masten", Location = new Uri("file://" + Directory.GetCurrentDirectory() + @"\Content\Data\c2000.rss"), Folder = "Disaster/Earthquakes", IconUri = new Uri(@"http://maps.google.com/mapfiles/kml/shapes/earthquake.png") });
//AppState.ViewDef.Content.Add(new KmlContent()
// {
// Name="Tno",
// Location = new Uri("http://nationaalgeoregister.nl/geonetwork/srv/nl/google.kml?uuid=f646dfb9-5bf6-eab9-042b-cab6ff2dc275&layers=M11M0561"),
// Folder = "Ondergrond"
// });
//AppState.ViewDef.Content.Add(new WmsContent()
// {
// //http://geoservices.cbs.nl/arcgis/services/BestandBodemGebruik2008/MapServer/WMSServer
// Name = "Bodemgebruik 2008",
// Location =
// "http://mesonet.agron.iastate.edu/cgi-bin/wms/nexrad/n0r.cgi",
// Folder = "Bodemgebruik",
// });
//AppState.ViewDef.Content.Add(new NO2Content());
//AppState.ViewDef.Content.Add(new FlexibleRainRadarContent()
//{
// Name = "RainRadar NL",
// Config = new Dictionary<string, string> { { "interval", "5" }, { "tlLat", "0" }, { "tlLon", "55.974" }, { "brLat", "10.856" }, { "brLon", "48.895" }, { "baseUrl", "http://134.221.210.43:8000/BuienRadarService/RainImage/nl/warped/" } }
//});
//AppState.ViewDef.Content.Add(new TwitterContent()
// {
// Name = "Twitter"
// });
#endregion
}
public void Start()
{
IsRunning = true;
menuItem = new MenuItem();
menuItem.Clicked += MenuItemClicked;
menuItem.Name = "Add Content";
AppState.MainMenuItems.Add(menuItem);
}
public void Pause()
{
IsRunning = false;
}
public void Stop()
{
menuItem.Clicked -= MenuItemClicked;
AppState.MainMenuItems.Remove(menuItem);
IsRunning = false;
}
#endregion
private void MenuItemClicked(object sender, EventArgs e)
{
object viewModel = IoC.GetInstance(typeof (IContentDirectory), null);
if (viewModel != null)
{
Element = FloatingHelpers.CreateFloatingElement("Content Plugin", new Point(400, 400),
new Size(550, 475), viewModel);
AppState.FloatingItems.AddFloatingElement(Element);
}
}
}
}
| |
using System;
using System.Linq;
using System.ComponentModel.DataAnnotations;
using Csla;
using System.ComponentModel;
using System.Threading.Tasks;
namespace ProjectTracker.Library
{
[Serializable()]
public class ResourceEdit : CslaBaseTypes.BusinessBase<ResourceEdit>
{
public static readonly PropertyInfo<byte[]> TimeStampProperty = RegisterProperty<byte[]>(c => c.TimeStamp);
[Browsable(false)]
[EditorBrowsable(EditorBrowsableState.Never)]
public byte[] TimeStamp
{
get { return GetProperty(TimeStampProperty); }
set { SetProperty(TimeStampProperty, value); }
}
public static readonly PropertyInfo<int> IdProperty = RegisterProperty<int>(c => c.Id);
[Display(Name = "Resource id")]
public int Id
{
get { return GetProperty(IdProperty); }
set { SetProperty(IdProperty, value); }
}
public static readonly PropertyInfo<string> LastNameProperty =
RegisterProperty<string>(c => c.LastName);
[Display(Name = "Last name")]
[Required]
[StringLength(50)]
public string LastName
{
get { return GetProperty(LastNameProperty); }
set { SetProperty(LastNameProperty, value); }
}
public static readonly PropertyInfo<string> FirstNameProperty =
RegisterProperty<string>(c => c.FirstName);
[Display(Name = "First name")]
[Required]
[StringLength(50)]
public string FirstName
{
get { return GetProperty(FirstNameProperty); }
set { SetProperty(FirstNameProperty, value); }
}
[Display(Name = "Full name")]
public string FullName
{
get { return LastName + ", " + FirstName; }
}
public static readonly PropertyInfo<ResourceAssignments> AssignmentsProperty =
RegisterProperty<ResourceAssignments>(c => c.Assignments);
public ResourceAssignments Assignments
{
get { return GetProperty(AssignmentsProperty); }
private set { LoadProperty(AssignmentsProperty, value); }
}
public override string ToString()
{
return Id.ToString();
}
protected override void AddBusinessRules()
{
base.AddBusinessRules();
BusinessRules.AddRule(new Csla.Rules.CommonRules.IsInRole(Csla.Rules.AuthorizationActions.WriteProperty, LastNameProperty, Security.Roles.ProjectManager));
BusinessRules.AddRule(new Csla.Rules.CommonRules.IsInRole(Csla.Rules.AuthorizationActions.WriteProperty, FirstNameProperty, Security.Roles.ProjectManager));
BusinessRules.AddRule(new NoDuplicateProject { PrimaryProperty = AssignmentsProperty });
}
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public static void AddObjectAuthorizationRules()
{
Csla.Rules.BusinessRules.AddRule(typeof(ResourceEdit), new Csla.Rules.CommonRules.IsInRole(Csla.Rules.AuthorizationActions.CreateObject, Security.Roles.ProjectManager));
Csla.Rules.BusinessRules.AddRule(typeof(ResourceEdit), new Csla.Rules.CommonRules.IsInRole(Csla.Rules.AuthorizationActions.EditObject, Security.Roles.ProjectManager));
Csla.Rules.BusinessRules.AddRule(typeof(ResourceEdit), new Csla.Rules.CommonRules.IsInRole(Csla.Rules.AuthorizationActions.DeleteObject, Security.Roles.ProjectManager, Security.Roles.Administrator));
}
protected override void OnChildChanged(Csla.Core.ChildChangedEventArgs e)
{
if (e.ChildObject is ResourceAssignments)
{
BusinessRules.CheckRules(AssignmentsProperty);
OnPropertyChanged(AssignmentsProperty);
}
base.OnChildChanged(e);
}
private class NoDuplicateProject : Csla.Rules.BusinessRule
{
protected override void Execute(Csla.Rules.IRuleContext context)
{
var target = (ResourceEdit)context.Target;
foreach (var item in target.Assignments)
{
var count = target.Assignments.Count(r => r.ProjectId == item.ProjectId);
if (count > 1)
{
context.AddErrorResult("Duplicate projects not allowed");
return;
}
}
}
}
public static async Task<ResourceEdit> NewResourceEditAsync()
{
return await DataPortal.CreateAsync<ResourceEdit>();
}
public static async Task<ResourceEdit> GetResourceEditAsync(int id)
{
return await DataPortal.FetchAsync<ResourceEdit>(id);
}
public static async Task<bool> ExistsAsync(int id)
{
var cmd = await DataPortal.CreateAsync<ResourceExistsCommand>(id);
cmd = await DataPortal.ExecuteAsync(cmd);
return cmd.ResourceExists;
}
public static ResourceEdit NewResourceEdit()
{
return DataPortal.Create<ResourceEdit>();
}
public static ResourceEdit GetResourceEdit(int id)
{
return DataPortal.Fetch<ResourceEdit>(id);
}
public static void DeleteResourceEdit(int id)
{
DataPortal.Delete<ResourceEdit>(id);
}
[RunLocal]
protected override void DataPortal_Create()
{
LoadProperty(AssignmentsProperty, DataPortal.CreateChild<ResourceAssignments>());
base.DataPortal_Create();
}
[Fetch]
private void Fetch(int id)
{
using (var ctx = ProjectTracker.Dal.DalFactory.GetManager())
{
var dal = ctx.GetProvider<ProjectTracker.Dal.IResourceDal>();
var data = dal.Fetch(id);
using (BypassPropertyChecks)
{
Id = data.Id;
FirstName = data.FirstName;
LastName = data.LastName;
TimeStamp = data.LastChanged;
Assignments = DataPortal.FetchChild<ResourceAssignments>(id);
}
}
}
[Insert]
private void Insert()
{
using (var ctx = ProjectTracker.Dal.DalFactory.GetManager())
{
var dal = ctx.GetProvider<ProjectTracker.Dal.IResourceDal>();
using (BypassPropertyChecks)
{
var item = new ProjectTracker.Dal.ResourceDto
{
FirstName = this.FirstName,
LastName = this.LastName
};
dal.Insert(item);
Id = item.Id;
TimeStamp = item.LastChanged;
}
FieldManager.UpdateChildren(this);
}
}
[Update]
private void Update()
{
using (var ctx = ProjectTracker.Dal.DalFactory.GetManager())
{
var dal = ctx.GetProvider<ProjectTracker.Dal.IResourceDal>();
using (BypassPropertyChecks)
{
var item = new ProjectTracker.Dal.ResourceDto
{
Id = this.Id,
FirstName = this.FirstName,
LastName = this.LastName,
LastChanged = this.TimeStamp
};
dal.Update(item);
TimeStamp = item.LastChanged;
}
FieldManager.UpdateChildren(this);
}
}
[DeleteSelf]
private void DeleteSelf()
{
using (BypassPropertyChecks)
Delete(this.Id);
}
[Delete]
private void Delete(int id)
{
using (var ctx = ProjectTracker.Dal.DalFactory.GetManager())
{
Assignments.Clear();
FieldManager.UpdateChildren(this);
var dal = ctx.GetProvider<ProjectTracker.Dal.IResourceDal>();
dal.Delete(id);
}
}
}
}
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Microsoft.WindowsAzure.Storage.Blob;
using Microsoft.WindowsAzure.Storage.Queue;
using Microsoft.WindowsAzure.Storage.Table;
using MS.Test.Common.MsTestLib;
using System.Collections;
namespace Management.Storage.ScenarioTest
{
public abstract class Agent
{
/// <summary>
/// output data returned after agent operation
/// </summary>
public Collection<Dictionary<string, object>> Output { get { return _Output; } }
/// <summary>
/// error messages returned after agent operation
/// </summary>
public Collection<string> ErrorMessages { get { return _ErrorMessages; } }
public bool UseContextParam
{
set {_UseContextParam = value;}
get {return _UseContextParam;}
}
/// <summary>
/// Return true if succeed otherwise return false
/// </summary>
public abstract bool NewAzureStorageContainer(string ContainerName);
/// <summary>
/// Parameters:
/// ContainerName:
/// 1. Could be empty if no Container parameter specified
/// 2. Could contain wildcards
/// </summary>
public abstract bool GetAzureStorageContainer(string ContainerName);
public abstract bool GetAzureStorageContainerByPrefix(string Prefix);
public abstract bool SetAzureStorageContainerACL(string ContainerName, BlobContainerPublicAccessType PublicAccess, bool PassThru = true);
public abstract bool RemoveAzureStorageContainer(string ContainerName, bool Force = true);
/// <summary>
/// For pipeline, new/remove a list of container names
/// </summary>
public abstract bool NewAzureStorageContainer(string[] ContainerNames);
public abstract bool RemoveAzureStorageContainer(string[] ContainerNames, bool Force = true);
public abstract bool NewAzureStorageQueue(string QueueName);
/// <summary>
/// Parameters:
/// ContainerName:
/// 1. Could be empty if no Queue parameter specified
/// 2. Could contain wildcards
/// </summary>
public abstract bool GetAzureStorageQueue(string QueueName);
public abstract bool GetAzureStorageQueueByPrefix(string Prefix);
public abstract bool RemoveAzureStorageQueue(string QueueName, bool Force = true);
/// <summary>
/// For pipeline, new/remove a list of queue names
/// </summary>
public abstract bool NewAzureStorageQueue(string[] QueueNames);
public abstract bool RemoveAzureStorageQueue(string[] QueueNames, bool Force = true);
/// <summary>
/// Parameters:
/// Block:
/// true for BlockBlob, false for PageBlob
/// ConcurrentCount:
/// -1 means use the default value
/// </summary>
public abstract bool SetAzureStorageBlobContent(string FileName, string ContainerName, BlobType Type, string BlobName = "",
bool Force = true, int ConcurrentCount = -1, Hashtable properties = null, Hashtable metadata = null);
public abstract bool GetAzureStorageBlobContent(string Blob, string FileName, string ContainerName,
bool Force = true, int ConcurrentCount = -1);
public abstract bool GetAzureStorageBlob(string BlobName, string ContainerName);
public abstract bool GetAzureStorageBlobByPrefix(string Prefix, string ContainerName);
/// <summary>
///
/// Remarks:
/// currently there is no Force param, may add it later on
/// </summary>
public abstract bool RemoveAzureStorageBlob(string BlobName, string ContainerName, bool onlySnapshot = false, bool force = true);
public abstract bool NewAzureStorageTable(string TableName);
public abstract bool NewAzureStorageTable(string[] TableNames);
public abstract bool GetAzureStorageTable(string TableName);
public abstract bool GetAzureStorageTableByPrefix(string Prefix);
public abstract bool RemoveAzureStorageTable(string TableName, bool Force = true);
public abstract bool RemoveAzureStorageTable(string[] TableNames, bool Force = true);
public abstract bool NewAzureStorageContext(string StorageAccountName, string StorageAccountKey, string endPoint = "");
public abstract bool NewAzureStorageContext(string ConnectionString);
public abstract bool StartAzureStorageBlobCopy(string sourceUri, string destContainerName, string destBlobName, object destContext, bool force = true);
public abstract bool StartAzureStorageBlobCopy(string srcContainerName, string srcBlobName, string destContainerName, string destBlobName, object destContext = null, bool force = true);
public abstract bool StartAzureStorageBlobCopy(ICloudBlob srcBlob, string destContainerName, string destBlobName, object destContext = null, bool force = true);
public abstract bool GetAzureStorageBlobCopyState(string containerName, string blobName, bool waitForComplete);
public abstract bool GetAzureStorageBlobCopyState(ICloudBlob blob, object context, bool waitForComplete);
public abstract bool StopAzureStorageBlobCopy(string containerName, string blobName, string copyId, bool force);
/// <summary>
/// Compare the output collection data with comp
///
/// Parameters:
/// comp: comparsion data
/// </summary>
public void OutputValidation(Collection<Dictionary<string, object>> comp)
{
Test.Info("Validate Dictionary objects");
Test.Assert(comp.Count == Output.Count, "Comparison size: {0} = {1} Output size", comp.Count, Output.Count);
if (comp.Count != Output.Count)
return;
// first check whether Key exists and then check value if it's not null
for (int i = 0; i < comp.Count; ++i)
{
foreach (string str in comp[i].Keys)
{
Test.Assert(Output[i].ContainsKey(str), "{0} should be in the ouput columns", str);
switch(str)
{
case "Context":
break;
case "CloudTable":
Test.Assert(Utility.CompareEntity((CloudTable)comp[i][str], (CloudTable)Output[i][str]),
"CloudTable Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
break;
case "CloudQueue":
Test.Assert(Utility.CompareEntity((CloudQueue)comp[i][str], (CloudQueue)Output[i][str]),
"CloudQueue Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
break;
case "CloudBlobContainer":
Test.Assert(Utility.CompareEntity((CloudBlobContainer)comp[i][str], (CloudBlobContainer)Output[i][str]),
"CloudBlobContainer Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
break;
case "ICloudBlob":
Test.Assert(Utility.CompareEntity((ICloudBlob)comp[i][str], (ICloudBlob)Output[i][str]),
"ICloudBlob Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
break;
case "Permission":
Test.Assert(Utility.CompareEntity((BlobContainerPermissions)comp[i][str], (BlobContainerPermissions)Output[i][str]),
"Permission Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
break;
default:
if(comp[i][str] == null)
{
Test.Assert(Output[i][str] == null, "Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
}
else
{
Test.Assert(comp[i][str].Equals(Output[i][str]), "Column {0}: {1} = {2}", str, comp[i][str], Output[i][str]);
}
break;
}
}
}
}
/// <summary>
/// Compare the output collection data with containers
///
/// Parameters:
/// containers: comparsion data
/// </summary>
public void OutputValidation(IEnumerable<CloudBlobContainer> containers)
{
Test.Info("Validate CloudBlobContainer objects");
Test.Assert(containers.Count() == Output.Count, "Comparison size: {0} = {1} Output size", containers.Count(), Output.Count);
if (containers.Count() != Output.Count)
return;
int count = 0;
foreach (CloudBlobContainer container in containers)
{
container.FetchAttributes();
Test.Assert(Utility.CompareEntity(container, (CloudBlobContainer)Output[count]["CloudBlobContainer"]), "container equality checking: {0}", container.Name);
++count;
}
}
/// <summary>
/// Compare the output collection data with container permissions
/// </summary>
/// <param name="containers">a list of cloudblobcontainer objects</param>
public void OutputValidation(IEnumerable<BlobContainerPermissions> permissions)
{
Test.Info("Validate BlobContainerPermissions");
Test.Assert(permissions.Count() == Output.Count, "Comparison size: {0} = {1} Output size", permissions.Count(), Output.Count);
if (permissions.Count() != Output.Count)
return;
int count = 0;
foreach (BlobContainerPermissions permission in permissions)
{
Test.Assert(Utility.CompareEntity(permission, (BlobContainerPermissions)Output[count]["Permission"]), "container permision equality checking ");
++count;
}
}
/// <summary>
/// Compare the output collection data with ICloudBlob
/// </summary>
/// <param name="containers">a list of cloudblobcontainer objects</param>
public void OutputValidation(IEnumerable<ICloudBlob> blobs)
{
Test.Info("Validate ICloudBlob objects");
Test.Assert(blobs.Count() == Output.Count, "Comparison size: {0} = {1} Output size", blobs.Count(), Output.Count);
if (blobs.Count() != Output.Count)
return;
int count = 0;
foreach (ICloudBlob blob in blobs)
{
Test.Assert(Utility.CompareEntity(blob, (ICloudBlob)Output[count]["ICloudBlob"]), string.Format("ICloudBlob equality checking for blob '{0}'", blob.Name));
++count;
}
}
/// <summary>
/// Compare the output collection data with queues
///
/// Parameters:
/// queues: comparsion data
/// </summary>
public void OutputValidation(IEnumerable<CloudQueue> queues)
{
Test.Info("Validate CloudQueue objects");
Test.Assert(queues.Count() == Output.Count, "Comparison size: {0} = {1} Output size", queues.Count(), Output.Count);
if (queues.Count() != Output.Count)
return;
int count = 0;
foreach (CloudQueue queue in queues)
{
queue.FetchAttributes();
Test.Assert(Utility.CompareEntity(queue, (CloudQueue)Output[count]["CloudQueue"]), "queue equality checking: {0}", queue.Name);
++count;
}
}
/// <summary>
/// Compare the output collection data with tables
///
/// Parameters:
/// tables: comparsion data
/// </summary>
public void OutputValidation(IEnumerable<CloudTable> tables)
{
Test.Info("Validate CloudTable objects");
Test.Assert(tables.Count() == Output.Count, "Comparison size: {0} = {1} Output size", tables.Count(), Output.Count);
if (tables.Count() != Output.Count)
return;
int count = 0;
foreach (CloudTable table in tables)
{
Test.Assert(Utility.CompareEntity(table, (CloudTable)Output[count]["CloudTable"]), "table equality checking: {0}", table.Name);
++count;
}
}
protected static Random _random = new Random((int)(DateTime.Now.Ticks)); // for generating random object names
protected Collection<Dictionary<string, object>> _Output = new Collection<Dictionary<string, object>>();
protected Collection<string> _ErrorMessages = new Collection<string>();
protected bool _UseContextParam = true; // decide whether to specify the Context parameter
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.DotNet.RemoteExecutor;
using Xunit;
namespace System.Diagnostics.Tests
{
public class ProcessWaitingTests : ProcessTestBase
{
[Fact]
[ActiveIssue(31908, TargetFrameworkMonikers.Uap)]
public void MultipleProcesses_StartAllKillAllWaitAll()
{
const int Iters = 10;
Process[] processes = Enumerable.Range(0, Iters).Select(_ => CreateProcessLong()).ToArray();
foreach (Process p in processes) p.Start();
foreach (Process p in processes) p.Kill();
foreach (Process p in processes) Assert.True(p.WaitForExit(WaitInMS));
}
[Fact]
[ActiveIssue(31908, TargetFrameworkMonikers.Uap)]
public void MultipleProcesses_SerialStartKillWait()
{
const int Iters = 10;
for (int i = 0; i < Iters; i++)
{
Process p = CreateProcessLong();
p.Start();
p.Kill();
p.WaitForExit(WaitInMS);
}
}
[Fact]
[ActiveIssue(31908, TargetFrameworkMonikers.Uap)]
public void MultipleProcesses_ParallelStartKillWait()
{
const int Tasks = 4, ItersPerTask = 10;
Action work = () =>
{
for (int i = 0; i < ItersPerTask; i++)
{
Process p = CreateProcessLong();
p.Start();
p.Kill();
p.WaitForExit(WaitInMS);
}
};
Task.WaitAll(Enumerable.Range(0, Tasks).Select(_ => Task.Run(work)).ToArray());
}
[Theory]
[InlineData(0)] // poll
[InlineData(10)] // real timeout
public void CurrentProcess_WaitNeverCompletes(int milliseconds)
{
Assert.False(Process.GetCurrentProcess().WaitForExit(milliseconds));
}
[Fact]
[ActiveIssue(31908, TargetFrameworkMonikers.Uap)]
public void SingleProcess_TryWaitMultipleTimesBeforeCompleting()
{
Process p = CreateProcessLong();
p.Start();
// Verify we can try to wait for the process to exit multiple times
Assert.False(p.WaitForExit(0));
Assert.False(p.WaitForExit(0));
// Then wait until it exits and concurrently kill it.
// There's a race condition here, in that we really want to test
// killing it while we're waiting, but we could end up killing it
// before hand, in which case we're simply not testing exactly
// what we wanted to test, but everything should still work.
Task.Delay(10).ContinueWith(_ => p.Kill());
Assert.True(p.WaitForExit(WaitInMS));
Assert.True(p.WaitForExit(0));
}
[Theory]
[InlineData(false)]
[InlineData(true)]
[ActiveIssue(31908, TargetFrameworkMonikers.Uap)]
public async Task SingleProcess_WaitAfterExited(bool addHandlerBeforeStart)
{
Process p = CreateProcessLong();
p.EnableRaisingEvents = true;
var tcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
if (addHandlerBeforeStart)
{
p.Exited += delegate { tcs.SetResult(true); };
}
p.Start();
if (!addHandlerBeforeStart)
{
p.Exited += delegate { tcs.SetResult(true); };
}
p.Kill();
Assert.True(await tcs.Task);
Assert.True(p.WaitForExit(0));
}
[Theory]
[InlineData(0)]
[InlineData(1)]
[InlineData(127)]
public async Task SingleProcess_EnableRaisingEvents_CorrectExitCode(int exitCode)
{
using (Process p = CreateProcessPortable(RemotelyInvokable.ExitWithCode, exitCode.ToString()))
{
var tcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
p.EnableRaisingEvents = true;
p.Exited += delegate
{ tcs.SetResult(true); };
p.Start();
Assert.True(await tcs.Task);
Assert.Equal(exitCode, p.ExitCode);
}
}
[Fact]
[ActiveIssue(31908, TargetFrameworkMonikers.Uap)]
public void SingleProcess_CopiesShareExitInformation()
{
Process p = CreateProcessLong();
p.Start();
Process[] copies = Enumerable.Range(0, 3).Select(_ => Process.GetProcessById(p.Id)).ToArray();
Assert.False(p.WaitForExit(0));
p.Kill();
Assert.True(p.WaitForExit(WaitInMS));
foreach (Process copy in copies)
{
Assert.True(copy.WaitForExit(0));
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Getting handle of child process running on UAP is not possible")]
public void WaitForPeerProcess()
{
Process child1 = CreateProcessLong();
child1.Start();
Process child2 = CreateProcess(peerId =>
{
Process peer = Process.GetProcessById(int.Parse(peerId));
Console.WriteLine("Signal");
Assert.True(peer.WaitForExit(WaitInMS));
return RemoteExecutor.SuccessExitCode;
}, child1.Id.ToString());
child2.StartInfo.RedirectStandardOutput = true;
child2.Start();
char[] output = new char[6];
child2.StandardOutput.Read(output, 0, output.Length);
Assert.Equal("Signal", new string(output)); // wait for the signal before killing the peer
child1.Kill();
Assert.True(child1.WaitForExit(WaitInMS));
Assert.True(child2.WaitForExit(WaitInMS));
Assert.Equal(RemoteExecutor.SuccessExitCode, child2.ExitCode);
}
[Fact]
public void WaitForSignal()
{
const string expectedSignal = "Signal";
const string successResponse = "Success";
const int timeout = 30 * 1000; // 30 seconds, to allow for very slow machines
Process p = CreateProcessPortable(RemotelyInvokable.WriteLineReadLine);
p.StartInfo.RedirectStandardInput = true;
p.StartInfo.RedirectStandardOutput = true;
var mre = new ManualResetEventSlim(false);
int linesReceived = 0;
p.OutputDataReceived += (s, e) =>
{
if (e.Data != null)
{
linesReceived++;
if (e.Data == expectedSignal)
{
mre.Set();
}
}
};
p.Start();
p.BeginOutputReadLine();
Assert.True(mre.Wait(timeout));
Assert.Equal(1, linesReceived);
// Wait a little bit to make sure process didn't exit on itself
Thread.Sleep(100);
Assert.False(p.HasExited, "Process has prematurely exited");
using (StreamWriter writer = p.StandardInput)
{
writer.WriteLine(successResponse);
}
Assert.True(p.WaitForExit(timeout), "Process has not exited");
Assert.Equal(RemotelyInvokable.SuccessExitCode, p.ExitCode);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Not applicable on uap - RemoteInvoke does not give back process handle")]
[ActiveIssue(15844, TestPlatforms.AnyUnix)]
public void WaitChain()
{
Process root = CreateProcess(() =>
{
Process child1 = CreateProcess(() =>
{
Process child2 = CreateProcess(() =>
{
Process child3 = CreateProcess(() => RemoteExecutor.SuccessExitCode);
child3.Start();
Assert.True(child3.WaitForExit(WaitInMS));
return child3.ExitCode;
});
child2.Start();
Assert.True(child2.WaitForExit(WaitInMS));
return child2.ExitCode;
});
child1.Start();
Assert.True(child1.WaitForExit(WaitInMS));
return child1.ExitCode;
});
root.Start();
Assert.True(root.WaitForExit(WaitInMS));
Assert.Equal(RemoteExecutor.SuccessExitCode, root.ExitCode);
}
[Fact]
public void WaitForSelfTerminatingChild()
{
Process child = CreateProcessPortable(RemotelyInvokable.SelfTerminate);
child.Start();
Assert.True(child.WaitForExit(WaitInMS));
Assert.NotEqual(RemoteExecutor.SuccessExitCode, child.ExitCode);
}
[Fact]
public void WaitForInputIdle_NotDirected_ThrowsInvalidOperationException()
{
var process = new Process();
Assert.Throws<InvalidOperationException>(() => process.WaitForInputIdle());
}
[Fact]
public void WaitForExit_NotDirected_ThrowsInvalidOperationException()
{
var process = new Process();
Assert.Throws<InvalidOperationException>(() => process.WaitForExit());
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Razor.Language.Syntax;
using Xunit;
namespace Microsoft.AspNetCore.Razor.Language.Legacy
{
public class TagHelperParseTreeRewriterTest : TagHelperRewritingTestBase
{
public static TheoryData GetAttributeNameValuePairsData
{
get
{
Func<string, string, KeyValuePair<string, string>> kvp =
(key, value) => new KeyValuePair<string, string>(key, value);
var empty = Enumerable.Empty<KeyValuePair<string, string>>();
var csharp = TagHelperParseTreeRewriter.Rewriter.InvalidAttributeValueMarker.ToString();
// documentContent, expectedPairs
return new TheoryData<string, IEnumerable<KeyValuePair<string, string>>>
{
{ "<a>", empty },
{ "<a @{ } href='~/home'>", empty },
{ "<a href=\"@true\">", new[] { kvp("href", csharp) } },
{ "<a href=\"prefix @true suffix\">", new[] { kvp("href", $"prefix{csharp} suffix") } },
{ "<a href=~/home>", new[] { kvp("href", "~/home") } },
{ "<a href=~/home @{ } nothing='something'>", new[] { kvp("href", "~/home") } },
{
"<a href=\"@DateTime.Now::0\" class='btn btn-success' random>",
new[] { kvp("href", $"{csharp}::0"), kvp("class", "btn btn-success"), kvp("random", "") }
},
{ "<a href=>", new[] { kvp("href", "") } },
{ "<a href='\"> ", new[] { kvp("href", "\"> ") } },
{ "<a href'", new[] { kvp("href'", "") } },
};
}
}
[Theory]
[MemberData(nameof(GetAttributeNameValuePairsData))]
public void GetAttributeNameValuePairs_ParsesPairsCorrectly(
string documentContent,
IEnumerable<KeyValuePair<string, string>> expectedPairs)
{
// Arrange
var errorSink = new ErrorSink();
var parseResult = ParseDocument(documentContent);
var document = parseResult.Root;
var parseTreeRewriter = new TagHelperParseTreeRewriter.Rewriter(
parseResult.Source,
null,
Enumerable.Empty<TagHelperDescriptor>(),
parseResult.Options.FeatureFlags,
errorSink);
// Assert - Guard
var rootBlock = Assert.IsType<RazorDocumentSyntax>(document);
var rootMarkup = Assert.IsType<MarkupBlockSyntax>(rootBlock.Document);
var childBlock = Assert.Single(rootMarkup.Children);
var element = Assert.IsType<MarkupElementSyntax>(childBlock);
Assert.Empty(errorSink.Errors);
// Act
var pairs = parseTreeRewriter.GetAttributeNameValuePairs(element.StartTag);
// Assert
Assert.Equal(expectedPairs, pairs);
}
public static TagHelperDescriptor[] PartialRequiredParentTags_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong"))
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("div"))
.Build(),
TagHelperDescriptorBuilder.Create("CatchALlTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("*"))
.Build(),
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.Build(),
};
[Fact]
public void UnderstandsPartialRequiredParentTags1()
{
var document = "<p><strong>";
EvaluateData(PartialRequiredParentTags_Descriptors, document);
}
[Fact]
public void UnderstandsPartialRequiredParentTags2()
{
var document = "<p><strong></strong>";
EvaluateData(PartialRequiredParentTags_Descriptors, document);
}
[Fact]
public void UnderstandsPartialRequiredParentTags3()
{
var document = "<p><strong></p><strong>";
EvaluateData(PartialRequiredParentTags_Descriptors, document);
}
[Fact]
public void UnderstandsPartialRequiredParentTags4()
{
var document = "<<p><<strong></</strong</strong></p>";
EvaluateData(PartialRequiredParentTags_Descriptors, document);
}
[Fact]
public void UnderstandsPartialRequiredParentTags5()
{
var document = "<<p><<strong></</strong></strong></p>";
EvaluateData(PartialRequiredParentTags_Descriptors, document);
}
[Fact]
public void UnderstandsPartialRequiredParentTags6()
{
var document = "<<p><<custom></<</custom></custom></p>";
EvaluateData(PartialRequiredParentTags_Descriptors, document);
}
public static TagHelperDescriptor[] NestedVoidSelfClosingRequiredParent_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("InputTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("input")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build(),
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("strong")
.RequireParentTag("p"))
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("strong")
.RequireParentTag("input"))
.Build(),
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.Build(),
};
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent1()
{
var document = "<input><strong></strong>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent2()
{
var document = "<p><input><strong></strong></p>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent3()
{
var document = "<p><br><strong></strong></p>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent4()
{
var document = "<p><p><br></p><strong></strong></p>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent5()
{
var document = "<input><strong></strong>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent6()
{
var document = "<p><input /><strong /></p>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent7()
{
var document = "<p><br /><strong /></p>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedVoidSelfClosingRequiredParent8()
{
var document = "<p><p><br /></p><strong /></p>";
EvaluateData(NestedVoidSelfClosingRequiredParent_Descriptors, document);
}
public static TagHelperDescriptor[] NestedRequiredParent_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("strong")
.RequireParentTag("p"))
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("strong")
.RequireParentTag("div"))
.Build(),
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.Build(),
};
[Fact]
public void UnderstandsNestedRequiredParent1()
{
var document = "<strong></strong>";
EvaluateData(NestedRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedRequiredParent2()
{
var document = "<p><strong></strong></p>";
EvaluateData(NestedRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedRequiredParent3()
{
var document = "<div><strong></strong></div>";
EvaluateData(NestedRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedRequiredParent4()
{
var document = "<strong><strong></strong></strong>";
EvaluateData(NestedRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsNestedRequiredParent5()
{
var document = "<p><strong><strong></strong></strong></p>";
EvaluateData(NestedRequiredParent_Descriptors, document);
}
[Fact]
public void UnderstandsTagHelperPrefixAndAllowedChildren()
{
// Arrange
var documentContent = "<th:p><th:strong></th:strong></th:p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("strong")
.Build(),
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong"))
.Build(),
};
// Act & Assert
EvaluateData(
descriptors,
documentContent,
tagHelperPrefix: "th:");
}
[Fact]
public void UnderstandsTagHelperPrefixAndAllowedChildrenAndRequireParent()
{
// Arrange
var documentContent = "<th:p><th:strong></th:strong></th:p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("strong")
.Build(),
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong").RequireParentTag("p"))
.Build(),
};
// Act & Assert
EvaluateData(
descriptors,
documentContent,
tagHelperPrefix: "th:");
}
[Fact]
public void InvalidStructure_UnderstandsTHPrefixAndAllowedChildrenAndRequireParent()
{
// Rewrite_InvalidStructure_UnderstandsTagHelperPrefixAndAllowedChildrenAndRequireParent
// Arrange
var documentContent = "<th:p></th:strong></th:p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("strong")
.Build(),
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong").RequireParentTag("p"))
.Build(),
};
// Act & Assert
EvaluateData(
descriptors,
documentContent,
tagHelperPrefix: "th:");
}
[Fact]
public void NonTagHelperChild_UnderstandsTagHelperPrefixAndAllowedChildren()
{
// Arrange
var documentContent = "<th:p><strong></strong></th:p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("strong")
.Build(),
};
// Act & Assert
EvaluateData(
descriptors,
documentContent,
tagHelperPrefix: "th:");
}
[Fact]
public void DoesNotUnderstandTagHelpersInInvalidHtmlTypedScriptTags1()
{
var document = "<script type><input /></script>";
RunParseTreeRewriterTest(document, "input");
}
[Fact]
public void DoesNotUnderstandTagHelpersInInvalidHtmlTypedScriptTags2()
{
var document = "<script types='text/html'><input /></script>";
RunParseTreeRewriterTest(document, "input");
}
[Fact]
public void DoesNotUnderstandTagHelpersInInvalidHtmlTypedScriptTags3()
{
var document = "<script type='text/html invalid'><input /></script>";
RunParseTreeRewriterTest(document, "input");
}
[Fact]
public void DoesNotUnderstandTagHelpersInInvalidHtmlTypedScriptTags4()
{
var document = "<script type='text/ng-*' type='text/html'><input /></script>";
RunParseTreeRewriterTest(document, "input");
}
[Fact]
public void UnderstandsTagHelpersInHtmlTypedScriptTags1()
{
var document = "<script type='text/html'><input /></script>";
RunParseTreeRewriterTest(document, "p", "input");
}
[Fact]
public void UnderstandsTagHelpersInHtmlTypedScriptTags2()
{
var document = "<script id='scriptTag' type='text/html' class='something'><input /></script>";
RunParseTreeRewriterTest(document, "p", "input");
}
[Fact]
public void UnderstandsTagHelpersInHtmlTypedScriptTags3()
{
var document = "<script type='text/html'><p><script type='text/html'><input /></script></p></script>";
RunParseTreeRewriterTest(document, "p", "input");
}
[Fact]
public void UnderstandsTagHelpersInHtmlTypedScriptTags4()
{
var document = "<script type='text/html'><p><script type='text/ html'><input /></script></p></script>";
RunParseTreeRewriterTest(document, "p", "input");
}
[Fact]
public void CanHandleInvalidChildrenWithWhitespace()
{
// Arrange
var documentContent = $"<p>{Environment.NewLine} <strong>{Environment.NewLine} Hello" +
$"{Environment.NewLine} </strong>{Environment.NewLine}</p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("br")
.Build()
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void RecoversWhenRequiredAttributeMismatchAndRestrictedChildren()
{
// Arrange
var documentContent = "<strong required><strong></strong></strong>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("strong")
.RequireAttributeDescriptor(attribute => attribute.Name("required")))
.AllowChildTag("br")
.Build()
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void CanHandleMultipleTagHelpersWithAllowedChildren_OneNull()
{
// Arrange
var documentContent = "<p><strong>Hello World</strong><br></p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper1", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("strong")
.AllowChildTag("br")
.Build(),
TagHelperDescriptorBuilder.Create("PTagHelper2", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.Build(),
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong"))
.Build(),
TagHelperDescriptorBuilder.Create("BRTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("br")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build(),
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void CanHandleMultipleTagHelpersWithAllowedChildren()
{
// Arrange
var documentContent = "<p><strong>Hello World</strong><br></p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper1", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("strong")
.Build(),
TagHelperDescriptorBuilder.Create("PTagHelper2", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("br")
.Build(),
TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong"))
.Build(),
TagHelperDescriptorBuilder.Create("BRTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("br")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build(),
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren1()
{
// Arrange
var documentContent = "<p><br /></p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "br" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren2()
{
// Arrange
var documentContent = $"<p>{Environment.NewLine}<br />{Environment.NewLine}</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "br" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren3()
{
// Arrange
var documentContent = "<p><br></p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren4()
{
// Arrange
var documentContent = "<p>Hello</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren5()
{
// Arrange
var documentContent = "<p><hr /></p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "br", "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren6()
{
// Arrange
var documentContent = "<p><br>Hello</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren7()
{
// Arrange
var documentContent = "<p><strong>Title:</strong><br />Something</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren8()
{
// Arrange
var documentContent = "<p><strong>Title:</strong><br />Something</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong", "br" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren9()
{
// Arrange
var documentContent = "<p> <strong>Title:</strong> <br /> Something</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong", "br" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren10()
{
// Arrange
var documentContent = "<p><strong>Title:<br><em>A Very Cool</em></strong><br />Something</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren11()
{
// Arrange
var documentContent = "<p><custom>Title:<br><em>A Very Cool</em></custom><br />Something</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "custom" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren12()
{
// Arrange
var documentContent = "<p></</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "custom" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren13()
{
// Arrange
var documentContent = "<p><</p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "custom" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsAllowedChildren14()
{
// Arrange
var documentContent = "<p><custom><br>:<strong><strong>Hello</strong></strong>:<input></custom></p>";
var descriptors = GetAllowedChildrenTagHelperDescriptors(new[] { "custom", "strong" });
// Act & Assert
EvaluateData(descriptors, documentContent);
}
private TagHelperDescriptor[] GetAllowedChildrenTagHelperDescriptors(string[] allowedChildren)
{
var pTagHelperBuilder = TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"));
var strongTagHelperBuilder = TagHelperDescriptorBuilder.Create("StrongTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("strong"));
foreach (var childTag in allowedChildren)
{
pTagHelperBuilder.AllowChildTag(childTag);
strongTagHelperBuilder.AllowChildTag(childTag);
}
var descriptors = new TagHelperDescriptor[]
{
pTagHelperBuilder.Build(),
strongTagHelperBuilder.Build(),
TagHelperDescriptorBuilder.Create("BRTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("br")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build(),
};
return descriptors;
}
[Fact]
public void AllowsSimpleHtmlCommentsAsChildren()
{
// Arrange
var allowedChildren = new List<string> { "b" };
var literal = "asdf";
var commentOutput = "Hello World";
var document = $"<p><b>{literal}</b><!--{commentOutput}--></p>";
var pTagHelperBuilder = TagHelperDescriptorBuilder
.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"));
foreach (var childTag in allowedChildren)
{
pTagHelperBuilder.AllowChildTag(childTag);
}
var descriptors = new TagHelperDescriptor[]
{
pTagHelperBuilder.Build()
};
// Act & Assert
EvaluateData(descriptors, document);
}
[Fact]
public void DoesntAllowSimpleHtmlCommentsAsChildrenWhenFeatureFlagIsOff()
{
// Arrange
var allowedChildren = new List<string> { "b" };
var comment1 = "Hello";
var document = $"<p><!--{comment1}--></p>";
var pTagHelperBuilder = TagHelperDescriptorBuilder
.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"));
foreach (var childTag in allowedChildren)
{
pTagHelperBuilder.AllowChildTag(childTag);
}
var descriptors = new TagHelperDescriptor[]
{
pTagHelperBuilder.Build()
};
// Act & Assert
EvaluateData(
descriptors,
document,
featureFlags: RazorParserFeatureFlags.Create(RazorLanguageVersion.Version_2_0, FileKinds.Legacy));
}
[Fact]
public void FailsForContentWithCommentsAsChildren()
{
// Arrange
var allowedChildren = new List<string> { "b" };
var comment1 = "Hello";
var literal = "asdf";
var comment2 = "World";
var document = $"<p><!--{comment1}-->{literal}<!--{comment2}--></p>";
var pTagHelperBuilder = TagHelperDescriptorBuilder
.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"));
foreach (var childTag in allowedChildren)
{
pTagHelperBuilder.AllowChildTag(childTag);
}
var descriptors = new TagHelperDescriptor[]
{
pTagHelperBuilder.Build()
};
// Act & Assert
EvaluateData(descriptors, document);
}
[Fact]
public void AllowsRazorCommentsAsChildren()
{
// Arrange
var allowedChildren = new List<string> { "b" };
var literal = "asdf";
var commentOutput = $"@*{literal}*@";
var document = $"<p><b>{literal}</b>{commentOutput}</p>";
var pTagHelperBuilder = TagHelperDescriptorBuilder
.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"));
foreach (var childTag in allowedChildren)
{
pTagHelperBuilder.AllowChildTag(childTag);
}
var descriptors = new TagHelperDescriptor[]
{
pTagHelperBuilder.Build()
};
// Act & Assert
EvaluateData(descriptors, document);
}
[Fact]
public void AllowsRazorMarkupInHtmlComment()
{
// Arrange
var allowedChildren = new List<string> { "b" };
var literal = "asdf";
var part1 = "Hello ";
var part2 = "World";
var commentStart = "<!--";
var commentEnd = "-->";
var document = $"<p><b>{literal}</b>{commentStart}{part1}@{part2}{commentEnd}</p>";
var pTagHelperBuilder = TagHelperDescriptorBuilder
.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"));
foreach (var childTag in allowedChildren)
{
pTagHelperBuilder.AllowChildTag(childTag);
}
var descriptors = new TagHelperDescriptor[]
{
pTagHelperBuilder.Build()
};
// Act & Assert
EvaluateData(descriptors, document);
}
[Fact]
public void UnderstandsNullTagNameWithAllowedChildrenForCatchAll()
{
// Arrange
var documentContent = "<p></</p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("custom")
.Build(),
TagHelperDescriptorBuilder.Create("CatchAllTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("*"))
.Build(),
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void UnderstandsNullTagNameWithAllowedChildrenForCatchAllWithPrefix()
{
// Arrange
var documentContent = "<th:p></</th:p>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("PTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("p"))
.AllowChildTag("custom")
.Build(),
TagHelperDescriptorBuilder.Create("CatchAllTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("*"))
.Build(),
};
// Act & Assert
EvaluateData(descriptors, documentContent, "th:");
}
[Fact]
public void CanHandleStartTagOnlyTagTagMode()
{
// Arrange
var documentContent = "<input>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("InputTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("input")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build()
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void CreatesErrorForWithoutEndTagTagStructureForEndTags()
{
// Arrange
var documentContent = "</input>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("InputTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("input")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build()
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
[Fact]
public void CreatesErrorForInconsistentTagStructures()
{
// Arrange
var documentContent = "<input>";
var descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("InputTagHelper1", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("input")
.RequireTagStructure(TagStructure.WithoutEndTag))
.Build(),
TagHelperDescriptorBuilder.Create("InputTagHelper2", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("input")
.RequireTagStructure(TagStructure.NormalOrSelfClosing))
.Build()
};
// Act & Assert
EvaluateData(descriptors, documentContent);
}
public static TagHelperDescriptor[] RequiredAttribute_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("pTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("p")
.RequireAttributeDescriptor(attribute => attribute.Name("class")))
.Build(),
TagHelperDescriptorBuilder.Create("divTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("div")
.RequireAttributeDescriptor(attribute => attribute.Name("class"))
.RequireAttributeDescriptor(attribute => attribute.Name("style")))
.Build(),
TagHelperDescriptorBuilder.Create("catchAllTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("*")
.RequireAttributeDescriptor(attribute => attribute.Name("catchAll")))
.Build()
};
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly1()
{
EvaluateData(RequiredAttribute_Descriptors, "<p />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly2()
{
EvaluateData(RequiredAttribute_Descriptors, "<p></p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly3()
{
EvaluateData(RequiredAttribute_Descriptors, "<div />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly4()
{
EvaluateData(RequiredAttribute_Descriptors, "<div></div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly5()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"btn\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly6()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"@DateTime.Now\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly7()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"btn\">words and spaces</p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly8()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"@DateTime.Now\">words and spaces</p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly9()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"btn\">words<strong>and</strong>spaces</p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly10()
{
EvaluateData(RequiredAttribute_Descriptors, "<strong catchAll=\"hi\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly11()
{
EvaluateData(RequiredAttribute_Descriptors, "<strong catchAll=\"@DateTime.Now\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly12()
{
EvaluateData(RequiredAttribute_Descriptors, "<strong catchAll=\"hi\">words and spaces</strong>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly13()
{
EvaluateData(RequiredAttribute_Descriptors, "<strong catchAll=\"@DateTime.Now\">words and spaces</strong>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly14()
{
EvaluateData(RequiredAttribute_Descriptors, "<div class=\"btn\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly15()
{
EvaluateData(RequiredAttribute_Descriptors, "<div class=\"btn\"></div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly16()
{
EvaluateData(RequiredAttribute_Descriptors, "<p notRequired=\"a\" class=\"btn\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly17()
{
EvaluateData(RequiredAttribute_Descriptors, "<p notRequired=\"@DateTime.Now\" class=\"btn\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly18()
{
EvaluateData(RequiredAttribute_Descriptors, "<p notRequired=\"a\" class=\"btn\">words and spaces</p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly19()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly20()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"@DateTime.Now\" class=\"btn\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly21()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\">words and spaces</div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly22()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"@DateTime.Now\" class=\"@DateTime.Now\">words and spaces</div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly23()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\">words<strong>and</strong>spaces</div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly24()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"btn\" catchAll=\"hi\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly25()
{
EvaluateData(RequiredAttribute_Descriptors, "<p class=\"btn\" catchAll=\"hi\">words and spaces</p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly26()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\" catchAll=\"hi\" />");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly27()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\" catchAll=\"hi\" >words and spaces</div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly28()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\" catchAll=\"@@hi\" >words and spaces</div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly29()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"@DateTime.Now\" class=\"@DateTime.Now\" catchAll=\"@DateTime.Now\" >words and spaces</div>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly30()
{
EvaluateData(RequiredAttribute_Descriptors, "<div style=\"\" class=\"btn\" catchAll=\"hi\" >words<strong>and</strong>spaces</div>");
}
public static TagHelperDescriptor[] NestedRequiredAttribute_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("pTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("p")
.RequireAttributeDescriptor(attribute => attribute.Name("class")))
.Build(),
TagHelperDescriptorBuilder.Create("catchAllTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("*")
.RequireAttributeDescriptor(attribute => attribute.Name("catchAll")))
.Build(),
};
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly1()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<p class=\"btn\"><p></p></p>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly2()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<strong catchAll=\"hi\"><strong></strong></strong>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly3()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<p class=\"btn\"><strong><p></p></strong></p>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly4()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<strong catchAll=\"hi\"><p><strong></strong></p></strong>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly5()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<p class=\"btn\"><strong catchAll=\"hi\"><p></p></strong></p>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly6()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<strong catchAll=\"hi\"><p class=\"btn\"><strong></strong></p></strong>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly7()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<p class=\"btn\"><p class=\"btn\"><p></p></p></p>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly8()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<strong catchAll=\"hi\"><strong catchAll=\"hi\"><strong></strong></strong></strong>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly9()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<p class=\"btn\"><p><p><p class=\"btn\"><p></p></p></p></p></p>");
}
[Fact]
public void NestedRequiredAttributeDescriptorsCreateTagHelperBlocksCorrectly10()
{
EvaluateData(NestedRequiredAttribute_Descriptors, "<strong catchAll=\"hi\"><strong><strong><strong catchAll=\"hi\"><strong></strong></strong></strong></strong></strong>");
}
public static TagHelperDescriptor[] MalformedRequiredAttribute_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("pTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("p")
.RequireAttributeDescriptor(attribute => attribute.Name("class")))
.Build(),
};
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly1()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly2()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p class=\"btn\"");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly3()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p notRequired=\"hi\" class=\"btn\"");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly4()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p></p");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly5()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p class=\"btn\"></p");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly6()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p notRequired=\"hi\" class=\"btn\"></p");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly7()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p class=\"btn\" <p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly8()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p notRequired=\"hi\" class=\"btn\" <p>");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly9()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p class=\"btn\" </p");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly10()
{
EvaluateData(MalformedRequiredAttribute_Descriptors, "<p notRequired=\"hi\" class=\"btn\" </p");
}
[Fact]
public void RequiredAttributeDescriptorsCreateMalformedTagHelperBlocksCorrectly11()
{
var document = "<p class='foo'>@if(true){</p>}</p>";
EvaluateData(MalformedRequiredAttribute_Descriptors, document);
}
public static TagHelperDescriptor[] PrefixedTagHelperColon_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("mythTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("myth"))
.Build(),
TagHelperDescriptorBuilder.Create("mythTagHelper2", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("myth2"))
.BoundAttributeDescriptor(attribute =>
attribute
.Name("bound")
.PropertyName("Bound")
.TypeName(typeof(bool).FullName))
.Build()
};
public static TagHelperDescriptor[] PrefixedTagHelperCatchAll_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("mythTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule => rule.RequireTagName("*"))
.Build(),
};
[Fact]
public void AllowsPrefixedTagHelpers1()
{
EvaluateData(PrefixedTagHelperCatchAll_Descriptors, "<th: />", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers2()
{
EvaluateData(PrefixedTagHelperCatchAll_Descriptors, "<th:>words and spaces</th:>", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers3()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth />", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers4()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth></th:myth>", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers5()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth><th:my2th></th:my2th></th:myth>", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers6()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<!th:myth />", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers7()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<!th:myth></!th:myth>", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers8()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth class=\"btn\" />", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers9()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth2 class=\"btn\" />", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers10()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth class=\"btn\">words and spaces</th:myth>", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsPrefixedTagHelpers11()
{
EvaluateData(PrefixedTagHelperColon_Descriptors, "<th:myth2 bound=\"@DateTime.Now\" />", tagHelperPrefix: "th:");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithAttrTextTag1()
{
RunParseTreeRewriterTest("@{<!text class=\"btn\">}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithAttrTextTag2()
{
RunParseTreeRewriterTest("@{<!text class=\"btn\"></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithAttrTextTag3()
{
RunParseTreeRewriterTest("@{<!text class=\"btn\">words with spaces</!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithAttrTextTag4()
{
RunParseTreeRewriterTest("@{<!text class='btn1 btn2' class2=btn></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithAttrTextTag5()
{
RunParseTreeRewriterTest("@{<!text class='btn1 @DateTime.Now btn2'></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag1()
{
RunParseTreeRewriterTest("@{<!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag2()
{
RunParseTreeRewriterTest("@{</!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag3()
{
RunParseTreeRewriterTest("@{<!text></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag4()
{
RunParseTreeRewriterTest("@{<!text>words and spaces</!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag5()
{
RunParseTreeRewriterTest("@{<!text></text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag6()
{
RunParseTreeRewriterTest("@{<text></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag7()
{
RunParseTreeRewriterTest("@{<!text><text></text></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag8()
{
RunParseTreeRewriterTest("@{<text><!text></!text>}", "p", "text");
}
[Fact]
public void AllowsTHElementOptForCompleteTextTagInCSharpBlock_WithBlockTextTag9()
{
RunParseTreeRewriterTest("@{<!text></!text></text>}", "p", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteTextTagInCSharpBlock1()
{
RunParseTreeRewriterTest("@{<!text}", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteTextTagInCSharpBlock2()
{
RunParseTreeRewriterTest("@{<!text /}", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteTextTagInCSharpBlock3()
{
RunParseTreeRewriterTest("@{<!text class=}", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteTextTagInCSharpBlock4()
{
RunParseTreeRewriterTest("@{<!text class=\"btn}", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteTextTagInCSharpBlock5()
{
RunParseTreeRewriterTest("@{<!text class=\"btn\"}", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteTextTagInCSharpBlock6()
{
RunParseTreeRewriterTest("@{<!text class=\"btn\" /}", "text");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock1()
{
RunParseTreeRewriterTest("@{<!}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock2()
{
RunParseTreeRewriterTest("@{<!p}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock3()
{
RunParseTreeRewriterTest("@{<!p /}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock4()
{
RunParseTreeRewriterTest("@{<!p class=}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock5()
{
RunParseTreeRewriterTest("@{<!p class=\"btn}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock6()
{
RunParseTreeRewriterTest("@{<!p class=\"btn@@}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock7()
{
RunParseTreeRewriterTest("@{<!p class=\"btn\"}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTMLInCSharpBlock8()
{
RunParseTreeRewriterTest("@{<!p class=\"btn\" /}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML1()
{
RunParseTreeRewriterTest("<!", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML2()
{
RunParseTreeRewriterTest("<!p", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML3()
{
RunParseTreeRewriterTest("<!p /", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML4()
{
RunParseTreeRewriterTest("<!p class=", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML5()
{
RunParseTreeRewriterTest("<!p class=\"btn", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML6()
{
RunParseTreeRewriterTest("<!p class=\"btn\"", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptForIncompleteHTML7()
{
RunParseTreeRewriterTest("<!p class=\"btn\" /", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData1()
{
RunParseTreeRewriterTest("@{<!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData2()
{
RunParseTreeRewriterTest("@{</!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData3()
{
RunParseTreeRewriterTest("@{<!p></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData4()
{
RunParseTreeRewriterTest("@{<!p>words and spaces</!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData5()
{
RunParseTreeRewriterTest("@{<!p></p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData6()
{
RunParseTreeRewriterTest("@{<p></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData7()
{
RunParseTreeRewriterTest("@{<p><!p></!p></p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData8()
{
RunParseTreeRewriterTest("@{<p><!p></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData9()
{
RunParseTreeRewriterTest("@{<!p></!p></p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData10()
{
RunParseTreeRewriterTest("@{<strong></!p></strong>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData11()
{
RunParseTreeRewriterTest("@{<strong></strong><!p></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithBlockData12()
{
RunParseTreeRewriterTest("@{<p><strong></!strong><!p></strong></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithAttributeData1()
{
RunParseTreeRewriterTest("@{<!p class=\"btn\">}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithAttributeData2()
{
RunParseTreeRewriterTest("@{<!p class=\"btn\"></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithAttributeData3()
{
RunParseTreeRewriterTest("@{<!p class=\"btn\">words with spaces</!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithAttributeData4()
{
RunParseTreeRewriterTest("@{<!p class='btn1 btn2' class2=btn></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutCSharp_WithAttributeData5()
{
RunParseTreeRewriterTest("@{<!p class='btn1 @DateTime.Now btn2'></!p>}", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData1()
{
RunParseTreeRewriterTest("<!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData2()
{
RunParseTreeRewriterTest("</!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData3()
{
RunParseTreeRewriterTest("<!p></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData4()
{
RunParseTreeRewriterTest("<!p>words and spaces</!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData5()
{
RunParseTreeRewriterTest("<!p></p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData6()
{
RunParseTreeRewriterTest("<p></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData7()
{
RunParseTreeRewriterTest("<p><!p></!p></p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData8()
{
RunParseTreeRewriterTest("<p><!p></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData9()
{
RunParseTreeRewriterTest("<!p></!p></p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData10()
{
RunParseTreeRewriterTest("<strong></!p></strong>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData11()
{
RunParseTreeRewriterTest("<strong></strong><!p></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithBlockData12()
{
RunParseTreeRewriterTest("<p><strong></!strong><!p></strong></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithAttributeData1()
{
RunParseTreeRewriterTest("<!p class=\"btn\">", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithAttributeData2()
{
RunParseTreeRewriterTest("<!p class=\"btn\"></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithAttributeData3()
{
RunParseTreeRewriterTest("<!p class=\"btn\">words and spaces</!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithAttributeData4()
{
RunParseTreeRewriterTest("<!p class='btn1 btn2' class2=btn></!p>", "strong", "p");
}
[Fact]
public void AllowsTagHelperElementOptOutHTML_WithAttributeData5()
{
RunParseTreeRewriterTest("<!p class='btn1 @DateTime.Now btn2'></!p>", "strong", "p");
}
[Fact]
public void DoesNotRewriteTextTagTransitionTagHelpers1()
{
RunParseTreeRewriterTest("<text>Hello World</text>", "p", "text");
}
[Fact]
public void DoesNotRewriteTextTagTransitionTagHelpers2()
{
RunParseTreeRewriterTest("@{<text>Hello World</text>}", "p", "text");
}
[Fact]
public void DoesNotRewriteTextTagTransitionTagHelpers3()
{
RunParseTreeRewriterTest("@{<text><p>Hello World</p></text>}", "p", "text");
}
[Fact]
public void DoesNotRewriteTextTagTransitionTagHelpers4()
{
RunParseTreeRewriterTest("@{<p><text>Hello World</text></p>}", "p", "text");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers1()
{
RunParseTreeRewriterTest("<foo><!-- Hello World --></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers2()
{
RunParseTreeRewriterTest("<foo><!-- @foo --></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers3()
{
RunParseTreeRewriterTest("<foo><?xml Hello World ?></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers4()
{
RunParseTreeRewriterTest("<foo><?xml @foo ?></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers5()
{
RunParseTreeRewriterTest("<foo><!DOCTYPE @foo ></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers6()
{
RunParseTreeRewriterTest("<foo><!DOCTYPE hello=\"world\" ></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers7()
{
RunParseTreeRewriterTest("<foo><![CDATA[ Hello World ]]></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void DoesNotRewriteSpecialTagTagHelpers8()
{
RunParseTreeRewriterTest("<foo><![CDATA[ @foo ]]></foo>", "!--", "?xml", "![CDATA[", "!DOCTYPE");
}
[Fact]
public void RewritesNestedTagHelperTagBlocks1()
{
RunParseTreeRewriterTest("<p><div></div></p>", "p", "div");
}
[Fact]
public void RewritesNestedTagHelperTagBlocks2()
{
RunParseTreeRewriterTest("<p>Hello World <div></div></p>", "p", "div");
}
[Fact]
public void RewritesNestedTagHelperTagBlocks3()
{
RunParseTreeRewriterTest("<p>Hel<p>lo</p></p> <p><div>World</div></p>", "p", "div");
}
[Fact]
public void RewritesNestedTagHelperTagBlocks4()
{
RunParseTreeRewriterTest("<p>Hel<strong>lo</strong></p> <p><span>World</span></p>", "p", "div");
}
[Fact]
public void HandlesMalformedNestedNonTagHelperTags_Correctly()
{
RunParseTreeRewriterTest("<div>@{</div>}");
}
[Fact]
public void HandlesNonTagHelperStartAndEndVoidTags_Correctly()
{
RunParseTreeRewriterTest("<input>Foo</input>");
}
public static TagHelperDescriptor[] CaseSensitive_Descriptors = new TagHelperDescriptor[]
{
TagHelperDescriptorBuilder.Create("pTagHelper", "SomeAssembly")
.SetCaseSensitive()
.BoundAttributeDescriptor(attribute =>
attribute
.Name("bound")
.PropertyName("Bound")
.TypeName(typeof(bool).FullName))
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("p")
.RequireAttributeDescriptor(attribute => attribute.Name("class")))
.Build(),
TagHelperDescriptorBuilder.Create("catchAllTagHelper", "SomeAssembly")
.TagMatchingRuleDescriptor(rule =>
rule
.RequireTagName("*")
.RequireAttributeDescriptor(attribute => attribute.Name("catchAll")))
.Build(),
};
[Fact]
public void HandlesCaseSensitiveTagHelpersCorrectly1()
{
EvaluateData(CaseSensitive_Descriptors, "<p class='foo' catchAll></p>");
}
[Fact]
public void HandlesCaseSensitiveTagHelpersCorrectly2()
{
EvaluateData(CaseSensitive_Descriptors, "<p CLASS='foo' CATCHAll></p>");
}
[Fact]
public void HandlesCaseSensitiveTagHelpersCorrectly3()
{
EvaluateData(CaseSensitive_Descriptors, "<P class='foo' CATCHAll></P>");
}
[Fact]
public void HandlesCaseSensitiveTagHelpersCorrectly4()
{
EvaluateData(CaseSensitive_Descriptors, "<P class='foo'></P>");
}
[Fact]
public void HandlesCaseSensitiveTagHelpersCorrectly5()
{
EvaluateData(CaseSensitive_Descriptors, "<p Class='foo'></p>");
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
using System;
using System.IO;
using System.Collections.Generic;
public class MemoryStream_TryGetBufferTests
{
[Fact]
public static void TryGetBuffer_Constructor_AlwaysReturnsTrue()
{
var stream = new MemoryStream();
ArraySegment<byte> segment;
Assert.True(stream.TryGetBuffer(out segment));
Assert.NotNull(segment.Array);
Assert.Equal(0, segment.Offset);
Assert.Equal(0, segment.Count);
}
[Fact]
public static void TryGetBuffer_Constructor_Int32_AlwaysReturnsTrue()
{
var stream = new MemoryStream(512);
ArraySegment<byte> segment;
Assert.True(stream.TryGetBuffer(out segment));
Assert.Equal(512, segment.Array.Length);
Assert.Equal(0, segment.Offset);
Assert.Equal(0, segment.Count);
}
[Fact]
public static void TryGetBuffer_Constructor_ByteArray_AlwaysReturnsFalse()
{
var stream = new MemoryStream(new byte[512]);
ArraySegment<byte> segment;
Assert.False(stream.TryGetBuffer(out segment));
}
[Fact]
public static void TryGetBuffer_Constructor_ByteArray_Bool_AlwaysReturnsFalse()
{
var stream = new MemoryStream(new byte[512], writable: true);
ArraySegment<byte> segment;
Assert.False(stream.TryGetBuffer(out segment));
}
[Fact]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_AlwaysReturnsFalse()
{
var stream = new MemoryStream(new byte[512], index: 0, count: 512);
ArraySegment<byte> segment;
Assert.False(stream.TryGetBuffer(out segment));
}
[Fact]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_AlwaysReturnsFalse()
{
var stream = new MemoryStream(new byte[512], index: 0, count: 512, writable: true);
ArraySegment<byte> segment;
Assert.False(stream.TryGetBuffer(out segment));
}
[Fact]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_Bool_FalseAsPubliclyVisible_ReturnsFalse()
{
var stream = new MemoryStream(new byte[512], index: 0, count: 512, writable: true, publiclyVisible: false);
ArraySegment<byte> segment;
Assert.False(stream.TryGetBuffer(out segment));
}
[Fact]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_Bool_TrueAsPubliclyVisible_ReturnsTrue()
{
var stream = new MemoryStream(new byte[512], index: 0, count: 512, writable: true, publiclyVisible: true);
ArraySegment<byte> segment;
Assert.True(stream.TryGetBuffer(out segment));
Assert.NotNull(segment.Array);
Assert.Equal(512, segment.Array.Length);
Assert.Equal(0, segment.Offset);
Assert.Equal(512, segment.Count);
}
[Theory]
[MemberData("GetArraysVariedBySize")]
public static void TryGetBuffer_Constructor_ByteArray_AlwaysReturnsEmptyArraySegment(byte[] array)
{
var stream = new MemoryStream(array);
ArraySegment<byte> result;
Assert.False(stream.TryGetBuffer(out result));
// publiclyVisible = false;
Assert.True(default(ArraySegment<byte>).Equals(result));
}
[Theory]
[MemberData("GetArraysVariedBySize")]
public static void TryGetBuffer_Constructor_ByteArray_Bool_AlwaysReturnsEmptyArraySegment(byte[] array)
{
var stream = new MemoryStream(array, writable: true);
ArraySegment<byte> result;
Assert.False(stream.TryGetBuffer(out result));
// publiclyVisible = false;
Assert.True(default(ArraySegment<byte>).Equals(result));
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_AlwaysReturnsEmptyArraySegment(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count);
ArraySegment<byte> result;
Assert.False(stream.TryGetBuffer(out result));
// publiclyVisible = false;
Assert.True(default(ArraySegment<byte>).Equals(result));
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_AlwaysReturnsEmptyArraySegment(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true);
ArraySegment<byte> result;
Assert.False(stream.TryGetBuffer(out result));
// publiclyVisible = false;
Assert.True(default(ArraySegment<byte>).Equals(result));
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_Bool_FalseAsPubliclyVisible_ReturnsEmptyArraySegment(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: false);
ArraySegment<byte> result;
Assert.False(stream.TryGetBuffer(out result));
// publiclyVisible = false;
Assert.True(default(ArraySegment<byte>).Equals(result));
}
[Fact]
public static void TryGetBuffer_Constructor_AlwaysReturnsOffsetSetToZero()
{
var stream = new MemoryStream();
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(0, result.Offset);
}
[Fact]
public static void TryGetBuffer_Constructor_Int32_AlwaysReturnsOffsetSetToZero()
{
var stream = new MemoryStream(512);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(0, result.Offset);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_Bool_ValueAsIndexAndTrueAsPubliclyVisible_AlwaysReturnsOffsetSetToIndex(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(array.Offset, result.Offset);
}
[Fact]
public static void TryGetBuffer_Constructor_ByDefaultReturnsCountSetToZero()
{
var stream = new MemoryStream();
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(0, result.Count);
}
[Theory]
[MemberData("GetArraysVariedBySize")]
public static void TryGetBuffer_Constructor_ReturnsCountSetToWrittenLength(byte[] array)
{
var stream = new MemoryStream();
stream.Write(array, 0, array.Length);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(array.Length, result.Count);
}
[Fact]
public static void TryGetBuffer_Constructor_Int32_ByDefaultReturnsCountSetToZero()
{
var stream = new MemoryStream(512);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(0, result.Offset);
}
[Theory]
[MemberData("GetArraysVariedBySize")]
public static void TryGetBuffer_Constructor_Int32_ReturnsCountSetToWrittenLength(byte[] array)
{
var stream = new MemoryStream(512);
stream.Write(array, 0, array.Length);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(array.Length, result.Count);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_Bool_ValueAsCountAndTrueAsPubliclyVisible_AlwaysReturnsCountSetToCount(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(array.Count, result.Count);
}
[Fact]
public static void TryGetBuffer_Constructor_ReturnsArray()
{
var stream = new MemoryStream();
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.NotNull(result.Array);
}
[Fact]
public static void TryGetBuffer_Constructor_MultipleCallsReturnsSameArray()
{
var stream = new MemoryStream();
ArraySegment<byte> result1;
ArraySegment<byte> result2;
Assert.True(stream.TryGetBuffer(out result1));
Assert.True(stream.TryGetBuffer(out result2));
Assert.Same(result1.Array, result2.Array);
}
[Fact]
public static void TryGetBuffer_Constructor_Int32_MultipleCallsReturnSameArray()
{
var stream = new MemoryStream(512);
ArraySegment<byte> result1;
ArraySegment<byte> result2;
Assert.True(stream.TryGetBuffer(out result1));
Assert.True(stream.TryGetBuffer(out result2));
Assert.Same(result1.Array, result2.Array);
}
[Fact]
public static void TryGetBuffer_Constructor_Int32_WhenWritingPastCapacity_ReturnsDifferentArrays()
{
var stream = new MemoryStream(512);
ArraySegment<byte> result1;
Assert.True(stream.TryGetBuffer(out result1));
// Force the stream to resize the underlying array
stream.Write(new byte[1024], 0, 1024);
ArraySegment<byte> result2;
Assert.True(stream.TryGetBuffer(out result2));
Assert.NotSame(result1.Array, result2.Array);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_Constructor_ByteArray_Int32_Int32_Bool_Bool_ValueAsBufferAndTrueAsPubliclyVisible_AlwaysReturnsArraySetToBuffer(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Same(array.Array, result.Array);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_WhenDisposed_ReturnsTrue(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
stream.Dispose();
ArraySegment<byte> segment;
Assert.True(stream.TryGetBuffer(out segment));
Assert.Same(array.Array, segment.Array);
Assert.Equal(array.Offset, segment.Offset);
Assert.Equal(array.Count, segment.Count);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_WhenDisposed_ReturnsOffsetSetToIndex(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
stream.Dispose();
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(array.Offset, result.Offset);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_WhenDisposed_ReturnsCountSetToCount(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
stream.Dispose();
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Equal(array.Count, result.Count);
}
[Theory]
[MemberData("GetArraysVariedByOffsetAndLength")]
public static void TryGetBuffer_WhenDisposed_ReturnsArraySetToBuffer(ArraySegment<byte> array)
{
var stream = new MemoryStream(array.Array, index: array.Offset, count: array.Count, writable: true, publiclyVisible: true);
stream.Dispose();
ArraySegment<byte> result;
Assert.True(stream.TryGetBuffer(out result));
Assert.Same(array.Array, result.Array);
}
public static IEnumerable<object[]> GetArraysVariedByOffsetAndLength()
{
yield return new object[] { new ArraySegment<byte>(new byte[512], 0, 512) };
yield return new object[] { new ArraySegment<byte>(new byte[512], 1, 511) };
yield return new object[] { new ArraySegment<byte>(new byte[512], 2, 510) };
yield return new object[] { new ArraySegment<byte>(new byte[512], 256, 256) };
yield return new object[] { new ArraySegment<byte>(new byte[512], 512, 0) };
yield return new object[] { new ArraySegment<byte>(new byte[512], 511, 1) };
yield return new object[] { new ArraySegment<byte>(new byte[512], 510, 2) };
}
public static IEnumerable<object[]> GetArraysVariedBySize()
{
yield return new object[] { FillWithData(new byte[0]) };
yield return new object[] { FillWithData(new byte[1]) };
yield return new object[] { FillWithData(new byte[2]) };
yield return new object[] { FillWithData(new byte[254]) };
yield return new object[] { FillWithData(new byte[255]) };
yield return new object[] { FillWithData(new byte[256]) };
yield return new object[] { FillWithData(new byte[511]) };
yield return new object[] { FillWithData(new byte[512]) };
yield return new object[] { FillWithData(new byte[513]) };
yield return new object[] { FillWithData(new byte[1023]) };
yield return new object[] { FillWithData(new byte[1024]) };
yield return new object[] { FillWithData(new byte[1025]) };
yield return new object[] { FillWithData(new byte[2047]) };
yield return new object[] { FillWithData(new byte[2048]) };
yield return new object[] { FillWithData(new byte[2049]) };
}
private static byte[] FillWithData(byte[] buffer)
{
for (int i = 0; i < buffer.Length; i++)
{
buffer[i] = (byte)i;
}
return buffer;
}
}
| |
// The MIT License
//
// Copyright (c) 2012-2015 Jordan E. Terrell
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.Serialization;
using System.Security.Permissions;
using iSynaptic.Commons.Reflection;
using iSynaptic.Commons.Reflection.Emit;
namespace iSynaptic.Commons.Runtime.Serialization
{
public static class Cloneable
{
public static T Clone<T>(this T source)
{
return Cloneable<T>.Clone(source);
}
public static T ShallowClone<T>(this T source)
{
return Cloneable<T>.ShallowClone(source);
}
public static T CloneTo<T>(this T source, T destination)
{
return Cloneable<T>.CloneTo(source, destination);
}
public static T ShallowCloneTo<T>(this T source, T destination)
{
return Cloneable<T>.ShallowCloneTo(source, destination);
}
}
public static class Cloneable<T>
{
private static readonly Type[] CloneablePrimitives =
{
typeof(IntPtr),
typeof(UIntPtr),
typeof(string),
typeof(decimal),
typeof(DateTime),
typeof(TimeSpan),
typeof(Guid)
};
private static readonly Type TargetType = null;
private static readonly MethodInfo CloneContextGetShouldUseExistingObjectsMethod = null;
private static readonly MethodInfo CloneContextGetIsShallowCloneMethod = null;
private static readonly MethodInfo CloneContextGetCloneMapMethod = null;
private static readonly MethodInfo DictionaryContainsKeyMethod = null;
private static readonly MethodInfo DictionaryGetItemMethod = null;
private static readonly object SyncLock = new object();
private static bool? _CanClone = null;
private static bool? _CanShallowClone = null;
private static Func<T, T, CloneContext, T> _Strategy = null;
private static Func<T, T, CloneContext, T> _DynamicStrategy = null;
private static readonly Func<FieldInfo, bool> FieldIncludeFilter = f =>
(f.IsDefined(typeof(NonSerializedAttribute), true) != true);
static Cloneable()
{
TargetType = typeof(T);
CloneContextGetShouldUseExistingObjectsMethod = GetMethod(typeof(CloneContext), "get_ShouldUseExistingObjects");
CloneContextGetIsShallowCloneMethod = GetMethod(typeof(CloneContext), "get_IsShallowClone");
CloneContextGetCloneMapMethod = GetMethod(typeof (CloneContext), "get_CloneMap");
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(typeof(object), typeof(object));
DictionaryContainsKeyMethod = GetMethod(dictionaryType, "ContainsKey", typeof(object));
DictionaryGetItemMethod = GetMethod(dictionaryType, "get_Item", typeof(object));
}
private class ReferenceTypeStrategy<TConcrete>
{
private Func<T, T, CloneContext, T> _NextStrategy = null;
private Func<TConcrete, TConcrete, CloneContext, TConcrete> _ReferenceTypeDynamicStrategy = null;
public T Strategy(T source, T destination, CloneContext cloneContext)
{
Type sourceType = source.GetType();
if (sourceType == typeof(TConcrete))
{
var s = (TConcrete)(object)source;
var d = destination is TConcrete ? (TConcrete) (object) destination : default(TConcrete);
if (_ReferenceTypeDynamicStrategy == null)
_ReferenceTypeDynamicStrategy = Cloneable<TConcrete>.BuildDynamicStrategy();
return (T)(object)_ReferenceTypeDynamicStrategy(s, d, cloneContext);
}
if(_NextStrategy == null)
{
var interfaceStrategyType = typeof(ReferenceTypeStrategy<>).MakeGenericType(TargetType, sourceType);
var interfaceStrategy = Activator.CreateInstance(interfaceStrategyType);
_NextStrategy = interfaceStrategy.GetDelegate<Func<T, T, CloneContext, T>>("Strategy");
}
return _NextStrategy(source, destination, cloneContext);
}
}
#region Build Methods
private static Func<T, T, CloneContext, T> BuildStrategy()
{
bool canShallowClone = CanShallowClone();
bool canClone = CanClone();
bool isSealedType = TargetType.IsSealed;
bool isTargetTypeArray = TargetType.IsArray;
bool isReferenceType = !TargetType.IsValueType;
bool isNullableType = TargetType.IsGenericType &&
TargetType.GetGenericTypeDefinition() == typeof (Nullable<>);
bool canReturnSourceAsClone = IsRootTypeCloneablePrimitive(TargetType) && isTargetTypeArray != true;
Func<T, T, CloneContext, T> completionStrategy = null;
Func<Array, Array, CloneContext, T> arrayCloneStrategy = null;
return (s, d, c) =>
{
if (canShallowClone != true || (c.IsShallowClone != true && canClone != true))
throw new InvalidOperationException("This type cannot be cloned.");
if (canReturnSourceAsClone)
return s;
if (s == null)
return default(T);
var actualType = s.GetType();
if (isReferenceType && c.CloneMap.ContainsKey(s))
return (T) c.CloneMap[s];
if (actualType.IsArray)
{
Array destArray = (Array) (object) d;
Array sourceArray = (Array) (object) s;
if (destArray == null || destArray.LongLength != sourceArray.LongLength)
{
d = (T) sourceArray.Clone();
destArray = (Array)(object)d;
}
else if (c.IsShallowClone && sourceArray.LongLength > 0)
Array.Copy(sourceArray, destArray, sourceArray.LongLength);
if (c.IsShallowClone || sourceArray.LongLength <= 0)
{
c.CloneMap.Add(s, d);
return d;
}
if(arrayCloneStrategy == null)
{
var arrayCloneMethod = GetMethod(typeof(Cloneable<T>),
"ArrayClone",
typeof (Array),
typeof (Array),
typeof (CloneContext));
arrayCloneMethod = arrayCloneMethod.MakeGenericMethod(actualType.GetElementType());
arrayCloneStrategy = arrayCloneMethod.ToDelegate<Func<Array, Array, CloneContext, T>>();
}
d = arrayCloneStrategy(sourceArray, destArray, c);
c.CloneMap.Add(s, d);
return d;
}
if(d == null && (isReferenceType || isNullableType))
d = (T)FormatterServices.GetSafeUninitializedObject(s.GetType());
if (completionStrategy == null)
{
if (isReferenceType && isSealedType != true)
{
var referenceTypeCloneableType = typeof(ReferenceTypeStrategy<>).MakeGenericType(TargetType, s.GetType());
var referenceTypeCloneable = Activator.CreateInstance(referenceTypeCloneableType);
completionStrategy = referenceTypeCloneable.GetDelegate<Func<T, T, CloneContext, T>>("Strategy");
}
else
completionStrategy = BuildDynamicStrategy();
}
if(isReferenceType)
c.CloneMap.Add(s, d);
return completionStrategy(s, d, c);
};
}
private static Func<T, T, CloneContext, T> BuildDynamicStrategy()
{
if (_DynamicStrategy == null)
{
string dynamicMethodName = string.Format("Cloneable<{0}>_CloneDynamicStrategy", TargetType.Name);
var dynamicStrategyMethod = new DynamicMethod(dynamicMethodName,
TargetType,
new[]
{
TargetType,
TargetType,
typeof (CloneContext)
},
TargetType,
true);
var il = dynamicStrategyMethod.GetILGenerator();
il.DeclareLocal(TargetType);
il.Emit(OpCodes.Ldarg_1);
il.Emit(OpCodes.Stloc_0);
foreach (FieldInfo field in TargetType.GetFieldsDeeply(FieldIncludeFilter))
{
if (field.IsDefined(typeof (CloneReferenceOnlyAttribute), true) ||
field.FieldType.IsDefined(typeof (CloneReferenceOnlyAttribute), true) ||
IsTypeCloneablePrimative(field.FieldType))
{
EmitCopyField(il, field);
}
else
{
EmitCloneFieldWithShallowCheck(il, field);
}
}
il.Emit(OpCodes.Ldloc_0);
il.Emit(OpCodes.Ret);
_DynamicStrategy = dynamicStrategyMethod.ToFunc<T, T, CloneContext, T>();
}
return _DynamicStrategy;
}
#endregion
#region Emit Methods
private static void EmitCopyField(ILGenerator gen, FieldInfo field)
{
var copyFieldLabel = gen.DefineLabel();
var storeFieldLabel = gen.DefineLabel();
// handles self-referencing fields
if (field.FieldType.IsValueType != true && field.FieldType.IsAssignableFrom(TargetType))
{
// check clone map
gen.Emit(OpCodes.Ldarg_0);
gen.Emit(OpCodes.Ldfld, field);
gen.Emit(OpCodes.Brfalse_S, copyFieldLabel);
gen.Emit(OpCodes.Ldarg_2);
gen.Emit(OpCodes.Call, CloneContextGetCloneMapMethod);
gen.Emit(OpCodes.Ldarg_0);
gen.Emit(OpCodes.Ldfld, field);
gen.Emit(OpCodes.Callvirt, DictionaryContainsKeyMethod);
gen.Emit(OpCodes.Brfalse_S, copyFieldLabel);
gen.Emit(OpCodes.Ldarg_2);
gen.Emit(OpCodes.Call, CloneContextGetCloneMapMethod);
gen.Emit(OpCodes.Ldarg_0);
gen.Emit(OpCodes.Ldfld, field);
gen.Emit(OpCodes.Call, DictionaryGetItemMethod);
gen.Emit(OpCodes.Ldloc_0);
gen.Emit(OpCodes.Br_S, storeFieldLabel);
}
// copy field
gen.MarkLabel(copyFieldLabel);
if (field.DeclaringType.IsValueType)
{
gen.Emit(OpCodes.Ldloca_S, (byte)0);
gen.Emit(OpCodes.Ldarga_S, (byte)0);
}
else
{
gen.Emit(OpCodes.Ldloc_0);
gen.Emit(OpCodes.Ldarg_0);
}
gen.Emit(OpCodes.Ldfld, field);
gen.MarkLabel(storeFieldLabel);
gen.Emit(OpCodes.Stfld, field);
}
private static void EmitCloneFieldWithShallowCheck(ILGenerator gen, FieldInfo field)
{
var ifNotShallowCloneLabel = gen.DefineLabel();
var afterDestinationCopied = gen.DefineLabel();
gen.Emit(OpCodes.Ldarg_2);
gen.Emit(OpCodes.Call, CloneContextGetIsShallowCloneMethod);
gen.Emit(OpCodes.Brfalse_S, ifNotShallowCloneLabel);
EmitCopyField(gen, field);
gen.Emit(OpCodes.Br_S, afterDestinationCopied);
gen.MarkLabel(ifNotShallowCloneLabel);
// used by final store field (Stfld) instruction
if (field.DeclaringType.IsValueType)
gen.Emit(OpCodes.Ldloca_S, (byte)0);
else
gen.Emit(OpCodes.Ldloc_0);
EmitCloneField(gen, field);
gen.Emit(OpCodes.Stfld, field);
gen.MarkLabel(afterDestinationCopied);
}
private static void EmitCloneField(ILGenerator gen, FieldInfo field)
{
Type fieldClonableType = typeof(Cloneable<>).MakeGenericType(field.FieldType);
MethodInfo getStrategyMethod = GetMethod(fieldClonableType, "get_Strategy");
Type strategyMethodType = typeof(Func<,,,>).MakeGenericType(field.FieldType, field.FieldType, typeof(CloneContext), field.FieldType);
MethodInfo getFuncMethod = GetMethod(strategyMethodType, "Invoke", field.FieldType, field.FieldType, typeof(CloneContext));
gen.Emit(OpCodes.Call, getStrategyMethod);
gen.Emit(OpCodes.Ldarg_0);
gen.Emit(OpCodes.Ldfld, field);
EmitLoadDestination(gen, field);
gen.Emit(OpCodes.Ldarg_2);
gen.Emit(OpCodes.Callvirt, getFuncMethod);
}
private static void EmitLoadDestination(ILGenerator gen, FieldInfo field)
{
if(field.FieldType.IsValueType)
{
gen.Emit(OpCodes.Ldarg_1);
gen.Emit(OpCodes.Ldfld, field);
}
else
{
var ifShouldUseExistingLabel = gen.DefineLabel();
var afterLoadingDestinationLabel = gen.DefineLabel();
gen.Emit(OpCodes.Ldarg_2);
gen.Emit(OpCodes.Call, CloneContextGetShouldUseExistingObjectsMethod);
gen.Emit(OpCodes.Brtrue_S, ifShouldUseExistingLabel);
gen.Emit(OpCodes.Ldnull);
gen.Emit(OpCodes.Br_S, afterLoadingDestinationLabel);
gen.MarkLabel(ifShouldUseExistingLabel);
gen.Emit(OpCodes.Ldloc_0);
gen.Emit(OpCodes.Ldfld, field);
gen.MarkLabel(afterLoadingDestinationLabel);
}
}
#endregion
#region Helper Methods
private static bool IsNotCloneable(FieldInfo field)
{
if (field.DeclaringType != null && field.DeclaringType.FullName == "System.Runtime.Serialization.SafeSerializationManager")
return false;
return IsNotCloneable(GetRootType(field.FieldType));
}
private static bool IsNotCloneable(Type inputType)
{
if (typeof(Delegate).IsAssignableFrom(inputType))
return true;
return false;
}
private static bool IsRootTypeCloneablePrimitive(Type inputType)
{
Type rootType = GetRootType(inputType);
return IsTypeCloneablePrimative(rootType);
}
private static bool IsTypeCloneablePrimative(Type inputType)
{
if (Array.Exists(CloneablePrimitives, x => x == inputType))
return true;
if (inputType.IsPrimitive)
return true;
if (typeof(Delegate).IsAssignableFrom(inputType))
return true;
return false;
}
private static Type GetRootType(Type type)
{
while (type.IsArray)
type = type.GetElementType();
if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>))
return type.GetGenericArguments()[0];
return type;
}
private static MethodInfo GetMethod(Type type, string methodName, params Type[] argumentTypes)
{
const BindingFlags bindingFlags = BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic;
return type.GetMethod
(
methodName,
bindingFlags,
null,
argumentTypes,
null
);
}
#endregion
#region Can Methods
private static bool CanClone(Type type, bool isShallow, Func<FieldInfo, bool> includeFilter)
{
Type typeToCheck = GetRootType(type);
if (typeToCheck.IsInterface)
return true;
if (IsNotCloneable(typeToCheck))
return false;
if (IsRootTypeCloneablePrimitive(typeToCheck))
return true;
if (includeFilter == null)
includeFilter = FieldIncludeFilter;
Func<FieldInfo, bool> fieldFilter = f => includeFilter(f) && f.FieldType != typeToCheck;
foreach (FieldInfo field in typeToCheck.GetFieldsDeeply(fieldFilter))
{
if (IsNotCloneable(field))
return false;
Type fieldType = GetRootType(field.FieldType);
if (IsRootTypeCloneablePrimitive(fieldType))
continue;
if (fieldType.IsInterface)
continue;
if (isShallow != true)
{
Type fieldClonableType = typeof(Cloneable<>).MakeGenericType(fieldType);
MethodInfo canCloneMethod = GetMethod(fieldClonableType, "CanClone", typeof(Type), typeof(bool), typeof(Func<FieldInfo, bool>));
var canClone = canCloneMethod.ToDelegate<Func<Type, bool, Func<FieldInfo, bool>, bool>>();
if (canClone(fieldType, false, fieldFilter) != true)
return false;
}
}
return true;
}
public static bool CanClone()
{
if (_CanClone.HasValue != true)
_CanClone = CanClone(TargetType, false, null);
return _CanClone.Value;
}
public static bool CanShallowClone()
{
if (_CanShallowClone.HasValue != true)
_CanShallowClone = CanClone(TargetType, true, null);
return _CanShallowClone.Value;
}
#endregion
#region Clone Methods
public static T Clone(T source)
{
var context = new CloneContext(false, false);
return Strategy(source, default(T), context);
}
public static T ShallowClone(T source)
{
var context = new CloneContext(true, false);
return Strategy(source, default(T), context);
}
public static T CloneTo(T source, T destination)
{
Guard.NotNull(source, "source");
Guard.NotNull(destination, "destination");
if (TargetType.IsValueType != true && ReferenceEquals(source, destination))
throw new InvalidOperationException("The destination object cannot be the same as the source.");
var sourceArray = source as Array;
var destArray = destination as Array;
if(sourceArray != null && destArray != null && destArray.LongLength != sourceArray.LongLength)
throw new InvalidOperationException("The destination array must be the same size (length) as the source array.");
var context = new CloneContext(false, true);
return Strategy(source, destination, context);
}
public static T ShallowCloneTo(T source, T destination)
{
Guard.NotNull(source, "source");
Guard.NotNull(destination, "destination");
if(TargetType.IsValueType != true && ReferenceEquals(source, destination))
throw new InvalidOperationException("The destination object cannot be the same as the source.");
var sourceArray = source as Array;
var destArray = destination as Array;
if (sourceArray != null && destArray != null && destArray.LongLength != sourceArray.LongLength)
throw new InvalidOperationException("The destination array must be the same size (length) as the source array.");
var context = new CloneContext(true, true);
return Strategy(source, destination, context);
}
private static T ArrayClone<TItem>(Array sourceArray, Array destArray, CloneContext context)
{
var i = new ArrayIndex(sourceArray);
while(true)
{
TItem sourceValue = (TItem)sourceArray.GetValue(i);
TItem currentValue = context.ShouldUseExistingObjects ? (TItem)destArray.GetValue(i) : default(TItem);
TItem value = Cloneable<TItem>.Strategy(sourceValue, currentValue, context);
destArray.SetValue(value, i);
if(i.CanIncrement())
i.Increment();
else
break;
}
return (T)(object)destArray;
}
#endregion
private static Func<T, T, CloneContext, T> Strategy
{
get
{
if (_Strategy == null)
{
lock (SyncLock)
{
if (_Strategy == null)
_Strategy = BuildStrategy();
}
}
return _Strategy;
}
}
}
}
| |
using ICSharpCode.SharpZipLib.Core;
using ICSharpCode.SharpZipLib.Zip.Compression;
using System;
using System.IO;
using static ICSharpCode.SharpZipLib.Zip.Compression.Deflater;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// FastZipEvents supports all events applicable to <see cref="FastZip">FastZip</see> operations.
/// </summary>
public class FastZipEvents
{
/// <summary>
/// Delegate to invoke when processing directories.
/// </summary>
public event EventHandler<DirectoryEventArgs> ProcessDirectory;
/// <summary>
/// Delegate to invoke when processing files.
/// </summary>
public ProcessFileHandler ProcessFile;
/// <summary>
/// Delegate to invoke during processing of files.
/// </summary>
public ProgressHandler Progress;
/// <summary>
/// Delegate to invoke when processing for a file has been completed.
/// </summary>
public CompletedFileHandler CompletedFile;
/// <summary>
/// Delegate to invoke when processing directory failures.
/// </summary>
public DirectoryFailureHandler DirectoryFailure;
/// <summary>
/// Delegate to invoke when processing file failures.
/// </summary>
public FileFailureHandler FileFailure;
/// <summary>
/// Raise the <see cref="DirectoryFailure">directory failure</see> event.
/// </summary>
/// <param name="directory">The directory causing the failure.</param>
/// <param name="e">The exception for this event.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnDirectoryFailure(string directory, Exception e)
{
bool result = false;
DirectoryFailureHandler handler = DirectoryFailure;
if (handler != null)
{
var args = new ScanFailureEventArgs(directory, e);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="FileFailure"> file failure handler delegate</see>.
/// </summary>
/// <param name="file">The file causing the failure.</param>
/// <param name="e">The exception for this failure.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnFileFailure(string file, Exception e)
{
FileFailureHandler handler = FileFailure;
bool result = (handler != null);
if (result)
{
var args = new ScanFailureEventArgs(file, e);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="ProcessFile">ProcessFile delegate</see>.
/// </summary>
/// <param name="file">The file being processed.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnProcessFile(string file)
{
bool result = true;
ProcessFileHandler handler = ProcessFile;
if (handler != null)
{
var args = new ScanEventArgs(file);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="CompletedFile"/> delegate
/// </summary>
/// <param name="file">The file whose processing has been completed.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnCompletedFile(string file)
{
bool result = true;
CompletedFileHandler handler = CompletedFile;
if (handler != null)
{
var args = new ScanEventArgs(file);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="ProcessDirectory">process directory</see> delegate.
/// </summary>
/// <param name="directory">The directory being processed.</param>
/// <param name="hasMatchingFiles">Flag indicating if the directory has matching files as determined by the current filter.</param>
/// <returns>A <see cref="bool"/> of true if the operation should continue; false otherwise.</returns>
public bool OnProcessDirectory(string directory, bool hasMatchingFiles)
{
bool result = true;
EventHandler<DirectoryEventArgs> handler = ProcessDirectory;
if (handler != null)
{
var args = new DirectoryEventArgs(directory, hasMatchingFiles);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// The minimum timespan between <see cref="Progress"/> events.
/// </summary>
/// <value>The minimum period of time between <see cref="Progress"/> events.</value>
/// <seealso cref="Progress"/>
/// <remarks>The default interval is three seconds.</remarks>
public TimeSpan ProgressInterval
{
get { return progressInterval_; }
set { progressInterval_ = value; }
}
#region Instance Fields
private TimeSpan progressInterval_ = TimeSpan.FromSeconds(3);
#endregion Instance Fields
}
/// <summary>
/// FastZip provides facilities for creating and extracting zip files.
/// </summary>
public class FastZip
{
#region Enumerations
/// <summary>
/// Defines the desired handling when overwriting files during extraction.
/// </summary>
public enum Overwrite
{
/// <summary>
/// Prompt the user to confirm overwriting
/// </summary>
Prompt,
/// <summary>
/// Never overwrite files.
/// </summary>
Never,
/// <summary>
/// Always overwrite files.
/// </summary>
Always
}
#endregion Enumerations
#region Constructors
/// <summary>
/// Initialise a default instance of <see cref="FastZip"/>.
/// </summary>
public FastZip()
{
}
/// <summary>
/// Initialise a new instance of <see cref="FastZip"/>
/// </summary>
/// <param name="events">The <see cref="FastZipEvents">events</see> to use during operations.</param>
public FastZip(FastZipEvents events)
{
events_ = events;
}
#endregion Constructors
#region Properties
/// <summary>
/// Get/set a value indicating wether empty directories should be created.
/// </summary>
public bool CreateEmptyDirectories
{
get { return createEmptyDirectories_; }
set { createEmptyDirectories_ = value; }
}
/// <summary>
/// Get / set the password value.
/// </summary>
public string Password
{
get { return password_; }
set { password_ = value; }
}
/// <summary>
/// Get or set the <see cref="INameTransform"></see> active when creating Zip files.
/// </summary>
/// <seealso cref="EntryFactory"></seealso>
public INameTransform NameTransform
{
get { return entryFactory_.NameTransform; }
set
{
entryFactory_.NameTransform = value;
}
}
/// <summary>
/// Get or set the <see cref="IEntryFactory"></see> active when creating Zip files.
/// </summary>
public IEntryFactory EntryFactory
{
get { return entryFactory_; }
set
{
if (value == null)
{
entryFactory_ = new ZipEntryFactory();
}
else
{
entryFactory_ = value;
}
}
}
/// <summary>
/// Gets or sets the setting for <see cref="UseZip64">Zip64 handling when writing.</see>
/// </summary>
/// <remarks>
/// The default value is dynamic which is not backwards compatible with old
/// programs and can cause problems with XP's built in compression which cant
/// read Zip64 archives. However it does avoid the situation were a large file
/// is added and cannot be completed correctly.
/// NOTE: Setting the size for entries before they are added is the best solution!
/// By default the EntryFactory used by FastZip will set fhe file size.
/// </remarks>
public UseZip64 UseZip64
{
get { return useZip64_; }
set { useZip64_ = value; }
}
/// <summary>
/// Get/set a value indicating wether file dates and times should
/// be restored when extracting files from an archive.
/// </summary>
/// <remarks>The default value is false.</remarks>
public bool RestoreDateTimeOnExtract
{
get
{
return restoreDateTimeOnExtract_;
}
set
{
restoreDateTimeOnExtract_ = value;
}
}
/// <summary>
/// Get/set a value indicating whether file attributes should
/// be restored during extract operations
/// </summary>
public bool RestoreAttributesOnExtract
{
get { return restoreAttributesOnExtract_; }
set { restoreAttributesOnExtract_ = value; }
}
/// <summary>
/// Get/set the Compression Level that will be used
/// when creating the zip
/// </summary>
public Deflater.CompressionLevel CompressionLevel
{
get { return compressionLevel_; }
set { compressionLevel_ = value; }
}
#endregion Properties
#region Delegates
/// <summary>
/// Delegate called when confirming overwriting of files.
/// </summary>
public delegate bool ConfirmOverwriteDelegate(string fileName);
#endregion Delegates
#region CreateZip
/// <summary>
/// Create a zip file.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory,
bool recurse, string fileFilter, string directoryFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, directoryFilter);
}
/// <summary>
/// Create a zip file/archive.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to obtain files and directories from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The file filter to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory, bool recurse, string fileFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, null);
}
/// <summary>
/// Create a zip archive sending output to the <paramref name="outputStream"/> passed.
/// </summary>
/// <param name="outputStream">The stream to write archive data to.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
/// <remarks>The <paramref name="outputStream"/> is closed after creation.</remarks>
public void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, string fileFilter, string directoryFilter)
{
NameTransform = new ZipNameTransform(sourceDirectory);
sourceDirectory_ = sourceDirectory;
using (outputStream_ = new ZipOutputStream(outputStream))
{
outputStream_.SetLevel((int)CompressionLevel);
if (password_ != null)
{
outputStream_.Password = password_;
}
outputStream_.UseZip64 = UseZip64;
var scanner = new FileSystemScanner(fileFilter, directoryFilter);
scanner.ProcessFile += ProcessFile;
if (this.CreateEmptyDirectories)
{
scanner.ProcessDirectory += ProcessDirectory;
}
if (events_ != null)
{
if (events_.FileFailure != null)
{
scanner.FileFailure += events_.FileFailure;
}
if (events_.DirectoryFailure != null)
{
scanner.DirectoryFailure += events_.DirectoryFailure;
}
}
scanner.Scan(sourceDirectory, recurse);
}
}
#endregion CreateZip
#region ExtractZip
/// <summary>
/// Extract the contents of a zip file.
/// </summary>
/// <param name="zipFileName">The zip file to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
public void ExtractZip(string zipFileName, string targetDirectory, string fileFilter)
{
ExtractZip(zipFileName, targetDirectory, Overwrite.Always, null, fileFilter, null, restoreDateTimeOnExtract_);
}
/// <summary>
/// Extract the contents of a zip file.
/// </summary>
/// <param name="zipFileName">The zip file to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="overwrite">The style of <see cref="Overwrite">overwriting</see> to apply.</param>
/// <param name="confirmDelegate">A delegate to invoke when confirming overwriting.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
/// <param name="directoryFilter">A filter to apply to directories.</param>
/// <param name="restoreDateTime">Flag indicating whether to restore the date and time for extracted files.</param>
/// <param name="allowParentTraversal">Allow parent directory traversal in file paths (e.g. ../file)</param>
public void ExtractZip(string zipFileName, string targetDirectory,
Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate,
string fileFilter, string directoryFilter, bool restoreDateTime, bool allowParentTraversal = false)
{
Stream inputStream = File.Open(zipFileName, FileMode.Open, FileAccess.Read, FileShare.Read);
ExtractZip(inputStream, targetDirectory, overwrite, confirmDelegate, fileFilter, directoryFilter, restoreDateTime, true, allowParentTraversal);
}
/// <summary>
/// Extract the contents of a zip file held in a stream.
/// </summary>
/// <param name="inputStream">The seekable input stream containing the zip to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="overwrite">The style of <see cref="Overwrite">overwriting</see> to apply.</param>
/// <param name="confirmDelegate">A delegate to invoke when confirming overwriting.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
/// <param name="directoryFilter">A filter to apply to directories.</param>
/// <param name="restoreDateTime">Flag indicating whether to restore the date and time for extracted files.</param>
/// <param name="isStreamOwner">Flag indicating whether the inputStream will be closed by this method.</param>
/// <param name="allowParentTraversal">Allow parent directory traversal in file paths (e.g. ../file)</param>
public void ExtractZip(Stream inputStream, string targetDirectory,
Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate,
string fileFilter, string directoryFilter, bool restoreDateTime,
bool isStreamOwner, bool allowParentTraversal = false)
{
if ((overwrite == Overwrite.Prompt) && (confirmDelegate == null))
{
throw new ArgumentNullException(nameof(confirmDelegate));
}
continueRunning_ = true;
overwrite_ = overwrite;
confirmDelegate_ = confirmDelegate;
extractNameTransform_ = new WindowsNameTransform(targetDirectory, allowParentTraversal);
fileFilter_ = new NameFilter(fileFilter);
directoryFilter_ = new NameFilter(directoryFilter);
restoreDateTimeOnExtract_ = restoreDateTime;
using (zipFile_ = new ZipFile(inputStream, !isStreamOwner))
{
if (password_ != null)
{
zipFile_.Password = password_;
}
System.Collections.IEnumerator enumerator = zipFile_.GetEnumerator();
while (continueRunning_ && enumerator.MoveNext())
{
var entry = (ZipEntry)enumerator.Current;
if (entry.IsFile)
{
// TODO Path.GetDirectory can fail here on invalid characters.
if (directoryFilter_.IsMatch(Path.GetDirectoryName(entry.Name)) && fileFilter_.IsMatch(entry.Name))
{
ExtractEntry(entry);
}
}
else if (entry.IsDirectory)
{
if (directoryFilter_.IsMatch(entry.Name) && CreateEmptyDirectories)
{
ExtractEntry(entry);
}
}
else
{
// Do nothing for volume labels etc...
}
}
}
}
#endregion ExtractZip
#region Internal Processing
private void ProcessDirectory(object sender, DirectoryEventArgs e)
{
if (!e.HasMatchingFiles && CreateEmptyDirectories)
{
if (events_ != null)
{
events_.OnProcessDirectory(e.Name, e.HasMatchingFiles);
}
if (e.ContinueRunning)
{
if (e.Name != sourceDirectory_)
{
ZipEntry entry = entryFactory_.MakeDirectoryEntry(e.Name);
outputStream_.PutNextEntry(entry);
}
}
}
}
private void ProcessFile(object sender, ScanEventArgs e)
{
if ((events_ != null) && (events_.ProcessFile != null))
{
events_.ProcessFile(sender, e);
}
if (e.ContinueRunning)
{
try
{
// The open below is equivalent to OpenRead which gaurantees that if opened the
// file will not be changed by subsequent openers, but precludes opening in some cases
// were it could succeed. ie the open may fail as its already open for writing and the share mode should reflect that.
using (FileStream stream = File.Open(e.Name, FileMode.Open, FileAccess.Read, FileShare.Read))
{
ZipEntry entry = entryFactory_.MakeFileEntry(e.Name);
outputStream_.PutNextEntry(entry);
AddFileContents(e.Name, stream);
}
}
catch (Exception ex)
{
if (events_ != null)
{
continueRunning_ = events_.OnFileFailure(e.Name, ex);
}
else
{
continueRunning_ = false;
throw;
}
}
}
}
private void AddFileContents(string name, Stream stream)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (buffer_ == null)
{
buffer_ = new byte[4096];
}
if ((events_ != null) && (events_.Progress != null))
{
StreamUtils.Copy(stream, outputStream_, buffer_,
events_.Progress, events_.ProgressInterval, this, name);
}
else
{
StreamUtils.Copy(stream, outputStream_, buffer_);
}
if (events_ != null)
{
continueRunning_ = events_.OnCompletedFile(name);
}
}
private void ExtractFileEntry(ZipEntry entry, string targetName)
{
bool proceed = true;
if (overwrite_ != Overwrite.Always)
{
if (File.Exists(targetName))
{
if ((overwrite_ == Overwrite.Prompt) && (confirmDelegate_ != null))
{
proceed = confirmDelegate_(targetName);
}
else
{
proceed = false;
}
}
}
if (proceed)
{
if (events_ != null)
{
continueRunning_ = events_.OnProcessFile(entry.Name);
}
if (continueRunning_)
{
try
{
using (FileStream outputStream = File.Create(targetName))
{
if (buffer_ == null)
{
buffer_ = new byte[4096];
}
if ((events_ != null) && (events_.Progress != null))
{
StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_,
events_.Progress, events_.ProgressInterval, this, entry.Name, entry.Size);
}
else
{
StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_);
}
if (events_ != null)
{
continueRunning_ = events_.OnCompletedFile(entry.Name);
}
}
if (restoreDateTimeOnExtract_)
{
File.SetLastWriteTime(targetName, entry.DateTime);
}
if (RestoreAttributesOnExtract && entry.IsDOSEntry && (entry.ExternalFileAttributes != -1))
{
var fileAttributes = (FileAttributes)entry.ExternalFileAttributes;
// TODO: FastZip - Setting of other file attributes on extraction is a little trickier.
fileAttributes &= (FileAttributes.Archive | FileAttributes.Normal | FileAttributes.ReadOnly | FileAttributes.Hidden);
File.SetAttributes(targetName, fileAttributes);
}
}
catch (Exception ex)
{
if (events_ != null)
{
continueRunning_ = events_.OnFileFailure(targetName, ex);
}
else
{
continueRunning_ = false;
throw;
}
}
}
}
}
private void ExtractEntry(ZipEntry entry)
{
bool doExtraction = entry.IsCompressionMethodSupported();
string targetName = entry.Name;
if (doExtraction)
{
if (entry.IsFile)
{
targetName = extractNameTransform_.TransformFile(targetName);
}
else if (entry.IsDirectory)
{
targetName = extractNameTransform_.TransformDirectory(targetName);
}
doExtraction = !(string.IsNullOrEmpty(targetName));
}
// TODO: Fire delegate/throw exception were compression method not supported, or name is invalid?
string dirName = null;
if (doExtraction)
{
if (entry.IsDirectory)
{
dirName = targetName;
}
else
{
dirName = Path.GetDirectoryName(Path.GetFullPath(targetName));
}
}
if (doExtraction && !Directory.Exists(dirName))
{
if (!entry.IsDirectory || CreateEmptyDirectories)
{
try
{
Directory.CreateDirectory(dirName);
}
catch (Exception ex)
{
doExtraction = false;
if (events_ != null)
{
if (entry.IsDirectory)
{
continueRunning_ = events_.OnDirectoryFailure(targetName, ex);
}
else
{
continueRunning_ = events_.OnFileFailure(targetName, ex);
}
}
else
{
continueRunning_ = false;
throw;
}
}
}
}
if (doExtraction && entry.IsFile)
{
ExtractFileEntry(entry, targetName);
}
}
private static int MakeExternalAttributes(FileInfo info)
{
return (int)info.Attributes;
}
private static bool NameIsValid(string name)
{
return !string.IsNullOrEmpty(name) &&
(name.IndexOfAny(Path.GetInvalidPathChars()) < 0);
}
#endregion Internal Processing
#region Instance Fields
private bool continueRunning_;
private byte[] buffer_;
private ZipOutputStream outputStream_;
private ZipFile zipFile_;
private string sourceDirectory_;
private NameFilter fileFilter_;
private NameFilter directoryFilter_;
private Overwrite overwrite_;
private ConfirmOverwriteDelegate confirmDelegate_;
private bool restoreDateTimeOnExtract_;
private bool restoreAttributesOnExtract_;
private bool createEmptyDirectories_;
private FastZipEvents events_;
private IEntryFactory entryFactory_ = new ZipEntryFactory();
private INameTransform extractNameTransform_;
private UseZip64 useZip64_ = UseZip64.Dynamic;
private CompressionLevel compressionLevel_ = CompressionLevel.DEFAULT_COMPRESSION;
private string password_;
#endregion Instance Fields
}
}
| |
/*
* Swaggy Jenkins
*
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
* Generated by: https://openapi-generator.tech
*/
using System;
using System.Linq;
using System.Text;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Org.OpenAPITools.Converters;
namespace Org.OpenAPITools.Models
{
/// <summary>
///
/// </summary>
[DataContract]
public partial class PipelineBranchesitem : IEquatable<PipelineBranchesitem>
{
/// <summary>
/// Gets or Sets DisplayName
/// </summary>
[DataMember(Name="displayName", EmitDefaultValue=false)]
public string DisplayName { get; set; }
/// <summary>
/// Gets or Sets EstimatedDurationInMillis
/// </summary>
[DataMember(Name="estimatedDurationInMillis", EmitDefaultValue=false)]
public int EstimatedDurationInMillis { get; set; }
/// <summary>
/// Gets or Sets Name
/// </summary>
[DataMember(Name="name", EmitDefaultValue=false)]
public string Name { get; set; }
/// <summary>
/// Gets or Sets WeatherScore
/// </summary>
[DataMember(Name="weatherScore", EmitDefaultValue=false)]
public int WeatherScore { get; set; }
/// <summary>
/// Gets or Sets LatestRun
/// </summary>
[DataMember(Name="latestRun", EmitDefaultValue=false)]
public PipelineBranchesitemlatestRun LatestRun { get; set; }
/// <summary>
/// Gets or Sets Organization
/// </summary>
[DataMember(Name="organization", EmitDefaultValue=false)]
public string Organization { get; set; }
/// <summary>
/// Gets or Sets PullRequest
/// </summary>
[DataMember(Name="pullRequest", EmitDefaultValue=false)]
public PipelineBranchesitempullRequest PullRequest { get; set; }
/// <summary>
/// Gets or Sets TotalNumberOfPullRequests
/// </summary>
[DataMember(Name="totalNumberOfPullRequests", EmitDefaultValue=false)]
public int TotalNumberOfPullRequests { get; set; }
/// <summary>
/// Gets or Sets Class
/// </summary>
[DataMember(Name="_class", EmitDefaultValue=false)]
public string Class { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class PipelineBranchesitem {\n");
sb.Append(" DisplayName: ").Append(DisplayName).Append("\n");
sb.Append(" EstimatedDurationInMillis: ").Append(EstimatedDurationInMillis).Append("\n");
sb.Append(" Name: ").Append(Name).Append("\n");
sb.Append(" WeatherScore: ").Append(WeatherScore).Append("\n");
sb.Append(" LatestRun: ").Append(LatestRun).Append("\n");
sb.Append(" Organization: ").Append(Organization).Append("\n");
sb.Append(" PullRequest: ").Append(PullRequest).Append("\n");
sb.Append(" TotalNumberOfPullRequests: ").Append(TotalNumberOfPullRequests).Append("\n");
sb.Append(" Class: ").Append(Class).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return Newtonsoft.Json.JsonConvert.SerializeObject(this, Newtonsoft.Json.Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
if (obj is null) return false;
if (ReferenceEquals(this, obj)) return true;
return obj.GetType() == GetType() && Equals((PipelineBranchesitem)obj);
}
/// <summary>
/// Returns true if PipelineBranchesitem instances are equal
/// </summary>
/// <param name="other">Instance of PipelineBranchesitem to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(PipelineBranchesitem other)
{
if (other is null) return false;
if (ReferenceEquals(this, other)) return true;
return
(
DisplayName == other.DisplayName ||
DisplayName != null &&
DisplayName.Equals(other.DisplayName)
) &&
(
EstimatedDurationInMillis == other.EstimatedDurationInMillis ||
EstimatedDurationInMillis.Equals(other.EstimatedDurationInMillis)
) &&
(
Name == other.Name ||
Name != null &&
Name.Equals(other.Name)
) &&
(
WeatherScore == other.WeatherScore ||
WeatherScore.Equals(other.WeatherScore)
) &&
(
LatestRun == other.LatestRun ||
LatestRun != null &&
LatestRun.Equals(other.LatestRun)
) &&
(
Organization == other.Organization ||
Organization != null &&
Organization.Equals(other.Organization)
) &&
(
PullRequest == other.PullRequest ||
PullRequest != null &&
PullRequest.Equals(other.PullRequest)
) &&
(
TotalNumberOfPullRequests == other.TotalNumberOfPullRequests ||
TotalNumberOfPullRequests.Equals(other.TotalNumberOfPullRequests)
) &&
(
Class == other.Class ||
Class != null &&
Class.Equals(other.Class)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
var hashCode = 41;
// Suitable nullity checks etc, of course :)
if (DisplayName != null)
hashCode = hashCode * 59 + DisplayName.GetHashCode();
hashCode = hashCode * 59 + EstimatedDurationInMillis.GetHashCode();
if (Name != null)
hashCode = hashCode * 59 + Name.GetHashCode();
hashCode = hashCode * 59 + WeatherScore.GetHashCode();
if (LatestRun != null)
hashCode = hashCode * 59 + LatestRun.GetHashCode();
if (Organization != null)
hashCode = hashCode * 59 + Organization.GetHashCode();
if (PullRequest != null)
hashCode = hashCode * 59 + PullRequest.GetHashCode();
hashCode = hashCode * 59 + TotalNumberOfPullRequests.GetHashCode();
if (Class != null)
hashCode = hashCode * 59 + Class.GetHashCode();
return hashCode;
}
}
#region Operators
#pragma warning disable 1591
public static bool operator ==(PipelineBranchesitem left, PipelineBranchesitem right)
{
return Equals(left, right);
}
public static bool operator !=(PipelineBranchesitem left, PipelineBranchesitem right)
{
return !Equals(left, right);
}
#pragma warning restore 1591
#endregion Operators
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="SharedAccessSignatureHelper.cs" company="Microsoft">
// Copyright 2013 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
//-----------------------------------------------------------------------
namespace Microsoft.Azure.Storage.Core.Auth
{
using Microsoft.Azure.Storage;
using Microsoft.Azure.Storage.Auth;
using Microsoft.Azure.Storage.Core.Util;
using Microsoft.Azure.Storage.Shared.Protocol;
using System;
using System.Collections.Generic;
using System.Globalization;
/// <summary>
/// Contains helper methods for implementing shared access signatures.
/// </summary>
internal static class SharedAccessSignatureHelper
{
internal static UriQueryBuilder GetSignature(
SharedAccessAccountPolicy policy,
string signature,
string accountKeyName,
string sasVersion)
{
CommonUtility.AssertNotNull("signature", signature);
CommonUtility.AssertNotNull("policy", policy);
UriQueryBuilder builder = new UriQueryBuilder();
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedVersion, sasVersion);
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedKey, accountKeyName);
AddEscapedIfNotNull(builder, Constants.QueryConstants.Signature, signature);
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedProtocols, policy.Protocols == null ? null : GetProtocolString(policy.Protocols.Value));
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedIP, policy.IPAddressOrRange == null ? null : policy.IPAddressOrRange.ToString());
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedStart, GetDateTimeOrNull(policy.SharedAccessStartTime));
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedExpiry, GetDateTimeOrNull(policy.SharedAccessExpiryTime));
string resourceTypes = SharedAccessAccountPolicy.ResourceTypesToString(policy.ResourceTypes);
if (!string.IsNullOrEmpty(resourceTypes))
{
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedResourceTypes, resourceTypes);
}
string services = SharedAccessAccountPolicy.ServicesToString(policy.Services);
if (!string.IsNullOrEmpty(services))
{
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedServices, services);
}
string permissions = SharedAccessAccountPolicy.PermissionsToString(policy.Permissions);
if (!string.IsNullOrEmpty(permissions))
{
AddEscapedIfNotNull(builder, Constants.QueryConstants.SignedPermissions, permissions);
}
return builder;
}
/// <summary>
/// Converts the specified value to either a string representation or <see cref="String.Empty"/>.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A string representing the specified value.</returns>
internal static string GetDateTimeOrEmpty(DateTimeOffset? value)
{
string result = GetDateTimeOrNull(value) ?? string.Empty;
return result;
}
/// <summary>
/// Converts the specified value to either a string representation or <c>null</c>.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A string representing the specified value.</returns>
internal static string GetDateTimeOrNull(DateTimeOffset? value)
{
string result = value != null ? value.Value.UtcDateTime.ToString("yyyy-MM-ddTHH:mm:ssZ", CultureInfo.InvariantCulture) : null;
return result;
}
/// <summary>
/// Converts the specified value to either a string representation or <c>null</c>.
/// </summary>
/// <param name="protocols">The protocols to convert</param>
/// <returns>A string representing the specified value.</returns>
internal static string GetProtocolString(SharedAccessProtocol? protocols)
{
if (!protocols.HasValue)
{
return null;
}
if ((protocols.Value != SharedAccessProtocol.HttpsOnly) && (protocols.Value != SharedAccessProtocol.HttpsOrHttp))
{
throw new ArgumentException(String.Format(CultureInfo.InvariantCulture, SR.InvalidProtocolsInSAS, protocols.Value));
}
return protocols.Value == SharedAccessProtocol.HttpsOnly ? "https" : "https,http";
}
/// <summary>
/// Escapes and adds the specified name/value pair to the query builder if it is not null.
/// </summary>
/// <param name="builder">The builder to add the value to.</param>
/// <param name="name">The name of the pair.</param>
/// <param name="value">The value to be escaped.</param>
internal static void AddEscapedIfNotNull(UriQueryBuilder builder, string name, string value)
{
if (value != null)
{
builder.Add(name, value);
}
}
/// <summary>
/// Parses the query.
/// </summary>
/// <param name="queryParameters">The query parameters.</param>
internal static StorageCredentials ParseQuery(IDictionary<string, string> queryParameters)
{
bool sasParameterFound = false;
List<string> removeList = new List<string>();
foreach (KeyValuePair<string, string> parameter in queryParameters)
{
switch (parameter.Key.ToLower())
{
case Constants.QueryConstants.Signature:
sasParameterFound = true;
break;
case Constants.QueryConstants.ResourceType:
case Constants.QueryConstants.Component:
case Constants.QueryConstants.Snapshot:
case Constants.QueryConstants.ApiVersion:
case Constants.QueryConstants.ShareSnapshot:
removeList.Add(parameter.Key);
break;
default:
break;
}
}
foreach (string removeParam in removeList)
{
queryParameters.Remove(removeParam);
}
if (sasParameterFound)
{
UriQueryBuilder builder = new UriQueryBuilder();
foreach (KeyValuePair<string, string> parameter in queryParameters)
{
AddEscapedIfNotNull(builder, parameter.Key.ToLower(), parameter.Value);
}
return new StorageCredentials(builder.ToString());
}
return null;
}
internal static string GetHash(
SharedAccessAccountPolicy policy,
string accountName,
string sasVersion,
byte[] keyValue)
{
string stringToSign = string.Format(
CultureInfo.InvariantCulture,
"{0}\n{1}\n{2}\n{3}\n{4}\n{5}\n{6}\n{7}\n{8}\n{9}",
accountName,
SharedAccessAccountPolicy.PermissionsToString(policy.Permissions),
SharedAccessAccountPolicy.ServicesToString(policy.Services),
SharedAccessAccountPolicy.ResourceTypesToString(policy.ResourceTypes),
GetDateTimeOrEmpty(policy.SharedAccessStartTime),
GetDateTimeOrEmpty(policy.SharedAccessExpiryTime),
policy.IPAddressOrRange == null ? string.Empty : policy.IPAddressOrRange.ToString(),
GetProtocolString(policy.Protocols),
sasVersion,
string.Empty);
Logger.LogVerbose(null /* operationContext */, SR.TraceStringToSign, stringToSign);
return CryptoUtility.ComputeHmac256(keyValue, stringToSign);
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gcav = Google.Cloud.AssuredWorkloads.V1Beta1;
using sys = System;
namespace Google.Cloud.AssuredWorkloads.V1Beta1
{
/// <summary>Resource name for the <c>Workload</c> resource.</summary>
public sealed partial class WorkloadName : gax::IResourceName, sys::IEquatable<WorkloadName>
{
/// <summary>The possible contents of <see cref="WorkloadName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>
/// .
/// </summary>
OrganizationLocationWorkload = 1,
}
private static gax::PathTemplate s_organizationLocationWorkload = new gax::PathTemplate("organizations/{organization}/locations/{location}/workloads/{workload}");
/// <summary>Creates a <see cref="WorkloadName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="WorkloadName"/> containing the provided <paramref name="unparsedResourceName"/>
/// .
/// </returns>
public static WorkloadName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new WorkloadName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="WorkloadName"/> with the pattern
/// <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="workloadId">The <c>Workload</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="WorkloadName"/> constructed from the provided ids.</returns>
public static WorkloadName FromOrganizationLocationWorkload(string organizationId, string locationId, string workloadId) =>
new WorkloadName(ResourceNameType.OrganizationLocationWorkload, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), workloadId: gax::GaxPreconditions.CheckNotNullOrEmpty(workloadId, nameof(workloadId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="WorkloadName"/> with pattern
/// <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="workloadId">The <c>Workload</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="WorkloadName"/> with pattern
/// <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>.
/// </returns>
public static string Format(string organizationId, string locationId, string workloadId) =>
FormatOrganizationLocationWorkload(organizationId, locationId, workloadId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="WorkloadName"/> with pattern
/// <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="workloadId">The <c>Workload</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="WorkloadName"/> with pattern
/// <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>.
/// </returns>
public static string FormatOrganizationLocationWorkload(string organizationId, string locationId, string workloadId) =>
s_organizationLocationWorkload.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(workloadId, nameof(workloadId)));
/// <summary>Parses the given resource name string into a new <see cref="WorkloadName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>organizations/{organization}/locations/{location}/workloads/{workload}</c></description>
/// </item>
/// </list>
/// </remarks>
/// <param name="workloadName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="WorkloadName"/> if successful.</returns>
public static WorkloadName Parse(string workloadName) => Parse(workloadName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="WorkloadName"/> instance; optionally allowing an
/// unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>organizations/{organization}/locations/{location}/workloads/{workload}</c></description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="workloadName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="WorkloadName"/> if successful.</returns>
public static WorkloadName Parse(string workloadName, bool allowUnparsed) =>
TryParse(workloadName, allowUnparsed, out WorkloadName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="WorkloadName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>organizations/{organization}/locations/{location}/workloads/{workload}</c></description>
/// </item>
/// </list>
/// </remarks>
/// <param name="workloadName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="WorkloadName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string workloadName, out WorkloadName result) => TryParse(workloadName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="WorkloadName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>organizations/{organization}/locations/{location}/workloads/{workload}</c></description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="workloadName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="WorkloadName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string workloadName, bool allowUnparsed, out WorkloadName result)
{
gax::GaxPreconditions.CheckNotNull(workloadName, nameof(workloadName));
gax::TemplatedResourceName resourceName;
if (s_organizationLocationWorkload.TryParseName(workloadName, out resourceName))
{
result = FromOrganizationLocationWorkload(resourceName[0], resourceName[1], resourceName[2]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(workloadName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private WorkloadName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string locationId = null, string organizationId = null, string workloadId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
LocationId = locationId;
OrganizationId = organizationId;
WorkloadId = workloadId;
}
/// <summary>
/// Constructs a new instance of a <see cref="WorkloadName"/> class from the component parts of pattern
/// <c>organizations/{organization}/locations/{location}/workloads/{workload}</c>
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="workloadId">The <c>Workload</c> ID. Must not be <c>null</c> or empty.</param>
public WorkloadName(string organizationId, string locationId, string workloadId) : this(ResourceNameType.OrganizationLocationWorkload, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), workloadId: gax::GaxPreconditions.CheckNotNullOrEmpty(workloadId, nameof(workloadId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string LocationId { get; }
/// <summary>
/// The <c>Organization</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource
/// name.
/// </summary>
public string OrganizationId { get; }
/// <summary>
/// The <c>Workload</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string WorkloadId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.OrganizationLocationWorkload: return s_organizationLocationWorkload.Expand(OrganizationId, LocationId, WorkloadId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as WorkloadName);
/// <inheritdoc/>
public bool Equals(WorkloadName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(WorkloadName a, WorkloadName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(WorkloadName a, WorkloadName b) => !(a == b);
}
/// <summary>Resource name for the <c>Location</c> resource.</summary>
public sealed partial class LocationName : gax::IResourceName, sys::IEquatable<LocationName>
{
/// <summary>The possible contents of <see cref="LocationName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>organizations/{organization}/locations/{location}</c>.
/// </summary>
OrganizationLocation = 1,
}
private static gax::PathTemplate s_organizationLocation = new gax::PathTemplate("organizations/{organization}/locations/{location}");
/// <summary>Creates a <see cref="LocationName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="LocationName"/> containing the provided <paramref name="unparsedResourceName"/>
/// .
/// </returns>
public static LocationName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new LocationName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="LocationName"/> with the pattern <c>organizations/{organization}/locations/{location}</c>
/// .
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="LocationName"/> constructed from the provided ids.</returns>
public static LocationName FromOrganizationLocation(string organizationId, string locationId) =>
new LocationName(ResourceNameType.OrganizationLocation, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="LocationName"/> with pattern
/// <c>organizations/{organization}/locations/{location}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="LocationName"/> with pattern
/// <c>organizations/{organization}/locations/{location}</c>.
/// </returns>
public static string Format(string organizationId, string locationId) =>
FormatOrganizationLocation(organizationId, locationId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="LocationName"/> with pattern
/// <c>organizations/{organization}/locations/{location}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="LocationName"/> with pattern
/// <c>organizations/{organization}/locations/{location}</c>.
/// </returns>
public static string FormatOrganizationLocation(string organizationId, string locationId) =>
s_organizationLocation.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)));
/// <summary>Parses the given resource name string into a new <see cref="LocationName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/locations/{location}</c></description></item>
/// </list>
/// </remarks>
/// <param name="locationName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="LocationName"/> if successful.</returns>
public static LocationName Parse(string locationName) => Parse(locationName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="LocationName"/> instance; optionally allowing an
/// unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/locations/{location}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="locationName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="LocationName"/> if successful.</returns>
public static LocationName Parse(string locationName, bool allowUnparsed) =>
TryParse(locationName, allowUnparsed, out LocationName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="LocationName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/locations/{location}</c></description></item>
/// </list>
/// </remarks>
/// <param name="locationName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="LocationName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string locationName, out LocationName result) => TryParse(locationName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="LocationName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/locations/{location}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="locationName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="LocationName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string locationName, bool allowUnparsed, out LocationName result)
{
gax::GaxPreconditions.CheckNotNull(locationName, nameof(locationName));
gax::TemplatedResourceName resourceName;
if (s_organizationLocation.TryParseName(locationName, out resourceName))
{
result = FromOrganizationLocation(resourceName[0], resourceName[1]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(locationName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private LocationName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string locationId = null, string organizationId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
LocationId = locationId;
OrganizationId = organizationId;
}
/// <summary>
/// Constructs a new instance of a <see cref="LocationName"/> class from the component parts of pattern
/// <c>organizations/{organization}/locations/{location}</c>
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
public LocationName(string organizationId, string locationId) : this(ResourceNameType.OrganizationLocation, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string LocationId { get; }
/// <summary>
/// The <c>Organization</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource
/// name.
/// </summary>
public string OrganizationId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.OrganizationLocation: return s_organizationLocation.Expand(OrganizationId, LocationId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as LocationName);
/// <inheritdoc/>
public bool Equals(LocationName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(LocationName a, LocationName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(LocationName a, LocationName b) => !(a == b);
}
public partial class CreateWorkloadRequest
{
/// <summary>
/// <see cref="LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class DeleteWorkloadRequest
{
/// <summary>
/// <see cref="gcav::WorkloadName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::WorkloadName WorkloadName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::WorkloadName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class GetWorkloadRequest
{
/// <summary>
/// <see cref="gcav::WorkloadName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::WorkloadName WorkloadName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::WorkloadName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ListWorkloadsRequest
{
/// <summary>
/// <see cref="LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class Workload
{
/// <summary>
/// <see cref="gcav::WorkloadName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::WorkloadName WorkloadName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::WorkloadName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
using Amazon.DynamoDBv2;
using Amazon.DynamoDBv2.Model;
using Amazon.Runtime;
using Microsoft.Extensions.Logging;
using Orleans;
using Orleans.Runtime;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
#if CLUSTERING_DYNAMODB
namespace Orleans.Clustering.DynamoDB
#elif PERSISTENCE_DYNAMODB
namespace Orleans.Persistence.DynamoDB
#elif REMINDERS_DYNAMODB
namespace Orleans.Reminders.DynamoDB
#elif AWSUTILS_TESTS
namespace Orleans.AWSUtils.Tests
#elif TRANSACTIONS_DYNAMODB
namespace Orleans.Transactions.DynamoDB
#else
// No default namespace intentionally to cause compile errors if something is not defined
#endif
{
/// <summary>
/// Wrapper around AWS DynamoDB SDK.
/// </summary>
internal class DynamoDBStorage
{
private string accessKey;
/// <summary> Secret key for this dynamoDB table </summary>
protected string secretKey;
private string service;
public const int DefaultReadCapacityUnits = 10;
public const int DefaultWriteCapacityUnits = 5;
private int readCapacityUnits = DefaultReadCapacityUnits;
private int writeCapacityUnits = DefaultWriteCapacityUnits;
private readonly bool useProvisionedThroughput;
private AmazonDynamoDBClient ddbClient;
private ILogger Logger;
/// <summary>
/// Create a DynamoDBStorage instance
/// </summary>
/// <param name="logger"></param>
/// <param name="accessKey"></param>
/// <param name="secretKey"></param>
/// <param name="service"></param>
/// <param name="readCapacityUnits"></param>
/// <param name="writeCapacityUnits"></param>
/// <param name="useProvisionedThroughput"></param>
public DynamoDBStorage(
ILogger logger,
string service,
string accessKey = "",
string secretKey = "",
int readCapacityUnits = DefaultReadCapacityUnits,
int writeCapacityUnits = DefaultWriteCapacityUnits,
bool useProvisionedThroughput = true)
{
if (service == null) throw new ArgumentNullException(nameof(service));
this.accessKey = accessKey;
this.secretKey = secretKey;
this.service = service;
this.readCapacityUnits = readCapacityUnits;
this.writeCapacityUnits = writeCapacityUnits;
this.useProvisionedThroughput = useProvisionedThroughput;
Logger = logger;
CreateClient();
}
/// <summary>
/// Create a DynamoDB table if it doesn't exist
/// </summary>
/// <param name="tableName">The name of the table</param>
/// <param name="keys">The keys definitions</param>
/// <param name="attributes">The attributes used on the key definition</param>
/// <param name="secondaryIndexes">(optional) The secondary index definitions</param>
/// <param name="ttlAttributeName">(optional) The name of the item attribute that indicates the item TTL (if null, ttl won't be enabled)</param>
/// <returns></returns>
public async Task InitializeTable(string tableName, List<KeySchemaElement> keys, List<AttributeDefinition> attributes, List<GlobalSecondaryIndex> secondaryIndexes = null, string ttlAttributeName = null)
{
try
{
if (await GetTableDescription(tableName) == null)
await CreateTable(tableName, keys, attributes, secondaryIndexes, ttlAttributeName);
}
catch (Exception exc)
{
Logger.Error(ErrorCode.StorageProviderBase, $"Could not initialize connection to storage table {tableName}", exc);
throw;
}
}
private void CreateClient()
{
if (this.service.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
this.service.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
{
// Local DynamoDB instance (for testing)
var credentials = new BasicAWSCredentials("dummy", "dummyKey");
this.ddbClient = new AmazonDynamoDBClient(credentials, new AmazonDynamoDBConfig { ServiceURL = this.service });
}
else if (!string.IsNullOrEmpty(this.accessKey) && !string.IsNullOrEmpty(this.secretKey))
{
// AWS DynamoDB instance (auth via explicit credentials)
var credentials = new BasicAWSCredentials(this.accessKey, this.secretKey);
this.ddbClient = new AmazonDynamoDBClient(credentials, new AmazonDynamoDBConfig {RegionEndpoint = AWSUtils.GetRegionEndpoint(this.service)});
}
else
{
// AWS DynamoDB instance (implicit auth - EC2 IAM Roles etc)
this.ddbClient = new AmazonDynamoDBClient(new AmazonDynamoDBConfig {RegionEndpoint = AWSUtils.GetRegionEndpoint(this.service)});
}
}
private async Task<TableDescription> GetTableDescription(string tableName)
{
try
{
var description = await ddbClient.DescribeTableAsync(tableName);
if (description.Table != null)
return description.Table;
}
catch (ResourceNotFoundException)
{
return null;
}
return null;
}
private async Task CreateTable(string tableName, List<KeySchemaElement> keys, List<AttributeDefinition> attributes, List<GlobalSecondaryIndex> secondaryIndexes = null, string ttlAttributeName = null)
{
var request = new CreateTableRequest
{
TableName = tableName,
AttributeDefinitions = attributes,
KeySchema = keys,
BillingMode = this.useProvisionedThroughput ? BillingMode.PROVISIONED : BillingMode.PAY_PER_REQUEST,
ProvisionedThroughput = this.useProvisionedThroughput ? new ProvisionedThroughput
{
ReadCapacityUnits = readCapacityUnits,
WriteCapacityUnits = writeCapacityUnits
} : null
};
if (secondaryIndexes != null && secondaryIndexes.Count > 0)
{
if (this.useProvisionedThroughput)
{
var indexThroughput = new ProvisionedThroughput {ReadCapacityUnits = readCapacityUnits, WriteCapacityUnits = writeCapacityUnits};
secondaryIndexes.ForEach(i =>
{
i.ProvisionedThroughput = indexThroughput;
});
}
request.GlobalSecondaryIndexes = secondaryIndexes;
}
try
{
var response = await ddbClient.CreateTableAsync(request);
TableDescription description = null;
do
{
description = await GetTableDescription(tableName);
await Task.Delay(2000);
} while (description.TableStatus == TableStatus.CREATING);
if (!string.IsNullOrEmpty(ttlAttributeName))
{
await ddbClient.UpdateTimeToLiveAsync(new UpdateTimeToLiveRequest
{
TableName = tableName,
TimeToLiveSpecification = new TimeToLiveSpecification { AttributeName = ttlAttributeName, Enabled = true }
});
}
if (description.TableStatus != TableStatus.ACTIVE)
throw new InvalidOperationException($"Failure creating table {tableName}");
}
catch (Exception exc)
{
Logger.Error(ErrorCode.StorageProviderBase, $"Could not create table {tableName}", exc);
throw;
}
}
/// <summary>
/// Delete a table from DynamoDB
/// </summary>
/// <param name="tableName">The name of the table to delete</param>
/// <returns></returns>
public Task DeleTableAsync(string tableName)
{
try
{
return ddbClient.DeleteTableAsync(new DeleteTableRequest { TableName = tableName });
}
catch (Exception exc)
{
Logger.Error(ErrorCode.StorageProviderBase, $"Could not delete table {tableName}", exc);
throw;
}
}
/// <summary>
/// Create or Replace an entry in a DynamoDB Table
/// </summary>
/// <param name="tableName">The name of the table to put an entry</param>
/// <param name="fields">The fields/attributes to add or replace in the table</param>
/// <param name="conditionExpression">Optional conditional expression</param>
/// <param name="conditionValues">Optional field/attribute values used in the conditional expression</param>
/// <returns></returns>
public Task PutEntryAsync(string tableName, Dictionary<string, AttributeValue> fields, string conditionExpression = "", Dictionary<string, AttributeValue> conditionValues = null)
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Creating {0} table entry: {1}", tableName, Utils.DictionaryToString(fields));
try
{
var request = new PutItemRequest(tableName, fields, ReturnValue.NONE);
if (!string.IsNullOrWhiteSpace(conditionExpression))
request.ConditionExpression = conditionExpression;
if (conditionValues != null && conditionValues.Keys.Count > 0)
request.ExpressionAttributeValues = conditionValues;
return ddbClient.PutItemAsync(request);
}
catch (Exception exc)
{
Logger.Error(ErrorCode.StorageProviderBase, $"Unable to create item to table '{tableName}'", exc);
throw;
}
}
/// <summary>
/// Create or update an entry in a DynamoDB Table
/// </summary>
/// <param name="tableName">The name of the table to upsert an entry</param>
/// <param name="keys">The table entry keys for the entry</param>
/// <param name="fields">The fields/attributes to add or updated in the table</param>
/// <param name="conditionExpression">Optional conditional expression</param>
/// <param name="conditionValues">Optional field/attribute values used in the conditional expression</param>
/// <param name="extraExpression">Additional expression that will be added in the end of the upsert expression</param>
/// <param name="extraExpressionValues">Additional field/attribute that will be used in the extraExpression</param>
/// <remarks>The fields dictionary item values will be updated with the values returned from DynamoDB</remarks>
/// <returns></returns>
public async Task UpsertEntryAsync(string tableName, Dictionary<string, AttributeValue> keys, Dictionary<string, AttributeValue> fields,
string conditionExpression = "", Dictionary<string, AttributeValue> conditionValues = null, string extraExpression = "",
Dictionary<string, AttributeValue> extraExpressionValues = null)
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Upserting entry {0} with key(s) {1} into table {2}", Utils.DictionaryToString(fields), Utils.DictionaryToString(keys), tableName);
try
{
var request = new UpdateItemRequest
{
TableName = tableName,
Key = keys,
ReturnValues = ReturnValue.UPDATED_NEW
};
(request.UpdateExpression, request.ExpressionAttributeValues) = ConvertUpdate(fields, conditionValues,
extraExpression, extraExpressionValues);
if (!string.IsNullOrWhiteSpace(conditionExpression))
request.ConditionExpression = conditionExpression;
var result = await ddbClient.UpdateItemAsync(request);
foreach (var key in result.Attributes.Keys)
{
if (fields.ContainsKey(key))
{
fields[key] = result.Attributes[key];
}
else
{
fields.Add(key, result.Attributes[key]);
}
}
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.StorageProviderBase,
$"Intermediate error upserting to the table {tableName}", exc);
throw;
}
}
public (string updateExpression, Dictionary<string, AttributeValue> expressionAttributeValues)
ConvertUpdate(Dictionary<string, AttributeValue> fields,
Dictionary<string, AttributeValue> conditionValues = null,
string extraExpression = "", Dictionary<string, AttributeValue> extraExpressionValues = null)
{
var expressionAttributeValues = new Dictionary<string, AttributeValue>();
var updateExpression = new StringBuilder();
foreach (var field in fields.Keys)
{
var valueKey = ":" + field;
expressionAttributeValues.Add(valueKey, fields[field]);
updateExpression.Append($" {field} = {valueKey},");
}
updateExpression.Insert(0, "SET");
if (string.IsNullOrWhiteSpace(extraExpression))
{
updateExpression.Remove(updateExpression.Length - 1, 1);
}
else
{
updateExpression.Append($" {extraExpression}");
if (extraExpressionValues != null && extraExpressionValues.Count > 0)
{
foreach (var key in extraExpressionValues.Keys)
{
expressionAttributeValues.Add(key, extraExpressionValues[key]);
}
}
}
if (conditionValues != null && conditionValues.Keys.Count > 0)
{
foreach (var item in conditionValues)
{
expressionAttributeValues.Add(item.Key, item.Value);
}
}
return (updateExpression.ToString(), expressionAttributeValues);
}
/// <summary>
/// Delete an entry from a DynamoDB table
/// </summary>
/// <param name="tableName">The name of the table to delete an entry</param>
/// <param name="keys">The table entry keys for the entry to be deleted</param>
/// <param name="conditionExpression">Optional conditional expression</param>
/// <param name="conditionValues">Optional field/attribute values used in the conditional expression</param>
/// <returns></returns>
public Task DeleteEntryAsync(string tableName, Dictionary<string, AttributeValue> keys, string conditionExpression = "", Dictionary<string, AttributeValue> conditionValues = null)
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Deleting table {0} entry with key(s) {1}", tableName, Utils.DictionaryToString(keys));
try
{
var request = new DeleteItemRequest
{
TableName = tableName,
Key = keys
};
if (!string.IsNullOrWhiteSpace(conditionExpression))
request.ConditionExpression = conditionExpression;
if (conditionValues != null && conditionValues.Keys.Count > 0)
request.ExpressionAttributeValues = conditionValues;
return ddbClient.DeleteItemAsync(request);
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.StorageProviderBase,
$"Intermediate error deleting entry from the table {tableName}.", exc);
throw;
}
}
/// <summary>
/// Delete multiple entries from a DynamoDB table (Batch delete)
/// </summary>
/// <param name="tableName">The name of the table to delete entries</param>
/// <param name="toDelete">List of key values for each entry that must be deleted in the batch</param>
/// <returns></returns>
public Task DeleteEntriesAsync(string tableName, IReadOnlyCollection<Dictionary<string, AttributeValue>> toDelete)
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Deleting {0} table entries", tableName);
if (toDelete == null) throw new ArgumentNullException("collection");
if (toDelete.Count == 0)
return Task.CompletedTask;
try
{
var request = new BatchWriteItemRequest();
request.RequestItems = new Dictionary<string, List<WriteRequest>>();
var batch = new List<WriteRequest>();
foreach (var keys in toDelete)
{
var writeRequest = new WriteRequest();
writeRequest.DeleteRequest = new DeleteRequest();
writeRequest.DeleteRequest.Key = keys;
batch.Add(writeRequest);
}
request.RequestItems.Add(tableName, batch);
return ddbClient.BatchWriteItemAsync(request);
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.StorageProviderBase,
$"Intermediate error deleting entries from the table {tableName}.", exc);
throw;
}
}
/// <summary>
/// Read an entry from a DynamoDB table
/// </summary>
/// <typeparam name="TResult">The result type</typeparam>
/// <param name="tableName">The name of the table to search for the entry</param>
/// <param name="keys">The table entry keys to search for</param>
/// <param name="resolver">Function that will be called to translate the returned fields into a concrete type. This Function is only called if the result is != null</param>
/// <returns>The object translated by the resolver function</returns>
public async Task<TResult> ReadSingleEntryAsync<TResult>(string tableName, Dictionary<string, AttributeValue> keys, Func<Dictionary<string, AttributeValue>, TResult> resolver) where TResult : class
{
try
{
var request = new GetItemRequest
{
TableName = tableName,
Key = keys,
ConsistentRead = true
};
var response = await ddbClient.GetItemAsync(request);
if (response.IsItemSet)
{
return resolver(response.Item);
}
else
{
return null;
}
}
catch (Exception)
{
if (Logger.IsEnabled(LogLevel.Debug)) Logger.Debug("Unable to find table entry for Keys = {0}", Utils.DictionaryToString(keys));
throw;
}
}
/// <summary>
/// Query for multiple entries in a DynamoDB table by filtering its keys
/// </summary>
/// <typeparam name="TResult">The result type</typeparam>
/// <param name="tableName">The name of the table to search for the entries</param>
/// <param name="keys">The table entry keys to search for</param>
/// <param name="keyConditionExpression">the expression that will filter the keys</param>
/// <param name="resolver">Function that will be called to translate the returned fields into a concrete type. This Function is only called if the result is != null and will be called for each entry that match the query and added to the results list</param>
/// <param name="indexName">In case a secondary index is used in the keyConditionExpression</param>
/// <param name="scanIndexForward">In case an index is used, show if the seek order is ascending (true) or descending (false)</param>
/// <param name="lastEvaluatedKey">The primary key of the first item that this operation will evaluate. Use the value that was returned for LastEvaluatedKey in the previous operation</param>
/// <returns>The collection containing a list of objects translated by the resolver function and the LastEvaluatedKey for paged results</returns>
public async Task<(List<TResult> results, Dictionary<string, AttributeValue> lastEvaluatedKey)> QueryAsync<TResult>(string tableName, Dictionary<string, AttributeValue> keys, string keyConditionExpression, Func<Dictionary<string, AttributeValue>, TResult> resolver, string indexName = "", bool scanIndexForward = true, Dictionary<string, AttributeValue> lastEvaluatedKey = null) where TResult : class
{
try
{
var request = new QueryRequest
{
TableName = tableName,
ExpressionAttributeValues = keys,
ConsistentRead = true,
KeyConditionExpression = keyConditionExpression,
Select = Select.ALL_ATTRIBUTES,
ExclusiveStartKey = lastEvaluatedKey
};
if (!string.IsNullOrWhiteSpace(indexName))
{
request.ScanIndexForward = scanIndexForward;
request.IndexName = indexName;
}
var response = await ddbClient.QueryAsync(request);
var resultList = new List<TResult>();
foreach (var item in response.Items)
{
resultList.Add(resolver(item));
}
return (resultList, response.LastEvaluatedKey);
}
catch (Exception)
{
if (Logger.IsEnabled(LogLevel.Debug)) Logger.Debug("Unable to find table entry for Keys = {0}", Utils.DictionaryToString(keys));
throw;
}
}
/// <summary>
/// Query for multiple entries in a DynamoDB table by filtering its keys
/// </summary>
/// <typeparam name="TResult">The result type</typeparam>
/// <param name="tableName">The name of the table to search for the entries</param>
/// <param name="keys">The table entry keys to search for</param>
/// <param name="keyConditionExpression">the expression that will filter the keys</param>
/// <param name="resolver">Function that will be called to translate the returned fields into a concrete type. This Function is only called if the result is != null and will be called for each entry that match the query and added to the results list</param>
/// <param name="indexName">In case a secondary index is used in the keyConditionExpression</param>
/// <param name="scanIndexForward">In case an index is used, show if the seek order is ascending (true) or descending (false)</param>
/// <returns>The collection containing a list of objects translated by the resolver function</returns>
public async Task<List<TResult>> QueryAllAsync<TResult>(string tableName, Dictionary<string, AttributeValue> keys,
string keyConditionExpression, Func<Dictionary<string, AttributeValue>, TResult> resolver,
string indexName = "", bool scanIndexForward = true) where TResult : class
{
List<TResult> resultList = null;
Dictionary<string, AttributeValue> lastEvaluatedKey = null;
do
{
List<TResult> results;
(results, lastEvaluatedKey) = await QueryAsync(tableName, keys, keyConditionExpression, resolver,
indexName, scanIndexForward, lastEvaluatedKey);
if (resultList == null)
{
resultList = results;
}
else
{
resultList.AddRange(results);
}
} while (lastEvaluatedKey.Count != 0);
return resultList;
}
/// <summary>
/// Scan a DynamoDB table by querying the entry fields.
/// </summary>
/// <typeparam name="TResult">The result type</typeparam>
/// <param name="tableName">The name of the table to search for the entries</param>
/// <param name="attributes">The attributes used on the expression</param>
/// <param name="expression">The filter expression</param>
/// <param name="resolver">Function that will be called to translate the returned fields into a concrete type. This Function is only called if the result is != null and will be called for each entry that match the query and added to the results list</param>
/// <returns>The collection containing a list of objects translated by the resolver function</returns>
public async Task<List<TResult>> ScanAsync<TResult>(string tableName, Dictionary<string, AttributeValue> attributes, string expression, Func<Dictionary<string, AttributeValue>, TResult> resolver) where TResult : class
{
// From the Amazon documentation:
// "A single Scan operation will read up to the maximum number of items set
// (if using the Limit parameter) or a maximum of 1 MB of data and then apply
// any filtering to the results using FilterExpression."
// https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/DynamoDBv2/MDynamoDBScanAsyncStringDictionary!String,%20Condition!CancellationToken.html
try
{
var resultList = new List<TResult>();
var exclusiveStartKey = new Dictionary<string, AttributeValue>();
while (true)
{
var request = new ScanRequest
{
TableName = tableName,
ConsistentRead = true,
FilterExpression = expression,
ExpressionAttributeValues = attributes,
Select = Select.ALL_ATTRIBUTES,
ExclusiveStartKey = exclusiveStartKey
};
var response = await ddbClient.ScanAsync(request);
foreach (var item in response.Items)
{
resultList.Add(resolver(item));
}
if (response.LastEvaluatedKey.Count == 0)
{
break;
}
else
{
exclusiveStartKey = response.LastEvaluatedKey;
}
}
return resultList;
}
catch (Exception exc)
{
var errorMsg = $"Failed to read table {tableName}: {exc.Message}";
Logger.Warn(ErrorCode.StorageProviderBase, errorMsg, exc);
throw new OrleansException(errorMsg, exc);
}
}
/// <summary>
/// Crete or replace multiple entries in a DynamoDB table (Batch put)
/// </summary>
/// <param name="tableName">The name of the table to search for the entry</param>
/// <param name="toCreate">List of key values for each entry that must be created or replaced in the batch</param>
/// <returns></returns>
public Task PutEntriesAsync(string tableName, IReadOnlyCollection<Dictionary<string, AttributeValue>> toCreate)
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Put entries {0} table", tableName);
if (toCreate == null) throw new ArgumentNullException("collection");
if (toCreate.Count == 0)
return Task.CompletedTask;
try
{
var request = new BatchWriteItemRequest();
request.RequestItems = new Dictionary<string, List<WriteRequest>>();
var batch = new List<WriteRequest>();
foreach (var item in toCreate)
{
var writeRequest = new WriteRequest();
writeRequest.PutRequest = new PutRequest();
writeRequest.PutRequest.Item = item;
batch.Add(writeRequest);
}
request.RequestItems.Add(tableName, batch);
return ddbClient.BatchWriteItemAsync(request);
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.StorageProviderBase,
$"Intermediate error bulk inserting entries to table {tableName}.", exc);
throw;
}
}
/// <summary>
/// Transactionally reads entries from a DynamoDB table
/// </summary>
/// <typeparam name="TResult">The result type</typeparam>
/// <param name="tableName">The name of the table to search for the entry</param>
/// <param name="keys">The table entry keys to search for</param>
/// <param name="resolver">Function that will be called to translate the returned fields into a concrete type. This Function is only called if the result is != null</param>
/// <returns>The object translated by the resolver function</returns>
public async Task<IEnumerable<TResult>> GetEntriesTxAsync<TResult>(string tableName, IEnumerable<Dictionary<string, AttributeValue>> keys, Func<Dictionary<string, AttributeValue>, TResult> resolver) where TResult : class
{
try
{
var request = new TransactGetItemsRequest
{
TransactItems = keys.Select(key => new TransactGetItem
{
Get = new Get
{
TableName = tableName,
Key = key
}
}).ToList()
};
var response = await ddbClient.TransactGetItemsAsync(request);
return response.Responses.Where(r => r?.Item?.Count > 0).Select(r => resolver(r.Item));
}
catch (Exception)
{
if (Logger.IsEnabled(LogLevel.Debug)) Logger.Debug("Unable to find table entry for Keys = {0}", Utils.EnumerableToString(keys, d => Utils.DictionaryToString(d)));
throw;
}
}
/// <summary>
/// Transactionally performs write requests
/// </summary>
/// <param name="puts">Any puts to be performed</param>
/// <param name="updates">Any updated to be performed</param>
/// <param name="deletes">Any deletes to be performed</param>
/// <param name="conditionChecks">Any condition checks to be performed</param>
/// <returns></returns>
public Task WriteTxAsync(IEnumerable<Put> puts = null, IEnumerable<Update> updates = null, IEnumerable<Delete> deletes = null, IEnumerable<ConditionCheck> conditionChecks = null)
{
try
{
var transactItems = new List<TransactWriteItem>();
if (puts != null)
{
transactItems.AddRange(puts.Select(p => new TransactWriteItem{Put = p}));
}
if (updates != null)
{
transactItems.AddRange(updates.Select(u => new TransactWriteItem{Update = u}));
}
if (deletes != null)
{
transactItems.AddRange(deletes.Select(d => new TransactWriteItem{Delete = d}));
}
if (conditionChecks != null)
{
transactItems.AddRange(conditionChecks.Select(c => new TransactWriteItem{ConditionCheck = c}));
}
var request = new TransactWriteItemsRequest
{
TransactItems = transactItems
};
return ddbClient.TransactWriteItemsAsync(request);
}
catch (Exception)
{
if (Logger.IsEnabled(LogLevel.Debug)) Logger.Debug("Unable to write tx");
throw;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
namespace System.Runtime.Serialization.Formatters.Binary
{
internal sealed class BinaryParser
{
private const int ChunkSize = 4096;
private static readonly Encoding s_encoding = new UTF8Encoding(false, true);
internal ObjectReader _objectReader;
internal Stream _input;
internal long _topId;
internal long _headerId;
internal SizedArray _objectMapIdTable;
internal SizedArray _assemIdToAssemblyTable; // Used to hold assembly information
internal SerStack _stack = new SerStack("ObjectProgressStack");
internal BinaryTypeEnum _expectedType = BinaryTypeEnum.ObjectUrt;
internal object _expectedTypeInformation;
internal ParseRecord _prs;
private BinaryAssemblyInfo _systemAssemblyInfo;
private BinaryReader _dataReader;
private SerStack _opPool;
private BinaryObject _binaryObject;
private BinaryObjectWithMap _bowm;
private BinaryObjectWithMapTyped _bowmt;
internal BinaryObjectString _objectString;
internal BinaryCrossAppDomainString _crossAppDomainString;
internal MemberPrimitiveTyped _memberPrimitiveTyped;
private byte[] _byteBuffer;
internal MemberPrimitiveUnTyped memberPrimitiveUnTyped;
internal MemberReference _memberReference;
internal ObjectNull _objectNull;
internal static volatile MessageEnd _messageEnd;
internal BinaryParser(Stream stream, ObjectReader objectReader)
{
_input = stream;
_objectReader = objectReader;
_dataReader = new BinaryReader(_input, s_encoding);
}
internal BinaryAssemblyInfo SystemAssemblyInfo =>
_systemAssemblyInfo ?? (_systemAssemblyInfo = new BinaryAssemblyInfo(Converter.s_urtAssemblyString, Converter.s_urtAssembly));
internal SizedArray ObjectMapIdTable =>
_objectMapIdTable ?? (_objectMapIdTable = new SizedArray());
internal SizedArray AssemIdToAssemblyTable =>
_assemIdToAssemblyTable ?? (_assemIdToAssemblyTable = new SizedArray(2));
internal ParseRecord PRs =>
_prs ?? (_prs = new ParseRecord());
// Parse the input
// Reads each record from the input stream. If the record is a primitive type (A number)
// then it doesn't have a BinaryHeaderEnum byte. For this case the expected type
// has been previously set to Primitive
internal void Run()
{
try
{
bool isLoop = true;
ReadBegin();
ReadSerializationHeaderRecord();
while (isLoop)
{
BinaryHeaderEnum binaryHeaderEnum = BinaryHeaderEnum.Object;
switch (_expectedType)
{
case BinaryTypeEnum.ObjectUrt:
case BinaryTypeEnum.ObjectUser:
case BinaryTypeEnum.String:
case BinaryTypeEnum.Object:
case BinaryTypeEnum.ObjectArray:
case BinaryTypeEnum.StringArray:
case BinaryTypeEnum.PrimitiveArray:
byte inByte = _dataReader.ReadByte();
binaryHeaderEnum = (BinaryHeaderEnum)inByte;
switch (binaryHeaderEnum)
{
case BinaryHeaderEnum.Assembly:
case BinaryHeaderEnum.CrossAppDomainAssembly:
ReadAssembly(binaryHeaderEnum);
break;
case BinaryHeaderEnum.Object:
ReadObject();
break;
case BinaryHeaderEnum.CrossAppDomainMap:
ReadCrossAppDomainMap();
break;
case BinaryHeaderEnum.ObjectWithMap:
case BinaryHeaderEnum.ObjectWithMapAssemId:
ReadObjectWithMap(binaryHeaderEnum);
break;
case BinaryHeaderEnum.ObjectWithMapTyped:
case BinaryHeaderEnum.ObjectWithMapTypedAssemId:
ReadObjectWithMapTyped(binaryHeaderEnum);
break;
case BinaryHeaderEnum.ObjectString:
case BinaryHeaderEnum.CrossAppDomainString:
ReadObjectString(binaryHeaderEnum);
break;
case BinaryHeaderEnum.Array:
case BinaryHeaderEnum.ArraySinglePrimitive:
case BinaryHeaderEnum.ArraySingleObject:
case BinaryHeaderEnum.ArraySingleString:
ReadArray(binaryHeaderEnum);
break;
case BinaryHeaderEnum.MemberPrimitiveTyped:
ReadMemberPrimitiveTyped();
break;
case BinaryHeaderEnum.MemberReference:
ReadMemberReference();
break;
case BinaryHeaderEnum.ObjectNull:
case BinaryHeaderEnum.ObjectNullMultiple256:
case BinaryHeaderEnum.ObjectNullMultiple:
ReadObjectNull(binaryHeaderEnum);
break;
case BinaryHeaderEnum.MessageEnd:
isLoop = false;
ReadMessageEnd();
ReadEnd();
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_BinaryHeader, inByte));
}
break;
case BinaryTypeEnum.Primitive:
ReadMemberPrimitiveUnTyped();
break;
default:
throw new SerializationException(SR.Serialization_TypeExpected);
}
// If an assembly is encountered, don't advance
// object Progress,
if (binaryHeaderEnum != BinaryHeaderEnum.Assembly)
{
// End of parse loop.
bool isData = false;
// Set up loop for next iteration.
// If this is an object, and the end of object has been reached, then parse object end.
while (!isData)
{
ObjectProgress op = (ObjectProgress)_stack.Peek();
if (op == null)
{
// No more object on stack, then the next record is a top level object
_expectedType = BinaryTypeEnum.ObjectUrt;
_expectedTypeInformation = null;
isData = true;
}
else
{
// Find out what record is expected next
isData = op.GetNext(out op._expectedType, out op._expectedTypeInformation);
_expectedType = op._expectedType;
_expectedTypeInformation = op._expectedTypeInformation;
if (!isData)
{
// No record is expected next, this is the end of an object or array
PRs.Init();
if (op._memberValueEnum == InternalMemberValueE.Nested)
{
// Nested object
PRs._parseTypeEnum = InternalParseTypeE.MemberEnd;
PRs._memberTypeEnum = op._memberTypeEnum;
PRs._memberValueEnum = op._memberValueEnum;
_objectReader.Parse(PRs);
}
else
{
// Top level object
PRs._parseTypeEnum = InternalParseTypeE.ObjectEnd;
PRs._memberTypeEnum = op._memberTypeEnum;
PRs._memberValueEnum = op._memberValueEnum;
_objectReader.Parse(PRs);
}
_stack.Pop();
PutOp(op);
}
}
}
}
}
}
catch (EndOfStreamException)
{
// EOF should never be thrown since there is a MessageEnd record to stop parsing
throw new SerializationException(SR.Serialization_StreamEnd);
}
}
internal void ReadBegin() { }
internal void ReadEnd() { }
// Primitive Reads from Stream
internal bool ReadBoolean() => _dataReader.ReadBoolean();
internal byte ReadByte() => _dataReader.ReadByte();
internal byte[] ReadBytes(int length) => _dataReader.ReadBytes(length);
internal void ReadBytes(byte[] byteA, int offset, int size)
{
while (size > 0)
{
int n = _dataReader.Read(byteA, offset, size);
if (n == 0)
{
throw new EndOfStreamException(SR.IO_EOF_ReadBeyondEOF);
}
offset += n;
size -= n;
}
}
internal char ReadChar() => _dataReader.ReadChar();
internal char[] ReadChars(int length) => _dataReader.ReadChars(length);
internal decimal ReadDecimal() => decimal.Parse(_dataReader.ReadString(), CultureInfo.InvariantCulture);
internal float ReadSingle() => _dataReader.ReadSingle();
internal double ReadDouble() => _dataReader.ReadDouble();
internal short ReadInt16() => _dataReader.ReadInt16();
internal int ReadInt32() => _dataReader.ReadInt32();
internal long ReadInt64() => _dataReader.ReadInt64();
internal sbyte ReadSByte() => unchecked((sbyte)ReadByte());
internal string ReadString() => _dataReader.ReadString();
internal TimeSpan ReadTimeSpan() => new TimeSpan(ReadInt64());
internal DateTime ReadDateTime() => FromBinaryRaw(ReadInt64());
private static DateTime FromBinaryRaw(long dateData)
{
// Use DateTime's public constructor to validate the input, but we
// can't return that result as it strips off the kind. To address
// that, store the value directly into a DateTime via an unsafe cast.
// See BinaryFormatterWriter.WriteDateTime for details.
const long TicksMask = 0x3FFFFFFFFFFFFFFF;
new DateTime(dateData & TicksMask);
return MemoryMarshal.Cast<long, DateTime>(MemoryMarshal.CreateReadOnlySpan(ref dateData, 1))[0];
}
internal ushort ReadUInt16() => _dataReader.ReadUInt16();
internal uint ReadUInt32() => _dataReader.ReadUInt32();
internal ulong ReadUInt64() => _dataReader.ReadUInt64();
// Binary Stream Record Reads
internal void ReadSerializationHeaderRecord()
{
var record = new SerializationHeaderRecord();
record.Read(this);
_topId = (record._topId > 0 ? _objectReader.GetId(record._topId) : record._topId);
_headerId = (record._headerId > 0 ? _objectReader.GetId(record._headerId) : record._headerId);
}
internal void ReadAssembly(BinaryHeaderEnum binaryHeaderEnum)
{
var record = new BinaryAssembly();
if (binaryHeaderEnum == BinaryHeaderEnum.CrossAppDomainAssembly)
{
var crossAppDomainAssembly = new BinaryCrossAppDomainAssembly();
crossAppDomainAssembly.Read(this);
record._assemId = crossAppDomainAssembly._assemId;
record._assemblyString = _objectReader.CrossAppDomainArray(crossAppDomainAssembly._assemblyIndex) as string;
if (record._assemblyString == null)
{
throw new SerializationException(SR.Format(SR.Serialization_CrossAppDomainError, "String", crossAppDomainAssembly._assemblyIndex));
}
}
else
{
record.Read(this);
}
AssemIdToAssemblyTable[record._assemId] = new BinaryAssemblyInfo(record._assemblyString);
}
private void ReadObject()
{
if (_binaryObject == null)
{
_binaryObject = new BinaryObject();
}
_binaryObject.Read(this);
ObjectMap objectMap = (ObjectMap)ObjectMapIdTable[_binaryObject._mapId];
if (objectMap == null)
{
throw new SerializationException(SR.Format(SR.Serialization_Map, _binaryObject._mapId));
}
ObjectProgress op = GetOp();
ParseRecord pr = op._pr;
_stack.Push(op);
op._objectTypeEnum = InternalObjectTypeE.Object;
op._binaryTypeEnumA = objectMap._binaryTypeEnumA;
op._memberNames = objectMap._memberNames;
op._memberTypes = objectMap._memberTypes;
op._typeInformationA = objectMap._typeInformationA;
op._memberLength = op._binaryTypeEnumA.Length;
ObjectProgress objectOp = (ObjectProgress)_stack.PeekPeek();
if ((objectOp == null) || (objectOp._isInitial))
{
// Non-Nested Object
op._name = objectMap._objectName;
pr._parseTypeEnum = InternalParseTypeE.Object;
op._memberValueEnum = InternalMemberValueE.Empty;
}
else
{
// Nested Object
pr._parseTypeEnum = InternalParseTypeE.Member;
pr._memberValueEnum = InternalMemberValueE.Nested;
op._memberValueEnum = InternalMemberValueE.Nested;
switch (objectOp._objectTypeEnum)
{
case InternalObjectTypeE.Object:
pr._name = objectOp._name;
pr._memberTypeEnum = InternalMemberTypeE.Field;
op._memberTypeEnum = InternalMemberTypeE.Field;
break;
case InternalObjectTypeE.Array:
pr._memberTypeEnum = InternalMemberTypeE.Item;
op._memberTypeEnum = InternalMemberTypeE.Item;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_Map, objectOp._objectTypeEnum.ToString()));
}
}
pr._objectId = _objectReader.GetId(_binaryObject._objectId);
pr._objectInfo = objectMap.CreateObjectInfo(ref pr._si, ref pr._memberData);
if (pr._objectId == _topId)
{
pr._objectPositionEnum = InternalObjectPositionE.Top;
}
pr._objectTypeEnum = InternalObjectTypeE.Object;
pr._keyDt = objectMap._objectName;
pr._dtType = objectMap._objectType;
pr._dtTypeCode = InternalPrimitiveTypeE.Invalid;
_objectReader.Parse(pr);
}
internal void ReadCrossAppDomainMap()
{
BinaryCrossAppDomainMap record = new BinaryCrossAppDomainMap();
record.Read(this);
object mapObject = _objectReader.CrossAppDomainArray(record._crossAppDomainArrayIndex);
BinaryObjectWithMap binaryObjectWithMap = mapObject as BinaryObjectWithMap;
if (binaryObjectWithMap != null)
{
ReadObjectWithMap(binaryObjectWithMap);
}
else
{
BinaryObjectWithMapTyped binaryObjectWithMapTyped = mapObject as BinaryObjectWithMapTyped;
if (binaryObjectWithMapTyped != null)
{
ReadObjectWithMapTyped(binaryObjectWithMapTyped);
}
else
{
throw new SerializationException(SR.Format(SR.Serialization_CrossAppDomainError, "BinaryObjectMap", mapObject));
}
}
}
internal void ReadObjectWithMap(BinaryHeaderEnum binaryHeaderEnum)
{
if (_bowm == null)
{
_bowm = new BinaryObjectWithMap(binaryHeaderEnum);
}
else
{
_bowm._binaryHeaderEnum = binaryHeaderEnum;
}
_bowm.Read(this);
ReadObjectWithMap(_bowm);
}
private void ReadObjectWithMap(BinaryObjectWithMap record)
{
BinaryAssemblyInfo assemblyInfo = null;
ObjectProgress op = GetOp();
ParseRecord pr = op._pr;
_stack.Push(op);
if (record._binaryHeaderEnum == BinaryHeaderEnum.ObjectWithMapAssemId)
{
if (record._assemId < 1)
{
throw new SerializationException(SR.Format(SR.Serialization_Assembly, record._name));
}
assemblyInfo = ((BinaryAssemblyInfo)AssemIdToAssemblyTable[record._assemId]);
if (assemblyInfo == null)
{
throw new SerializationException(SR.Format(SR.Serialization_Assembly, record._assemId + " " + record._name));
}
}
else if (record._binaryHeaderEnum == BinaryHeaderEnum.ObjectWithMap)
{
assemblyInfo = SystemAssemblyInfo; //Urt assembly
}
Type objectType = _objectReader.GetType(assemblyInfo, record._name);
ObjectMap objectMap = ObjectMap.Create(record._name, objectType, record._memberNames, _objectReader, record._objectId, assemblyInfo);
ObjectMapIdTable[record._objectId] = objectMap;
op._objectTypeEnum = InternalObjectTypeE.Object;
op._binaryTypeEnumA = objectMap._binaryTypeEnumA;
op._typeInformationA = objectMap._typeInformationA;
op._memberLength = op._binaryTypeEnumA.Length;
op._memberNames = objectMap._memberNames;
op._memberTypes = objectMap._memberTypes;
ObjectProgress objectOp = (ObjectProgress)_stack.PeekPeek();
if ((objectOp == null) || (objectOp._isInitial))
{
// Non-Nested Object
op._name = record._name;
pr._parseTypeEnum = InternalParseTypeE.Object;
op._memberValueEnum = InternalMemberValueE.Empty;
}
else
{
// Nested Object
pr._parseTypeEnum = InternalParseTypeE.Member;
pr._memberValueEnum = InternalMemberValueE.Nested;
op._memberValueEnum = InternalMemberValueE.Nested;
switch (objectOp._objectTypeEnum)
{
case InternalObjectTypeE.Object:
pr._name = objectOp._name;
pr._memberTypeEnum = InternalMemberTypeE.Field;
op._memberTypeEnum = InternalMemberTypeE.Field;
break;
case InternalObjectTypeE.Array:
pr._memberTypeEnum = InternalMemberTypeE.Item;
op._memberTypeEnum = InternalMemberTypeE.Field;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_ObjectTypeEnum, objectOp._objectTypeEnum.ToString()));
}
}
pr._objectTypeEnum = InternalObjectTypeE.Object;
pr._objectId = _objectReader.GetId(record._objectId);
pr._objectInfo = objectMap.CreateObjectInfo(ref pr._si, ref pr._memberData);
if (pr._objectId == _topId)
{
pr._objectPositionEnum = InternalObjectPositionE.Top;
}
pr._keyDt = record._name;
pr._dtType = objectMap._objectType;
pr._dtTypeCode = InternalPrimitiveTypeE.Invalid;
_objectReader.Parse(pr);
}
internal void ReadObjectWithMapTyped(BinaryHeaderEnum binaryHeaderEnum)
{
if (_bowmt == null)
{
_bowmt = new BinaryObjectWithMapTyped(binaryHeaderEnum);
}
else
{
_bowmt._binaryHeaderEnum = binaryHeaderEnum;
}
_bowmt.Read(this);
ReadObjectWithMapTyped(_bowmt);
}
private void ReadObjectWithMapTyped(BinaryObjectWithMapTyped record)
{
BinaryAssemblyInfo assemblyInfo = null;
ObjectProgress op = GetOp();
ParseRecord pr = op._pr;
_stack.Push(op);
if (record._binaryHeaderEnum == BinaryHeaderEnum.ObjectWithMapTypedAssemId)
{
if (record._assemId < 1)
{
throw new SerializationException(SR.Format(SR.Serialization_AssemblyId, record._name));
}
assemblyInfo = (BinaryAssemblyInfo)AssemIdToAssemblyTable[record._assemId];
if (assemblyInfo == null)
{
throw new SerializationException(SR.Format(SR.Serialization_AssemblyId, record._assemId + " " + record._name));
}
}
else if (record._binaryHeaderEnum == BinaryHeaderEnum.ObjectWithMapTyped)
{
assemblyInfo = SystemAssemblyInfo; // Urt assembly
}
ObjectMap objectMap = ObjectMap.Create(record._name, record._memberNames, record._binaryTypeEnumA, record._typeInformationA, record._memberAssemIds, _objectReader, record._objectId, assemblyInfo, AssemIdToAssemblyTable);
ObjectMapIdTable[record._objectId] = objectMap;
op._objectTypeEnum = InternalObjectTypeE.Object;
op._binaryTypeEnumA = objectMap._binaryTypeEnumA;
op._typeInformationA = objectMap._typeInformationA;
op._memberLength = op._binaryTypeEnumA.Length;
op._memberNames = objectMap._memberNames;
op._memberTypes = objectMap._memberTypes;
ObjectProgress objectOp = (ObjectProgress)_stack.PeekPeek();
if ((objectOp == null) || (objectOp._isInitial))
{
// Non-Nested Object
op._name = record._name;
pr._parseTypeEnum = InternalParseTypeE.Object;
op._memberValueEnum = InternalMemberValueE.Empty;
}
else
{
// Nested Object
pr._parseTypeEnum = InternalParseTypeE.Member;
pr._memberValueEnum = InternalMemberValueE.Nested;
op._memberValueEnum = InternalMemberValueE.Nested;
switch (objectOp._objectTypeEnum)
{
case InternalObjectTypeE.Object:
pr._name = objectOp._name;
pr._memberTypeEnum = InternalMemberTypeE.Field;
op._memberTypeEnum = InternalMemberTypeE.Field;
break;
case InternalObjectTypeE.Array:
pr._memberTypeEnum = InternalMemberTypeE.Item;
op._memberTypeEnum = InternalMemberTypeE.Item;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_ObjectTypeEnum, objectOp._objectTypeEnum.ToString()));
}
}
pr._objectTypeEnum = InternalObjectTypeE.Object;
pr._objectInfo = objectMap.CreateObjectInfo(ref pr._si, ref pr._memberData);
pr._objectId = _objectReader.GetId(record._objectId);
if (pr._objectId == _topId)
{
pr._objectPositionEnum = InternalObjectPositionE.Top;
}
pr._keyDt = record._name;
pr._dtType = objectMap._objectType;
pr._dtTypeCode = InternalPrimitiveTypeE.Invalid;
_objectReader.Parse(pr);
}
private void ReadObjectString(BinaryHeaderEnum binaryHeaderEnum)
{
if (_objectString == null)
{
_objectString = new BinaryObjectString();
}
if (binaryHeaderEnum == BinaryHeaderEnum.ObjectString)
{
_objectString.Read(this);
}
else
{
if (_crossAppDomainString == null)
{
_crossAppDomainString = new BinaryCrossAppDomainString();
}
_crossAppDomainString.Read(this);
_objectString._value = _objectReader.CrossAppDomainArray(_crossAppDomainString._value) as string;
if (_objectString._value == null)
{
throw new SerializationException(SR.Format(SR.Serialization_CrossAppDomainError, "String", _crossAppDomainString._value));
}
_objectString._objectId = _crossAppDomainString._objectId;
}
PRs.Init();
PRs._parseTypeEnum = InternalParseTypeE.Object;
PRs._objectId = _objectReader.GetId(_objectString._objectId);
if (PRs._objectId == _topId)
{
PRs._objectPositionEnum = InternalObjectPositionE.Top;
}
PRs._objectTypeEnum = InternalObjectTypeE.Object;
ObjectProgress objectOp = (ObjectProgress)_stack.Peek();
PRs._value = _objectString._value;
PRs._keyDt = "System.String";
PRs._dtType = Converter.s_typeofString;
PRs._dtTypeCode = InternalPrimitiveTypeE.Invalid;
PRs._varValue = _objectString._value; //Need to set it because ObjectReader is picking up value from variant, not pr.PRvalue
if (objectOp == null)
{
// Top level String
PRs._parseTypeEnum = InternalParseTypeE.Object;
PRs._name = "System.String";
}
else
{
// Nested in an Object
PRs._parseTypeEnum = InternalParseTypeE.Member;
PRs._memberValueEnum = InternalMemberValueE.InlineValue;
switch (objectOp._objectTypeEnum)
{
case InternalObjectTypeE.Object:
PRs._name = objectOp._name;
PRs._memberTypeEnum = InternalMemberTypeE.Field;
break;
case InternalObjectTypeE.Array:
PRs._memberTypeEnum = InternalMemberTypeE.Item;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_ObjectTypeEnum, objectOp._objectTypeEnum.ToString()));
}
}
_objectReader.Parse(PRs);
}
private void ReadMemberPrimitiveTyped()
{
if (_memberPrimitiveTyped == null)
{
_memberPrimitiveTyped = new MemberPrimitiveTyped();
}
_memberPrimitiveTyped.Read(this);
PRs._objectTypeEnum = InternalObjectTypeE.Object; //Get rid of
ObjectProgress objectOp = (ObjectProgress)_stack.Peek();
PRs.Init();
PRs._varValue = _memberPrimitiveTyped._value;
PRs._keyDt = Converter.ToComType(_memberPrimitiveTyped._primitiveTypeEnum);
PRs._dtType = Converter.ToType(_memberPrimitiveTyped._primitiveTypeEnum);
PRs._dtTypeCode = _memberPrimitiveTyped._primitiveTypeEnum;
if (objectOp == null)
{
// Top level boxed primitive
PRs._parseTypeEnum = InternalParseTypeE.Object;
PRs._name = "System.Variant";
}
else
{
// Nested in an Object
PRs._parseTypeEnum = InternalParseTypeE.Member;
PRs._memberValueEnum = InternalMemberValueE.InlineValue;
switch (objectOp._objectTypeEnum)
{
case InternalObjectTypeE.Object:
PRs._name = objectOp._name;
PRs._memberTypeEnum = InternalMemberTypeE.Field;
break;
case InternalObjectTypeE.Array:
PRs._memberTypeEnum = InternalMemberTypeE.Item;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_ObjectTypeEnum, objectOp._objectTypeEnum.ToString()));
}
}
_objectReader.Parse(PRs);
}
private void ReadArray(BinaryHeaderEnum binaryHeaderEnum)
{
BinaryAssemblyInfo assemblyInfo = null;
BinaryArray record = new BinaryArray(binaryHeaderEnum);
record.Read(this);
if (record._binaryTypeEnum == BinaryTypeEnum.ObjectUser)
{
if (record._assemId < 1)
{
throw new SerializationException(SR.Format(SR.Serialization_AssemblyId, record._typeInformation));
}
assemblyInfo = (BinaryAssemblyInfo)AssemIdToAssemblyTable[record._assemId];
}
else
{
assemblyInfo = SystemAssemblyInfo; //Urt assembly
}
ObjectProgress op = GetOp();
ParseRecord pr = op._pr;
op._objectTypeEnum = InternalObjectTypeE.Array;
op._binaryTypeEnum = record._binaryTypeEnum;
op._typeInformation = record._typeInformation;
ObjectProgress objectOp = (ObjectProgress)_stack.PeekPeek();
if ((objectOp == null) || (record._objectId > 0))
{
// Non-Nested Object
op._name = "System.Array";
pr._parseTypeEnum = InternalParseTypeE.Object;
op._memberValueEnum = InternalMemberValueE.Empty;
}
else
{
// Nested Object
pr._parseTypeEnum = InternalParseTypeE.Member;
pr._memberValueEnum = InternalMemberValueE.Nested;
op._memberValueEnum = InternalMemberValueE.Nested;
switch (objectOp._objectTypeEnum)
{
case InternalObjectTypeE.Object:
pr._name = objectOp._name;
pr._memberTypeEnum = InternalMemberTypeE.Field;
op._memberTypeEnum = InternalMemberTypeE.Field;
pr._keyDt = objectOp._name;
pr._dtType = objectOp._dtType;
break;
case InternalObjectTypeE.Array:
pr._memberTypeEnum = InternalMemberTypeE.Item;
op._memberTypeEnum = InternalMemberTypeE.Item;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_ObjectTypeEnum, objectOp._objectTypeEnum.ToString()));
}
}
pr._objectId = _objectReader.GetId(record._objectId);
if (pr._objectId == _topId)
{
pr._objectPositionEnum = InternalObjectPositionE.Top;
}
else if ((_headerId > 0) && (pr._objectId == _headerId))
{
pr._objectPositionEnum = InternalObjectPositionE.Headers; // Headers are an array of header objects
}
else
{
pr._objectPositionEnum = InternalObjectPositionE.Child;
}
pr._objectTypeEnum = InternalObjectTypeE.Array;
BinaryTypeConverter.TypeFromInfo(record._binaryTypeEnum, record._typeInformation, _objectReader, assemblyInfo,
out pr._arrayElementTypeCode, out pr._arrayElementTypeString,
out pr._arrayElementType, out pr._isArrayVariant);
pr._dtTypeCode = InternalPrimitiveTypeE.Invalid;
pr._rank = record._rank;
pr._lengthA = record._lengthA;
pr._lowerBoundA = record._lowerBoundA;
bool isPrimitiveArray = false;
switch (record._binaryArrayTypeEnum)
{
case BinaryArrayTypeEnum.Single:
case BinaryArrayTypeEnum.SingleOffset:
op._numItems = record._lengthA[0];
pr._arrayTypeEnum = InternalArrayTypeE.Single;
if (Converter.IsWriteAsByteArray(pr._arrayElementTypeCode) &&
(record._lowerBoundA[0] == 0))
{
isPrimitiveArray = true;
ReadArrayAsBytes(pr);
}
break;
case BinaryArrayTypeEnum.Jagged:
case BinaryArrayTypeEnum.JaggedOffset:
op._numItems = record._lengthA[0];
pr._arrayTypeEnum = InternalArrayTypeE.Jagged;
break;
case BinaryArrayTypeEnum.Rectangular:
case BinaryArrayTypeEnum.RectangularOffset:
int arrayLength = 1;
for (int i = 0; i < record._rank; i++)
arrayLength = arrayLength * record._lengthA[i];
op._numItems = arrayLength;
pr._arrayTypeEnum = InternalArrayTypeE.Rectangular;
break;
default:
throw new SerializationException(SR.Format(SR.Serialization_ArrayType, record._binaryArrayTypeEnum.ToString()));
}
if (!isPrimitiveArray)
{
_stack.Push(op);
}
else
{
PutOp(op);
}
_objectReader.Parse(pr);
if (isPrimitiveArray)
{
pr._parseTypeEnum = InternalParseTypeE.ObjectEnd;
_objectReader.Parse(pr);
}
}
private void ReadArrayAsBytes(ParseRecord pr)
{
if (pr._arrayElementTypeCode == InternalPrimitiveTypeE.Byte)
{
pr._newObj = ReadBytes(pr._lengthA[0]);
}
else if (pr._arrayElementTypeCode == InternalPrimitiveTypeE.Char)
{
pr._newObj = ReadChars(pr._lengthA[0]);
}
else
{
int typeLength = Converter.TypeLength(pr._arrayElementTypeCode);
pr._newObj = Converter.CreatePrimitiveArray(pr._arrayElementTypeCode, pr._lengthA[0]);
Debug.Assert((pr._newObj != null), "[BinaryParser expected a Primitive Array]");
Array array = (Array)pr._newObj;
int arrayOffset = 0;
if (_byteBuffer == null)
{
_byteBuffer = new byte[ChunkSize];
}
while (arrayOffset < array.Length)
{
int numArrayItems = Math.Min(ChunkSize / typeLength, array.Length - arrayOffset);
int bufferUsed = numArrayItems * typeLength;
ReadBytes(_byteBuffer, 0, bufferUsed);
if (!BitConverter.IsLittleEndian)
{
// we know that we are reading a primitive type, so just do a simple swap
Debug.Fail("Re-review this code if/when we start running on big endian systems");
for (int i = 0; i < bufferUsed; i += typeLength)
{
for (int j = 0; j < typeLength / 2; j++)
{
byte tmp = _byteBuffer[i + j];
_byteBuffer[i + j] = _byteBuffer[i + typeLength - 1 - j];
_byteBuffer[i + typeLength - 1 - j] = tmp;
}
}
}
Buffer.BlockCopy(_byteBuffer, 0, array, arrayOffset * typeLength, bufferUsed);
arrayOffset += numArrayItems;
}
}
}
private void ReadMemberPrimitiveUnTyped()
{
ObjectProgress objectOp = (ObjectProgress)_stack.Peek();
if (memberPrimitiveUnTyped == null)
{
memberPrimitiveUnTyped = new MemberPrimitiveUnTyped();
}
memberPrimitiveUnTyped.Set((InternalPrimitiveTypeE)_expectedTypeInformation);
memberPrimitiveUnTyped.Read(this);
PRs.Init();
PRs._varValue = memberPrimitiveUnTyped._value;
PRs._dtTypeCode = (InternalPrimitiveTypeE)_expectedTypeInformation;
PRs._dtType = Converter.ToType(PRs._dtTypeCode);
PRs._parseTypeEnum = InternalParseTypeE.Member;
PRs._memberValueEnum = InternalMemberValueE.InlineValue;
if (objectOp._objectTypeEnum == InternalObjectTypeE.Object)
{
PRs._memberTypeEnum = InternalMemberTypeE.Field;
PRs._name = objectOp._name;
}
else
{
PRs._memberTypeEnum = InternalMemberTypeE.Item;
}
_objectReader.Parse(PRs);
}
private void ReadMemberReference()
{
if (_memberReference == null)
{
_memberReference = new MemberReference();
}
_memberReference.Read(this);
ObjectProgress objectOp = (ObjectProgress)_stack.Peek();
PRs.Init();
PRs._idRef = _objectReader.GetId(_memberReference._idRef);
PRs._parseTypeEnum = InternalParseTypeE.Member;
PRs._memberValueEnum = InternalMemberValueE.Reference;
if (objectOp._objectTypeEnum == InternalObjectTypeE.Object)
{
PRs._memberTypeEnum = InternalMemberTypeE.Field;
PRs._name = objectOp._name;
PRs._dtType = objectOp._dtType;
}
else
{
PRs._memberTypeEnum = InternalMemberTypeE.Item;
}
_objectReader.Parse(PRs);
}
private void ReadObjectNull(BinaryHeaderEnum binaryHeaderEnum)
{
if (_objectNull == null)
{
_objectNull = new ObjectNull();
}
_objectNull.Read(this, binaryHeaderEnum);
ObjectProgress objectOp = (ObjectProgress)_stack.Peek();
PRs.Init();
PRs._parseTypeEnum = InternalParseTypeE.Member;
PRs._memberValueEnum = InternalMemberValueE.Null;
if (objectOp._objectTypeEnum == InternalObjectTypeE.Object)
{
PRs._memberTypeEnum = InternalMemberTypeE.Field;
PRs._name = objectOp._name;
PRs._dtType = objectOp._dtType;
}
else
{
PRs._memberTypeEnum = InternalMemberTypeE.Item;
PRs._consecutiveNullArrayEntryCount = _objectNull._nullCount;
//only one null position has been incremented by GetNext
//The position needs to be reset for the rest of the nulls
objectOp.ArrayCountIncrement(_objectNull._nullCount - 1);
}
_objectReader.Parse(PRs);
}
private void ReadMessageEnd()
{
if (_messageEnd == null)
{
_messageEnd = new MessageEnd();
}
_messageEnd.Read(this);
if (!_stack.IsEmpty())
{
throw new SerializationException(SR.Serialization_StreamEnd);
}
}
// ReadValue from stream using InternalPrimitiveTypeE code
internal object ReadValue(InternalPrimitiveTypeE code)
{
object var = null;
switch (code)
{
case InternalPrimitiveTypeE.Boolean: var = ReadBoolean(); break;
case InternalPrimitiveTypeE.Byte: var = ReadByte(); break;
case InternalPrimitiveTypeE.Char: var = ReadChar(); break;
case InternalPrimitiveTypeE.Double: var = ReadDouble(); break;
case InternalPrimitiveTypeE.Int16: var = ReadInt16(); break;
case InternalPrimitiveTypeE.Int32: var = ReadInt32(); break;
case InternalPrimitiveTypeE.Int64: var = ReadInt64(); break;
case InternalPrimitiveTypeE.SByte: var = ReadSByte(); break;
case InternalPrimitiveTypeE.Single: var = ReadSingle(); break;
case InternalPrimitiveTypeE.UInt16: var = ReadUInt16(); break;
case InternalPrimitiveTypeE.UInt32: var = ReadUInt32(); break;
case InternalPrimitiveTypeE.UInt64: var = ReadUInt64(); break;
case InternalPrimitiveTypeE.Decimal: var = ReadDecimal(); break;
case InternalPrimitiveTypeE.TimeSpan: var = ReadTimeSpan(); break;
case InternalPrimitiveTypeE.DateTime: var = ReadDateTime(); break;
default: throw new SerializationException(SR.Format(SR.Serialization_TypeCode, code.ToString()));
}
return var;
}
private ObjectProgress GetOp()
{
ObjectProgress op = null;
if (_opPool != null && !_opPool.IsEmpty())
{
op = (ObjectProgress)_opPool.Pop();
op.Init();
}
else
{
op = new ObjectProgress();
}
return op;
}
private void PutOp(ObjectProgress op)
{
if (_opPool == null)
{
_opPool = new SerStack("opPool");
}
_opPool.Push(op);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Data.Common;
using System.Data.SqlClient;
using System.Runtime.InteropServices;
namespace System.Data.SqlClient
{
internal static partial class SNINativeMethodWrapper
{
private const string SNI = "sni.dll";
private static int s_sniMaxComposedSpnLength = -1;
private const int SniOpenTimeOut = -1; // infinite
[UnmanagedFunctionPointer(CallingConvention.StdCall)]
internal delegate void SqlAsyncCallbackDelegate(IntPtr m_ConsKey, IntPtr pPacket, uint dwError);
internal static int SniMaxComposedSpnLength
{
get
{
if (s_sniMaxComposedSpnLength == -1)
{
s_sniMaxComposedSpnLength = checked((int)GetSniMaxComposedSpnLength());
}
return s_sniMaxComposedSpnLength;
}
}
#region Structs\Enums
[StructLayout(LayoutKind.Sequential)]
internal struct ConsumerInfo
{
internal int defaultBufferSize;
internal SqlAsyncCallbackDelegate readDelegate;
internal SqlAsyncCallbackDelegate writeDelegate;
internal IntPtr key;
}
internal enum ConsumerNumber
{
SNI_Consumer_SNI,
SNI_Consumer_SSB,
SNI_Consumer_PacketIsReleased,
SNI_Consumer_Invalid,
}
internal enum IOType
{
READ,
WRITE,
}
internal enum PrefixEnum
{
UNKNOWN_PREFIX,
SM_PREFIX,
TCP_PREFIX,
NP_PREFIX,
VIA_PREFIX,
INVALID_PREFIX,
}
internal enum ProviderEnum
{
HTTP_PROV,
NP_PROV,
SESSION_PROV,
SIGN_PROV,
SM_PROV,
SMUX_PROV,
SSL_PROV,
TCP_PROV,
VIA_PROV,
MAX_PROVS,
INVALID_PROV,
}
internal enum QTypes
{
SNI_QUERY_CONN_INFO,
SNI_QUERY_CONN_BUFSIZE,
SNI_QUERY_CONN_KEY,
SNI_QUERY_CLIENT_ENCRYPT_POSSIBLE,
SNI_QUERY_SERVER_ENCRYPT_POSSIBLE,
SNI_QUERY_CERTIFICATE,
SNI_QUERY_LOCALDB_HMODULE,
SNI_QUERY_CONN_ENCRYPT,
SNI_QUERY_CONN_PROVIDERNUM,
SNI_QUERY_CONN_CONNID,
SNI_QUERY_CONN_PARENTCONNID,
SNI_QUERY_CONN_SECPKG,
SNI_QUERY_CONN_NETPACKETSIZE,
SNI_QUERY_CONN_NODENUM,
SNI_QUERY_CONN_PACKETSRECD,
SNI_QUERY_CONN_PACKETSSENT,
SNI_QUERY_CONN_PEERADDR,
SNI_QUERY_CONN_PEERPORT,
SNI_QUERY_CONN_LASTREADTIME,
SNI_QUERY_CONN_LASTWRITETIME,
SNI_QUERY_CONN_CONSUMER_ID,
SNI_QUERY_CONN_CONNECTTIME,
SNI_QUERY_CONN_HTTPENDPOINT,
SNI_QUERY_CONN_LOCALADDR,
SNI_QUERY_CONN_LOCALPORT,
SNI_QUERY_CONN_SSLHANDSHAKESTATE,
SNI_QUERY_CONN_SOBUFAUTOTUNING,
SNI_QUERY_CONN_SECPKGNAME,
SNI_QUERY_CONN_SECPKGMUTUALAUTH,
SNI_QUERY_CONN_CONSUMERCONNID,
SNI_QUERY_CONN_SNIUCI,
SNI_QUERY_CONN_SUPPORTS_EXTENDED_PROTECTION,
SNI_QUERY_CONN_CHANNEL_PROVIDES_AUTHENTICATION_CONTEXT,
SNI_QUERY_CONN_PEERID,
SNI_QUERY_CONN_SUPPORTS_SYNC_OVER_ASYNC,
}
internal enum TransparentNetworkResolutionMode : byte
{
DisabledMode = 0,
SequentialMode,
ParallelMode
};
[StructLayout(LayoutKind.Sequential)]
private struct Sni_Consumer_Info
{
public int DefaultUserDataLength;
public IntPtr ConsumerKey;
public IntPtr fnReadComp;
public IntPtr fnWriteComp;
public IntPtr fnTrace;
public IntPtr fnAcceptComp;
public uint dwNumProts;
public IntPtr rgListenInfo;
public IntPtr NodeAffinity;
}
[StructLayout(LayoutKind.Sequential)]
private unsafe struct SNI_CLIENT_CONSUMER_INFO
{
public Sni_Consumer_Info ConsumerInfo;
[MarshalAs(UnmanagedType.LPWStr)]
public string wszConnectionString;
public PrefixEnum networkLibrary;
public byte* szSPN;
public uint cchSPN;
public byte* szInstanceName;
public uint cchInstanceName;
[MarshalAs(UnmanagedType.Bool)]
public bool fOverrideLastConnectCache;
[MarshalAs(UnmanagedType.Bool)]
public bool fSynchronousConnection;
public int timeout;
[MarshalAs(UnmanagedType.Bool)]
public bool fParallel;
public TransparentNetworkResolutionMode transparentNetworkResolution;
public int totalTimeout;
public bool isAzureSqlServerEndpoint;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
internal struct SNI_Error
{
internal ProviderEnum provider;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 261)]
internal string errorMessage;
internal uint nativeError;
internal uint sniError;
[MarshalAs(UnmanagedType.LPWStr)]
internal string fileName;
[MarshalAs(UnmanagedType.LPWStr)]
internal string function;
internal uint lineNumber;
}
#endregion
#region DLL Imports
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNIAddProviderWrapper")]
internal static extern uint SNIAddProvider(SNIHandle pConn, ProviderEnum ProvNum, [In] ref uint pInfo);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNICheckConnectionWrapper")]
internal static extern uint SNICheckConnection([In] SNIHandle pConn);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNICloseWrapper")]
internal static extern uint SNIClose(IntPtr pConn);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern void SNIGetLastError(out SNI_Error pErrorStruct);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern void SNIPacketRelease(IntPtr pPacket);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNIPacketResetWrapper")]
internal static extern void SNIPacketReset([In] SNIHandle pConn, IOType IOType, SNIPacket pPacket, ConsumerNumber ConsNum);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern uint SNIQueryInfo(QTypes QType, ref uint pbQInfo);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern uint SNIQueryInfo(QTypes QType, ref IntPtr pbQInfo);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNIReadAsyncWrapper")]
internal static extern uint SNIReadAsync(SNIHandle pConn, ref IntPtr ppNewPacket);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern uint SNIReadSyncOverAsync(SNIHandle pConn, ref IntPtr ppNewPacket, int timeout);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNIRemoveProviderWrapper")]
internal static extern uint SNIRemoveProvider(SNIHandle pConn, ProviderEnum ProvNum);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern uint SNISecInitPackage(ref uint pcbMaxToken);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNISetInfoWrapper")]
internal static extern uint SNISetInfo(SNIHandle pConn, QTypes QType, [In] ref uint pbQInfo);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern uint SNITerminate();
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl, EntryPoint = "SNIWaitForSSLHandshakeToCompleteWrapper")]
internal static extern uint SNIWaitForSSLHandshakeToComplete([In] SNIHandle pConn, int dwMilliseconds);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
internal static extern uint UnmanagedIsTokenRestricted([In] IntPtr token, [MarshalAs(UnmanagedType.Bool)] out bool isRestricted);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint GetSniMaxComposedSpnLength();
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIGetInfoWrapper([In] SNIHandle pConn, SNINativeMethodWrapper.QTypes QType, out Guid pbQInfo);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIInitialize([In] IntPtr pmo);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIOpenSyncExWrapper(ref SNI_CLIENT_CONSUMER_INFO pClientConsumerInfo, out IntPtr ppConn);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIOpenWrapper(
[In] ref Sni_Consumer_Info pConsumerInfo,
[MarshalAs(UnmanagedType.LPStr)] string szConnect,
[In] SNIHandle pConn,
out IntPtr ppConn,
[MarshalAs(UnmanagedType.Bool)] bool fSync);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern IntPtr SNIPacketAllocateWrapper([In] SafeHandle pConn, IOType IOType);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIPacketGetDataWrapper([In] IntPtr packet, [In, Out] byte[] readBuffer, uint readBufferLength, out uint dataSize);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern unsafe void SNIPacketSetData(SNIPacket pPacket, [In] byte* pbBuf, uint cbBuf);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern unsafe uint SNISecGenClientContextWrapper(
[In] SNIHandle pConn,
[In, Out] byte[] pIn,
uint cbIn,
[In, Out] byte[] pOut,
[In] ref uint pcbOut,
[MarshalAsAttribute(UnmanagedType.Bool)] out bool pfDone,
byte* szServerInfo,
uint cbServerInfo,
[MarshalAsAttribute(UnmanagedType.LPWStr)] string pwszUserName,
[MarshalAsAttribute(UnmanagedType.LPWStr)] string pwszPassword);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIWriteAsyncWrapper(SNIHandle pConn, [In] SNIPacket pPacket);
[DllImport(SNI, CallingConvention = CallingConvention.Cdecl)]
private static extern uint SNIWriteSyncOverAsync(SNIHandle pConn, [In] SNIPacket pPacket);
#endregion
internal static uint SniGetConnectionId(SNIHandle pConn, ref Guid connId)
{
return SNIGetInfoWrapper(pConn, QTypes.SNI_QUERY_CONN_CONNID, out connId);
}
internal static uint SNIInitialize()
{
return SNIInitialize(IntPtr.Zero);
}
internal static unsafe uint SNIOpenMarsSession(ConsumerInfo consumerInfo, SNIHandle parent, ref IntPtr pConn, bool fSync)
{
// initialize consumer info for MARS
Sni_Consumer_Info native_consumerInfo = new Sni_Consumer_Info();
MarshalConsumerInfo(consumerInfo, ref native_consumerInfo);
return SNIOpenWrapper(ref native_consumerInfo, "session:", parent, out pConn, fSync);
}
internal static unsafe uint SNIOpenSyncEx(ConsumerInfo consumerInfo, string constring, ref IntPtr pConn, byte[] spnBuffer, byte[] instanceName, bool fOverrideCache, bool fSync, int timeout, bool fParallel)
{
fixed (byte* pin_instanceName = &instanceName[0])
{
SNI_CLIENT_CONSUMER_INFO clientConsumerInfo = new SNI_CLIENT_CONSUMER_INFO();
// initialize client ConsumerInfo part first
MarshalConsumerInfo(consumerInfo, ref clientConsumerInfo.ConsumerInfo);
clientConsumerInfo.wszConnectionString = constring;
clientConsumerInfo.networkLibrary = PrefixEnum.UNKNOWN_PREFIX;
clientConsumerInfo.szInstanceName = pin_instanceName;
clientConsumerInfo.cchInstanceName = (uint)instanceName.Length;
clientConsumerInfo.fOverrideLastConnectCache = fOverrideCache;
clientConsumerInfo.fSynchronousConnection = fSync;
clientConsumerInfo.timeout = timeout;
clientConsumerInfo.fParallel = fParallel;
clientConsumerInfo.transparentNetworkResolution = TransparentNetworkResolutionMode.DisabledMode;
clientConsumerInfo.totalTimeout = SniOpenTimeOut;
clientConsumerInfo.isAzureSqlServerEndpoint = ADP.IsAzureSqlServerEndpoint(constring);
if (spnBuffer != null)
{
fixed (byte* pin_spnBuffer = &spnBuffer[0])
{
clientConsumerInfo.szSPN = pin_spnBuffer;
clientConsumerInfo.cchSPN = (uint)spnBuffer.Length;
return SNIOpenSyncExWrapper(ref clientConsumerInfo, out pConn);
}
}
else
{
// else leave szSPN null (SQL Auth)
return SNIOpenSyncExWrapper(ref clientConsumerInfo, out pConn);
}
}
}
internal static void SNIPacketAllocate(SafeHandle pConn, IOType IOType, ref IntPtr pPacket)
{
pPacket = SNIPacketAllocateWrapper(pConn, IOType);
}
internal static unsafe uint SNIPacketGetData(IntPtr packet, byte[] readBuffer, ref uint dataSize)
{
return SNIPacketGetDataWrapper(packet, readBuffer, (uint)readBuffer.Length, out dataSize);
}
internal static unsafe void SNIPacketSetData(SNIPacket packet, byte[] data, int length)
{
fixed (byte* pin_data = &data[0])
{
SNIPacketSetData(packet, pin_data, (uint)length);
}
}
internal static unsafe uint SNISecGenClientContext(SNIHandle pConnectionObject, byte[] inBuff, uint receivedLength, byte[] OutBuff, ref uint sendLength, byte[] serverUserName)
{
fixed (byte* pin_serverUserName = &serverUserName[0])
{
bool local_fDone;
return SNISecGenClientContextWrapper(
pConnectionObject,
inBuff,
receivedLength,
OutBuff,
ref sendLength,
out local_fDone,
pin_serverUserName,
(uint)serverUserName.Length,
null,
null);
}
}
internal static uint SNIWritePacket(SNIHandle pConn, SNIPacket packet, bool sync)
{
if (sync)
{
return SNIWriteSyncOverAsync(pConn, packet);
}
else
{
return SNIWriteAsyncWrapper(pConn, packet);
}
}
private static void MarshalConsumerInfo(ConsumerInfo consumerInfo, ref Sni_Consumer_Info native_consumerInfo)
{
native_consumerInfo.DefaultUserDataLength = consumerInfo.defaultBufferSize;
native_consumerInfo.fnReadComp = null != consumerInfo.readDelegate
? Marshal.GetFunctionPointerForDelegate(consumerInfo.readDelegate)
: IntPtr.Zero;
native_consumerInfo.fnWriteComp = null != consumerInfo.writeDelegate
? Marshal.GetFunctionPointerForDelegate(consumerInfo.writeDelegate)
: IntPtr.Zero;
native_consumerInfo.ConsumerKey = consumerInfo.key;
}
}
}
namespace System.Data
{
internal static partial class SafeNativeMethods
{
[DllImport("kernel32.dll", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, SetLastError = true)]
internal static extern IntPtr GetProcAddress(IntPtr HModule, [MarshalAs(UnmanagedType.LPStr), In] string funcName);
}
}
namespace System.Data
{
internal static class Win32NativeMethods
{
internal static bool IsTokenRestrictedWrapper(IntPtr token)
{
bool isRestricted;
uint result = SNINativeMethodWrapper.UnmanagedIsTokenRestricted(token, out isRestricted);
if (result != 0)
{
Marshal.ThrowExceptionForHR(unchecked((int)result));
}
return isRestricted;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace Microsoft.Build.Framework
{
public delegate void AnyEventHandler(object sender, Microsoft.Build.Framework.BuildEventArgs e);
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct BuildEngineResult
{
public BuildEngineResult(bool result, System.Collections.Generic.List<System.Collections.Generic.IDictionary<string, Microsoft.Build.Framework.ITaskItem[]>> targetOutputsPerProject) { throw null;}
public bool Result { get { throw null; } }
public System.Collections.Generic.IList<System.Collections.Generic.IDictionary<string, Microsoft.Build.Framework.ITaskItem[]>> TargetOutputsPerProject { get { throw null; } }
}
public partial class BuildErrorEventArgs : Microsoft.Build.Framework.LazyFormattedBuildEventArgs
{
protected BuildErrorEventArgs() { }
public BuildErrorEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName) { }
public BuildErrorEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, System.DateTime eventTimestamp) { }
public BuildErrorEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, System.DateTime eventTimestamp, params object[] messageArgs) { }
public string Code { get { throw null; } }
public int ColumnNumber { get { throw null; } }
public int EndColumnNumber { get { throw null; } }
public int EndLineNumber { get { throw null; } }
public string File { get { throw null; } }
public int LineNumber { get { throw null; } }
public string ProjectFile { get { throw null; } set { } }
public string Subcategory { get { throw null; } }
}
public delegate void BuildErrorEventHandler(object sender, Microsoft.Build.Framework.BuildErrorEventArgs e);
public abstract partial class BuildEventArgs : System.EventArgs
{
protected BuildEventArgs() { }
protected BuildEventArgs(string message, string helpKeyword, string senderName) { }
protected BuildEventArgs(string message, string helpKeyword, string senderName, System.DateTime eventTimestamp) { }
public Microsoft.Build.Framework.BuildEventContext BuildEventContext { get { throw null; } set { } }
public string HelpKeyword { get { throw null; } }
public virtual string Message { get { throw null; } protected set { } }
public string SenderName { get { throw null; } }
public int ThreadId { get { throw null; } }
public System.DateTime Timestamp { get { throw null; } }
}
public partial class BuildEventContext
{
public const int InvalidEvaluationId = -1;
public const int InvalidNodeId = -2;
public const int InvalidProjectContextId = -2;
public const int InvalidProjectInstanceId = -1;
public const int InvalidSubmissionId = -1;
public const int InvalidTargetId = -1;
public const int InvalidTaskId = -1;
public BuildEventContext(int nodeId, int targetId, int projectContextId, int taskId) { }
public BuildEventContext(int nodeId, int projectInstanceId, int projectContextId, int targetId, int taskId) { }
public BuildEventContext(int submissionId, int nodeId, int projectInstanceId, int projectContextId, int targetId, int taskId) { }
public BuildEventContext(int submissionId, int nodeId, int evaluationId, int projectInstanceId, int projectContextId, int targetId, int taskId) { }
public long BuildRequestId { get { throw null; } }
public int EvaluationId { get { throw null; } }
public static Microsoft.Build.Framework.BuildEventContext Invalid { get { throw null; } }
public int NodeId { get { throw null; } }
public int ProjectContextId { get { throw null; } }
public int ProjectInstanceId { get { throw null; } }
public int SubmissionId { get { throw null; } }
public int TargetId { get { throw null; } }
public int TaskId { get { throw null; } }
public override bool Equals(object obj) { throw null; }
public override int GetHashCode() { throw null; }
public static bool operator ==(Microsoft.Build.Framework.BuildEventContext left, Microsoft.Build.Framework.BuildEventContext right) { throw null; }
public static bool operator !=(Microsoft.Build.Framework.BuildEventContext left, Microsoft.Build.Framework.BuildEventContext right) { throw null; }
}
public partial class BuildFinishedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected BuildFinishedEventArgs() { }
public BuildFinishedEventArgs(string message, string helpKeyword, bool succeeded) { }
public BuildFinishedEventArgs(string message, string helpKeyword, bool succeeded, System.DateTime eventTimestamp) { }
public BuildFinishedEventArgs(string message, string helpKeyword, bool succeeded, System.DateTime eventTimestamp, params object[] messageArgs) { }
public bool Succeeded { get { throw null; } }
}
public delegate void BuildFinishedEventHandler(object sender, Microsoft.Build.Framework.BuildFinishedEventArgs e);
public partial class BuildMessageEventArgs : Microsoft.Build.Framework.LazyFormattedBuildEventArgs
{
protected BuildMessageEventArgs() { }
public BuildMessageEventArgs(string message, string helpKeyword, string senderName, Microsoft.Build.Framework.MessageImportance importance) { }
public BuildMessageEventArgs(string message, string helpKeyword, string senderName, Microsoft.Build.Framework.MessageImportance importance, System.DateTime eventTimestamp) { }
public BuildMessageEventArgs(string message, string helpKeyword, string senderName, Microsoft.Build.Framework.MessageImportance importance, System.DateTime eventTimestamp, params object[] messageArgs) { }
public BuildMessageEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, Microsoft.Build.Framework.MessageImportance importance) { }
public BuildMessageEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, Microsoft.Build.Framework.MessageImportance importance, System.DateTime eventTimestamp) { }
public BuildMessageEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, Microsoft.Build.Framework.MessageImportance importance, System.DateTime eventTimestamp, params object[] messageArgs) { }
public string Code { get { throw null; } }
public int ColumnNumber { get { throw null; } }
public int EndColumnNumber { get { throw null; } }
public int EndLineNumber { get { throw null; } }
public string File { get { throw null; } }
public Microsoft.Build.Framework.MessageImportance Importance { get { throw null; } }
public int LineNumber { get { throw null; } }
public string ProjectFile { get { throw null; } set { } }
public string Subcategory { get { throw null; } }
}
public delegate void BuildMessageEventHandler(object sender, Microsoft.Build.Framework.BuildMessageEventArgs e);
public partial class BuildStartedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected BuildStartedEventArgs() { }
public BuildStartedEventArgs(string message, string helpKeyword) { }
public BuildStartedEventArgs(string message, string helpKeyword, System.Collections.Generic.IDictionary<string, string> environmentOfBuild) { }
public BuildStartedEventArgs(string message, string helpKeyword, System.DateTime eventTimestamp) { }
public BuildStartedEventArgs(string message, string helpKeyword, System.DateTime eventTimestamp, params object[] messageArgs) { }
public System.Collections.Generic.IDictionary<string, string> BuildEnvironment { get { throw null; } }
}
public delegate void BuildStartedEventHandler(object sender, Microsoft.Build.Framework.BuildStartedEventArgs e);
public abstract partial class BuildStatusEventArgs : Microsoft.Build.Framework.LazyFormattedBuildEventArgs
{
protected BuildStatusEventArgs() { }
protected BuildStatusEventArgs(string message, string helpKeyword, string senderName) { }
protected BuildStatusEventArgs(string message, string helpKeyword, string senderName, System.DateTime eventTimestamp) { }
protected BuildStatusEventArgs(string message, string helpKeyword, string senderName, System.DateTime eventTimestamp, params object[] messageArgs) { }
}
public delegate void BuildStatusEventHandler(object sender, Microsoft.Build.Framework.BuildStatusEventArgs e);
public partial class BuildWarningEventArgs : Microsoft.Build.Framework.LazyFormattedBuildEventArgs
{
protected BuildWarningEventArgs() { }
public BuildWarningEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName) { }
public BuildWarningEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, System.DateTime eventTimestamp) { }
public BuildWarningEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, System.DateTime eventTimestamp, params object[] messageArgs) { }
public string Code { get { throw null; } }
public int ColumnNumber { get { throw null; } }
public int EndColumnNumber { get { throw null; } }
public int EndLineNumber { get { throw null; } }
public string File { get { throw null; } }
public int LineNumber { get { throw null; } }
public string ProjectFile { get { throw null; } set { } }
public string Subcategory { get { throw null; } }
}
public delegate void BuildWarningEventHandler(object sender, Microsoft.Build.Framework.BuildWarningEventArgs e);
public partial class CriticalBuildMessageEventArgs : Microsoft.Build.Framework.BuildMessageEventArgs
{
protected CriticalBuildMessageEventArgs() { }
public CriticalBuildMessageEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName) { }
public CriticalBuildMessageEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, System.DateTime eventTimestamp) { }
public CriticalBuildMessageEventArgs(string subcategory, string code, string file, int lineNumber, int columnNumber, int endLineNumber, int endColumnNumber, string message, string helpKeyword, string senderName, System.DateTime eventTimestamp, params object[] messageArgs) { }
}
public abstract partial class CustomBuildEventArgs : Microsoft.Build.Framework.LazyFormattedBuildEventArgs
{
protected CustomBuildEventArgs() { }
protected CustomBuildEventArgs(string message, string helpKeyword, string senderName) { }
protected CustomBuildEventArgs(string message, string helpKeyword, string senderName, System.DateTime eventTimestamp) { }
protected CustomBuildEventArgs(string message, string helpKeyword, string senderName, System.DateTime eventTimestamp, params object[] messageArgs) { }
}
public delegate void CustomBuildEventHandler(object sender, Microsoft.Build.Framework.CustomBuildEventArgs e);
public partial class ExternalProjectFinishedEventArgs : Microsoft.Build.Framework.CustomBuildEventArgs
{
protected ExternalProjectFinishedEventArgs() { }
public ExternalProjectFinishedEventArgs(string message, string helpKeyword, string senderName, string projectFile, bool succeeded) { }
public ExternalProjectFinishedEventArgs(string message, string helpKeyword, string senderName, string projectFile, bool succeeded, System.DateTime eventTimestamp) { }
public string ProjectFile { get { throw null; } }
public bool Succeeded { get { throw null; } }
}
public partial class ExternalProjectStartedEventArgs : Microsoft.Build.Framework.CustomBuildEventArgs
{
protected ExternalProjectStartedEventArgs() { }
public ExternalProjectStartedEventArgs(string message, string helpKeyword, string senderName, string projectFile, string targetNames) { }
public ExternalProjectStartedEventArgs(string message, string helpKeyword, string senderName, string projectFile, string targetNames, System.DateTime eventTimestamp) { }
public string ProjectFile { get { throw null; } }
public string TargetNames { get { throw null; } }
}
public partial interface IBuildEngine
{
int ColumnNumberOfTaskNode { get; }
bool ContinueOnError { get; }
int LineNumberOfTaskNode { get; }
string ProjectFileOfTaskNode { get; }
bool BuildProjectFile(string projectFileName, string[] targetNames, System.Collections.IDictionary globalProperties, System.Collections.IDictionary targetOutputs);
void LogCustomEvent(Microsoft.Build.Framework.CustomBuildEventArgs e);
void LogErrorEvent(Microsoft.Build.Framework.BuildErrorEventArgs e);
void LogMessageEvent(Microsoft.Build.Framework.BuildMessageEventArgs e);
void LogWarningEvent(Microsoft.Build.Framework.BuildWarningEventArgs e);
}
public partial interface IBuildEngine2 : Microsoft.Build.Framework.IBuildEngine
{
bool IsRunningMultipleNodes { get; }
bool BuildProjectFile(string projectFileName, string[] targetNames, System.Collections.IDictionary globalProperties, System.Collections.IDictionary targetOutputs, string toolsVersion);
bool BuildProjectFilesInParallel(string[] projectFileNames, string[] targetNames, System.Collections.IDictionary[] globalProperties, System.Collections.IDictionary[] targetOutputsPerProject, string[] toolsVersion, bool useResultsCache, bool unloadProjectsOnCompletion);
}
public partial interface IBuildEngine3 : Microsoft.Build.Framework.IBuildEngine, Microsoft.Build.Framework.IBuildEngine2
{
Microsoft.Build.Framework.BuildEngineResult BuildProjectFilesInParallel(string[] projectFileNames, string[] targetNames, System.Collections.IDictionary[] globalProperties, System.Collections.Generic.IList<string>[] removeGlobalProperties, string[] toolsVersion, bool returnTargetOutputs);
void Reacquire();
void Yield();
}
public partial interface IBuildEngine4 : Microsoft.Build.Framework.IBuildEngine, Microsoft.Build.Framework.IBuildEngine2, Microsoft.Build.Framework.IBuildEngine3
{
object GetRegisteredTaskObject(object key, Microsoft.Build.Framework.RegisteredTaskObjectLifetime lifetime);
void RegisterTaskObject(object key, object obj, Microsoft.Build.Framework.RegisteredTaskObjectLifetime lifetime, bool allowEarlyCollection);
object UnregisterTaskObject(object key, Microsoft.Build.Framework.RegisteredTaskObjectLifetime lifetime);
}
public partial interface IBuildEngine5 : Microsoft.Build.Framework.IBuildEngine, Microsoft.Build.Framework.IBuildEngine2, Microsoft.Build.Framework.IBuildEngine3, Microsoft.Build.Framework.IBuildEngine4
{
void LogTelemetry(string eventName, System.Collections.Generic.IDictionary<string, string> properties);
}
public partial interface ICancelableTask : Microsoft.Build.Framework.ITask
{
void Cancel();
}
public partial interface IEventRedirector
{
void ForwardEvent(Microsoft.Build.Framework.BuildEventArgs buildEvent);
}
public partial interface IEventSource
{
event Microsoft.Build.Framework.AnyEventHandler AnyEventRaised;
event Microsoft.Build.Framework.BuildFinishedEventHandler BuildFinished;
event Microsoft.Build.Framework.BuildStartedEventHandler BuildStarted;
event Microsoft.Build.Framework.CustomBuildEventHandler CustomEventRaised;
event Microsoft.Build.Framework.BuildErrorEventHandler ErrorRaised;
event Microsoft.Build.Framework.BuildMessageEventHandler MessageRaised;
event Microsoft.Build.Framework.ProjectFinishedEventHandler ProjectFinished;
event Microsoft.Build.Framework.ProjectStartedEventHandler ProjectStarted;
event Microsoft.Build.Framework.BuildStatusEventHandler StatusEventRaised;
event Microsoft.Build.Framework.TargetFinishedEventHandler TargetFinished;
event Microsoft.Build.Framework.TargetStartedEventHandler TargetStarted;
event Microsoft.Build.Framework.TaskFinishedEventHandler TaskFinished;
event Microsoft.Build.Framework.TaskStartedEventHandler TaskStarted;
event Microsoft.Build.Framework.BuildWarningEventHandler WarningRaised;
}
public partial interface IEventSource2 : Microsoft.Build.Framework.IEventSource
{
event Microsoft.Build.Framework.TelemetryEventHandler TelemetryLogged;
}
public partial interface IEventSource3 : Microsoft.Build.Framework.IEventSource, Microsoft.Build.Framework.IEventSource2
{
void IncludeEvaluationMetaprojects();
void IncludeEvaluationProfiles();
void IncludeTaskInputs();
}
public partial interface IForwardingLogger : Microsoft.Build.Framework.ILogger, Microsoft.Build.Framework.INodeLogger
{
Microsoft.Build.Framework.IEventRedirector BuildEventRedirector { get; set; }
int NodeId { get; set; }
}
public partial interface IGeneratedTask : Microsoft.Build.Framework.ITask
{
object GetPropertyValue(Microsoft.Build.Framework.TaskPropertyInfo property);
void SetPropertyValue(Microsoft.Build.Framework.TaskPropertyInfo property, object value);
}
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
public partial interface ILogger
{
string Parameters { get; set; }
Microsoft.Build.Framework.LoggerVerbosity Verbosity { get; set; }
void Initialize(Microsoft.Build.Framework.IEventSource eventSource);
void Shutdown();
}
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
public partial interface INodeLogger : Microsoft.Build.Framework.ILogger
{
void Initialize(Microsoft.Build.Framework.IEventSource eventSource, int nodeCount);
}
public partial interface IProjectElement
{
string ElementName { get; }
string OuterElement { get; }
}
public partial interface ITask
{
Microsoft.Build.Framework.IBuildEngine BuildEngine { get; set; }
Microsoft.Build.Framework.ITaskHost HostObject { get; set; }
bool Execute();
}
public partial interface ITaskFactory
{
string FactoryName { get; }
System.Type TaskType { get; }
void CleanupTask(Microsoft.Build.Framework.ITask task);
Microsoft.Build.Framework.ITask CreateTask(Microsoft.Build.Framework.IBuildEngine taskFactoryLoggingHost);
Microsoft.Build.Framework.TaskPropertyInfo[] GetTaskParameters();
bool Initialize(string taskName, System.Collections.Generic.IDictionary<string, Microsoft.Build.Framework.TaskPropertyInfo> parameterGroup, string taskBody, Microsoft.Build.Framework.IBuildEngine taskFactoryLoggingHost);
}
public partial interface ITaskFactory2 : Microsoft.Build.Framework.ITaskFactory
{
Microsoft.Build.Framework.ITask CreateTask(Microsoft.Build.Framework.IBuildEngine taskFactoryLoggingHost, System.Collections.Generic.IDictionary<string, string> taskIdentityParameters);
bool Initialize(string taskName, System.Collections.Generic.IDictionary<string, string> factoryIdentityParameters, System.Collections.Generic.IDictionary<string, Microsoft.Build.Framework.TaskPropertyInfo> parameterGroup, string taskBody, Microsoft.Build.Framework.IBuildEngine taskFactoryLoggingHost);
}
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
[System.Runtime.InteropServices.GuidAttribute("9049A481-D0E9-414f-8F92-D4F67A0359A6")]
[System.Runtime.InteropServices.InterfaceTypeAttribute((System.Runtime.InteropServices.ComInterfaceType)(1))]
public partial interface ITaskHost
{
}
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
[System.Runtime.InteropServices.GuidAttribute("8661674F-2148-4F71-A92A-49875511C528")]
public partial interface ITaskItem
{
string ItemSpec { get; set; }
int MetadataCount { get; }
System.Collections.ICollection MetadataNames { get; }
System.Collections.IDictionary CloneCustomMetadata();
void CopyMetadataTo(Microsoft.Build.Framework.ITaskItem destinationItem);
string GetMetadata(string metadataName);
void RemoveMetadata(string metadataName);
void SetMetadata(string metadataName, string metadataValue);
}
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
[System.Runtime.InteropServices.GuidAttribute("ac6d5a59-f877-461b-88e3-b2f06fce0cb9")]
public partial interface ITaskItem2 : Microsoft.Build.Framework.ITaskItem
{
string EvaluatedIncludeEscaped { get; set; }
System.Collections.IDictionary CloneCustomMetadataEscaped();
string GetMetadataValueEscaped(string metadataName);
void SetMetadataValueLiteral(string metadataName, string metadataValue);
}
public partial class LazyFormattedBuildEventArgs : Microsoft.Build.Framework.BuildEventArgs
{
protected LazyFormattedBuildEventArgs() { }
public LazyFormattedBuildEventArgs(string message, string helpKeyword, string senderName) { }
public LazyFormattedBuildEventArgs(string message, string helpKeyword, string senderName, System.DateTime eventTimestamp, params object[] messageArgs) { }
public override string Message { get { throw null; } }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=true)]
public sealed partial class LoadInSeparateAppDomainAttribute : System.Attribute
{
public LoadInSeparateAppDomainAttribute() { }
}
public partial class LoggerException : System.Exception
{
public LoggerException() { }
protected LoggerException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
public LoggerException(string message) { }
public LoggerException(string message, System.Exception innerException) { }
public LoggerException(string message, System.Exception innerException, string errorCode, string helpKeyword) { }
public string ErrorCode { get { throw null; } }
public string HelpKeyword { get { throw null; } }
public override void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
public enum LoggerVerbosity
{
Detailed = 3,
Diagnostic = 4,
Minimal = 1,
Normal = 2,
Quiet = 0,
}
public enum MessageImportance
{
High = 0,
Low = 2,
Normal = 1,
}
public partial class MetaprojectGeneratedEventArgs : Microsoft.Build.Framework.BuildMessageEventArgs
{
public string metaprojectXml;
public MetaprojectGeneratedEventArgs(string metaprojectXml, string metaprojectPath, string message) { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(128), AllowMultiple=false, Inherited=false)]
public sealed partial class OutputAttribute : System.Attribute
{
public OutputAttribute() { }
}
public sealed partial class ProjectEvaluationFinishedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
public ProjectEvaluationFinishedEventArgs() { }
public ProjectEvaluationFinishedEventArgs(string message, params object[] messageArgs) { }
public System.Nullable<Microsoft.Build.Framework.Profiler.ProfilerResult> ProfilerResult { get { throw null; } set { } }
public string ProjectFile { get { throw null; } set { } }
}
public partial class ProjectEvaluationStartedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
public ProjectEvaluationStartedEventArgs() { }
public ProjectEvaluationStartedEventArgs(string message, params object[] messageArgs) { }
public string ProjectFile { get { throw null; } set { } }
}
public partial class ProjectFinishedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected ProjectFinishedEventArgs() { }
public ProjectFinishedEventArgs(string message, string helpKeyword, string projectFile, bool succeeded) { }
public ProjectFinishedEventArgs(string message, string helpKeyword, string projectFile, bool succeeded, System.DateTime eventTimestamp) { }
public string ProjectFile { get { throw null; } }
public bool Succeeded { get { throw null; } }
}
public delegate void ProjectFinishedEventHandler(object sender, Microsoft.Build.Framework.ProjectFinishedEventArgs e);
public partial class ProjectImportedEventArgs : Microsoft.Build.Framework.BuildMessageEventArgs
{
public ProjectImportedEventArgs() { }
public ProjectImportedEventArgs(int lineNumber, int columnNumber, string message, params object[] messageArgs) { }
public string ImportedProjectFile { get { throw null; } set { } }
public bool ImportIgnored { get { throw null; } set { } }
public string UnexpandedProject { get { throw null; } set { } }
}
public partial class ProjectStartedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
public const int InvalidProjectId = -1;
protected ProjectStartedEventArgs() { }
public ProjectStartedEventArgs(int projectId, string message, string helpKeyword, string projectFile, string targetNames, System.Collections.IEnumerable properties, System.Collections.IEnumerable items, Microsoft.Build.Framework.BuildEventContext parentBuildEventContext) { }
public ProjectStartedEventArgs(int projectId, string message, string helpKeyword, string projectFile, string targetNames, System.Collections.IEnumerable properties, System.Collections.IEnumerable items, Microsoft.Build.Framework.BuildEventContext parentBuildEventContext, System.Collections.Generic.IDictionary<string, string> globalProperties, string toolsVersion) { }
public ProjectStartedEventArgs(int projectId, string message, string helpKeyword, string projectFile, string targetNames, System.Collections.IEnumerable properties, System.Collections.IEnumerable items, Microsoft.Build.Framework.BuildEventContext parentBuildEventContext, System.DateTime eventTimestamp) { }
public ProjectStartedEventArgs(string message, string helpKeyword, string projectFile, string targetNames, System.Collections.IEnumerable properties, System.Collections.IEnumerable items) { }
public ProjectStartedEventArgs(string message, string helpKeyword, string projectFile, string targetNames, System.Collections.IEnumerable properties, System.Collections.IEnumerable items, System.DateTime eventTimestamp) { }
public System.Collections.Generic.IDictionary<string, string> GlobalProperties { get { throw null; } }
public System.Collections.IEnumerable Items { get { throw null; } }
public Microsoft.Build.Framework.BuildEventContext ParentProjectBuildEventContext { get { throw null; } }
public string ProjectFile { get { throw null; } }
public int ProjectId { get { throw null; } }
public System.Collections.IEnumerable Properties { get { throw null; } }
public string TargetNames { get { throw null; } }
public string ToolsVersion { get { throw null; } }
}
public delegate void ProjectStartedEventHandler(object sender, Microsoft.Build.Framework.ProjectStartedEventArgs e);
public enum RegisteredTaskObjectLifetime
{
AppDomain = 1,
Build = 0,
}
[System.AttributeUsageAttribute((System.AttributeTargets)(128), AllowMultiple=false, Inherited=false)]
public sealed partial class RequiredAttribute : System.Attribute
{
public RequiredAttribute() { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=false)]
public sealed partial class RequiredRuntimeAttribute : System.Attribute
{
public RequiredRuntimeAttribute(string runtimeVersion) { }
public string RuntimeVersion { get { throw null; } }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=false)]
public sealed partial class RunInMTAAttribute : System.Attribute
{
public RunInMTAAttribute() { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=false)]
public sealed partial class RunInSTAAttribute : System.Attribute
{
public RunInSTAAttribute() { }
}
public abstract partial class SdkLogger
{
protected SdkLogger() { }
public abstract void LogMessage(string message, Microsoft.Build.Framework.MessageImportance messageImportance=(Microsoft.Build.Framework.MessageImportance)(2));
}
public sealed partial class SdkReference : System.IEquatable<Microsoft.Build.Framework.SdkReference>
{
public SdkReference(string name, string version, string minimumVersion) { }
public string MinimumVersion { get { throw null; } }
public string Name { get { throw null; } }
public string Version { get { throw null; } }
public bool Equals(Microsoft.Build.Framework.SdkReference other) { throw null; }
public override bool Equals(object obj) { throw null; }
public override int GetHashCode() { throw null; }
public override string ToString() { throw null; }
public static bool TryParse(string sdk, out Microsoft.Build.Framework.SdkReference sdkReference) { sdkReference = default(Microsoft.Build.Framework.SdkReference); throw null; }
}
public abstract partial class SdkResolver
{
protected SdkResolver() { }
public abstract string Name { get; }
public abstract int Priority { get; }
public abstract Microsoft.Build.Framework.SdkResult Resolve(Microsoft.Build.Framework.SdkReference sdkReference, Microsoft.Build.Framework.SdkResolverContext resolverContext, Microsoft.Build.Framework.SdkResultFactory factory);
}
public abstract partial class SdkResolverContext
{
protected SdkResolverContext() { }
public virtual bool Interactive { get { throw null; } protected set { } }
public virtual Microsoft.Build.Framework.SdkLogger Logger { get { throw null; } protected set { } }
public virtual System.Version MSBuildVersion { get { throw null; } protected set { } }
public virtual string ProjectFilePath { get { throw null; } protected set { } }
public virtual string SolutionFilePath { get { throw null; } protected set { } }
public virtual object State { get { throw null; } set { } }
}
public abstract partial class SdkResult
{
protected SdkResult() { }
public virtual string Path { get { throw null; } protected set { } }
public virtual Microsoft.Build.Framework.SdkReference SdkReference { get { throw null; } protected set { } }
public virtual bool Success { get { throw null; } protected set { } }
public virtual string Version { get { throw null; } protected set { } }
}
public abstract partial class SdkResultFactory
{
protected SdkResultFactory() { }
public abstract Microsoft.Build.Framework.SdkResult IndicateFailure(System.Collections.Generic.IEnumerable<string> errors, System.Collections.Generic.IEnumerable<string> warnings=null);
public abstract Microsoft.Build.Framework.SdkResult IndicateSuccess(string path, string version, System.Collections.Generic.IEnumerable<string> warnings=null);
}
public enum TargetBuiltReason
{
AfterTargets = 3,
BeforeTargets = 1,
DependsOn = 2,
None = 0,
}
public partial class TargetFinishedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected TargetFinishedEventArgs() { }
public TargetFinishedEventArgs(string message, string helpKeyword, string targetName, string projectFile, string targetFile, bool succeeded) { }
public TargetFinishedEventArgs(string message, string helpKeyword, string targetName, string projectFile, string targetFile, bool succeeded, System.Collections.IEnumerable targetOutputs) { }
public TargetFinishedEventArgs(string message, string helpKeyword, string targetName, string projectFile, string targetFile, bool succeeded, System.DateTime eventTimestamp, System.Collections.IEnumerable targetOutputs) { }
public string ProjectFile { get { throw null; } }
public bool Succeeded { get { throw null; } }
public string TargetFile { get { throw null; } }
public string TargetName { get { throw null; } }
public System.Collections.IEnumerable TargetOutputs { get { throw null; } set { } }
}
public delegate void TargetFinishedEventHandler(object sender, Microsoft.Build.Framework.TargetFinishedEventArgs e);
public partial class TargetSkippedEventArgs : Microsoft.Build.Framework.BuildMessageEventArgs
{
public TargetSkippedEventArgs() { }
public TargetSkippedEventArgs(string message, params object[] messageArgs) { }
public Microsoft.Build.Framework.TargetBuiltReason BuildReason { get { throw null; } set { } }
public string ParentTarget { get { throw null; } set { } }
public string TargetFile { get { throw null; } set { } }
public string TargetName { get { throw null; } set { } }
}
public partial class TargetStartedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected TargetStartedEventArgs() { }
public TargetStartedEventArgs(string message, string helpKeyword, string targetName, string projectFile, string targetFile) { }
public TargetStartedEventArgs(string message, string helpKeyword, string targetName, string projectFile, string targetFile, string parentTarget, Microsoft.Build.Framework.TargetBuiltReason buildReason, System.DateTime eventTimestamp) { }
public TargetStartedEventArgs(string message, string helpKeyword, string targetName, string projectFile, string targetFile, string parentTarget, System.DateTime eventTimestamp) { }
public Microsoft.Build.Framework.TargetBuiltReason BuildReason { get { throw null; } }
public string ParentTarget { get { throw null; } }
public string ProjectFile { get { throw null; } }
public string TargetFile { get { throw null; } }
public string TargetName { get { throw null; } }
}
public delegate void TargetStartedEventHandler(object sender, Microsoft.Build.Framework.TargetStartedEventArgs e);
public partial class TaskCommandLineEventArgs : Microsoft.Build.Framework.BuildMessageEventArgs
{
protected TaskCommandLineEventArgs() { }
public TaskCommandLineEventArgs(string commandLine, string taskName, Microsoft.Build.Framework.MessageImportance importance) { }
public TaskCommandLineEventArgs(string commandLine, string taskName, Microsoft.Build.Framework.MessageImportance importance, System.DateTime eventTimestamp) { }
public string CommandLine { get { throw null; } }
public string TaskName { get { throw null; } }
}
public partial class TaskFinishedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected TaskFinishedEventArgs() { }
public TaskFinishedEventArgs(string message, string helpKeyword, string projectFile, string taskFile, string taskName, bool succeeded) { }
public TaskFinishedEventArgs(string message, string helpKeyword, string projectFile, string taskFile, string taskName, bool succeeded, System.DateTime eventTimestamp) { }
public string ProjectFile { get { throw null; } }
public bool Succeeded { get { throw null; } }
public string TaskFile { get { throw null; } }
public string TaskName { get { throw null; } }
}
public delegate void TaskFinishedEventHandler(object sender, Microsoft.Build.Framework.TaskFinishedEventArgs e);
public partial class TaskPropertyInfo
{
public TaskPropertyInfo(string name, System.Type typeOfParameter, bool output, bool required) { }
public string Name { get { throw null; } }
public bool Output { get { throw null; } }
public System.Type PropertyType { get { throw null; } }
public bool Required { get { throw null; } }
}
public partial class TaskStartedEventArgs : Microsoft.Build.Framework.BuildStatusEventArgs
{
protected TaskStartedEventArgs() { }
public TaskStartedEventArgs(string message, string helpKeyword, string projectFile, string taskFile, string taskName) { }
public TaskStartedEventArgs(string message, string helpKeyword, string projectFile, string taskFile, string taskName, System.DateTime eventTimestamp) { }
public string ProjectFile { get { throw null; } }
public string TaskFile { get { throw null; } }
public string TaskName { get { throw null; } }
}
public delegate void TaskStartedEventHandler(object sender, Microsoft.Build.Framework.TaskStartedEventArgs e);
public sealed partial class TelemetryEventArgs : Microsoft.Build.Framework.BuildEventArgs
{
public TelemetryEventArgs() { }
public string EventName { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, string> Properties { get { throw null; } set { } }
}
public delegate void TelemetryEventHandler(object sender, Microsoft.Build.Framework.TelemetryEventArgs e);
}
namespace Microsoft.Build.Framework.Profiler
{
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct EvaluationLocation
{
public EvaluationLocation(Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string evaluationPassDescription, string file, System.Nullable<int> line, string elementName, string elementDescription, Microsoft.Build.Framework.Profiler.EvaluationLocationKind kind) { throw null;}
public EvaluationLocation(long id, System.Nullable<long> parentId, Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string evaluationPassDescription, string file, System.Nullable<int> line, string elementName, string elementDescription, Microsoft.Build.Framework.Profiler.EvaluationLocationKind kind) { throw null;}
public EvaluationLocation(System.Nullable<long> parentId, Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string evaluationPassDescription, string file, System.Nullable<int> line, string elementName, string elementDescription, Microsoft.Build.Framework.Profiler.EvaluationLocationKind kind) { throw null;}
public string ElementDescription { get { throw null; } }
public string ElementName { get { throw null; } }
public static Microsoft.Build.Framework.Profiler.EvaluationLocation EmptyLocation { get { throw null; } }
public Microsoft.Build.Framework.Profiler.EvaluationPass EvaluationPass { get { throw null; } }
public string EvaluationPassDescription { get { throw null; } }
public string File { get { throw null; } }
public long Id { get { throw null; } }
public bool IsEvaluationPass { get { throw null; } }
public Microsoft.Build.Framework.Profiler.EvaluationLocationKind Kind { get { throw null; } }
public System.Nullable<int> Line { get { throw null; } }
public System.Nullable<long> ParentId { get { throw null; } }
public static Microsoft.Build.Framework.Profiler.EvaluationLocation CreateLocationForAggregatedGlob() { throw null; }
public static Microsoft.Build.Framework.Profiler.EvaluationLocation CreateLocationForCondition(System.Nullable<long> parentId, Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string evaluationDescription, string file, System.Nullable<int> line, string condition) { throw null; }
public static Microsoft.Build.Framework.Profiler.EvaluationLocation CreateLocationForGlob(System.Nullable<long> parentId, Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string evaluationDescription, string file, System.Nullable<int> line, string globDescription) { throw null; }
public static Microsoft.Build.Framework.Profiler.EvaluationLocation CreateLocationForProject(System.Nullable<long> parentId, Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string evaluationDescription, string file, System.Nullable<int> line, Microsoft.Build.Framework.IProjectElement element) { throw null; }
public override bool Equals(object obj) { throw null; }
public override int GetHashCode() { throw null; }
public override string ToString() { throw null; }
public Microsoft.Build.Framework.Profiler.EvaluationLocation WithEvaluationPass(Microsoft.Build.Framework.Profiler.EvaluationPass evaluationPass, string passDescription=null) { throw null; }
public Microsoft.Build.Framework.Profiler.EvaluationLocation WithFile(string file) { throw null; }
public Microsoft.Build.Framework.Profiler.EvaluationLocation WithFileLineAndCondition(string file, System.Nullable<int> line, string condition) { throw null; }
public Microsoft.Build.Framework.Profiler.EvaluationLocation WithFileLineAndElement(string file, System.Nullable<int> line, Microsoft.Build.Framework.IProjectElement element) { throw null; }
public Microsoft.Build.Framework.Profiler.EvaluationLocation WithGlob(string globDescription) { throw null; }
public Microsoft.Build.Framework.Profiler.EvaluationLocation WithParentId(System.Nullable<long> parentId) { throw null; }
}
public enum EvaluationLocationKind : byte
{
Condition = (byte)1,
Element = (byte)0,
Glob = (byte)2,
}
public enum EvaluationPass : byte
{
InitialProperties = (byte)2,
ItemDefinitionGroups = (byte)4,
Items = (byte)5,
LazyItems = (byte)6,
Properties = (byte)3,
Targets = (byte)8,
TotalEvaluation = (byte)0,
TotalGlobbing = (byte)1,
UsingTasks = (byte)7,
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct ProfiledLocation
{
public ProfiledLocation(System.TimeSpan inclusiveTime, System.TimeSpan exclusiveTime, int numberOfHits) { throw null;}
public System.TimeSpan ExclusiveTime { get { throw null; } }
public System.TimeSpan InclusiveTime { get { throw null; } }
public int NumberOfHits { get { throw null; } }
public override bool Equals(object obj) { throw null; }
public override int GetHashCode() { throw null; }
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct ProfilerResult
{
public ProfilerResult(System.Collections.Generic.IDictionary<Microsoft.Build.Framework.Profiler.EvaluationLocation, Microsoft.Build.Framework.Profiler.ProfiledLocation> profiledLocations) { throw null;}
public System.Collections.Generic.IReadOnlyDictionary<Microsoft.Build.Framework.Profiler.EvaluationLocation, Microsoft.Build.Framework.Profiler.ProfiledLocation> ProfiledLocations { get { throw null; } }
public override bool Equals(object obj) { throw null; }
public override int GetHashCode() { throw null; }
}
}
| |
//https://github.com/Themaister/RetroArch/wiki/GLSL-shaders
//https://github.com/Themaister/Emulator-Shader-Pack/blob/master/Cg/README
//https://github.com/libretro/common-shaders/
using System;
using System.Linq;
using System.Diagnostics;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;
using System.Drawing;
using BizHawk.Common;
using BizHawk.Client.Common;
using BizHawk.Client.EmuHawk;
using BizHawk.Client.EmuHawk.FilterManager;
using BizHawk.Bizware.BizwareGL;
using BizHawk.Bizware.BizwareGL.Drivers.OpenTK;
using OpenTK;
using OpenTK.Graphics;
namespace BizHawk.Client.EmuHawk.Filters
{
public class RetroShaderChain : IDisposable
{
static System.Text.RegularExpressions.Regex rxInclude = new Regex(@"^(\s)?\#include(\s)+(""|<)(.*)?(""|>)", RegexOptions.Multiline | RegexOptions.IgnoreCase);
static string ResolveIncludes(string content, string baseDirectory)
{
for (; ; )
{
var match = rxInclude.Match(content);
if(match.Value == "") break;
string fname = match.Groups[4].Value;
fname = Path.Combine(baseDirectory,fname);
string includedContent = ResolveIncludes(File.ReadAllText(fname),Path.GetDirectoryName(fname));
content = content.Substring(0, match.Index) + includedContent + content.Substring(match.Index + match.Length);
}
return content;
}
public RetroShaderChain(IGL owner, RetroShaderPreset preset, string baseDirectory, bool debug = false)
{
Owner = owner;
this.Preset = preset;
Passes = preset.Passes.ToArray();
bool ok = true;
Errors = "";
//load up the shaders
Shaders = new RetroShader[preset.Passes.Count];
for (int i = 0; i < preset.Passes.Count; i++)
{
RetroShaderPreset.ShaderPass pass = preset.Passes[i];
//acquire content
string path = Path.Combine(baseDirectory, pass.ShaderPath);
if (!File.Exists(path))
{
ok = false;
break;
}
string content = ResolveIncludes(File.ReadAllText(path), Path.GetDirectoryName(path));
var shader = new RetroShader(Owner, content, debug);
Shaders[i] = shader;
if (!shader.Available)
{
Errors += string.Format("===================\r\nPass {0}:\r\n{1}",i,shader.Errors);
ok = false;
}
}
Available = ok;
}
public void Dispose()
{
//todo
}
/// <summary>
/// Whether this shader chain is available (it wont be available if some resources failed to load or compile)
/// </summary>
public bool Available { get; private set; }
public string Errors { get; private set; }
public readonly IGL Owner;
public readonly RetroShaderPreset Preset;
public readonly RetroShader[] Shaders;
public readonly RetroShaderPreset.ShaderPass[] Passes;
}
public class RetroShaderPreset
{
/// <summary>
/// Parses an instance from a stream to a CGP file
/// </summary>
public RetroShaderPreset(Stream stream)
{
var content = new StreamReader(stream).ReadToEnd();
Dictionary<string, string> dict = new Dictionary<string, string>();
//parse the key-value-pair format of the file
content = content.Replace("\r", "");
foreach (var _line in content.Split('\n'))
{
var line = _line.Trim();
if (line.StartsWith("#")) continue; //lines that are solely comments
if (line == "") continue; //empty line
int eq = line.IndexOf('=');
var key = line.Substring(0, eq).Trim();
var value = line.Substring(eq + 1).Trim();
int quote = value.IndexOf('\"');
if (quote != -1)
value = value.Substring(quote + 1, value.IndexOf('\"', quote + 1) - (quote + 1));
else
{
//remove comments from end of value. exclusive from above condition, since comments after quoted strings would be snipped by the quoted string extraction
int hash = value.IndexOf('#');
if (hash != -1)
value = value.Substring(0, hash);
value = value.Trim();
}
dict[key.ToLower()] = value;
}
//process the keys
int nShaders = FetchInt(dict, "shaders", 0);
for (int i = 0; i < nShaders; i++)
{
ShaderPass sp = new ShaderPass();
sp.Index = i;
Passes.Add(sp);
sp.InputFilterLinear = FetchBool(dict, "filter_linear" + i, false); //Should this value not be defined, the filtering option is implementation defined.
sp.OuputFloat = FetchBool(dict, "float_framebuffer" + i, false);
sp.FrameCountMod = FetchInt(dict, "frame_count_mod" + i, 1);
sp.ShaderPath = FetchString(dict, "shader" + i, "?"); //todo - change extension to .cg for better compatibility? just change .cg to .glsl transparently at last second?
//If no scale type is assumed, it is assumed that it is set to "source" with scaleN set to 1.0.
//It is possible to set scale_type_xN and scale_type_yN to specialize the scaling type in either direction. scale_typeN however overrides both of these.
sp.ScaleTypeX = (ScaleType)Enum.Parse(typeof(ScaleType), FetchString(dict, "scale_type_x" + i, "Source"), true);
sp.ScaleTypeY = (ScaleType)Enum.Parse(typeof(ScaleType), FetchString(dict, "scale_type_y" + i, "Source"), true);
ScaleType st = (ScaleType)Enum.Parse(typeof(ScaleType), FetchString(dict, "scale_type" + i, "NotSet"), true);
if (st != ScaleType.NotSet)
sp.ScaleTypeX = sp.ScaleTypeY = st;
//scaleN controls both scaling type in horizontal and vertical directions. If scaleN is defined, scale_xN and scale_yN have no effect.
sp.Scale.X = FetchFloat(dict, "scale_x" + i, 1);
sp.Scale.Y = FetchFloat(dict, "scale_y" + i, 1);
float scale = FetchFloat(dict, "scale" + i, -999);
if (scale != -999)
sp.Scale.X = sp.Scale.Y = FetchFloat(dict, "scale" + i, 1);
//TODO - LUTs
}
}
public List<ShaderPass> Passes = new List<ShaderPass>();
/// <summary>
/// Indicates whether any of the passes contain GLSL filenames (these are invalid now)
/// </summary>
public bool ContainsGLSL
{
get
{
foreach (var pass in Passes)
if (Path.GetExtension(pass.ShaderPath).ToLowerInvariant() == ".glsl")
return true;
return false;
}
}
public enum ScaleType
{
NotSet, Source, Viewport, Absolute
}
public class ShaderPass
{
public int Index;
public string ShaderPath;
public bool InputFilterLinear;
public bool OuputFloat;
public int FrameCountMod;
public ScaleType ScaleTypeX;
public ScaleType ScaleTypeY;
public Vector2 Scale;
}
string FetchString(Dictionary<string, string> dict, string key, string @default)
{
string str;
if (dict.TryGetValue(key, out str))
return str;
else return @default;
}
int FetchInt(Dictionary<string, string> dict, string key, int @default)
{
string str;
if (dict.TryGetValue(key, out str))
return int.Parse(str);
else return @default;
}
float FetchFloat(Dictionary<string, string> dict, string key, float @default)
{
string str;
if (dict.TryGetValue(key, out str))
return float.Parse(str);
else return @default;
}
bool FetchBool(Dictionary<string, string> dict, string key, bool @default)
{
string str;
if (dict.TryGetValue(key, out str))
return ParseBool(str);
else return @default;
}
bool ParseBool(string value)
{
if (value == "1") return true;
if (value == "0") return false;
value = value.ToLower();
if (value == "true") return true;
if (value == "false") return false;
throw new InvalidOperationException("Unparseable bool in CGP file content");
}
}
public class RetroShaderPass : BaseFilter
{
RetroShaderChain RSC;
RetroShaderPreset.ShaderPass SP;
int RSI;
Size OutputSize;
public override string ToString()
{
return string.Format("RetroShaderPass[#{0}]", RSI);
}
public RetroShaderPass(RetroShaderChain RSC, int index)
{
this.RSC = RSC;
this.RSI = index;
this.SP = RSC.Passes[index];
}
public override void Initialize()
{
DeclareInput(SurfaceDisposition.Texture);
}
public override void SetInputFormat(string channel, SurfaceState state)
{
Size insize = state.SurfaceFormat.Size;
if (SP.ScaleTypeX == RetroShaderPreset.ScaleType.Absolute) OutputSize.Width = (int)SP.Scale.X;
if (SP.ScaleTypeY == RetroShaderPreset.ScaleType.Absolute) OutputSize.Width = (int)SP.Scale.Y;
if (SP.ScaleTypeX == RetroShaderPreset.ScaleType.Source) OutputSize.Width = (int)(insize.Width * SP.Scale.X);
if (SP.ScaleTypeY == RetroShaderPreset.ScaleType.Source) OutputSize.Height = (int)(insize.Height * SP.Scale.Y);
var outState = new SurfaceState();
outState.SurfaceFormat = new SurfaceFormat(OutputSize);
outState.SurfaceDisposition = SurfaceDisposition.RenderTarget;
DeclareOutput(outState);
}
public override Size PresizeOutput(string channel, Size size)
{
OutputSize = size;
return size;
}
public override Size PresizeInput(string channel, Size insize)
{
Size outsize = insize;
if (SP.ScaleTypeX == RetroShaderPreset.ScaleType.Absolute) outsize.Width = (int)SP.Scale.X;
if (SP.ScaleTypeY == RetroShaderPreset.ScaleType.Absolute) outsize.Width = (int)SP.Scale.Y;
if (SP.ScaleTypeX == RetroShaderPreset.ScaleType.Source) outsize.Width = (int)(insize.Width * SP.Scale.X);
if (SP.ScaleTypeY == RetroShaderPreset.ScaleType.Source) outsize.Height = (int)(insize.Height * SP.Scale.Y);
return outsize;
}
public override void Run()
{
var shader = RSC.Shaders[RSI];
shader.Bind();
//apply all parameters to this shader.. even if it was meant for other shaders. kind of lame.
if(Parameters != null)
foreach (var kvp in Parameters)
{
if (kvp.Value is float)
shader.Pipeline[kvp.Key].Set((float)kvp.Value);
}
var input = InputTexture;
if (SP.InputFilterLinear)
InputTexture.SetFilterLinear();
else
InputTexture.SetFilterNearest();
RSC.Shaders[RSI].Run(input, input.Size, OutputSize, InputTexture.IsUpsideDown);
//maintain invariant.. i think.
InputTexture.SetFilterNearest();
}
}
}
| |
/*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2
* Contact: [email protected]
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace DocuSign.eSign.Model
{
/// <summary>
/// RadioGroup
/// </summary>
[DataContract]
public partial class RadioGroup : IEquatable<RadioGroup>, IValidatableObject
{
public RadioGroup()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="RadioGroup" /> class.
/// </summary>
/// <param name="ConditionalParentLabel">For conditional fields this is the TabLabel of the parent tab that controls this tab's visibility..</param>
/// <param name="ConditionalParentValue">For conditional fields, this is the value of the parent tab that controls the tab's visibility. If the parent tab is a Checkbox, Radio button, Optional Signature, or Optional Initial use \"on\" as the value to show that the parent tab is active. .</param>
/// <param name="DocumentId">Specifies the document ID number that the tab is placed on. This must refer to an existing Document's ID attribute..</param>
/// <param name="GroupName">The name of the group..</param>
/// <param name="Radios">Specifies the locations and status for radio buttons that are grouped together..</param>
/// <param name="RecipientId">Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document..</param>
/// <param name="RequireAll">When set to **true** and shared is true, information must be entered in this field to complete the envelope. .</param>
/// <param name="RequireInitialOnSharedChange">Optional element for field markup. When set to **true**, the signer is required to initial when they modify a shared field..</param>
/// <param name="Shared">When set to **true**, this custom tab is shared..</param>
public RadioGroup(string ConditionalParentLabel = default(string), string ConditionalParentValue = default(string), string DocumentId = default(string), string GroupName = default(string), List<Radio> Radios = default(List<Radio>), string RecipientId = default(string), string RequireAll = default(string), string RequireInitialOnSharedChange = default(string), string Shared = default(string))
{
this.ConditionalParentLabel = ConditionalParentLabel;
this.ConditionalParentValue = ConditionalParentValue;
this.DocumentId = DocumentId;
this.GroupName = GroupName;
this.Radios = Radios;
this.RecipientId = RecipientId;
this.RequireAll = RequireAll;
this.RequireInitialOnSharedChange = RequireInitialOnSharedChange;
this.Shared = Shared;
}
/// <summary>
/// For conditional fields this is the TabLabel of the parent tab that controls this tab's visibility.
/// </summary>
/// <value>For conditional fields this is the TabLabel of the parent tab that controls this tab's visibility.</value>
[DataMember(Name="conditionalParentLabel", EmitDefaultValue=false)]
public string ConditionalParentLabel { get; set; }
/// <summary>
/// For conditional fields, this is the value of the parent tab that controls the tab's visibility. If the parent tab is a Checkbox, Radio button, Optional Signature, or Optional Initial use \"on\" as the value to show that the parent tab is active.
/// </summary>
/// <value>For conditional fields, this is the value of the parent tab that controls the tab's visibility. If the parent tab is a Checkbox, Radio button, Optional Signature, or Optional Initial use \"on\" as the value to show that the parent tab is active. </value>
[DataMember(Name="conditionalParentValue", EmitDefaultValue=false)]
public string ConditionalParentValue { get; set; }
/// <summary>
/// Specifies the document ID number that the tab is placed on. This must refer to an existing Document's ID attribute.
/// </summary>
/// <value>Specifies the document ID number that the tab is placed on. This must refer to an existing Document's ID attribute.</value>
[DataMember(Name="documentId", EmitDefaultValue=false)]
public string DocumentId { get; set; }
/// <summary>
/// The name of the group.
/// </summary>
/// <value>The name of the group.</value>
[DataMember(Name="groupName", EmitDefaultValue=false)]
public string GroupName { get; set; }
/// <summary>
/// Specifies the locations and status for radio buttons that are grouped together.
/// </summary>
/// <value>Specifies the locations and status for radio buttons that are grouped together.</value>
[DataMember(Name="radios", EmitDefaultValue=false)]
public List<Radio> Radios { get; set; }
/// <summary>
/// Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document.
/// </summary>
/// <value>Unique for the recipient. It is used by the tab element to indicate which recipient is to sign the Document.</value>
[DataMember(Name="recipientId", EmitDefaultValue=false)]
public string RecipientId { get; set; }
/// <summary>
/// When set to **true** and shared is true, information must be entered in this field to complete the envelope.
/// </summary>
/// <value>When set to **true** and shared is true, information must be entered in this field to complete the envelope. </value>
[DataMember(Name="requireAll", EmitDefaultValue=false)]
public string RequireAll { get; set; }
/// <summary>
/// Optional element for field markup. When set to **true**, the signer is required to initial when they modify a shared field.
/// </summary>
/// <value>Optional element for field markup. When set to **true**, the signer is required to initial when they modify a shared field.</value>
[DataMember(Name="requireInitialOnSharedChange", EmitDefaultValue=false)]
public string RequireInitialOnSharedChange { get; set; }
/// <summary>
/// When set to **true**, this custom tab is shared.
/// </summary>
/// <value>When set to **true**, this custom tab is shared.</value>
[DataMember(Name="shared", EmitDefaultValue=false)]
public string Shared { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class RadioGroup {\n");
sb.Append(" ConditionalParentLabel: ").Append(ConditionalParentLabel).Append("\n");
sb.Append(" ConditionalParentValue: ").Append(ConditionalParentValue).Append("\n");
sb.Append(" DocumentId: ").Append(DocumentId).Append("\n");
sb.Append(" GroupName: ").Append(GroupName).Append("\n");
sb.Append(" Radios: ").Append(Radios).Append("\n");
sb.Append(" RecipientId: ").Append(RecipientId).Append("\n");
sb.Append(" RequireAll: ").Append(RequireAll).Append("\n");
sb.Append(" RequireInitialOnSharedChange: ").Append(RequireInitialOnSharedChange).Append("\n");
sb.Append(" Shared: ").Append(Shared).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as RadioGroup);
}
/// <summary>
/// Returns true if RadioGroup instances are equal
/// </summary>
/// <param name="other">Instance of RadioGroup to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(RadioGroup other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.ConditionalParentLabel == other.ConditionalParentLabel ||
this.ConditionalParentLabel != null &&
this.ConditionalParentLabel.Equals(other.ConditionalParentLabel)
) &&
(
this.ConditionalParentValue == other.ConditionalParentValue ||
this.ConditionalParentValue != null &&
this.ConditionalParentValue.Equals(other.ConditionalParentValue)
) &&
(
this.DocumentId == other.DocumentId ||
this.DocumentId != null &&
this.DocumentId.Equals(other.DocumentId)
) &&
(
this.GroupName == other.GroupName ||
this.GroupName != null &&
this.GroupName.Equals(other.GroupName)
) &&
(
this.Radios == other.Radios ||
this.Radios != null &&
this.Radios.SequenceEqual(other.Radios)
) &&
(
this.RecipientId == other.RecipientId ||
this.RecipientId != null &&
this.RecipientId.Equals(other.RecipientId)
) &&
(
this.RequireAll == other.RequireAll ||
this.RequireAll != null &&
this.RequireAll.Equals(other.RequireAll)
) &&
(
this.RequireInitialOnSharedChange == other.RequireInitialOnSharedChange ||
this.RequireInitialOnSharedChange != null &&
this.RequireInitialOnSharedChange.Equals(other.RequireInitialOnSharedChange)
) &&
(
this.Shared == other.Shared ||
this.Shared != null &&
this.Shared.Equals(other.Shared)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.ConditionalParentLabel != null)
hash = hash * 59 + this.ConditionalParentLabel.GetHashCode();
if (this.ConditionalParentValue != null)
hash = hash * 59 + this.ConditionalParentValue.GetHashCode();
if (this.DocumentId != null)
hash = hash * 59 + this.DocumentId.GetHashCode();
if (this.GroupName != null)
hash = hash * 59 + this.GroupName.GetHashCode();
if (this.Radios != null)
hash = hash * 59 + this.Radios.GetHashCode();
if (this.RecipientId != null)
hash = hash * 59 + this.RecipientId.GetHashCode();
if (this.RequireAll != null)
hash = hash * 59 + this.RequireAll.GetHashCode();
if (this.RequireInitialOnSharedChange != null)
hash = hash * 59 + this.RequireInitialOnSharedChange.GetHashCode();
if (this.Shared != null)
hash = hash * 59 + this.Shared.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Lucene.Net.Analysis.Tokenattributes;
using Lucene.Net.Documents;
using Lucene.Net.Util;
using NUnit.Framework;
using Analyzer = Lucene.Net.Analysis.Analyzer;
using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
using TokenFilter = Lucene.Net.Analysis.TokenFilter;
using TokenStream = Lucene.Net.Analysis.TokenStream;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using Index = Lucene.Net.Documents.Field.Index;
using Store = Lucene.Net.Documents.Field.Store;
using TermVector = Lucene.Net.Documents.Field.TermVector;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using AttributeSource = Lucene.Net.Util.AttributeSource;
using BaseTokenStreamTestCase = Lucene.Net.Test.Analysis.BaseTokenStreamTestCase;
using _TestUtil = Lucene.Net.Util._TestUtil;
namespace Lucene.Net.Index
{
[TestFixture]
public class TestDocumentWriter : LuceneTestCase
{
private class AnonymousClassAnalyzer:Analyzer
{
public AnonymousClassAnalyzer(TestDocumentWriter enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(TestDocumentWriter enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private TestDocumentWriter enclosingInstance;
public TestDocumentWriter Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
{
return new WhitespaceTokenizer(reader);
}
public override int GetPositionIncrementGap(System.String fieldName)
{
return 500;
}
}
private class AnonymousClassAnalyzer1:Analyzer
{
public AnonymousClassAnalyzer1(TestDocumentWriter enclosingInstance)
{
InitBlock(enclosingInstance);
}
private class AnonymousClassTokenFilter:TokenFilter
{
private void InitBlock(AnonymousClassAnalyzer1 enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
termAtt = AddAttribute<ITermAttribute>();
payloadAtt = AddAttribute<IPayloadAttribute>();
posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
}
private AnonymousClassAnalyzer1 enclosingInstance;
public AnonymousClassAnalyzer1 Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal AnonymousClassTokenFilter(AnonymousClassAnalyzer1 enclosingInstance, Lucene.Net.Analysis.TokenStream Param1):base(Param1)
{
InitBlock(enclosingInstance);
}
internal bool first = true;
internal AttributeSource.State state;
public override bool IncrementToken()
{
if (state != null)
{
RestoreState(state);
payloadAtt.Payload = null;
posIncrAtt.PositionIncrement = 0;
termAtt.SetTermBuffer(new char[]{'b'}, 0, 1);
state = null;
return true;
}
bool hasNext = input.IncrementToken();
if (!hasNext)
return false;
if (System.Char.IsDigit(termAtt.TermBuffer()[0]))
{
posIncrAtt.PositionIncrement = termAtt.TermBuffer()[0] - '0';
}
if (first)
{
// set payload on first position only
payloadAtt.Payload = new Payload(new byte[]{100});
first = false;
}
// index a "synonym" for every token
state = CaptureState();
return true;
}
internal ITermAttribute termAtt;
internal IPayloadAttribute payloadAtt;
internal IPositionIncrementAttribute posIncrAtt;
}
private void InitBlock(TestDocumentWriter enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private TestDocumentWriter enclosingInstance;
public TestDocumentWriter Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
{
return new AnonymousClassTokenFilter(this, new WhitespaceTokenizer(reader));
}
}
private class AnonymousClassTokenStream:TokenStream
{
public AnonymousClassTokenStream(TestDocumentWriter enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(TestDocumentWriter enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
termAtt = AddAttribute<ITermAttribute>();
}
private TestDocumentWriter enclosingInstance;
public TestDocumentWriter Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
private System.String[] tokens = new System.String[]{"term1", "term2", "term3", "term2"};
private int index = 0;
private ITermAttribute termAtt;
public override bool IncrementToken()
{
if (index == tokens.Length)
{
return false;
}
else
{
ClearAttributes();
termAtt.SetTermBuffer(tokens[index++]);
return true;
}
}
protected override void Dispose(bool disposing)
{
// Do Nothing
}
}
private RAMDirectory dir;
public TestDocumentWriter(System.String s):base(s)
{
}
public TestDocumentWriter() : base("")
{
}
[SetUp]
public override void SetUp()
{
base.SetUp();
dir = new RAMDirectory();
}
[Test]
public virtual void Test()
{
Assert.IsTrue(dir != null);
}
[Test]
public virtual void TestAddDocument()
{
Document testDoc = new Document();
DocHelper.SetupDoc(testDoc);
Analyzer analyzer = new WhitespaceAnalyzer();
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(testDoc);
writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
//After adding the document, we should be able to read it back in
SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
Assert.IsTrue(reader != null);
Document doc = reader.Document(0);
Assert.IsTrue(doc != null);
//System.out.println("Document: " + doc);
IFieldable[] fields = doc.GetFields("textField2");
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_2_TEXT));
Assert.IsTrue(fields[0].IsTermVectorStored);
fields = doc.GetFields("textField1");
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_1_TEXT));
Assert.IsFalse(fields[0].IsTermVectorStored);
fields = doc.GetFields("keyField");
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.KEYWORD_TEXT));
fields = doc.GetFields(DocHelper.NO_NORMS_KEY);
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.NO_NORMS_TEXT));
fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY);
Assert.IsTrue(fields != null && fields.Length == 1);
Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_3_TEXT));
// test that the norms are not present in the segment if
// omitNorms is true
for (int i = 0; i < reader.core_ForNUnit.fieldInfos_ForNUnit.Size(); i++)
{
FieldInfo fi = reader.core_ForNUnit.fieldInfos_ForNUnit.FieldInfo(i);
if (fi.isIndexed_ForNUnit)
{
Assert.IsTrue(fi.omitNorms_ForNUnit == !reader.HasNorms(fi.name_ForNUnit));
}
}
}
[Test]
public virtual void TestPositionIncrementGap()
{
Analyzer analyzer = new AnonymousClassAnalyzer(this);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.Add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
TermPositions termPositions = reader.TermPositions(new Term("repeated", "repeated"));
Assert.IsTrue(termPositions.Next());
int freq = termPositions.Freq;
Assert.AreEqual(2, freq);
Assert.AreEqual(0, termPositions.NextPosition());
Assert.AreEqual(502, termPositions.NextPosition());
}
[Test]
public virtual void TestTokenReuse()
{
Analyzer analyzer = new AnonymousClassAnalyzer1(this);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.Add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
TermPositions termPositions = reader.TermPositions(new Term("f1", "a"));
Assert.IsTrue(termPositions.Next());
int freq = termPositions.Freq;
Assert.AreEqual(3, freq);
Assert.AreEqual(0, termPositions.NextPosition());
Assert.AreEqual(true, termPositions.IsPayloadAvailable);
Assert.AreEqual(6, termPositions.NextPosition());
Assert.AreEqual(false, termPositions.IsPayloadAvailable);
Assert.AreEqual(7, termPositions.NextPosition());
Assert.AreEqual(false, termPositions.IsPayloadAvailable);
}
[Test]
public virtual void TestPreAnalyzedField()
{
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.Add(new Field("preanalyzed", new AnonymousClassTokenStream(this), TermVector.NO));
writer.AddDocument(doc);
writer.Commit();
SegmentInfo info = writer.NewestSegment();
writer.Close();
SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
TermPositions termPositions = reader.TermPositions(new Term("preanalyzed", "term1"));
Assert.IsTrue(termPositions.Next());
Assert.AreEqual(1, termPositions.Freq);
Assert.AreEqual(0, termPositions.NextPosition());
termPositions.Seek(new Term("preanalyzed", "term2"));
Assert.IsTrue(termPositions.Next());
Assert.AreEqual(2, termPositions.Freq);
Assert.AreEqual(1, termPositions.NextPosition());
Assert.AreEqual(3, termPositions.NextPosition());
termPositions.Seek(new Term("preanalyzed", "term3"));
Assert.IsTrue(termPositions.Next());
Assert.AreEqual(1, termPositions.Freq);
Assert.AreEqual(2, termPositions.NextPosition());
}
/// <summary> Test adding two fields with the same name, but
/// with different term vector setting (LUCENE-766).
/// </summary>
[Test]
public virtual void TestMixedTermVectorSettingsSameField()
{
Document doc = new Document();
// f1 first without tv then with tv
doc.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.NO));
doc.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
// f2 first with tv then without tv
doc.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.NO));
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Close();
_TestUtil.CheckIndex(dir);
IndexReader reader = IndexReader.Open(dir, true);
// f1
ITermFreqVector tfv1 = reader.GetTermFreqVector(0, "f1");
Assert.IsNotNull(tfv1);
Assert.AreEqual(2, tfv1.GetTerms().Length, "the 'with_tv' setting should rule!");
// f2
ITermFreqVector tfv2 = reader.GetTermFreqVector(0, "f2");
Assert.IsNotNull(tfv2);
Assert.AreEqual(2, tfv2.GetTerms().Length, "the 'with_tv' setting should rule!");
}
/// <summary> Test adding two fields with the same name, one indexed
/// the other stored only. The omitNorms and omitTermFreqAndPositions setting
/// of the stored field should not affect the indexed one (LUCENE-1590)
/// </summary>
[Test]
public virtual void TestLUCENE_1590()
{
Document doc = new Document();
// f1 has no norms
doc.Add(new Field("f1", "v1", Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
doc.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.NO));
// f2 has no TF
Field f = new Field("f2", "v1", Field.Store.NO, Field.Index.ANALYZED);
f.OmitTermFreqAndPositions = true;
doc.Add(f);
doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NO));
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.AddDocument(doc);
writer.Optimize(); // be sure to have a single segment
writer.Close();
_TestUtil.CheckIndex(dir);
SegmentReader reader = SegmentReader.GetOnlySegmentReader(dir);
FieldInfos fi = reader.FieldInfos();
// f1
Assert.IsFalse(reader.HasNorms("f1"), "f1 should have no norms");
Assert.IsFalse(fi.FieldInfo("f1").omitTermFreqAndPositions_ForNUnit, "omitTermFreqAndPositions field bit should not be set for f1");
// f2
Assert.IsTrue(reader.HasNorms("f2"), "f2 should have norms");
Assert.IsTrue(fi.FieldInfo("f2").omitTermFreqAndPositions_ForNUnit, "omitTermFreqAndPositions field bit should be set for f2");
}
}
}
| |
//
// XmlDsigXPathTransform.cs -
// XmlDsigXPathTransform implementation for XML Signature
// http://www.w3.org/TR/1999/REC-xpath-19991116
//
// Author:
// Sebastien Pouliot <[email protected]>
// Atsushi Enomoto <[email protected]>
//
// (C) 2002, 2003 Motus Technologies Inc. (http://www.motus.com)
// Copyright (C) 2004-2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Collections;
using System.IO;
using System.Text;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Xsl;
namespace System.Security.Cryptography.Xml
{
// www.w3.org/TR/xmldsig-core/
// see Section 6.6.3 of the XMLDSIG specification
public class XmlDsigXPathTransform : Transform
{
private Type [] input;
private Type [] output;
private XmlNodeList xpath;
private XmlDocument doc;
private XsltContext ctx;
public XmlDsigXPathTransform ()
{
Algorithm = XmlSignature.AlgorithmNamespaces.XmlDsigXPathTransform;
}
public override Type [] InputTypes {
get {
if (input == null) {
input = new Type [3];
input [0] = typeof (System.IO.Stream);
input [1] = typeof (System.Xml.XmlDocument);
input [2] = typeof (System.Xml.XmlNodeList);
}
return input;
}
}
public override Type[] OutputTypes {
get {
if (output == null) {
// this way the result is cached if called multiple time
output = new Type [1];
output [0] = typeof (System.Xml.XmlNodeList);
}
return output;
}
}
protected override XmlNodeList GetInnerXml ()
{
if (xpath == null) {
// default value
XmlDocument xpdoc = new XmlDocument ();
xpdoc.LoadXml ("<XPath xmlns=\"" + XmlSignature.NamespaceURI + "\"></XPath>");
xpath = xpdoc.ChildNodes;
}
return xpath;
}
[MonoTODO ("Evaluation of extension function here() results in different from MS.NET (is MS.NET really correct??).")]
public override object GetOutput ()
{
#if NET_2_0
if ((xpath == null) || (doc == null))
return new XmlDsigNodeList (new ArrayList ());
#else
if (xpath == null)
return new XmlDsigNodeList (new ArrayList ());
#endif
// evaluate every time since input or xpath might have changed.
string x = null;
for (int i = 0; i < xpath.Count; i++) {
switch (xpath [i].NodeType) {
case XmlNodeType.Text:
case XmlNodeType.CDATA:
case XmlNodeType.Element:
x += xpath [i].InnerText;
break;
}
}
ctx = new XmlDsigXPathContext (doc);
foreach (XmlNode n in xpath) {
XPathNavigator nav = n.CreateNavigator ();
XPathNodeIterator iter = nav.Select ("namespace::*");
while (iter.MoveNext ())
if (iter.Current.LocalName != "xml")
ctx.AddNamespace (iter.Current.LocalName, iter.Current.Value);
}
return EvaluateMatch (doc, x);
}
public override object GetOutput (Type type)
{
if (type != typeof (XmlNodeList))
throw new ArgumentException ("type");
return GetOutput ();
}
private XmlDsigNodeList EvaluateMatch (XmlNode n, string xpath)
{
ArrayList al = new ArrayList ();
// Strictly to say, document node is explicitly
// excluded by W3C spec (context node is initialized
// to the document root and XPath expression is
// "//. | //@* | //namespace::*)
XPathNavigator nav = n.CreateNavigator ();
XPathExpression exp = nav.Compile (xpath);
exp.SetContext (ctx);
EvaluateMatch (n, exp, al);
return new XmlDsigNodeList (al);
}
private void EvaluateMatch (XmlNode n, XPathExpression exp, ArrayList al)
{
if (NodeMatches (n, exp))
al.Add (n);
if (n.Attributes != null)
for (int i = 0; i < n.Attributes.Count; i++)
if (NodeMatches (n.Attributes [i], exp))
al.Add (n.Attributes [i]);
for (int i = 0; i < n.ChildNodes.Count; i++)
EvaluateMatch (n.ChildNodes [i], exp, al);
}
private bool NodeMatches (XmlNode n, XPathExpression exp)
{
// This looks waste of memory since it creates
// XPathNavigator every time, but even if we use
// XPathNodeIterator.Current, it also clones every time.
object ret = n.CreateNavigator ().Evaluate (exp);
if (ret is bool)
return (bool) ret;
if (ret is double) {
double d = (double) ret;
return !(d == 0.0 || Double.IsNaN (d));
}
if (ret is string)
return ((string) ret).Length > 0;
if (ret is XPathNodeIterator) {
XPathNodeIterator retiter = (XPathNodeIterator) ret;
return retiter.Count > 0;
}
return false;
}
public override void LoadInnerXml (XmlNodeList nodeList)
{
if (nodeList == null)
throw new CryptographicException ("nodeList");
xpath = nodeList;
}
public override void LoadInput (object obj)
{
// possible input: Stream, XmlDocument, and XmlNodeList
if (obj is Stream) {
doc = new XmlDocument ();
doc.PreserveWhitespace = true;
doc.XmlResolver = GetResolver ();
doc.Load (new XmlSignatureStreamReader (
new StreamReader ((Stream) obj)));
}
else if (obj is XmlDocument) {
doc = (obj as XmlDocument);
}
else if (obj is XmlNodeList) {
doc = new XmlDocument ();
doc.XmlResolver = GetResolver ();
foreach (XmlNode xn in (obj as XmlNodeList)) {
XmlNode importedNode = doc.ImportNode (xn, true);
doc.AppendChild (importedNode);
}
}
}
// Internal classes to support XPath extension function here()
internal class XmlDsigXPathContext : XsltContext
{
XmlDsigXPathFunctionHere here;
public XmlDsigXPathContext (XmlNode node)
{
here = new XmlDsigXPathFunctionHere (node);
}
public override IXsltContextFunction ResolveFunction (
string prefix, string name, XPathResultType [] argType)
{
// Here MS.NET incorrectly allows arbitrary
// name e.g. "heretic()".
if (name == "here" &&
prefix == String.Empty &&
argType.Length == 0)
return here;
else
return null; // ????
}
public override bool Whitespace {
get { return true; }
}
public override bool PreserveWhitespace (XPathNavigator node)
{
return true;
}
public override int CompareDocument (string s1, string s2)
{
return String.Compare (s1, s2);
}
public override IXsltContextVariable ResolveVariable (string prefix, string name)
{
throw new InvalidOperationException ();
}
}
internal class XmlDsigXPathFunctionHere : IXsltContextFunction
{
// Static
static XPathResultType [] types;
static XmlDsigXPathFunctionHere ()
{
types = new XPathResultType [0];
}
// Instance
XPathNodeIterator xpathNode;
public XmlDsigXPathFunctionHere (XmlNode node)
{
xpathNode = node.CreateNavigator ().Select (".");
}
public XPathResultType [] ArgTypes {
get { return types; }
}
public int Maxargs { get { return 0; } }
public int Minargs { get { return 0; } }
public XPathResultType ReturnType {
get { return XPathResultType.NodeSet; }
}
public object Invoke (XsltContext ctx, object [] args, XPathNavigator docContext)
{
if (args.Length != 0)
throw new ArgumentException ("Not allowed arguments for function here().", "args");
return xpathNode.Clone ();
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Xml;
using System.Diagnostics;
namespace DPStressHarness
{
public class Logger
{
private const string _resultDocumentName = "perfout.xml";
private XmlDocument _doc;
private XmlElement _runElem;
private XmlElement _testElem;
public Logger(string runLabel, bool isOfficial, string milestone, string branch)
{
_doc = GetTestResultDocument();
_runElem = GetRunElement(_doc, runLabel, DateTime.Now.ToString(), isOfficial, milestone, branch);
Process currentProcess = Process.GetCurrentProcess();
AddRunMetric(Constants.RUN_PROCESS_MACHINE_NAME, currentProcess.MachineName);
AddRunMetric(Constants.RUN_DNS_HOST_NAME, System.Net.Dns.GetHostName());
AddRunMetric(Constants.RUN_IDENTITY_NAME, System.Security.Principal.WindowsIdentity.GetCurrent().Name);
AddRunMetric(Constants.RUN_METRIC_PROCESSOR_COUNT, Environment.ProcessorCount.ToString());
}
public void AddRunMetric(string metricName, string metricValue)
{
Debug.Assert(_runElem != null);
if (metricValue.Equals(String.Empty))
return;
AddRunMetricElement(_runElem, metricName, metricValue);
}
public void AddTest(string testName)
{
Debug.Assert(_runElem != null);
_testElem = AddTestElement(_runElem, testName);
}
public void AddTestMetric(string metricName, string metricValue, string metricUnits)
{
AddTestMetric(metricName, metricValue, metricUnits, null);
}
public void AddTestMetric(string metricName, string metricValue, string metricUnits, bool? isHigherBetter)
{
Debug.Assert(_runElem != null);
Debug.Assert(_testElem != null);
if (metricValue.Equals(String.Empty))
return;
AddTestMetricElement(_testElem, metricName, metricValue, metricUnits, isHigherBetter);
}
public void AddTestException(string exceptionData)
{
Debug.Assert(_runElem != null);
Debug.Assert(_testElem != null);
AddTestExceptionElement(_testElem, exceptionData);
}
public void Save()
{
FileStream resultDocumentStream = new FileStream(_resultDocumentName, FileMode.Create);
_doc.Save(resultDocumentStream);
resultDocumentStream.Dispose();
}
private static XmlDocument GetTestResultDocument()
{
if (File.Exists(_resultDocumentName))
{
XmlDocument doc = new XmlDocument();
FileStream resultDocumentStream = new FileStream(_resultDocumentName, FileMode.Open, FileAccess.Read);
doc.Load(resultDocumentStream);
resultDocumentStream.Dispose();
return doc;
}
else
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<?xml version=\"1.0\" encoding=\"utf-8\" ?><PerfResults></PerfResults>");
FileStream resultDocumentStream = new FileStream(_resultDocumentName, FileMode.CreateNew);
doc.Save(resultDocumentStream);
resultDocumentStream.Dispose();
return doc;
}
}
private static XmlElement GetRunElement(XmlDocument doc, string label, string startTime, bool isOfficial, string milestone, string branch)
{
foreach (XmlNode node in doc.DocumentElement.ChildNodes)
{
if (node.NodeType == XmlNodeType.Element &&
node.Name.Equals(Constants.XML_ELEM_RUN) &&
((XmlElement)node).GetAttribute(Constants.XML_ATTR_RUN_LABEL).Equals(label))
{
return (XmlElement)node;
}
}
XmlElement runElement = doc.CreateElement(Constants.XML_ELEM_RUN);
XmlAttribute attrLabel = doc.CreateAttribute(Constants.XML_ATTR_RUN_LABEL);
attrLabel.Value = label;
runElement.Attributes.Append(attrLabel);
XmlAttribute attrStartTime = doc.CreateAttribute(Constants.XML_ATTR_RUN_START_TIME);
attrStartTime.Value = startTime;
runElement.Attributes.Append(attrStartTime);
XmlAttribute attrOfficial = doc.CreateAttribute(Constants.XML_ATTR_RUN_OFFICIAL);
attrOfficial.Value = isOfficial.ToString();
runElement.Attributes.Append(attrOfficial);
if (milestone != null)
{
XmlAttribute attrMilestone = doc.CreateAttribute(Constants.XML_ATTR_RUN_MILESTONE);
attrMilestone.Value = milestone;
runElement.Attributes.Append(attrMilestone);
}
if (branch != null)
{
XmlAttribute attrBranch = doc.CreateAttribute(Constants.XML_ATTR_RUN_BRANCH);
attrBranch.Value = branch;
runElement.Attributes.Append(attrBranch);
}
doc.DocumentElement.AppendChild(runElement);
return runElement;
}
private static void AddRunMetricElement(XmlElement runElement, string name, string value)
{
// First check and make sure the metric hasn't already been added.
// If it has, it's from a previous test in the same run, so just return.
foreach (XmlNode node in runElement.ChildNodes)
{
if (node.NodeType == XmlNodeType.Element && node.Name.Equals(Constants.XML_ELEM_RUN_METRIC))
{
if (node.Attributes[Constants.XML_ATTR_RUN_METRIC_NAME].Value.Equals(name))
return;
}
}
XmlElement runMetricElement = runElement.OwnerDocument.CreateElement(Constants.XML_ELEM_RUN_METRIC);
XmlAttribute attrName = runElement.OwnerDocument.CreateAttribute(Constants.XML_ATTR_RUN_METRIC_NAME);
attrName.Value = name;
runMetricElement.Attributes.Append(attrName);
XmlText nodeValue = runElement.OwnerDocument.CreateTextNode(value);
runMetricElement.AppendChild(nodeValue);
runElement.AppendChild(runMetricElement);
}
private static XmlElement AddTestElement(XmlElement runElement, string name)
{
XmlElement testElement = runElement.OwnerDocument.CreateElement(Constants.XML_ELEM_TEST);
XmlAttribute attrName = runElement.OwnerDocument.CreateAttribute(Constants.XML_ATTR_TEST_NAME);
attrName.Value = name;
testElement.Attributes.Append(attrName);
runElement.AppendChild(testElement);
return testElement;
}
private static void AddTestMetricElement(XmlElement testElement, string name, string value, string units, bool? isHigherBetter)
{
XmlElement testMetricElement = testElement.OwnerDocument.CreateElement(Constants.XML_ELEM_TEST_METRIC);
XmlAttribute attrName = testElement.OwnerDocument.CreateAttribute(Constants.XML_ATTR_TEST_METRIC_NAME);
attrName.Value = name;
testMetricElement.Attributes.Append(attrName);
if (units != null)
{
XmlAttribute attrUnits = testElement.OwnerDocument.CreateAttribute(Constants.XML_ATTR_TEST_METRIC_UNITS);
attrUnits.Value = units;
testMetricElement.Attributes.Append(attrUnits);
}
if (isHigherBetter.HasValue)
{
XmlAttribute attrIsHigherBetter = testElement.OwnerDocument.CreateAttribute(Constants.XML_ATTR_TEST_METRIC_ISHIGHERBETTER);
attrIsHigherBetter.Value = isHigherBetter.ToString();
testMetricElement.Attributes.Append(attrIsHigherBetter);
}
XmlText nodeValue = testElement.OwnerDocument.CreateTextNode(value);
testMetricElement.AppendChild(nodeValue);
testElement.AppendChild(testMetricElement);
}
private static void AddTestExceptionElement(XmlElement testElement, string exceptionData)
{
XmlElement testFailureElement = testElement.OwnerDocument.CreateElement(Constants.XML_ELEM_EXCEPTION);
XmlText txtNode = testFailureElement.OwnerDocument.CreateTextNode(exceptionData);
testFailureElement.AppendChild(txtNode);
testElement.AppendChild(testFailureElement);
}
}
}
| |
// MIT License
//
// Copyright(c) 2022 ICARUS Consulting GmbH
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
using System;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using Yaapii.Atoms.Bytes;
using Yaapii.Atoms.Scalar;
namespace Yaapii.Atoms.IO
{
#pragma warning disable MaxClassLength // Class length max
/// <summary>
/// Input out of other things.
/// </summary>
public sealed class InputOf : IInput, IDisposable //@TODO IDisposable interface needs to be replaced with a better approach.
{
/// <summary>
/// the input
/// </summary>
private readonly IScalar<Stream> _origin;
/// <summary>
/// Input out of a file Uri.
/// </summary>
/// <param name="file">uri of a file, get with Path.GetFullPath(relativePath) or prefix with file://...</param>
public InputOf(Uri file) : this(
() =>
{
if (file.HostNameType == UriHostNameType.Dns)
{
return WebRequest.Create(file).GetResponse().GetResponseStream();
}
else
{
return new FileStream(Uri.UnescapeDataString(file.LocalPath), FileMode.Open, FileAccess.Read);
}
})
{ }
/// <summary>
/// Input out of a file Uri.
/// </summary>
/// <param name="file">uri of a file, get with Path.GetFullPath(relativePath) or prefix with file://...</param>
public InputOf(FileInfo file) : this(new Live<FileInfo>(file))
{ }
/// <summary>
/// Input out of a scalar of a file Uri.
/// </summary>
/// <param name="file">scalar of a uri of a file, get with Path.GetFullPath(relativePath) or prefix with file://...</param>
public InputOf(IScalar<FileInfo> file) : this(
() => new FileStream(Uri.UnescapeDataString(file.Value().FullName), FileMode.Open, FileAccess.Read))
{ }
/// <summary>
/// Input out of a Url.
/// </summary>
/// <param name="url">a url starting with http:// or https://</param>
public InputOf(Url url) : this(new Live<Url>(url))
{ }
/// <summary>
/// Input out of a Url scalar.
/// </summary>
/// <param name="url">a url starting with http:// or https://</param>
public InputOf(IScalar<Url> url) : this(() =>
{
var stream = Task.Run(async () =>
{
using (HttpClient client = new HttpClient())
{
HttpResponseMessage response = await client.GetAsync(url.Value().Value());
HttpContent content = response.Content;
{
return await content.ReadAsStreamAsync();
}
}
});
return stream.Result;
})
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="rdr">a stringreader</param>
public InputOf(StringReader rdr) : this(new BytesOf(rdr))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="rdr">a streamreader</param>
public InputOf(StreamReader rdr) : this(new BytesOf(rdr))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="rdr">a streamreader</param>
/// <param name="enc">encoding of the reader</param>
public InputOf(StreamReader rdr, Encoding enc) : this(new BytesOf(rdr, enc))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="str">a stream</param>
/// <param name="enc">encoding of the stream</param>
public InputOf(Stream str, Encoding enc) : this(new BytesOf(new StreamReader(str), enc))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="rdr">a streamreader</param>
/// <param name="enc">encoding of the reader</param>
/// <param name="max">maximum buffer size</param>
public InputOf(StreamReader rdr, Encoding enc, int max = 16 << 10) : this(new BytesOf(rdr, enc, max))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="builder">a stringbuilder</param>
public InputOf(StringBuilder builder) : this(builder, Encoding.UTF8)
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="builder">a stringbuilder</param>
/// <param name="enc">encoding of the stringbuilder</param>
public InputOf(StringBuilder builder, Encoding enc) : this(
new Live<Stream>(
() => new MemoryStream(
new BytesOf(builder, enc).AsBytes())))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="chars">some chars</param>
public InputOf(params char[] chars) : this(new BytesOf(chars))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="chars">some chars</param>
/// <param name="enc">encoding of the chars</param>
public InputOf(char[] chars, Encoding enc) : this(new BytesOf(chars, enc))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="text">some text</param>
public InputOf(String text) : this(new BytesOf(text))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="text">some <see cref="string"/></param>
/// <param name="enc"><see cref="Encoding"/> of the string</param>
public InputOf(String text, Encoding enc) : this(new BytesOf(text, enc))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="text">some <see cref="IText"/></param>
public InputOf(IText text) : this(new BytesOf(text))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="text">some <see cref="IText"/></param>
/// <param name="encoding"><see cref="Encoding"/> of the text</param>
public InputOf(IText text, Encoding encoding) : this(new BytesOf(text, encoding))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="error"><see cref="Exception"/> to serialize</param>
public InputOf(Exception error) : this(new BytesOf(error))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="bytes">a <see cref="byte"/> array</param>
public InputOf(byte[] bytes) : this(new BytesOf(bytes))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="src">a <see cref="IBytes"/> object which will be copied to memory</param>
public InputOf(IBytes src) : this(new Live<Stream>(
() =>
{
var b = src.AsBytes();
var m = new MemoryStream();
m.Write(b, 0, b.Length);
m.Position = 0;
return m;
}))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="stream">a <see cref="Stream"/> as input</param>
public InputOf(Stream stream) : this(new Live<Stream>(stream))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="fnc">a function retrieving a <see cref="Stream"/> as input</param>
public InputOf(Func<Stream> fnc) : this(new Live<Stream>(fnc))
{ }
/// <summary>
/// ctor
/// </summary>
/// <param name="stream">the input <see cref="Stream"/></param>
private InputOf(IScalar<Stream> stream)
{
this._origin = new ScalarOf<Stream>(stream, streamObj => !streamObj.CanRead);
}
/// <summary>
/// Get the stream.
/// </summary>
/// <returns>the stream</returns>
public Stream Stream()
{
return this._origin.Value();
}
/// <summary>
/// Clean up.
/// </summary>
public void Dispose()
{
Stream().Dispose();
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using Avalonia.Collections;
using Avalonia.Controls;
using Avalonia.Controls.Presenters;
using Avalonia.Markup.Xaml.Data;
using Avalonia.Markup.Xaml.Styling;
using Avalonia.Markup.Xaml.Templates;
using Avalonia.Media;
using Avalonia.Media.Immutable;
using Avalonia.Styling;
using Avalonia.UnitTests;
using Portable.Xaml;
using System.Collections;
using System.ComponentModel;
using System.Linq;
using Xunit;
namespace Avalonia.Markup.Xaml.UnitTests.Xaml
{
public class BasicTests
{
[Fact]
public void Simple_Property_Is_Set()
{
var xaml = @"<ContentControl xmlns='https://github.com/avaloniaui' Content='Foo'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.NotNull(target);
Assert.Equal("Foo", target.Content);
}
[Fact]
public void Default_Content_Property_Is_Set()
{
var xaml = @"<ContentControl xmlns='https://github.com/avaloniaui'>Foo</ContentControl>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.NotNull(target);
Assert.Equal("Foo", target.Content);
}
[Fact]
public void AvaloniaProperty_Without_Getter_And_Setter_Is_Set()
{
var xaml =
@"<local:NonControl xmlns='https://github.com/avaloniaui'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'
Foo='55' />";
var target = AvaloniaXamlLoader.Parse<NonControl>(xaml);
Assert.Equal(55, target.GetValue(NonControl.FooProperty));
}
[Fact]
public void AvaloniaProperty_With_Getter_And_No_Setter_Is_Set()
{
var xaml =
@"<local:NonControl xmlns='https://github.com/avaloniaui'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'
Bar='bar' />";
var target = AvaloniaXamlLoader.Parse<NonControl>(xaml);
Assert.Equal("bar", target.Bar);
}
[Fact]
public void Attached_Property_Is_Set()
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui' TextBlock.FontSize='21'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.NotNull(target);
Assert.Equal(21.0, TextBlock.GetFontSize(target));
}
[Fact]
public void Attached_Property_Is_Set_On_Control_Outside_Avalonia_Namspace()
{
// Test for issue #1548
var xaml =
@"<UserControl xmlns='https://github.com/avaloniaui'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'>
<local:TestControl Grid.Column='2' />
</UserControl>";
var target = AvaloniaXamlLoader.Parse<UserControl>(xaml);
Assert.Equal(2, Grid.GetColumn((TestControl)target.Content));
}
[Fact]
public void Attached_Property_With_Namespace_Is_Set()
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui'
xmlns:test='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'
test:BasicTestsAttachedPropertyHolder.Foo='Bar'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.NotNull(target);
Assert.Equal("Bar", BasicTestsAttachedPropertyHolder.GetFoo(target));
}
[Fact]
public void Attached_Property_Supports_Binding()
{
using (UnitTestApplication.Start(TestServices.MockWindowingPlatform))
{
var xaml =
@"<Window xmlns='https://github.com/avaloniaui' TextBlock.FontSize='{Binding}'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
target.DataContext = 21.0;
Assert.Equal(21.0, TextBlock.GetFontSize(target));
}
}
[Fact]
public void Attached_Property_In_Panel_Is_Set()
{
var xaml = @"
<Panel xmlns='https://github.com/avaloniaui'>
<ToolTip.Tip>Foo</ToolTip.Tip>
</Panel>";
var target = AvaloniaXamlLoader.Parse<Panel>(xaml);
Assert.Empty(target.Children);
Assert.Equal("Foo", ToolTip.GetTip(target));
}
[Fact]
public void NonExistent_Property_Throws()
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui' DoesntExist='foo'/>";
Assert.Throws<XamlObjectWriterException>(() => AvaloniaXamlLoader.Parse<ContentControl>(xaml));
}
[Fact]
public void Non_Attached_Property_With_Attached_Property_Syntax_Throws()
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui' TextBlock.Text='foo'/>";
Assert.Throws<XamlObjectWriterException>(() => AvaloniaXamlLoader.Parse<ContentControl>(xaml));
}
[Fact]
public void ContentControl_ContentTemplate_Is_Functional()
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui'>
<ContentControl.ContentTemplate>
<DataTemplate>
<TextBlock Text='Foo' />
</DataTemplate>
</ContentControl.ContentTemplate>
</ContentControl>";
var contentControl = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
var target = contentControl.ContentTemplate;
Assert.NotNull(target);
var txt = (TextBlock)target.Build(null);
Assert.Equal("Foo", txt.Text);
}
[Fact]
public void Named_Control_Is_Added_To_NameScope_Simple()
{
var xaml = @"
<UserControl xmlns='https://github.com/avaloniaui'>
<Button Name='button'>Foo</Button>
</UserControl>";
var control = AvaloniaXamlLoader.Parse<UserControl>(xaml);
var button = control.FindControl<Button>("button");
Assert.Equal("Foo", button.Content);
}
[Fact]
public void Direct_Content_In_ItemsControl_Is_Operational()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'>
<ItemsControl Name='items'>
<ContentControl>Foo</ContentControl>
<ContentControl>Bar</ContentControl>
</ItemsControl>
</Window>";
var control = AvaloniaXamlLoader.Parse<Window>(xaml);
var itemsControl = control.FindControl<ItemsControl>("items");
Assert.NotNull(itemsControl);
var items = itemsControl.Items.Cast<ContentControl>().ToArray();
Assert.Equal("Foo", items[0].Content);
Assert.Equal("Bar", items[1].Content);
}
}
[Fact]
public void Panel_Children_Are_Added()
{
var xaml = @"
<UserControl xmlns='https://github.com/avaloniaui'>
<Panel Name='panel'>
<ContentControl Name='Foo' />
<ContentControl Name='Bar' />
</Panel>
</UserControl>";
var control = AvaloniaXamlLoader.Parse<UserControl>(xaml);
var panel = control.FindControl<Panel>("panel");
Assert.Equal(2, panel.Children.Count);
var foo = control.FindControl<ContentControl>("Foo");
var bar = control.FindControl<ContentControl>("Bar");
Assert.Contains(foo, panel.Children);
Assert.Contains(bar, panel.Children);
}
[Fact]
public void Grid_Row_Col_Definitions_Are_Built()
{
var xaml = @"
<Grid xmlns='https://github.com/avaloniaui'>
<Grid.ColumnDefinitions>
<ColumnDefinition Width='100' />
<ColumnDefinition Width='Auto' />
<ColumnDefinition Width='*' />
<ColumnDefinition Width='100*' />
</Grid.ColumnDefinitions>
<Grid.RowDefinitions>
<RowDefinition Height='100' />
<RowDefinition Height='Auto' />
<RowDefinition Height='*' />
<RowDefinition Height='100*' />
</Grid.RowDefinitions>
</Grid>";
var grid = AvaloniaXamlLoader.Parse<Grid>(xaml);
Assert.Equal(4, grid.ColumnDefinitions.Count);
Assert.Equal(4, grid.RowDefinitions.Count);
var expected1 = new GridLength(100);
var expected2 = GridLength.Auto;
var expected3 = new GridLength(1, GridUnitType.Star);
var expected4 = new GridLength(100, GridUnitType.Star);
Assert.Equal(expected1, grid.ColumnDefinitions[0].Width);
Assert.Equal(expected2, grid.ColumnDefinitions[1].Width);
Assert.Equal(expected3, grid.ColumnDefinitions[2].Width);
Assert.Equal(expected4, grid.ColumnDefinitions[3].Width);
Assert.Equal(expected1, grid.RowDefinitions[0].Height);
Assert.Equal(expected2, grid.RowDefinitions[1].Height);
Assert.Equal(expected3, grid.RowDefinitions[2].Height);
Assert.Equal(expected4, grid.RowDefinitions[3].Height);
}
[Fact]
public void Grid_Row_Col_Definitions_Are_Parsed()
{
var xaml = @"
<Grid xmlns='https://github.com/avaloniaui'
ColumnDefinitions='100,Auto,*,100*'
RowDefinitions='100,Auto,*,100*'>
</Grid>";
var grid = AvaloniaXamlLoader.Parse<Grid>(xaml);
Assert.Equal(4, grid.ColumnDefinitions.Count);
Assert.Equal(4, grid.RowDefinitions.Count);
var expected1 = new GridLength(100);
var expected2 = GridLength.Auto;
var expected3 = new GridLength(1, GridUnitType.Star);
var expected4 = new GridLength(100, GridUnitType.Star);
Assert.Equal(expected1, grid.ColumnDefinitions[0].Width);
Assert.Equal(expected2, grid.ColumnDefinitions[1].Width);
Assert.Equal(expected3, grid.ColumnDefinitions[2].Width);
Assert.Equal(expected4, grid.ColumnDefinitions[3].Width);
Assert.Equal(expected1, grid.RowDefinitions[0].Height);
Assert.Equal(expected2, grid.RowDefinitions[1].Height);
Assert.Equal(expected3, grid.RowDefinitions[2].Height);
Assert.Equal(expected4, grid.RowDefinitions[3].Height);
}
[Fact]
public void ControlTemplate_With_Nested_Child_Is_Operational()
{
var xaml = @"
<ControlTemplate xmlns='https://github.com/avaloniaui'>
<ContentControl Name='parent'>
<ContentControl Name='child' />
</ContentControl>
</ControlTemplate>
";
var template = AvaloniaXamlLoader.Parse<ControlTemplate>(xaml);
var parent = (ContentControl)template.Build(new ContentControl());
Assert.Equal("parent", parent.Name);
var child = parent.Content as ContentControl;
Assert.NotNull(child);
Assert.Equal("child", child.Name);
}
[Fact]
public void ControlTemplate_With_Panel_Children_Are_Added()
{
var xaml = @"
<ControlTemplate xmlns='https://github.com/avaloniaui'>
<Panel Name='panel'>
<ContentControl Name='Foo' />
<ContentControl Name='Bar' />
</Panel>
</ControlTemplate>
";
var template = AvaloniaXamlLoader.Parse<ControlTemplate>(xaml);
var panel = (Panel)template.Build(new ContentControl());
Assert.Equal(2, panel.Children.Count);
var foo = panel.Children[0];
var bar = panel.Children[1];
Assert.Equal("Foo", foo.Name);
Assert.Equal("Bar", bar.Name);
}
[Fact]
public void Named_x_Control_Is_Added_To_NameScope_Simple()
{
var xaml = @"
<UserControl xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Button x:Name='button'>Foo</Button>
</UserControl>";
var control = AvaloniaXamlLoader.Parse<UserControl>(xaml);
var button = control.FindControl<Button>("button");
Assert.Equal("Foo", button.Content);
}
[Fact]
public void Standart_TypeConverter_Is_Used()
{
var xaml = @"<UserControl xmlns='https://github.com/avaloniaui' Width='200.5' />";
var control = AvaloniaXamlLoader.Parse<UserControl>(xaml);
Assert.Equal(200.5, control.Width);
}
[Fact]
public void Avalonia_TypeConverter_Is_Used()
{
var xaml = @"<UserControl xmlns='https://github.com/avaloniaui' Background='White' />";
var control = AvaloniaXamlLoader.Parse<UserControl>(xaml);
var bk = control.Background;
Assert.IsType<ImmutableSolidColorBrush>(bk);
Assert.Equal(Colors.White, (bk as ISolidColorBrush).Color);
}
[Fact]
public void Simple_Style_Is_Parsed()
{
var xaml = @"
<Styles xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Style Selector='TextBlock'>
<Setter Property='Background' Value='White'/>
<Setter Property='Width' Value='100'/>
</Style>
</Styles>";
var styles = AvaloniaXamlLoader.Parse<Styles>(xaml);
Assert.Single(styles);
var style = (Style)styles[0];
var setters = style.Setters.Cast<Setter>().ToArray();
Assert.Equal(2, setters.Length);
Assert.Equal(TextBlock.BackgroundProperty, setters[0].Property);
Assert.Equal(Brushes.White.Color, ((ISolidColorBrush)setters[0].Value).Color);
Assert.Equal(TextBlock.WidthProperty, setters[1].Property);
Assert.Equal(100.0, setters[1].Value);
}
[Fact]
public void Style_Setter_With_AttachedProperty_Is_Parsed()
{
var xaml = @"
<Styles xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Style Selector='ContentControl'>
<Setter Property='TextBlock.FontSize' Value='21'/>
</Style>
</Styles>";
var styles = AvaloniaXamlLoader.Parse<Styles>(xaml);
Assert.Single(styles);
var style = (Style)styles[0];
var setters = style.Setters.Cast<Setter>().ToArray();
Assert.Single(setters);
Assert.Equal(TextBlock.FontSizeProperty, setters[0].Property);
Assert.Equal(21.0, setters[0].Value);
}
[Fact]
public void Complex_Style_Is_Parsed()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Styles xmlns='https://github.com/avaloniaui'>
<Style Selector='CheckBox'>
<Setter Property='BorderBrush' Value='{DynamicResource ThemeBorderMidBrush}'/>
<Setter Property='BorderThickness' Value='{DynamicResource ThemeBorderThickness}'/>
<Setter Property='Template'>
<ControlTemplate>
<Grid ColumnDefinitions='Auto,*'>
<Border Name='border'
BorderBrush='{TemplateBinding BorderBrush}'
BorderThickness='{TemplateBinding BorderThickness}'
Width='18'
Height='18'
VerticalAlignment='Center'>
<Path Name='checkMark'
Fill='{StaticResource HighlightBrush}'
Width='11'
Height='10'
Stretch='Uniform'
HorizontalAlignment='Center'
VerticalAlignment='Center'
Data='M 1145.607177734375,430 C1145.607177734375,430 1141.449951171875,435.0772705078125 1141.449951171875,435.0772705078125 1141.449951171875,435.0772705078125 1139.232177734375,433.0999755859375 1139.232177734375,433.0999755859375 1139.232177734375,433.0999755859375 1138,434.5538330078125 1138,434.5538330078125 1138,434.5538330078125 1141.482177734375,438 1141.482177734375,438 1141.482177734375,438 1141.96875,437.9375 1141.96875,437.9375 1141.96875,437.9375 1147,431.34619140625 1147,431.34619140625 1147,431.34619140625 1145.607177734375,430 1145.607177734375,430 z'/>
</Border>
<ContentPresenter Name='PART_ContentPresenter'
Content='{TemplateBinding Content}'
ContentTemplate='{TemplateBinding ContentTemplate}'
Margin='4,0,0,0'
VerticalAlignment='Center'
Grid.Column='1'/>
</Grid>
</ControlTemplate>
</Setter>
</Style>
</Styles>
";
var styles = AvaloniaXamlLoader.Parse<Styles>(xaml);
Assert.Single(styles);
var style = (Style)styles[0];
var setters = style.Setters.Cast<Setter>().ToArray();
Assert.Equal(3, setters.Length);
Assert.Equal(CheckBox.BorderBrushProperty, setters[0].Property);
Assert.Equal(CheckBox.BorderThicknessProperty, setters[1].Property);
Assert.Equal(CheckBox.TemplateProperty, setters[2].Property);
Assert.IsType<ControlTemplate>(setters[2].Value);
}
}
[Fact]
public void Style_Resources_Are_Built()
{
var xaml = @"
<Style xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
xmlns:sys='clr-namespace:System;assembly=mscorlib'>
<Style.Resources>
<SolidColorBrush x:Key='Brush'>White</SolidColorBrush>
<sys:Double x:Key='Double'>10</sys:Double>
</Style.Resources>
</Style>";
var style = AvaloniaXamlLoader.Parse<Style>(xaml);
Assert.True(style.Resources.Count > 0);
style.TryGetResource("Brush", out var brush);
Assert.NotNull(brush);
Assert.Equal(Colors.White, ((ISolidColorBrush)brush).Color);
style.TryGetResource("Double", out var d);
Assert.Equal(10.0, d);
}
[Fact]
public void StyleInclude_Is_Built()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Styles xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<StyleInclude Source='resm:Avalonia.Themes.Default.ContextMenu.xaml?assembly=Avalonia.Themes.Default'/>
</Styles>";
var styles = AvaloniaXamlLoader.Parse<Styles>(xaml);
Assert.True(styles.Count == 1);
var styleInclude = styles.First() as StyleInclude;
Assert.NotNull(styleInclude);
var style = styleInclude.Loaded;
Assert.NotNull(style);
}
}
[Fact]
public void Simple_Xaml_Binding_Is_Operational()
{
using (UnitTestApplication.Start(TestServices.MockPlatformWrapper
.With(windowingPlatform: new MockWindowingPlatform())))
{
var xaml =
@"<Window xmlns='https://github.com/avaloniaui' Content='{Binding}'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.Null(target.Content);
target.DataContext = "Foo";
Assert.Equal("Foo", target.Content);
}
}
[Fact]
public void Xaml_Binding_Is_Delayed()
{
using (UnitTestApplication.Start(TestServices.MockWindowingPlatform))
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui' Content='{Binding}'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.Null(target.Content);
target.DataContext = "Foo";
Assert.Null(target.Content);
DelayedBinding.ApplyBindings(target);
Assert.Equal("Foo", target.Content);
}
}
[Fact]
public void Double_Xaml_Binding_Is_Operational()
{
using (UnitTestApplication.Start(TestServices.MockPlatformWrapper
.With(windowingPlatform: new MockWindowingPlatform())))
{
var xaml =
@"<Window xmlns='https://github.com/avaloniaui' Width='{Binding}'/>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.Null(target.Content);
target.DataContext = 55.0;
Assert.Equal(55.0, target.Width);
}
}
[Fact]
public void Collection_Xaml_Binding_Is_Operational()
{
using (UnitTestApplication.Start(TestServices.MockPlatformWrapper
.With(windowingPlatform: new MockWindowingPlatform())))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'>
<ItemsControl Name='itemsControl' Items='{Binding}'>
</ItemsControl>
</Window>
";
var target = AvaloniaXamlLoader.Parse<Window>(xaml);
Assert.NotNull(target.Content);
var itemsControl = target.FindControl<ItemsControl>("itemsControl");
var items = new string[] { "Foo", "Bar" };
//DelayedBinding.ApplyBindings(itemsControl);
target.DataContext = items;
Assert.Equal(items, itemsControl.Items);
}
}
[Fact]
public void Multi_Xaml_Binding_Is_Parsed()
{
var xaml =
@"<MultiBinding xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
Converter ='{x:Static BoolConverters.And}'>
<Binding Path='Foo' />
<Binding Path='Bar' />
</MultiBinding>";
var target = AvaloniaXamlLoader.Parse<MultiBinding>(xaml);
Assert.Equal(2, target.Bindings.Count);
Assert.Equal(BoolConverters.And, target.Converter);
var bindings = target.Bindings.Cast<Binding>().ToArray();
Assert.Equal("Foo", bindings[0].Path);
Assert.Equal("Bar", bindings[1].Path);
}
[Fact]
public void Control_Template_Is_Operational()
{
using (UnitTestApplication.Start(TestServices.MockPlatformWrapper
.With(windowingPlatform: new MockWindowingPlatform())))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Window.Template>
<ControlTemplate>
<ContentPresenter Name='PART_ContentPresenter'
Content='{TemplateBinding Content}'/>
</ControlTemplate>
</Window.Template>
</Window>";
var target = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
Assert.NotNull(target.Template);
Assert.Null(target.Presenter);
target.ApplyTemplate();
Assert.NotNull(target.Presenter);
target.Content = "Foo";
Assert.Equal("Foo", target.Presenter.Content);
}
}
[Fact]
public void Style_ControlTemplate_Is_Built()
{
var xaml = @"
<Style xmlns='https://github.com/avaloniaui' Selector='ContentControl'>
<Setter Property='Template'>
<ControlTemplate>
<ContentPresenter Name='PART_ContentPresenter'
Content='{TemplateBinding Content}'
ContentTemplate='{TemplateBinding ContentTemplate}' />
</ControlTemplate>
</Setter>
</Style> ";
var style = AvaloniaXamlLoader.Parse<Style>(xaml);
Assert.Single(style.Setters);
var setter = (Setter)style.Setters.First();
Assert.Equal(ContentControl.TemplateProperty, setter.Property);
Assert.IsType<ControlTemplate>(setter.Value);
var template = (ControlTemplate)setter.Value;
var control = new ContentControl();
var result = (ContentPresenter)template.Build(control);
Assert.NotNull(result);
}
[Fact]
public void Named_Control_Is_Added_To_NameScope()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Button Name='button'>Foo</Button>
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var button = window.FindControl<Button>("button");
Assert.Equal("Foo", button.Content);
}
}
[Fact(Skip =
@"Doesn't work with Portable.xaml, it's working in different creation order -
Handled in test 'Control_Is_Added_To_Parent_Before_Final_EndInit'
do we need it?")]
public void Control_Is_Added_To_Parent_Before_Properties_Are_Set()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'>
<local:InitializationOrderTracker Width='100'/>
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var tracker = (InitializationOrderTracker)window.Content;
var attached = tracker.Order.IndexOf("AttachedToLogicalTree");
var widthChanged = tracker.Order.IndexOf("Property Width Changed");
Assert.NotEqual(-1, attached);
Assert.NotEqual(-1, widthChanged);
Assert.True(attached < widthChanged);
}
}
[Fact]
public void Control_Is_Added_To_Parent_Before_Final_EndInit()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'>
<local:InitializationOrderTracker Width='100'/>
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var tracker = (InitializationOrderTracker)window.Content;
var attached = tracker.Order.IndexOf("AttachedToLogicalTree");
var endInit = tracker.Order.IndexOf("EndInit 0");
Assert.NotEqual(-1, attached);
Assert.NotEqual(-1, endInit);
Assert.True(attached < endInit);
}
}
[Fact]
public void All_Properties_Are_Set_Before_Final_EndInit()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'>
<local:InitializationOrderTracker Width='100' Height='100'
Tag='{Binding Height, RelativeSource={RelativeSource Self}}' />
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var tracker = (InitializationOrderTracker)window.Content;
//ensure binding is set and operational first
Assert.Equal(100.0, tracker.Tag);
Assert.Equal("EndInit 0", tracker.Order.Last());
}
}
[Fact]
public void BeginInit_Matches_EndInit()
{
using (UnitTestApplication.Start(TestServices.StyledWindow))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'>
<local:InitializationOrderTracker />
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var tracker = (InitializationOrderTracker)window.Content;
Assert.Equal(0, tracker.InitState);
}
}
[Fact]
public void DeferedXamlLoader_Should_Preserve_NamespacesContext()
{
var xaml =
@"<ContentControl xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'
xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'>
<ContentControl.ContentTemplate>
<DataTemplate>
<TextBlock Tag='{x:Static local:NonControl.StringProperty}'/>
</DataTemplate>
</ContentControl.ContentTemplate>
</ContentControl>";
var contentControl = AvaloniaXamlLoader.Parse<ContentControl>(xaml);
var template = contentControl.ContentTemplate;
Assert.NotNull(template);
var txt = (TextBlock)template.Build(null);
Assert.Equal((object)NonControl.StringProperty, txt.Tag);
}
[Fact]
public void Binding_To_List_AvaloniaProperty_Is_Operational()
{
using (UnitTestApplication.Start(TestServices.MockWindowingPlatform))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'>
<ListBox Items='{Binding Items}' SelectedItems='{Binding SelectedItems}'/>
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var listBox = (ListBox)window.Content;
var vm = new SelectedItemsViewModel()
{
Items = new string[] { "foo", "bar", "baz" }
};
window.DataContext = vm;
Assert.Equal(vm.Items, listBox.Items);
Assert.Equal(vm.SelectedItems, listBox.SelectedItems);
}
}
[Fact]
public void Element_Whitespace_Should_Be_Trimmed()
{
using (UnitTestApplication.Start(TestServices.MockWindowingPlatform))
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'>
<TextBlock>
Hello World!
</TextBlock>
</Window>";
var window = AvaloniaXamlLoader.Parse<Window>(xaml);
var textBlock = (TextBlock)window.Content;
Assert.Equal("Hello World!", textBlock.Text);
}
}
private class SelectedItemsViewModel : INotifyPropertyChanged
{
public string[] Items { get; set; }
public event PropertyChangedEventHandler PropertyChanged;
private IList _selectedItems = new AvaloniaList<string>();
public IList SelectedItems
{
get { return _selectedItems; }
set
{
_selectedItems = value;
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(SelectedItems)));
}
}
}
}
public class BasicTestsAttachedPropertyHolder
{
public static AvaloniaProperty<string> FooProperty =
AvaloniaProperty.RegisterAttached<BasicTestsAttachedPropertyHolder, AvaloniaObject, string>("Foo");
public static void SetFoo(AvaloniaObject target, string value) => target.SetValue(FooProperty, value);
public static string GetFoo(AvaloniaObject target) => (string)target.GetValue(FooProperty);
}
}
| |
//
// Encog(tm) Core v3.3 - .Net Version
// http://www.heatonresearch.com/encog/
//
// Copyright 2008-2014 Heaton Research, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Encog.Util.CSV;
namespace Encog.App.Analyst.Util
{
/// <summary>
/// Utility class to help deal with CSV headers.
/// </summary>
public class CSVHeaders
{
/// <summary>
/// The column mapping, maps column name to column index.
/// </summary>
private readonly IDictionary<String, Int32> _columnMapping;
/// <summary>
/// The header list.
/// </summary>
private readonly IList<String> _headerList;
/// <summary>
/// Construct the object.
/// </summary>
/// <param name="filename">The filename.</param>
/// <param name="headers">False if headers are not extended.</param>
/// <param name="format">The CSV format.</param>
public CSVHeaders(FileInfo filename, bool headers,
CSVFormat format)
{
_headerList = new List<String>();
_columnMapping = new Dictionary<String, Int32>();
ReadCSV csv = null;
try
{
csv = new ReadCSV(filename.ToString(), headers, format);
if (csv.Next())
{
if (headers)
{
foreach (String str in csv.ColumnNames)
{
_headerList.Add(str);
}
}
else
{
for (int i = 0; i < csv.ColumnCount; i++)
{
_headerList.Add("field:" + (i + 1));
}
}
}
Init();
}
finally
{
if (csv != null)
{
csv.Close();
}
}
}
/// <summary>
/// Construct the object.
/// </summary>
/// <param name="inputHeadings">The input headings.</param>
public CSVHeaders(IEnumerable<string> inputHeadings)
{
_headerList = new List<String>();
_columnMapping = new Dictionary<String, Int32>();
foreach (String header in inputHeadings)
{
_headerList.Add(header);
}
Init();
}
/// <value>The headers.</value>
public IList<String> Headers
{
get { return _headerList; }
}
/// <summary>
/// Parse a timeslice from a header such as (t-1).
/// </summary>
/// <param name="name">The column name.</param>
/// <returns>The timeslice.</returns>
public static int ParseTimeSlice(String name)
{
int index1 = name.IndexOf('(');
if (index1 == -1)
{
return 0;
}
int index2 = name.IndexOf(')');
if (index2 == -1)
{
return 0;
}
if (index2 < index1)
{
return 0;
}
String list = name.Substring(index1 + 1, (index2) - (index1 + 1));
String[] values = list.Split(',');
return (from v in values
select v.Trim()
into str where str.ToLower().StartsWith("t") select Int32.Parse(str.Substring(1))).FirstOrDefault();
}
/// <summary>
/// Tag a column with part # and timeslice.
/// </summary>
/// <param name="name">The name of the column.</param>
/// <param name="part">The part #.</param>
/// <param name="timeSlice">The timeslice.</param>
/// <param name="multiPart">True if this is a multipart column.</param>
/// <returns>The new tagged column.</returns>
public static String TagColumn(String name, int part,
int timeSlice, bool multiPart)
{
var result = new StringBuilder();
result.Append(name);
// is there any suffix?
if (multiPart || (timeSlice != 0))
{
result.Append('(');
// is there a part?
if (multiPart)
{
result.Append('p');
result.Append(part);
}
// is there a timeslice?
if (timeSlice != 0)
{
if (multiPart)
{
result.Append(',');
}
result.Append('t');
if (timeSlice > 0)
{
result.Append('+');
}
result.Append(timeSlice);
}
result.Append(')');
}
return result.ToString();
}
/// <summary>
/// Find the specified column.
/// </summary>
/// <param name="name">The column name.</param>
/// <returns>The index of the column.</returns>
public int Find(String name)
{
String key = name.ToLower();
if (!_columnMapping.ContainsKey(key))
{
throw new AnalystError("Can't find column: " + name.ToLower());
}
return _columnMapping[key];
}
/// <summary>
/// Get the base header, strip any (...).
/// </summary>
/// <param name="index">The index of the header.</param>
/// <returns>The base header.</returns>
public String GetBaseHeader(int index)
{
String result = _headerList[index];
int loc = result.IndexOf('(');
if (loc != -1)
{
result = result.Substring(0, (loc) - (0));
}
return result.Trim();
}
/// <summary>
/// Get the specified header.
/// </summary>
/// <param name="index">The index of the header to get.</param>
/// <returns>The header value.</returns>
public String GetHeader(int index)
{
return _headerList[index];
}
/// <summary>
/// Get the timeslice for the specified index.
/// </summary>
/// <param name="currentIndex">The index to get the time slice for.</param>
/// <returns>The timeslice.</returns>
public int GetSlice(int currentIndex)
{
String name = _headerList[currentIndex];
int index1 = name.IndexOf('(');
if (index1 == -1)
{
return 0;
}
int index2 = name.IndexOf(')');
if (index2 == -1)
{
return 0;
}
if (index2 < index1)
{
return 0;
}
String list = name.Substring(index1 + 1, (index2) - (index1 + 1));
String[] values = list.Split(',');
foreach (String v in values)
{
String str = v.Trim();
if (str.ToLower().StartsWith("t"))
{
str = v.Trim().Substring(1).Trim();
if (str[0] == '+')
{
// since Integer.parseInt can't handle +1
str = str.Substring(1);
}
int slice = Int32.Parse(str);
return slice;
}
}
return 0;
}
/// <summary>
/// Setup the column mapping and validate.
/// </summary>
private void Init()
{
int index = 0;
foreach (String str in _headerList)
{
_columnMapping[str.ToLower()] = index++;
}
ValidateSameName();
}
/// <returns>The number of headers.</returns>
public int Size()
{
return _headerList.Count;
}
/// <summary>
/// Validate that two columns do not have the same name. This is an error.
/// </summary>
private void ValidateSameName()
{
for (int i = 0; i < _headerList.Count; i++)
{
int i1 = i;
int i2 = i;
if (_headerList.Count > i2)
if (
_headerList.Where((t, j) => i1 != j)
.Any(t => _headerList[i2].Equals(t, StringComparison.InvariantCultureIgnoreCase)))
{
throw new AnalystError("Multiple fields named: "
+ _headerList[i]);
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.Text;
using System.Text.RegularExpressions;
namespace System.Data.Common
{
public abstract class DbCommandBuilder : Component
{
private class ParameterNames
{
private const string DefaultOriginalPrefix = "Original_";
private const string DefaultIsNullPrefix = "IsNull_";
// we use alternative prefix if the default prefix fails parametername validation
private const string AlternativeOriginalPrefix = "original";
private const string AlternativeIsNullPrefix = "isnull";
private const string AlternativeOriginalPrefix2 = "ORIGINAL";
private const string AlternativeIsNullPrefix2 = "ISNULL";
private string _originalPrefix;
private string _isNullPrefix;
private Regex _parameterNameParser;
private DbCommandBuilder _dbCommandBuilder;
private string[] _baseParameterNames;
private string[] _originalParameterNames;
private string[] _nullParameterNames;
private bool[] _isMutatedName;
private int _count;
private int _genericParameterCount;
private int _adjustedParameterNameMaxLength;
internal ParameterNames(DbCommandBuilder dbCommandBuilder, DbSchemaRow[] schemaRows)
{
_dbCommandBuilder = dbCommandBuilder;
_baseParameterNames = new string[schemaRows.Length];
_originalParameterNames = new string[schemaRows.Length];
_nullParameterNames = new string[schemaRows.Length];
_isMutatedName = new bool[schemaRows.Length];
_count = schemaRows.Length;
_parameterNameParser = new Regex(_dbCommandBuilder.ParameterNamePattern, RegexOptions.ExplicitCapture | RegexOptions.Singleline);
SetAndValidateNamePrefixes();
_adjustedParameterNameMaxLength = GetAdjustedParameterNameMaxLength();
// Generate the baseparameter names and remove conflicting names
// No names will be generated for any name that is rejected due to invalid prefix, regex violation or
// name conflict after mutation.
// All null values will be replaced with generic parameter names
//
for (int i = 0; i < schemaRows.Length; i++)
{
if (null == schemaRows[i])
{
continue;
}
bool isMutatedName = false;
string columnName = schemaRows[i].ColumnName;
// all names that start with original- or isNullPrefix are invalid
if (null != _originalPrefix)
{
if (columnName.StartsWith(_originalPrefix, StringComparison.OrdinalIgnoreCase))
{
continue;
}
}
if (null != _isNullPrefix)
{
if (columnName.StartsWith(_isNullPrefix, StringComparison.OrdinalIgnoreCase))
{
continue;
}
}
// Mutate name if it contains space(s)
if (columnName.Contains(' '))
{
columnName = columnName.Replace(' ', '_');
isMutatedName = true;
}
// Validate name against regular expression
if (!_parameterNameParser.IsMatch(columnName))
{
continue;
}
// Validate name against adjusted max parametername length
if (columnName.Length > _adjustedParameterNameMaxLength)
{
continue;
}
_baseParameterNames[i] = columnName;
_isMutatedName[i] = isMutatedName;
}
EliminateConflictingNames();
// Generate names for original- and isNullparameters
// no names will be generated if the prefix failed parametername validation
for (int i = 0; i < schemaRows.Length; i++)
{
if (null != _baseParameterNames[i])
{
if (null != _originalPrefix)
{
_originalParameterNames[i] = _originalPrefix + _baseParameterNames[i];
}
if (null != _isNullPrefix)
{
// don't bother generating an 'IsNull' name if it's not used
if (schemaRows[i].AllowDBNull)
{
_nullParameterNames[i] = _isNullPrefix + _baseParameterNames[i];
}
}
}
}
ApplyProviderSpecificFormat();
GenerateMissingNames(schemaRows);
}
private void SetAndValidateNamePrefixes()
{
if (_parameterNameParser.IsMatch(DefaultIsNullPrefix))
{
_isNullPrefix = DefaultIsNullPrefix;
}
else if (_parameterNameParser.IsMatch(AlternativeIsNullPrefix))
{
_isNullPrefix = AlternativeIsNullPrefix;
}
else if (_parameterNameParser.IsMatch(AlternativeIsNullPrefix2))
{
_isNullPrefix = AlternativeIsNullPrefix2;
}
else
{
_isNullPrefix = null;
}
if (_parameterNameParser.IsMatch(DefaultOriginalPrefix))
{
_originalPrefix = DefaultOriginalPrefix;
}
else if (_parameterNameParser.IsMatch(AlternativeOriginalPrefix))
{
_originalPrefix = AlternativeOriginalPrefix;
}
else if (_parameterNameParser.IsMatch(AlternativeOriginalPrefix2))
{
_originalPrefix = AlternativeOriginalPrefix2;
}
else
{
_originalPrefix = null;
}
}
private void ApplyProviderSpecificFormat()
{
for (int i = 0; i < _baseParameterNames.Length; i++)
{
if (null != _baseParameterNames[i])
{
_baseParameterNames[i] = _dbCommandBuilder.GetParameterName(_baseParameterNames[i]);
}
if (null != _originalParameterNames[i])
{
_originalParameterNames[i] = _dbCommandBuilder.GetParameterName(_originalParameterNames[i]);
}
if (null != _nullParameterNames[i])
{
_nullParameterNames[i] = _dbCommandBuilder.GetParameterName(_nullParameterNames[i]);
}
}
}
private void EliminateConflictingNames()
{
for (int i = 0; i < _count - 1; i++)
{
string name = _baseParameterNames[i];
if (null != name)
{
for (int j = i + 1; j < _count; j++)
{
if (ADP.CompareInsensitiveInvariant(name, _baseParameterNames[j]))
{
// found duplicate name
// the name unchanged name wins
int iMutatedName = _isMutatedName[j] ? j : i;
Debug.Assert(_isMutatedName[iMutatedName], string.Format(CultureInfo.InvariantCulture, "{0} expected to be a mutated name", _baseParameterNames[iMutatedName]));
_baseParameterNames[iMutatedName] = null; // null out the culprit
}
}
}
}
}
// Generates parameternames that couldn't be generated from columnname
internal void GenerateMissingNames(DbSchemaRow[] schemaRows)
{
// foreach name in base names
// if base name is null
// for base, original and nullnames (null names only if nullable)
// do
// generate name based on current index
// increment index
// search name in base names
// loop while name occurs in base names
// end for
// end foreach
string name;
for (int i = 0; i < _baseParameterNames.Length; i++)
{
name = _baseParameterNames[i];
if (null == name)
{
_baseParameterNames[i] = GetNextGenericParameterName();
_originalParameterNames[i] = GetNextGenericParameterName();
// don't bother generating an 'IsNull' name if it's not used
if ((null != schemaRows[i]) && schemaRows[i].AllowDBNull)
{
_nullParameterNames[i] = GetNextGenericParameterName();
}
}
}
}
private int GetAdjustedParameterNameMaxLength()
{
int maxPrefixLength = Math.Max(
(null != _isNullPrefix ? _isNullPrefix.Length : 0),
(null != _originalPrefix ? _originalPrefix.Length : 0)
) + _dbCommandBuilder.GetParameterName("").Length;
return _dbCommandBuilder.ParameterNameMaxLength - maxPrefixLength;
}
private string GetNextGenericParameterName()
{
string name;
bool nameExist;
do
{
nameExist = false;
_genericParameterCount++;
name = _dbCommandBuilder.GetParameterName(_genericParameterCount);
for (int i = 0; i < _baseParameterNames.Length; i++)
{
if (ADP.CompareInsensitiveInvariant(_baseParameterNames[i], name))
{
nameExist = true;
break;
}
}
} while (nameExist);
return name;
}
internal string GetBaseParameterName(int index)
{
return (_baseParameterNames[index]);
}
internal string GetOriginalParameterName(int index)
{
return (_originalParameterNames[index]);
}
internal string GetNullParameterName(int index)
{
return (_nullParameterNames[index]);
}
}
private const string DeleteFrom = "DELETE FROM ";
private const string InsertInto = "INSERT INTO ";
private const string DefaultValues = " DEFAULT VALUES";
private const string Values = " VALUES ";
private const string Update = "UPDATE ";
private const string Set = " SET ";
private const string Where = " WHERE ";
private const string SpaceLeftParenthesis = " (";
private const string Comma = ", ";
private const string Equal = " = ";
private const string LeftParenthesis = "(";
private const string RightParenthesis = ")";
private const string NameSeparator = ".";
private const string IsNull = " IS NULL";
private const string EqualOne = " = 1";
private const string And = " AND ";
private const string Or = " OR ";
private DbDataAdapter _dataAdapter;
private DbCommand _insertCommand;
private DbCommand _updateCommand;
private DbCommand _deleteCommand;
private MissingMappingAction _missingMappingAction;
private ConflictOption _conflictDetection = ConflictOption.CompareAllSearchableValues;
private bool _setAllValues = false;
private bool _hasPartialPrimaryKey = false;
private DataTable _dbSchemaTable;
private DbSchemaRow[] _dbSchemaRows;
private string[] _sourceColumnNames;
private ParameterNames _parameterNames = null;
private string _quotedBaseTableName;
// quote strings to use around SQL object names
private CatalogLocation _catalogLocation = CatalogLocation.Start;
private string _catalogSeparator = NameSeparator;
private string _schemaSeparator = NameSeparator;
private string _quotePrefix = string.Empty;
private string _quoteSuffix = string.Empty;
private string _parameterNamePattern = null;
private string _parameterMarkerFormat = null;
private int _parameterNameMaxLength = 0;
protected DbCommandBuilder() : base()
{
}
[DefaultValueAttribute(ConflictOption.CompareAllSearchableValues)]
public virtual ConflictOption ConflictOption
{
get
{
return _conflictDetection;
}
set
{
switch (value)
{
case ConflictOption.CompareAllSearchableValues:
case ConflictOption.CompareRowVersion:
case ConflictOption.OverwriteChanges:
_conflictDetection = value;
break;
default:
throw ADP.InvalidConflictOptions(value);
}
}
}
[DefaultValueAttribute(CatalogLocation.Start)]
public virtual CatalogLocation CatalogLocation
{
get
{
return _catalogLocation;
}
set
{
if (null != _dbSchemaTable)
{
throw ADP.NoQuoteChange();
}
switch (value)
{
case CatalogLocation.Start:
case CatalogLocation.End:
_catalogLocation = value;
break;
default:
throw ADP.InvalidCatalogLocation(value);
}
}
}
[DefaultValueAttribute(DbCommandBuilder.NameSeparator)]
public virtual string CatalogSeparator
{
get
{
string catalogSeparator = _catalogSeparator;
return (((null != catalogSeparator) && (0 < catalogSeparator.Length)) ? catalogSeparator : NameSeparator);
}
set
{
if (null != _dbSchemaTable)
{
throw ADP.NoQuoteChange();
}
_catalogSeparator = value;
}
}
[Browsable(false)]
[DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public DbDataAdapter DataAdapter
{
get
{
return _dataAdapter;
}
set
{
if (_dataAdapter != value)
{
RefreshSchema();
if (null != _dataAdapter)
{
// derived should remove event handler from old adapter
SetRowUpdatingHandler(_dataAdapter);
_dataAdapter = null;
}
if (null != value)
{
// derived should add event handler to new adapter
SetRowUpdatingHandler(value);
_dataAdapter = value;
}
}
}
}
internal int ParameterNameMaxLength
{
get
{
return _parameterNameMaxLength;
}
}
internal string ParameterNamePattern
{
get
{
return _parameterNamePattern;
}
}
private string QuotedBaseTableName
{
get
{
return _quotedBaseTableName;
}
}
[DefaultValueAttribute("")]
public virtual string QuotePrefix
{
get { return _quotePrefix ?? string.Empty; }
set
{
if (null != _dbSchemaTable)
{
throw ADP.NoQuoteChange();
}
_quotePrefix = value;
}
}
[DefaultValueAttribute("")]
public virtual string QuoteSuffix
{
get
{
string quoteSuffix = _quoteSuffix;
return ((null != quoteSuffix) ? quoteSuffix : string.Empty);
}
set
{
if (null != _dbSchemaTable)
{
throw ADP.NoQuoteChange();
}
_quoteSuffix = value;
}
}
[DefaultValueAttribute(DbCommandBuilder.NameSeparator)]
public virtual string SchemaSeparator
{
get
{
string schemaSeparator = _schemaSeparator;
return (((null != schemaSeparator) && (0 < schemaSeparator.Length)) ? schemaSeparator : NameSeparator);
}
set
{
if (null != _dbSchemaTable)
{
throw ADP.NoQuoteChange();
}
_schemaSeparator = value;
}
}
[DefaultValueAttribute(false)]
public bool SetAllValues
{
get
{
return _setAllValues;
}
set
{
_setAllValues = value;
}
}
private DbCommand InsertCommand
{
get
{
return _insertCommand;
}
set
{
_insertCommand = value;
}
}
private DbCommand UpdateCommand
{
get
{
return _updateCommand;
}
set
{
_updateCommand = value;
}
}
private DbCommand DeleteCommand
{
get
{
return _deleteCommand;
}
set
{
_deleteCommand = value;
}
}
private void BuildCache(bool closeConnection, DataRow dataRow, bool useColumnsForParameterNames)
{
// Don't bother building the cache if it's done already; wait for
// the user to call RefreshSchema first.
if ((null != _dbSchemaTable) && (!useColumnsForParameterNames || (null != _parameterNames)))
{
return;
}
DataTable schemaTable = null;
DbCommand srcCommand = GetSelectCommand();
DbConnection connection = srcCommand.Connection;
if (null == connection)
{
throw ADP.MissingSourceCommandConnection();
}
try
{
if (0 == (ConnectionState.Open & connection.State))
{
connection.Open();
}
else
{
closeConnection = false;
}
if (useColumnsForParameterNames)
{
DataTable dataTable = connection.GetSchema(DbMetaDataCollectionNames.DataSourceInformation);
if (dataTable.Rows.Count == 1)
{
_parameterNamePattern = dataTable.Rows[0][DbMetaDataColumnNames.ParameterNamePattern] as string;
_parameterMarkerFormat = dataTable.Rows[0][DbMetaDataColumnNames.ParameterMarkerFormat] as string;
object oParameterNameMaxLength = dataTable.Rows[0][DbMetaDataColumnNames.ParameterNameMaxLength];
_parameterNameMaxLength = (oParameterNameMaxLength is int) ? (int)oParameterNameMaxLength : 0;
// note that we protect against errors in the xml file!
if (0 == _parameterNameMaxLength || null == _parameterNamePattern || null == _parameterMarkerFormat)
{
useColumnsForParameterNames = false;
}
}
else
{
Debug.Assert(false, "Rowcount expected to be 1");
useColumnsForParameterNames = false;
}
}
schemaTable = GetSchemaTable(srcCommand);
}
finally
{
if (closeConnection)
{
connection.Close();
}
}
if (null == schemaTable)
{
throw ADP.DynamicSQLNoTableInfo();
}
BuildInformation(schemaTable);
_dbSchemaTable = schemaTable;
DbSchemaRow[] schemaRows = _dbSchemaRows;
string[] srcColumnNames = new string[schemaRows.Length];
for (int i = 0; i < schemaRows.Length; ++i)
{
if (null != schemaRows[i])
{
srcColumnNames[i] = schemaRows[i].ColumnName;
}
}
_sourceColumnNames = srcColumnNames;
if (useColumnsForParameterNames)
{
_parameterNames = new ParameterNames(this, schemaRows);
}
ADP.BuildSchemaTableInfoTableNames(srcColumnNames);
}
protected virtual DataTable GetSchemaTable(DbCommand sourceCommand)
{
using (IDataReader dataReader = sourceCommand.ExecuteReader(CommandBehavior.SchemaOnly | CommandBehavior.KeyInfo))
{
return dataReader.GetSchemaTable();
}
}
private void BuildInformation(DataTable schemaTable)
{
DbSchemaRow[] rows = DbSchemaRow.GetSortedSchemaRows(schemaTable, false);
if ((null == rows) || (0 == rows.Length))
{
throw ADP.DynamicSQLNoTableInfo();
}
string baseServerName = string.Empty;
string baseCatalogName = string.Empty;
string baseSchemaName = string.Empty;
string baseTableName = null;
for (int i = 0; i < rows.Length; ++i)
{
DbSchemaRow row = rows[i];
string tableName = row.BaseTableName;
if ((null == tableName) || (0 == tableName.Length))
{
rows[i] = null;
continue;
}
string serverName = row.BaseServerName;
string catalogName = row.BaseCatalogName;
string schemaName = row.BaseSchemaName;
if (null == serverName)
{
serverName = string.Empty;
}
if (null == catalogName)
{
catalogName = string.Empty;
}
if (null == schemaName)
{
schemaName = string.Empty;
}
if (null == baseTableName)
{
baseServerName = serverName;
baseCatalogName = catalogName;
baseSchemaName = schemaName;
baseTableName = tableName;
}
else if ((0 != ADP.SrcCompare(baseTableName, tableName))
|| (0 != ADP.SrcCompare(baseSchemaName, schemaName))
|| (0 != ADP.SrcCompare(baseCatalogName, catalogName))
|| (0 != ADP.SrcCompare(baseServerName, serverName)))
{
throw ADP.DynamicSQLJoinUnsupported();
}
}
if (0 == baseServerName.Length)
{
baseServerName = null;
}
if (0 == baseCatalogName.Length)
{
baseServerName = null;
baseCatalogName = null;
}
if (0 == baseSchemaName.Length)
{
baseServerName = null;
baseCatalogName = null;
baseSchemaName = null;
}
if ((null == baseTableName) || (0 == baseTableName.Length))
{
throw ADP.DynamicSQLNoTableInfo();
}
CatalogLocation location = CatalogLocation;
string catalogSeparator = CatalogSeparator;
string schemaSeparator = SchemaSeparator;
string quotePrefix = QuotePrefix;
string quoteSuffix = QuoteSuffix;
if (!string.IsNullOrEmpty(quotePrefix) && (-1 != baseTableName.IndexOf(quotePrefix, StringComparison.Ordinal)))
{
throw ADP.DynamicSQLNestedQuote(baseTableName, quotePrefix);
}
if (!string.IsNullOrEmpty(quoteSuffix) && (-1 != baseTableName.IndexOf(quoteSuffix, StringComparison.Ordinal)))
{
throw ADP.DynamicSQLNestedQuote(baseTableName, quoteSuffix);
}
System.Text.StringBuilder builder = new System.Text.StringBuilder();
if (CatalogLocation.Start == location)
{
if (null != baseServerName)
{
builder.Append(ADP.BuildQuotedString(quotePrefix, quoteSuffix, baseServerName));
builder.Append(catalogSeparator);
}
if (null != baseCatalogName)
{
builder.Append(ADP.BuildQuotedString(quotePrefix, quoteSuffix, baseCatalogName));
builder.Append(catalogSeparator);
}
}
if (null != baseSchemaName)
{
builder.Append(ADP.BuildQuotedString(quotePrefix, quoteSuffix, baseSchemaName));
builder.Append(schemaSeparator);
}
builder.Append(ADP.BuildQuotedString(quotePrefix, quoteSuffix, baseTableName));
if (CatalogLocation.End == location)
{
if (null != baseServerName)
{
builder.Append(catalogSeparator);
builder.Append(ADP.BuildQuotedString(quotePrefix, quoteSuffix, baseServerName));
}
if (null != baseCatalogName)
{
builder.Append(catalogSeparator);
builder.Append(ADP.BuildQuotedString(quotePrefix, quoteSuffix, baseCatalogName));
}
}
_quotedBaseTableName = builder.ToString();
_hasPartialPrimaryKey = false;
foreach (DbSchemaRow row in rows)
{
if ((null != row) && (row.IsKey || row.IsUnique) && !row.IsLong && !row.IsRowVersion && row.IsHidden)
{
_hasPartialPrimaryKey = true;
break;
}
}
_dbSchemaRows = rows;
}
private DbCommand BuildDeleteCommand(DataTableMapping mappings, DataRow dataRow)
{
DbCommand command = InitializeCommand(DeleteCommand);
StringBuilder builder = new StringBuilder();
int parameterCount = 0;
Debug.Assert(!string.IsNullOrEmpty(_quotedBaseTableName), "no table name");
builder.Append(DeleteFrom);
builder.Append(QuotedBaseTableName);
parameterCount = BuildWhereClause(mappings, dataRow, builder, command, parameterCount, false);
command.CommandText = builder.ToString();
RemoveExtraParameters(command, parameterCount);
DeleteCommand = command;
return command;
}
private DbCommand BuildInsertCommand(DataTableMapping mappings, DataRow dataRow)
{
DbCommand command = InitializeCommand(InsertCommand);
StringBuilder builder = new StringBuilder();
int parameterCount = 0;
string nextSeparator = SpaceLeftParenthesis;
Debug.Assert(!string.IsNullOrEmpty(_quotedBaseTableName), "no table name");
builder.Append(InsertInto);
builder.Append(QuotedBaseTableName);
// search for the columns in that base table, to be the column clause
DbSchemaRow[] schemaRows = _dbSchemaRows;
string[] parameterName = new string[schemaRows.Length];
for (int i = 0; i < schemaRows.Length; ++i)
{
DbSchemaRow row = schemaRows[i];
if ((null == row) || (0 == row.BaseColumnName.Length) || !IncludeInInsertValues(row))
continue;
object currentValue = null;
string sourceColumn = _sourceColumnNames[i];
// If we're building a statement for a specific row, then check the
// values to see whether the column should be included in the insert
// statement or not
if ((null != mappings) && (null != dataRow))
{
DataColumn dataColumn = GetDataColumn(sourceColumn, mappings, dataRow);
if (null == dataColumn)
continue;
// Don't bother inserting if the column is readonly in both the data
// set and the back end.
if (row.IsReadOnly && dataColumn.ReadOnly)
continue;
currentValue = GetColumnValue(dataRow, dataColumn, DataRowVersion.Current);
// If the value is null, and the column doesn't support nulls, then
// the user is requesting the server-specified default value, so don't
// include it in the set-list.
if (!row.AllowDBNull && (null == currentValue || Convert.IsDBNull(currentValue)))
continue;
}
builder.Append(nextSeparator);
nextSeparator = Comma;
builder.Append(QuotedColumn(row.BaseColumnName));
parameterName[parameterCount] = CreateParameterForValue(
command,
GetBaseParameterName(i),
sourceColumn,
DataRowVersion.Current,
parameterCount,
currentValue,
row, StatementType.Insert, false
);
parameterCount++;
}
if (0 == parameterCount)
builder.Append(DefaultValues);
else
{
builder.Append(RightParenthesis);
builder.Append(Values);
builder.Append(LeftParenthesis);
builder.Append(parameterName[0]);
for (int i = 1; i < parameterCount; ++i)
{
builder.Append(Comma);
builder.Append(parameterName[i]);
}
builder.Append(RightParenthesis);
}
command.CommandText = builder.ToString();
RemoveExtraParameters(command, parameterCount);
InsertCommand = command;
return command;
}
private DbCommand BuildUpdateCommand(DataTableMapping mappings, DataRow dataRow)
{
DbCommand command = InitializeCommand(UpdateCommand);
StringBuilder builder = new StringBuilder();
string nextSeparator = Set;
int parameterCount = 0;
Debug.Assert(!string.IsNullOrEmpty(_quotedBaseTableName), "no table name");
builder.Append(Update);
builder.Append(QuotedBaseTableName);
// search for the columns in that base table, to build the set clause
DbSchemaRow[] schemaRows = _dbSchemaRows;
for (int i = 0; i < schemaRows.Length; ++i)
{
DbSchemaRow row = schemaRows[i];
if ((null == row) || (0 == row.BaseColumnName.Length) || !IncludeInUpdateSet(row))
continue;
object currentValue = null;
string sourceColumn = _sourceColumnNames[i];
// If we're building a statement for a specific row, then check the
// values to see whether the column should be included in the update
// statement or not
if ((null != mappings) && (null != dataRow))
{
DataColumn dataColumn = GetDataColumn(sourceColumn, mappings, dataRow);
if (null == dataColumn)
continue;
// Don't bother updating if the column is readonly in both the data
// set and the back end.
if (row.IsReadOnly && dataColumn.ReadOnly)
continue;
// Unless specifically directed to do so, we will not automatically update
// a column with it's original value, which means that we must determine
// whether the value has changed locally, before we send it up.
currentValue = GetColumnValue(dataRow, dataColumn, DataRowVersion.Current);
if (!SetAllValues)
{
object originalValue = GetColumnValue(dataRow, dataColumn, DataRowVersion.Original);
if ((originalValue == currentValue)
|| ((null != originalValue) && originalValue.Equals(currentValue)))
{
continue;
}
}
}
builder.Append(nextSeparator);
nextSeparator = Comma;
builder.Append(QuotedColumn(row.BaseColumnName));
builder.Append(Equal);
builder.Append(
CreateParameterForValue(
command,
GetBaseParameterName(i),
sourceColumn,
DataRowVersion.Current,
parameterCount,
currentValue,
row, StatementType.Update, false
)
);
parameterCount++;
}
// It is an error to attempt an update when there's nothing to update;
bool skipRow = (0 == parameterCount);
parameterCount = BuildWhereClause(mappings, dataRow, builder, command, parameterCount, true);
command.CommandText = builder.ToString();
RemoveExtraParameters(command, parameterCount);
UpdateCommand = command;
return (skipRow) ? null : command;
}
private int BuildWhereClause(
DataTableMapping mappings,
DataRow dataRow,
StringBuilder builder,
DbCommand command,
int parameterCount,
bool isUpdate
)
{
string beginNewCondition = string.Empty;
int whereCount = 0;
builder.Append(Where);
builder.Append(LeftParenthesis);
DbSchemaRow[] schemaRows = _dbSchemaRows;
for (int i = 0; i < schemaRows.Length; ++i)
{
DbSchemaRow row = schemaRows[i];
if ((null == row) || (0 == row.BaseColumnName.Length) || !IncludeInWhereClause(row, isUpdate))
{
continue;
}
builder.Append(beginNewCondition);
beginNewCondition = And;
object value = null;
string sourceColumn = _sourceColumnNames[i];
string baseColumnName = QuotedColumn(row.BaseColumnName);
if ((null != mappings) && (null != dataRow))
value = GetColumnValue(dataRow, sourceColumn, mappings, DataRowVersion.Original);
if (!row.AllowDBNull)
{
// (<baseColumnName> = ?)
builder.Append(LeftParenthesis);
builder.Append(baseColumnName);
builder.Append(Equal);
builder.Append(
CreateParameterForValue(
command,
GetOriginalParameterName(i),
sourceColumn,
DataRowVersion.Original,
parameterCount,
value,
row, (isUpdate ? StatementType.Update : StatementType.Delete), true
)
);
parameterCount++;
builder.Append(RightParenthesis);
}
else
{
// ((? = 1 AND <baseColumnName> IS NULL) OR (<baseColumnName> = ?))
builder.Append(LeftParenthesis);
builder.Append(LeftParenthesis);
builder.Append(
CreateParameterForNullTest(
command,
GetNullParameterName(i),
sourceColumn,
DataRowVersion.Original,
parameterCount,
value,
row, (isUpdate ? StatementType.Update : StatementType.Delete), true
)
);
parameterCount++;
builder.Append(EqualOne);
builder.Append(And);
builder.Append(baseColumnName);
builder.Append(IsNull);
builder.Append(RightParenthesis);
builder.Append(Or);
builder.Append(LeftParenthesis);
builder.Append(baseColumnName);
builder.Append(Equal);
builder.Append(
CreateParameterForValue(
command,
GetOriginalParameterName(i),
sourceColumn,
DataRowVersion.Original,
parameterCount,
value,
row, (isUpdate ? StatementType.Update : StatementType.Delete), true
)
);
parameterCount++;
builder.Append(RightParenthesis);
builder.Append(RightParenthesis);
}
if (IncrementWhereCount(row))
{
whereCount++;
}
}
builder.Append(RightParenthesis);
if (0 == whereCount)
{
if (isUpdate)
{
if (ConflictOption.CompareRowVersion == ConflictOption)
{
throw ADP.DynamicSQLNoKeyInfoRowVersionUpdate();
}
throw ADP.DynamicSQLNoKeyInfoUpdate();
}
else
{
if (ConflictOption.CompareRowVersion == ConflictOption)
{
throw ADP.DynamicSQLNoKeyInfoRowVersionDelete();
}
throw ADP.DynamicSQLNoKeyInfoDelete();
}
}
return parameterCount;
}
private string CreateParameterForNullTest(
DbCommand command,
string parameterName,
string sourceColumn,
DataRowVersion version,
int parameterCount,
object value,
DbSchemaRow row,
StatementType statementType,
bool whereClause
)
{
DbParameter p = GetNextParameter(command, parameterCount);
Debug.Assert(!string.IsNullOrEmpty(sourceColumn), "empty source column");
if (null == parameterName)
{
p.ParameterName = GetParameterName(1 + parameterCount);
}
else
{
p.ParameterName = parameterName;
}
p.Direction = ParameterDirection.Input;
p.SourceColumn = sourceColumn;
p.SourceVersion = version;
p.SourceColumnNullMapping = true;
p.Value = value;
p.Size = 0; // don't specify parameter.Size so that we don't silently truncate to the metadata size
ApplyParameterInfo(p, row.DataRow, statementType, whereClause);
p.DbType = DbType.Int32;
p.Value = ADP.IsNull(value) ? DbDataAdapter.s_parameterValueNullValue : DbDataAdapter.s_parameterValueNonNullValue;
if (!command.Parameters.Contains(p))
{
command.Parameters.Add(p);
}
if (null == parameterName)
{
return GetParameterPlaceholder(1 + parameterCount);
}
else
{
Debug.Assert(null != _parameterNames, "How can we have a parameterName without a _parameterNames collection?");
Debug.Assert(null != _parameterMarkerFormat, "How can we have a _parameterNames collection but no _parameterMarkerFormat?");
return string.Format(CultureInfo.InvariantCulture, _parameterMarkerFormat, parameterName);
}
}
private string CreateParameterForValue(
DbCommand command,
string parameterName,
string sourceColumn,
DataRowVersion version,
int parameterCount,
object value,
DbSchemaRow row,
StatementType statementType,
bool whereClause
)
{
DbParameter p = GetNextParameter(command, parameterCount);
if (null == parameterName)
{
p.ParameterName = GetParameterName(1 + parameterCount);
}
else
{
p.ParameterName = parameterName;
}
p.Direction = ParameterDirection.Input;
p.SourceColumn = sourceColumn;
p.SourceVersion = version;
p.SourceColumnNullMapping = false;
p.Value = value;
p.Size = 0; // don't specify parameter.Size so that we don't silently truncate to the metadata size
ApplyParameterInfo(p, row.DataRow, statementType, whereClause);
if (!command.Parameters.Contains(p))
{
command.Parameters.Add(p);
}
if (null == parameterName)
{
return GetParameterPlaceholder(1 + parameterCount);
}
else
{
Debug.Assert(null != _parameterNames, "How can we have a parameterName without a _parameterNames collection?");
Debug.Assert(null != _parameterMarkerFormat, "How can we have a _parameterNames collection but no _parameterMarkerFormat?");
return string.Format(CultureInfo.InvariantCulture, _parameterMarkerFormat, parameterName);
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
// release mananged objects
DataAdapter = null;
}
//release unmanaged objects
base.Dispose(disposing); // notify base classes
}
private DataTableMapping GetTableMapping(DataRow dataRow)
{
DataTableMapping tableMapping = null;
if (null != dataRow)
{
DataTable dataTable = dataRow.Table;
if (null != dataTable)
{
DbDataAdapter adapter = DataAdapter;
if (null != adapter)
{
tableMapping = adapter.GetTableMapping(dataTable);
}
else
{
string tableName = dataTable.TableName;
tableMapping = new DataTableMapping(tableName, tableName);
}
}
}
return tableMapping;
}
private string GetBaseParameterName(int index)
{
if (null != _parameterNames)
{
return (_parameterNames.GetBaseParameterName(index));
}
else
{
return null;
}
}
private string GetOriginalParameterName(int index)
{
if (null != _parameterNames)
{
return (_parameterNames.GetOriginalParameterName(index));
}
else
{
return null;
}
}
private string GetNullParameterName(int index)
{
if (null != _parameterNames)
{
return (_parameterNames.GetNullParameterName(index));
}
else
{
return null;
}
}
private DbCommand GetSelectCommand()
{
DbCommand select = null;
DbDataAdapter adapter = DataAdapter;
if (null != adapter)
{
if (0 == _missingMappingAction)
{
_missingMappingAction = adapter.MissingMappingAction;
}
select = adapter.SelectCommand;
}
if (null == select)
{
throw ADP.MissingSourceCommand();
}
return select;
}
public DbCommand GetInsertCommand()
{
return GetInsertCommand(null, false);
}
public DbCommand GetInsertCommand(bool useColumnsForParameterNames)
{
return GetInsertCommand(null, useColumnsForParameterNames);
}
internal DbCommand GetInsertCommand(DataRow dataRow, bool useColumnsForParameterNames)
{
BuildCache(true, dataRow, useColumnsForParameterNames);
BuildInsertCommand(GetTableMapping(dataRow), dataRow);
return InsertCommand;
}
public DbCommand GetUpdateCommand()
{
return GetUpdateCommand(null, false);
}
public DbCommand GetUpdateCommand(bool useColumnsForParameterNames)
{
return GetUpdateCommand(null, useColumnsForParameterNames);
}
internal DbCommand GetUpdateCommand(DataRow dataRow, bool useColumnsForParameterNames)
{
BuildCache(true, dataRow, useColumnsForParameterNames);
BuildUpdateCommand(GetTableMapping(dataRow), dataRow);
return UpdateCommand;
}
public DbCommand GetDeleteCommand()
{
return GetDeleteCommand(null, false);
}
public DbCommand GetDeleteCommand(bool useColumnsForParameterNames)
{
return GetDeleteCommand(null, useColumnsForParameterNames);
}
internal DbCommand GetDeleteCommand(DataRow dataRow, bool useColumnsForParameterNames)
{
BuildCache(true, dataRow, useColumnsForParameterNames);
BuildDeleteCommand(GetTableMapping(dataRow), dataRow);
return DeleteCommand;
}
private object GetColumnValue(DataRow row, string columnName, DataTableMapping mappings, DataRowVersion version)
{
return GetColumnValue(row, GetDataColumn(columnName, mappings, row), version);
}
private object GetColumnValue(DataRow row, DataColumn column, DataRowVersion version)
{
object value = null;
if (null != column)
{
value = row[column, version];
}
return value;
}
private DataColumn GetDataColumn(string columnName, DataTableMapping tablemapping, DataRow row)
{
DataColumn column = null;
if (!string.IsNullOrEmpty(columnName))
{
column = tablemapping.GetDataColumn(columnName, null, row.Table, _missingMappingAction, MissingSchemaAction.Error);
}
return column;
}
private static DbParameter GetNextParameter(DbCommand command, int pcount)
{
DbParameter p;
if (pcount < command.Parameters.Count)
{
p = command.Parameters[pcount];
}
else
{
p = command.CreateParameter();
/*if (null == p) {
// CONSIDER: throw exception
}*/
}
Debug.Assert(null != p, "null CreateParameter");
return p;
}
private bool IncludeInInsertValues(DbSchemaRow row)
{
// NOTE: Include ignore condition - i.e. ignore if 'row' is IsReadOnly else include
return (!row.IsAutoIncrement && !row.IsHidden && !row.IsExpression && !row.IsRowVersion && !row.IsReadOnly);
}
private bool IncludeInUpdateSet(DbSchemaRow row)
{
// NOTE: Include ignore condition - i.e. ignore if 'row' is IsReadOnly else include
return (!row.IsAutoIncrement && !row.IsRowVersion && !row.IsHidden && !row.IsReadOnly);
}
private bool IncludeInWhereClause(DbSchemaRow row, bool isUpdate)
{
bool flag = IncrementWhereCount(row);
if (flag && row.IsHidden)
{
if (ConflictOption.CompareRowVersion == ConflictOption)
{
throw ADP.DynamicSQLNoKeyInfoRowVersionUpdate();
}
throw ADP.DynamicSQLNoKeyInfoUpdate();
}
if (!flag && (ConflictOption.CompareAllSearchableValues == ConflictOption))
{
// include other searchable values
flag = !row.IsLong && !row.IsRowVersion && !row.IsHidden;
}
return flag;
}
private bool IncrementWhereCount(DbSchemaRow row)
{
ConflictOption value = ConflictOption;
switch (value)
{
case ConflictOption.CompareAllSearchableValues:
case ConflictOption.OverwriteChanges:
// find the primary key
return (row.IsKey || row.IsUnique) && !row.IsLong && !row.IsRowVersion;
case ConflictOption.CompareRowVersion:
// or the row version
return (((row.IsKey || row.IsUnique) && !_hasPartialPrimaryKey) || row.IsRowVersion) && !row.IsLong;
default:
throw ADP.InvalidConflictOptions(value);
}
}
protected virtual DbCommand InitializeCommand(DbCommand command)
{
if (null == command)
{
DbCommand select = GetSelectCommand();
command = select.Connection.CreateCommand();
/*if (null == command) {
// CONSIDER: throw exception
}*/
// the following properties are only initialized when the object is created
// all other properites are reinitialized on every row
/*command.Connection = select.Connection;*/ // initialized by CreateCommand
command.CommandTimeout = select.CommandTimeout;
command.Transaction = select.Transaction;
}
command.CommandType = CommandType.Text;
command.UpdatedRowSource = UpdateRowSource.None; // no select or output parameters expected
return command;
}
private string QuotedColumn(string column)
{
return ADP.BuildQuotedString(QuotePrefix, QuoteSuffix, column);
}
public virtual string QuoteIdentifier(string unquotedIdentifier)
{
throw ADP.NotSupported();
}
public virtual void RefreshSchema()
{
_dbSchemaTable = null;
_dbSchemaRows = null;
_sourceColumnNames = null;
_quotedBaseTableName = null;
DbDataAdapter adapter = DataAdapter;
if (null != adapter)
{
if (InsertCommand == adapter.InsertCommand)
{
adapter.InsertCommand = null;
}
if (UpdateCommand == adapter.UpdateCommand)
{
adapter.UpdateCommand = null;
}
if (DeleteCommand == adapter.DeleteCommand)
{
adapter.DeleteCommand = null;
}
}
DbCommand command;
if (null != (command = InsertCommand))
{
command.Dispose();
}
if (null != (command = UpdateCommand))
{
command.Dispose();
}
if (null != (command = DeleteCommand))
{
command.Dispose();
}
InsertCommand = null;
UpdateCommand = null;
DeleteCommand = null;
}
private static void RemoveExtraParameters(DbCommand command, int usedParameterCount)
{
for (int i = command.Parameters.Count - 1; i >= usedParameterCount; --i)
{
command.Parameters.RemoveAt(i);
}
}
protected void RowUpdatingHandler(RowUpdatingEventArgs rowUpdatingEvent)
{
if (null == rowUpdatingEvent)
{
throw ADP.ArgumentNull(nameof(rowUpdatingEvent));
}
try
{
if (UpdateStatus.Continue == rowUpdatingEvent.Status)
{
StatementType stmtType = rowUpdatingEvent.StatementType;
DbCommand command = (DbCommand)rowUpdatingEvent.Command;
if (null != command)
{
switch (stmtType)
{
case StatementType.Select:
Debug.Assert(false, "how did we get here?");
return; // don't mess with it
case StatementType.Insert:
command = InsertCommand;
break;
case StatementType.Update:
command = UpdateCommand;
break;
case StatementType.Delete:
command = DeleteCommand;
break;
default:
throw ADP.InvalidStatementType(stmtType);
}
if (command != rowUpdatingEvent.Command)
{
command = (DbCommand)rowUpdatingEvent.Command;
if ((null != command) && (null == command.Connection))
{
DbDataAdapter adapter = DataAdapter;
DbCommand select = ((null != adapter) ? adapter.SelectCommand : null);
if (null != select)
{
command.Connection = select.Connection;
}
}
// user command, not a command builder command
}
else command = null;
}
if (null == command)
{
RowUpdatingHandlerBuilder(rowUpdatingEvent);
}
}
}
catch (Exception e) when (ADP.IsCatchableExceptionType(e))
{
ADP.TraceExceptionForCapture(e);
rowUpdatingEvent.Status = UpdateStatus.ErrorsOccurred;
rowUpdatingEvent.Errors = e;
}
}
private void RowUpdatingHandlerBuilder(RowUpdatingEventArgs rowUpdatingEvent)
{
// the Update method will close the connection if command was null and returned command.Connection is same as SelectCommand.Connection
DataRow datarow = rowUpdatingEvent.Row;
BuildCache(false, datarow, false);
DbCommand command;
switch (rowUpdatingEvent.StatementType)
{
case StatementType.Insert:
command = BuildInsertCommand(rowUpdatingEvent.TableMapping, datarow);
break;
case StatementType.Update:
command = BuildUpdateCommand(rowUpdatingEvent.TableMapping, datarow);
break;
case StatementType.Delete:
command = BuildDeleteCommand(rowUpdatingEvent.TableMapping, datarow);
break;
#if DEBUG
case StatementType.Select:
Debug.Assert(false, "how did we get here?");
goto default;
#endif
default:
throw ADP.InvalidStatementType(rowUpdatingEvent.StatementType);
}
if (null == command)
{
if (null != datarow)
{
datarow.AcceptChanges();
}
rowUpdatingEvent.Status = UpdateStatus.SkipCurrentRow;
}
rowUpdatingEvent.Command = command;
}
public virtual string UnquoteIdentifier(string quotedIdentifier)
{
throw ADP.NotSupported();
}
protected abstract void ApplyParameterInfo(DbParameter parameter, DataRow row, StatementType statementType, bool whereClause);
protected abstract string GetParameterName(int parameterOrdinal);
protected abstract string GetParameterName(string parameterName);
protected abstract string GetParameterPlaceholder(int parameterOrdinal);
protected abstract void SetRowUpdatingHandler(DbDataAdapter adapter);
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void AlignRightSByte0()
{
var test = new ImmBinaryOpTest__AlignRightSByte0();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmBinaryOpTest__AlignRightSByte0
{
private struct TestStruct
{
public Vector128<SByte> _fld1;
public Vector128<SByte> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref testStruct._fld1), ref Unsafe.As<SByte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<SByte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref testStruct._fld2), ref Unsafe.As<SByte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<SByte>>());
return testStruct;
}
public void RunStructFldScenario(ImmBinaryOpTest__AlignRightSByte0 testClass)
{
var result = Ssse3.AlignRight(_fld1, _fld2, 0);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<SByte>>() / sizeof(SByte);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<SByte>>() / sizeof(SByte);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<SByte>>() / sizeof(SByte);
private static SByte[] _data1 = new SByte[Op1ElementCount];
private static SByte[] _data2 = new SByte[Op2ElementCount];
private static Vector128<SByte> _clsVar1;
private static Vector128<SByte> _clsVar2;
private Vector128<SByte> _fld1;
private Vector128<SByte> _fld2;
private SimpleBinaryOpTest__DataTable<SByte, SByte, SByte> _dataTable;
static ImmBinaryOpTest__AlignRightSByte0()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar1), ref Unsafe.As<SByte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<SByte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar2), ref Unsafe.As<SByte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<SByte>>());
}
public ImmBinaryOpTest__AlignRightSByte0()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld1), ref Unsafe.As<SByte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<SByte>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSByte(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld2), ref Unsafe.As<SByte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<SByte>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSByte(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSByte(); }
_dataTable = new SimpleBinaryOpTest__DataTable<SByte, SByte, SByte>(_data1, _data2, new SByte[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Ssse3.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Ssse3.AlignRight(
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr),
0
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Ssse3.AlignRight(
Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr)),
0
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Ssse3.AlignRight(
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr)),
0
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Ssse3).GetMethod(nameof(Ssse3.AlignRight), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr),
(byte)0
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Ssse3).GetMethod(nameof(Ssse3.AlignRight), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr)),
(byte)0
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Ssse3).GetMethod(nameof(Ssse3.AlignRight), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr)),
(byte)0
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Ssse3.AlignRight(
_clsVar1,
_clsVar2,
0
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var left = Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr);
var result = Ssse3.AlignRight(left, right, 0);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var left = Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr));
var result = Ssse3.AlignRight(left, right, 0);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var left = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr));
var result = Ssse3.AlignRight(left, right, 0);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmBinaryOpTest__AlignRightSByte0();
var result = Ssse3.AlignRight(test._fld1, test._fld2, 0);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Ssse3.AlignRight(_fld1, _fld2, 0);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Ssse3.AlignRight(test._fld1, test._fld2, 0);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<SByte> left, Vector128<SByte> right, void* result, [CallerMemberName] string method = "")
{
SByte[] inArray1 = new SByte[Op1ElementCount];
SByte[] inArray2 = new SByte[Op2ElementCount];
SByte[] outArray = new SByte[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<SByte, byte>(ref inArray1[0]), left);
Unsafe.WriteUnaligned(ref Unsafe.As<SByte, byte>(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<SByte>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
SByte[] inArray1 = new SByte[Op1ElementCount];
SByte[] inArray2 = new SByte[Op2ElementCount];
SByte[] outArray = new SByte[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), (uint)Unsafe.SizeOf<Vector128<SByte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), (uint)Unsafe.SizeOf<Vector128<SByte>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<SByte>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(SByte[] left, SByte[] right, SByte[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != right[0])
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != right[i])
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Ssse3)}.{nameof(Ssse3.AlignRight)}<SByte>(Vector128<SByte>.0, Vector128<SByte>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Amplify Shader Editor - Visual Shader Editing Tool
// Copyright (c) Amplify Creations, Lda <[email protected]>
using UnityEngine;
using UnityEditor;
using System;
using System.Collections.Generic;
namespace AmplifyShaderEditor
{
public enum OutlineMode
{
VertexOffset,
VertexScale
}
[Serializable]
public sealed class OutlineOpHelper
{
private readonly string[] OutlineBodyBegin = { "Tags{ }",
"Cull Front",
"CGPROGRAM",
"#pragma target 3.0",
"#pragma surface outlineSurf Standard keepalpha noshadow noambient novertexlights nolightmap nodynlightmap nodirlightmap nofog nometa noforwardadd vertex:outlineVertexDataFunc"
};
private readonly string[] OutlineBodyStruct = {"struct Input",
"{",
"\tfixed filler;",
"};",
};
private readonly string[] OutlineBodyDefaultBegin = {
"uniform fixed4 _ASEOutlineColor;",
"uniform fixed _ASEOutlineWidth;",
"void outlineVertexDataFunc( inout appdata_full v, out Input o )",
"{",
"\tUNITY_INITIALIZE_OUTPUT( Input, o );" };
private readonly string[] OutlineBodyDefaultEnd = {
"}",
"void outlineSurf( Input i, inout SurfaceOutputStandard o ) { o.Emission = _ASEOutlineColor.rgb; o.Alpha = 1; }",
"ENDCG",
"\n"};
private const string OutlineInstancedHeader = "#pragma multi_compile_instancing";
private readonly string[] OutlineBodyInstancedBegin = {
"UNITY_INSTANCING_CBUFFER_START({0})",
"\tUNITY_DEFINE_INSTANCED_PROP( fixed4, _ASEOutlineColor )",
"\tUNITY_DEFINE_INSTANCED_PROP(fixed, _ASEOutlineWidth)",
"UNITY_INSTANCING_CBUFFER_END",
"void outlineVertexDataFunc( inout appdata_full v, out Input o )",
"{",
"\tUNITY_INITIALIZE_OUTPUT( Input, o );" };
private readonly string[] OutlineBodyInstancedEnd = {
"}",
"void outlineSurf( Input i, inout SurfaceOutputStandard o ) { o.Emission = UNITY_ACCESS_INSTANCED_PROP( _ASEOutlineColor ).rgb; o.Alpha = 1; }",
"ENDCG",
"\n"};
private const string WidthVariableAccessInstanced = "UNITY_ACCESS_INSTANCED_PROP( _ASEOutlineWidth )";
private const string OutlineVertexOffsetMode = "\tv.vertex.xyz += ( v.normal * {0} );";
private const string OutlineVertexScaleMode = "\tv.vertex.xyz *= ( 1 + {0});";
private const string OutlineColorLabel = "Color";
private const string OutlineWidthLabel = "Width";
private const string ColorPropertyName = "_ASEOutlineColor";
private const string WidthPropertyName = "_ASEOutlineWidth";
private const string ColorPropertyDec = "_ASEOutlineColor( \"Outline Color\", Color ) = ({0})";
private const string OutlinePropertyDec = "_ASEOutlineWidth( \"Outline Width\", Float ) = {0}";
private const string ModePropertyStr = "Mode";
private const string BillboardInstructionFormat = "\t{0};";
[SerializeField]
private Color m_outlineColor;
[SerializeField]
private float m_outlineWidth;
[SerializeField]
private bool m_enabled;
[SerializeField]
private OutlineMode m_mode = OutlineMode.VertexOffset;
public void Draw( UndoParentNode owner, GUIStyle toolbarstyle, Material mat )
{
Color cachedColor = GUI.color;
GUI.color = new Color( cachedColor.r, cachedColor.g, cachedColor.b, 0.5f );
EditorGUILayout.BeginHorizontal( toolbarstyle );
GUI.color = cachedColor;
EditorVariablesManager.OutlineActiveMode.Value = owner.GUILayoutToggle( EditorVariablesManager.OutlineActiveMode.Value, EditorVariablesManager.OutlineActiveMode.LabelName, UIUtils.MenuItemToggleStyle, GUILayout.ExpandWidth( true ) );
EditorGUI.BeginChangeCheck();
m_enabled = owner.EditorGUILayoutToggle( string.Empty, m_enabled, UIUtils.MenuItemEnableStyle, GUILayout.Width( 16 ) );
if ( EditorGUI.EndChangeCheck() )
{
if ( m_enabled )
UpdateToMaterial( mat );
UIUtils.RequestSave();
}
EditorGUILayout.EndHorizontal();
if ( EditorVariablesManager.OutlineActiveMode.Value )
{
cachedColor = GUI.color;
GUI.color = new Color( cachedColor.r, cachedColor.g, cachedColor.b, ( EditorGUIUtility.isProSkin ? 0.5f : 0.25f ) );
EditorGUILayout.BeginVertical( UIUtils.MenuItemBackgroundStyle );
GUI.color = cachedColor;
EditorGUILayout.Separator();
EditorGUI.BeginDisabledGroup( !m_enabled );
EditorGUI.indentLevel += 1;
{
m_mode = ( OutlineMode ) owner.EditorGUILayoutEnumPopup( ModePropertyStr, m_mode );
EditorGUI.BeginChangeCheck();
m_outlineColor = owner.EditorGUILayoutColorField( OutlineColorLabel, m_outlineColor );
if ( EditorGUI.EndChangeCheck() && mat != null )
{
if ( mat.HasProperty( ColorPropertyName ) )
{
mat.SetColor( ColorPropertyName, m_outlineColor );
}
}
EditorGUI.BeginChangeCheck();
m_outlineWidth = owner.EditorGUILayoutFloatField( OutlineWidthLabel, m_outlineWidth );
if ( EditorGUI.EndChangeCheck() && mat != null )
{
if ( mat.HasProperty( WidthPropertyName ) )
{
mat.SetFloat( WidthPropertyName, m_outlineWidth );
}
}
}
EditorGUI.indentLevel -= 1;
EditorGUI.EndDisabledGroup();
EditorGUILayout.Separator();
EditorGUILayout.EndVertical();
}
}
public void UpdateToMaterial( Material mat )
{
if ( mat == null )
return;
if ( mat.HasProperty( ColorPropertyName ) )
{
mat.SetColor( ColorPropertyName, m_outlineColor );
}
if ( mat.HasProperty( WidthPropertyName ) )
{
mat.SetFloat( WidthPropertyName, m_outlineWidth );
}
}
public void ReadFromString( ref uint index, ref string[] nodeParams )
{
m_enabled = Convert.ToBoolean( nodeParams[ index++ ] );
m_outlineWidth = Convert.ToSingle( nodeParams[ index++ ] );
m_outlineColor = IOUtils.StringToColor( nodeParams[ index++ ] );
if ( UIUtils.CurrentShaderVersion() > 5004 )
m_mode = ( OutlineMode ) Enum.Parse( typeof( OutlineMode ), nodeParams[ index++ ] );
}
public void WriteToString( ref string nodeInfo )
{
IOUtils.AddFieldValueToString( ref nodeInfo, m_enabled );
IOUtils.AddFieldValueToString( ref nodeInfo, m_outlineWidth );
IOUtils.AddFieldValueToString( ref nodeInfo, IOUtils.ColorToString( m_outlineColor ) );
IOUtils.AddFieldValueToString( ref nodeInfo, m_mode );
}
public void AddToDataCollector( ref MasterNodeDataCollector dataCollector )
{
dataCollector.AddToProperties( -1, string.Format( ColorPropertyDec, IOUtils.ColorToString( m_outlineColor ) ), -1 );
dataCollector.AddToProperties( -1, string.Format( OutlinePropertyDec, m_outlineWidth ), -1 );
}
public void UpdateFromMaterial( Material mat )
{
if ( mat.HasProperty( ColorPropertyName ) )
{
m_outlineColor = mat.GetColor( ColorPropertyName );
}
if ( mat.HasProperty( WidthPropertyName ) )
{
m_outlineWidth = mat.GetFloat( WidthPropertyName );
}
}
public string[] OutlineFunctionBody( bool instanced, bool isShadowCaster, string shaderName, string[] billboardInfo )
{
List<string> body = new List<string>();
for ( int i = 0; i < OutlineBodyBegin.Length; i++ )
{
body.Add( OutlineBodyBegin[ i ] );
}
if ( instanced )
{
body.Add( OutlineInstancedHeader );
}
if ( !isShadowCaster )
{
for ( int i = 0; i < OutlineBodyStruct.Length; i++ )
{
body.Add( OutlineBodyStruct[ i ] );
}
}
if ( instanced )
{
for ( int i = 0; i < OutlineBodyInstancedBegin.Length; i++ )
{
body.Add( ( i == 0 ) ? string.Format( OutlineBodyInstancedBegin[ i ], shaderName ) : OutlineBodyInstancedBegin[ i ] );
}
if ( ( object ) billboardInfo != null )
{
for ( int j = 0; j < billboardInfo.Length; j++ )
{
body.Add( string.Format( BillboardInstructionFormat, billboardInfo[ j ] ) );
}
}
switch ( m_mode )
{
case OutlineMode.VertexOffset: body.Add( string.Format( OutlineVertexOffsetMode, WidthVariableAccessInstanced ) ); break;
case OutlineMode.VertexScale: body.Add( string.Format( OutlineVertexScaleMode, WidthVariableAccessInstanced ) ); break;
}
for ( int i = 0; i < OutlineBodyInstancedEnd.Length; i++ )
{
body.Add( OutlineBodyInstancedEnd[ i ] );
}
}
else
{
for ( int i = 0; i < OutlineBodyDefaultBegin.Length; i++ )
{
body.Add( OutlineBodyDefaultBegin[ i ] );
}
if ( ( object ) billboardInfo != null )
{
for ( int j = 0; j < billboardInfo.Length; j++ )
{
body.Add( string.Format( BillboardInstructionFormat, billboardInfo[ j ] ) );
}
}
switch ( m_mode )
{
case OutlineMode.VertexOffset: body.Add( string.Format( OutlineVertexOffsetMode, WidthPropertyName ) ); break;
case OutlineMode.VertexScale: body.Add( string.Format( OutlineVertexScaleMode, WidthPropertyName ) ); break;
}
for ( int i = 0; i < OutlineBodyDefaultEnd.Length; i++ )
{
body.Add( OutlineBodyDefaultEnd[ i ] );
}
}
string[] bodyArr = body.ToArray();
body.Clear();
body = null;
return bodyArr;
}
public bool EnableOutline { get { return m_enabled; } }
}
}
| |
#if !BESTHTTP_DISABLE_ALTERNATE_SSL && (!UNITY_WEBGL || UNITY_EDITOR)
using System;
namespace Org.BouncyCastle.Math.EC.Custom.Sec
{
internal class SecT163R2Point
: AbstractF2mPoint
{
/**
* @deprecated Use ECCurve.createPoint to construct points
*/
public SecT163R2Point(ECCurve curve, ECFieldElement x, ECFieldElement y)
: this(curve, x, y, false)
{
}
/**
* @deprecated per-point compression property will be removed, refer {@link #getEncoded(bool)}
*/
public SecT163R2Point(ECCurve curve, ECFieldElement x, ECFieldElement y, bool withCompression)
: base(curve, x, y, withCompression)
{
if ((x == null) != (y == null))
throw new ArgumentException("Exactly one of the field elements is null");
}
internal SecT163R2Point(ECCurve curve, ECFieldElement x, ECFieldElement y, ECFieldElement[] zs, bool withCompression)
: base(curve, x, y, zs, withCompression)
{
}
protected override ECPoint Detach()
{
return new SecT163R2Point(null, AffineXCoord, AffineYCoord);
}
public override ECFieldElement YCoord
{
get
{
ECFieldElement X = RawXCoord, L = RawYCoord;
if (this.IsInfinity || X.IsZero)
return L;
// Y is actually Lambda (X + Y/X) here; convert to affine value on the fly
ECFieldElement Y = L.Add(X).Multiply(X);
ECFieldElement Z = RawZCoords[0];
if (!Z.IsOne)
{
Y = Y.Divide(Z);
}
return Y;
}
}
protected internal override bool CompressionYTilde
{
get
{
ECFieldElement X = this.RawXCoord;
if (X.IsZero)
return false;
ECFieldElement Y = this.RawYCoord;
// Y is actually Lambda (X + Y/X) here
return Y.TestBitZero() != X.TestBitZero();
}
}
public override ECPoint Add(ECPoint b)
{
if (this.IsInfinity)
return b;
if (b.IsInfinity)
return this;
ECCurve curve = this.Curve;
ECFieldElement X1 = this.RawXCoord;
ECFieldElement X2 = b.RawXCoord;
if (X1.IsZero)
{
if (X2.IsZero)
return curve.Infinity;
return b.Add(this);
}
ECFieldElement L1 = this.RawYCoord, Z1 = this.RawZCoords[0];
ECFieldElement L2 = b.RawYCoord, Z2 = b.RawZCoords[0];
bool Z1IsOne = Z1.IsOne;
ECFieldElement U2 = X2, S2 = L2;
if (!Z1IsOne)
{
U2 = U2.Multiply(Z1);
S2 = S2.Multiply(Z1);
}
bool Z2IsOne = Z2.IsOne;
ECFieldElement U1 = X1, S1 = L1;
if (!Z2IsOne)
{
U1 = U1.Multiply(Z2);
S1 = S1.Multiply(Z2);
}
ECFieldElement A = S1.Add(S2);
ECFieldElement B = U1.Add(U2);
if (B.IsZero)
{
if (A.IsZero)
{
return Twice();
}
return curve.Infinity;
}
ECFieldElement X3, L3, Z3;
if (X2.IsZero)
{
// TODO This can probably be optimized quite a bit
ECPoint p = this.Normalize();
X1 = p.XCoord;
ECFieldElement Y1 = p.YCoord;
ECFieldElement Y2 = L2;
ECFieldElement L = Y1.Add(Y2).Divide(X1);
//X3 = L.Square().Add(L).Add(X1).Add(curve.A);
X3 = L.Square().Add(L).Add(X1).AddOne();
if (X3.IsZero)
{
return new SecT163R2Point(curve, X3, curve.B.Sqrt(), IsCompressed);
}
ECFieldElement Y3 = L.Multiply(X1.Add(X3)).Add(X3).Add(Y1);
L3 = Y3.Divide(X3).Add(X3);
Z3 = curve.FromBigInteger(BigInteger.One);
}
else
{
B = B.Square();
ECFieldElement AU1 = A.Multiply(U1);
ECFieldElement AU2 = A.Multiply(U2);
X3 = AU1.Multiply(AU2);
if (X3.IsZero)
{
return new SecT163R2Point(curve, X3, curve.B.Sqrt(), IsCompressed);
}
ECFieldElement ABZ2 = A.Multiply(B);
if (!Z2IsOne)
{
ABZ2 = ABZ2.Multiply(Z2);
}
L3 = AU2.Add(B).SquarePlusProduct(ABZ2, L1.Add(Z1));
Z3 = ABZ2;
if (!Z1IsOne)
{
Z3 = Z3.Multiply(Z1);
}
}
return new SecT163R2Point(curve, X3, L3, new ECFieldElement[] { Z3 }, IsCompressed);
}
public override ECPoint Twice()
{
if (this.IsInfinity)
{
return this;
}
ECCurve curve = this.Curve;
ECFieldElement X1 = this.RawXCoord;
if (X1.IsZero)
{
// A point with X == 0 is it's own Additive inverse
return curve.Infinity;
}
ECFieldElement L1 = this.RawYCoord, Z1 = this.RawZCoords[0];
bool Z1IsOne = Z1.IsOne;
ECFieldElement L1Z1 = Z1IsOne ? L1 : L1.Multiply(Z1);
ECFieldElement Z1Sq = Z1IsOne ? Z1 : Z1.Square();
ECFieldElement T = L1.Square().Add(L1Z1).Add(Z1Sq);
if (T.IsZero)
{
return new SecT163R2Point(curve, T, curve.B.Sqrt(), IsCompressed);
}
ECFieldElement X3 = T.Square();
ECFieldElement Z3 = Z1IsOne ? T : T.Multiply(Z1Sq);
ECFieldElement X1Z1 = Z1IsOne ? X1 : X1.Multiply(Z1);
ECFieldElement L3 = X1Z1.SquarePlusProduct(T, L1Z1).Add(X3).Add(Z3);
return new SecT163R2Point(curve, X3, L3, new ECFieldElement[] { Z3 }, IsCompressed);
}
public override ECPoint TwicePlus(ECPoint b)
{
if (this.IsInfinity)
{
return b;
}
if (b.IsInfinity)
{
return Twice();
}
ECCurve curve = this.Curve;
ECFieldElement X1 = this.RawXCoord;
if (X1.IsZero)
{
// A point with X == 0 is it's own Additive inverse
return b;
}
ECFieldElement X2 = b.RawXCoord, Z2 = b.RawZCoords[0];
if (X2.IsZero || !Z2.IsOne)
{
return Twice().Add(b);
}
ECFieldElement L1 = this.RawYCoord, Z1 = this.RawZCoords[0];
ECFieldElement L2 = b.RawYCoord;
ECFieldElement X1Sq = X1.Square();
ECFieldElement L1Sq = L1.Square();
ECFieldElement Z1Sq = Z1.Square();
ECFieldElement L1Z1 = L1.Multiply(Z1);
//ECFieldElement T = curve.A.Multiply(Z1Sq).Add(L1Sq).Add(L1Z1);
ECFieldElement T = Z1Sq.Add(L1Sq).Add(L1Z1);
ECFieldElement L2plus1 = L2.AddOne();
//ECFieldElement A = curve.A.Add(L2plus1).Multiply(Z1Sq).Add(L1Sq).MultiplyPlusProduct(T, X1Sq, Z1Sq);
ECFieldElement A = L2.Multiply(Z1Sq).Add(L1Sq).MultiplyPlusProduct(T, X1Sq, Z1Sq);
ECFieldElement X2Z1Sq = X2.Multiply(Z1Sq);
ECFieldElement B = X2Z1Sq.Add(T).Square();
if (B.IsZero)
{
if (A.IsZero)
return b.Twice();
return curve.Infinity;
}
if (A.IsZero)
{
return new SecT163R2Point(curve, A, curve.B.Sqrt(), IsCompressed);
}
ECFieldElement X3 = A.Square().Multiply(X2Z1Sq);
ECFieldElement Z3 = A.Multiply(B).Multiply(Z1Sq);
ECFieldElement L3 = A.Add(B).Square().MultiplyPlusProduct(T, L2plus1, Z3);
return new SecT163R2Point(curve, X3, L3, new ECFieldElement[] { Z3 }, IsCompressed);
}
public override ECPoint Negate()
{
if (this.IsInfinity)
return this;
ECFieldElement X = this.RawXCoord;
if (X.IsZero)
return this;
// L is actually Lambda (X + Y/X) here
ECFieldElement L = this.RawYCoord, Z = this.RawZCoords[0];
return new SecT163R2Point(Curve, X, L.Add(Z), new ECFieldElement[] { Z }, IsCompressed);
}
}
}
#endif
| |
using Amazon.DynamoDBv2;
using Amazon.DynamoDBv2.Model;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.Configuration;
using Orleans.Internal;
using Orleans.Runtime;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace Orleans.Reminders.DynamoDB
{
/// <summary>
/// Implementation for IReminderTable using DynamoDB as underlying storage.
/// </summary>
internal class DynamoDBReminderTable : IReminderTable
{
private const string GRAIN_REFERENCE_PROPERTY_NAME = "GrainReference";
private const string REMINDER_NAME_PROPERTY_NAME = "ReminderName";
private const string SERVICE_ID_PROPERTY_NAME = "ServiceId";
private const string START_TIME_PROPERTY_NAME = "StartTime";
private const string PERIOD_PROPERTY_NAME = "Period";
private const string GRAIN_HASH_PROPERTY_NAME = "GrainHash";
private const string REMINDER_ID_PROPERTY_NAME = "ReminderId";
private const string ETAG_PROPERTY_NAME = "ETag";
private const string CURRENT_ETAG_ALIAS = ":currentETag";
private const string SERVICE_ID_INDEX = "ServiceIdIndex";
private readonly ILogger logger;
private readonly GrainReferenceKeyStringConverter grainReferenceConverter;
private readonly DynamoDBReminderStorageOptions options;
private readonly string serviceId;
private DynamoDBStorage storage;
/// <summary>Initializes a new instance of the <see cref="DynamoDBReminderTable"/> class.</summary>
/// <param name="grainReferenceConverter">The grain factory.</param>
/// <param name="loggerFactory">logger factory to use</param>
/// <param name="clusterOptions"></param>
/// <param name="storageOptions"></param>
public DynamoDBReminderTable(
GrainReferenceKeyStringConverter grainReferenceConverter,
ILoggerFactory loggerFactory,
IOptions<ClusterOptions> clusterOptions,
IOptions<DynamoDBReminderStorageOptions> storageOptions)
{
this.grainReferenceConverter = grainReferenceConverter;
this.logger = loggerFactory.CreateLogger<DynamoDBReminderTable>();
this.serviceId = clusterOptions.Value.ServiceId;
this.options = storageOptions.Value;
}
/// <summary>Initialize current instance with specific global configuration and logger</summary>
public Task Init()
{
this.storage = new DynamoDBStorage(this.logger, this.options.Service, this.options.AccessKey, this.options.SecretKey,
this.options.Token, this.options.ProfileName, this.options.ReadCapacityUnits, this.options.WriteCapacityUnits);
this.logger.Info(ErrorCode.ReminderServiceBase, "Initializing AWS DynamoDB Reminders Table");
var secondaryIndex = new GlobalSecondaryIndex
{
IndexName = SERVICE_ID_INDEX,
Projection = new Projection { ProjectionType = ProjectionType.ALL },
KeySchema = new List<KeySchemaElement>
{
new KeySchemaElement { AttributeName = SERVICE_ID_PROPERTY_NAME, KeyType = KeyType.HASH},
new KeySchemaElement { AttributeName = GRAIN_HASH_PROPERTY_NAME, KeyType = KeyType.RANGE }
}
};
return this.storage.InitializeTable(this.options.TableName,
new List<KeySchemaElement>
{
new KeySchemaElement { AttributeName = REMINDER_ID_PROPERTY_NAME, KeyType = KeyType.HASH },
new KeySchemaElement { AttributeName = GRAIN_HASH_PROPERTY_NAME, KeyType = KeyType.RANGE }
},
new List<AttributeDefinition>
{
new AttributeDefinition { AttributeName = REMINDER_ID_PROPERTY_NAME, AttributeType = ScalarAttributeType.S },
new AttributeDefinition { AttributeName = GRAIN_HASH_PROPERTY_NAME, AttributeType = ScalarAttributeType.N },
new AttributeDefinition { AttributeName = SERVICE_ID_PROPERTY_NAME, AttributeType = ScalarAttributeType.S }
},
new List<GlobalSecondaryIndex> { secondaryIndex });
}
/// <summary>
/// Reads a reminder for a grain reference by reminder name.
/// Read a row from the reminder table
/// </summary>
/// <param name="grainRef"> grain ref to locate the row </param>
/// <param name="reminderName"> reminder name to locate the row </param>
/// <returns> Return the ReminderTableData if the rows were read successfully </returns>
public async Task<ReminderEntry> ReadRow(GrainReference grainRef, string reminderName)
{
var reminderId = ConstructReminderId(this.serviceId, grainRef, reminderName);
var keys = new Dictionary<string, AttributeValue>
{
{ $"{REMINDER_ID_PROPERTY_NAME}", new AttributeValue(reminderId) },
{ $"{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = grainRef.GetUniformHashCode().ToString() } }
};
try
{
return await this.storage.ReadSingleEntryAsync(this.options.TableName, keys, this.Resolve).ConfigureAwait(false);
}
catch (Exception exc)
{
this.logger.Warn(ErrorCode.ReminderServiceBase,
$"Intermediate error reading reminder entry {Utils.DictionaryToString(keys)} from table {this.options.TableName}.", exc);
throw;
}
}
/// <summary>
/// Read one row from the reminder table
/// </summary>
/// <param name="grainRef">grain ref to locate the row </param>
/// <returns> Return the ReminderTableData if the rows were read successfully </returns>
public async Task<ReminderTableData> ReadRows(GrainReference grainRef)
{
var expressionValues = new Dictionary<string, AttributeValue>
{
{ $":{SERVICE_ID_PROPERTY_NAME}", new AttributeValue(this.serviceId) },
{ $":{GRAIN_REFERENCE_PROPERTY_NAME}", new AttributeValue(grainRef.ToKeyString()) }
};
try
{
var expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME} AND {GRAIN_REFERENCE_PROPERTY_NAME} = :{GRAIN_REFERENCE_PROPERTY_NAME}";
var records = await this.storage.ScanAsync(this.options.TableName, expressionValues, expression, this.Resolve).ConfigureAwait(false);
return new ReminderTableData(records);
}
catch (Exception exc)
{
this.logger.Warn(ErrorCode.ReminderServiceBase,
$"Intermediate error reading reminder entry {Utils.DictionaryToString(expressionValues)} from table {this.options.TableName}.", exc);
throw;
}
}
/// <summary>
/// Reads reminder table data for a given hash range.
/// </summary>
/// <param name="beginHash"></param>
/// <param name="endHash"></param>
/// <returns> Return the RemiderTableData if the rows were read successfully </returns>
public async Task<ReminderTableData> ReadRows(uint beginHash, uint endHash)
{
var expressionValues = new Dictionary<string, AttributeValue>
{
{ $":{SERVICE_ID_PROPERTY_NAME}", new AttributeValue(this.serviceId) },
{ $":Begin{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = beginHash.ToString() } },
{ $":End{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = endHash.ToString() } }
};
try
{
string expression = string.Empty;
if (beginHash < endHash)
{
expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME} AND {GRAIN_HASH_PROPERTY_NAME} > :Begin{GRAIN_HASH_PROPERTY_NAME} AND {GRAIN_HASH_PROPERTY_NAME} <= :End{GRAIN_HASH_PROPERTY_NAME}";
}
else
{
expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME} AND ({GRAIN_HASH_PROPERTY_NAME} > :Begin{GRAIN_HASH_PROPERTY_NAME} OR {GRAIN_HASH_PROPERTY_NAME} <= :End{GRAIN_HASH_PROPERTY_NAME})";
}
var records = await this.storage.ScanAsync(this.options.TableName, expressionValues, expression, this.Resolve).ConfigureAwait(false);
return new ReminderTableData(records);
}
catch (Exception exc)
{
this.logger.Warn(ErrorCode.ReminderServiceBase,
$"Intermediate error reading reminder entry {Utils.DictionaryToString(expressionValues)} from table {this.options.TableName}.", exc);
throw;
}
}
private ReminderEntry Resolve(Dictionary<string, AttributeValue> item)
{
return new ReminderEntry
{
ETag = item[ETAG_PROPERTY_NAME].N,
GrainRef = this.grainReferenceConverter.FromKeyString(item[GRAIN_REFERENCE_PROPERTY_NAME].S),
Period = TimeSpan.Parse(item[PERIOD_PROPERTY_NAME].S),
ReminderName = item[REMINDER_NAME_PROPERTY_NAME].S,
StartAt = DateTime.Parse(item[START_TIME_PROPERTY_NAME].S)
};
}
/// <summary>
/// Remove one row from the reminder table
/// </summary>
/// <param name="grainRef"> specific grain ref to locate the row </param>
/// <param name="reminderName"> reminder name to locate the row </param>
/// <param name="eTag"> e tag </param>
/// <returns> Return true if the row was removed </returns>
public async Task<bool> RemoveRow(GrainReference grainRef, string reminderName, string eTag)
{
var reminderId = ConstructReminderId(this.serviceId, grainRef, reminderName);
var keys = new Dictionary<string, AttributeValue>
{
{ $"{REMINDER_ID_PROPERTY_NAME}", new AttributeValue(reminderId) },
{ $"{GRAIN_HASH_PROPERTY_NAME}", new AttributeValue { N = grainRef.GetUniformHashCode().ToString() } }
};
try
{
var conditionalValues = new Dictionary<string, AttributeValue> { { CURRENT_ETAG_ALIAS, new AttributeValue { N = eTag } } };
var expression = $"{ETAG_PROPERTY_NAME} = {CURRENT_ETAG_ALIAS}";
await this.storage.DeleteEntryAsync(this.options.TableName, keys, expression, conditionalValues).ConfigureAwait(false);
return true;
}
catch (ConditionalCheckFailedException)
{
return false;
}
}
/// <summary>
/// Test hook to clear reminder table data.
/// </summary>
/// <returns></returns>
public async Task TestOnlyClearTable()
{
var expressionValues = new Dictionary<string, AttributeValue>
{
{ $":{SERVICE_ID_PROPERTY_NAME}", new AttributeValue(this.serviceId) }
};
try
{
var expression = $"{SERVICE_ID_PROPERTY_NAME} = :{SERVICE_ID_PROPERTY_NAME}";
var records = await this.storage.ScanAsync(this.options.TableName, expressionValues, expression,
item => new Dictionary<string, AttributeValue>
{
{ REMINDER_ID_PROPERTY_NAME, item[REMINDER_ID_PROPERTY_NAME] },
{ GRAIN_HASH_PROPERTY_NAME, item[GRAIN_HASH_PROPERTY_NAME] }
}).ConfigureAwait(false);
if (records.Count <= 25)
{
await this.storage.DeleteEntriesAsync(this.options.TableName, records);
}
else
{
List<Task> tasks = new List<Task>();
foreach (var batch in records.BatchIEnumerable(25))
{
tasks.Add(this.storage.DeleteEntriesAsync(this.options.TableName, batch));
}
await Task.WhenAll(tasks);
}
}
catch (Exception exc)
{
this.logger.Warn(ErrorCode.ReminderServiceBase,
$"Intermediate error removing reminder entries {Utils.DictionaryToString(expressionValues)} from table {this.options.TableName}.", exc);
throw;
}
}
/// <summary>
/// Async method to put an entry into the reminder table
/// </summary>
/// <param name="entry"> The entry to put </param>
/// <returns> Return the entry ETag if entry was upsert successfully </returns>
public async Task<string> UpsertRow(ReminderEntry entry)
{
var reminderId = ConstructReminderId(this.serviceId, entry.GrainRef, entry.ReminderName);
var fields = new Dictionary<string, AttributeValue>
{
{ REMINDER_ID_PROPERTY_NAME, new AttributeValue(reminderId) },
{ GRAIN_HASH_PROPERTY_NAME, new AttributeValue { N = entry.GrainRef.GetUniformHashCode().ToString() } },
{ SERVICE_ID_PROPERTY_NAME, new AttributeValue(this.serviceId) },
{ GRAIN_REFERENCE_PROPERTY_NAME, new AttributeValue( entry.GrainRef.ToKeyString()) },
{ PERIOD_PROPERTY_NAME, new AttributeValue(entry.Period.ToString()) },
{ START_TIME_PROPERTY_NAME, new AttributeValue(entry.StartAt.ToString()) },
{ REMINDER_NAME_PROPERTY_NAME, new AttributeValue(entry.ReminderName) },
{ ETAG_PROPERTY_NAME, new AttributeValue { N = ThreadSafeRandom.Next().ToString() } }
};
try
{
if (this.logger.IsEnabled(LogLevel.Debug)) this.logger.Debug("UpsertRow entry = {0}, etag = {1}", entry.ToString(), entry.ETag);
await this.storage.PutEntryAsync(this.options.TableName, fields);
entry.ETag = fields[ETAG_PROPERTY_NAME].N;
return entry.ETag;
}
catch (Exception exc)
{
this.logger.Warn(ErrorCode.ReminderServiceBase,
$"Intermediate error updating entry {entry.ToString()} to the table {this.options.TableName}.", exc);
throw;
}
}
private static string ConstructReminderId(string serviceId, GrainReference grainRef, string reminderName)
{
return $"{serviceId}_{grainRef.ToKeyString()}_{reminderName}";
}
}
}
| |
namespace FakeItEasy.Specs
{
using System;
using System.Collections.Generic;
using System.Linq.Expressions;
using FakeItEasy.Configuration;
using FakeItEasy.Tests.TestHelpers;
using FluentAssertions;
using Xbehave;
using Xunit;
public static class AssertingCallCountSpecs
{
public interface IFoo
{
void Method();
int Property { get; set; }
}
public class CallCountAsserter
{
public int NumberOfCalls { get; }
private readonly Expression<Action<IAssertConfiguration>> assertion;
public string AssertionDescription => this.assertion.Body.ToString();
public string AssertionError { get; }
public CallCountAsserter(int numberOfCalls, Expression<Action<IAssertConfiguration>> assertion, string assertionError = "")
{
this.NumberOfCalls = numberOfCalls;
this.assertion = assertion;
this.AssertionError = assertionError;
}
public void AssertCallCount(IAssertConfiguration configuration)
{
this.assertion.Compile().Invoke(configuration);
}
public override string ToString()
{
return $"{this.NumberOfCalls} calls, asserting {this.AssertionDescription}";
}
}
public class CountableCall
{
private readonly Func<IFoo, IAssertConfiguration> callSpecifier;
private readonly string description;
private readonly Delegate invocation;
public CountableCall(Expression<Func<IFoo, IAssertConfiguration>> callSpecifier, Action<IFoo> invocation)
{
this.callSpecifier = callSpecifier.Compile();
this.description = callSpecifier.Body.ToString();
this.invocation = invocation;
}
public CountableCall(Expression<Func<IFoo, IAssertConfiguration>> callSpecifier, Func<IFoo, int> invocation)
{
this.callSpecifier = callSpecifier.Compile();
this.description = callSpecifier.Body.ToString();
this.invocation = invocation;
}
public override string ToString() => this.description;
public IAssertConfiguration BeginAssertion(IFoo fake)
{
return this.callSpecifier.Invoke(fake);
}
public void Invoke(IFoo fake)
{
this.invocation.DynamicInvoke(fake);
}
}
[Scenario]
[MemberData(nameof(MatchingTestCases))]
public static void CallCountConstraintWithMatchingNumberOfCalls(CountableCall call, CallCountAsserter callCountAsserter, IFoo fake, Exception exception)
{
"Given a Fake"
.x(() => fake = A.Fake<IFoo>());
$"And I make {callCountAsserter.NumberOfCalls} calls to the fake"
.x(() =>
{
for (int i = 0; i < callCountAsserter.NumberOfCalls; ++i)
{
call.Invoke(fake);
}
});
$"When I assert {callCountAsserter.AssertionDescription}"
.x(() => exception = Record.Exception(() => callCountAsserter.AssertCallCount(call.BeginAssertion(fake))));
"Then the assertion passes"
.x(() => exception.Should().BeNull());
}
[Scenario]
[MemberData(nameof(NonMatchingTestCases))]
public static void CallCountConstraintWithNonMatchingNumberOfCalls(CountableCall call, CallCountAsserter callCountAsserter, IFoo fake, Exception exception)
{
"Given a Fake"
.x(() => fake = A.Fake<IFoo>());
$"And I make {callCountAsserter.NumberOfCalls} calls to the fake"
.x(() =>
{
for (int i = 0; i < callCountAsserter.NumberOfCalls; ++i)
{
call.Invoke(fake);
}
});
$"When I assert {callCountAsserter.AssertionDescription}"
.x(() => exception = Record.Exception(() => callCountAsserter.AssertCallCount(call.BeginAssertion(fake))));
"Then the assertion fails"
.x(() => exception.Should().BeAnExceptionOfType<ExpectationException>()
.WithMessageModuloLineEndings($"*\r\nExpected to find it {callCountAsserter.AssertionError}*"));
}
private static IEnumerable<object[]> MatchingTestCases()
{
foreach (var call in AllCountableCalls())
{
foreach (var assertion in MatchingAssertions())
{
yield return new object[] { call, assertion };
}
}
}
private static IEnumerable<object[]> NonMatchingTestCases()
{
foreach (var call in AllCountableCalls())
{
foreach (var assertion in NonMatchingAssertions())
{
yield return new object[] { call, assertion };
}
}
}
private static IEnumerable<CountableCall> AllCountableCalls()
{
return new[]
{
new CountableCall(fake => A.CallTo(() => fake.Method()), fake => fake.Method()),
new CountableCall(fake => A.CallTo(fake), fake => fake.Method()),
new CountableCall(fake => A.CallTo(fake).WithNonVoidReturnType(), fake => fake.Property),
new CountableCall(fake => A.CallToSet(() => fake.Property), fake => fake.Property = 3),
new CountableCall(fake => A.CallToSet(() => fake.Property).To(7), fake => fake.Property = 7)
};
}
private static IEnumerable<CallCountAsserter> MatchingAssertions()
{
return new[]
{
new CallCountAsserter(1, call => call.MustHaveHappened()),
new CallCountAsserter(2, call => call.MustHaveHappened()),
new CallCountAsserter(0, call => call.MustNotHaveHappened()),
new CallCountAsserter(1, call => call.MustHaveHappenedOnceExactly()),
new CallCountAsserter(1, call => call.MustHaveHappenedOnceOrMore()),
new CallCountAsserter(2, call => call.MustHaveHappenedOnceOrMore()),
new CallCountAsserter(0, call => call.MustHaveHappenedOnceOrLess()),
new CallCountAsserter(1, call => call.MustHaveHappenedOnceOrLess()),
new CallCountAsserter(2, call => call.MustHaveHappenedTwiceExactly()),
new CallCountAsserter(2, call => call.MustHaveHappenedTwiceOrMore()),
new CallCountAsserter(3, call => call.MustHaveHappenedTwiceOrMore()),
new CallCountAsserter(0, call => call.MustHaveHappenedTwiceOrLess()),
new CallCountAsserter(1, call => call.MustHaveHappenedTwiceOrLess()),
new CallCountAsserter(2, call => call.MustHaveHappenedTwiceOrLess()),
new CallCountAsserter(0, call => call.MustHaveHappened(0, Times.Exactly)),
new CallCountAsserter(0, call => call.MustHaveHappened(0, Times.OrMore)),
new CallCountAsserter(1, call => call.MustHaveHappened(0, Times.OrMore)),
new CallCountAsserter(0, call => call.MustHaveHappened(0, Times.OrLess)),
new CallCountAsserter(1, call => call.MustHaveHappened(1, Times.Exactly)),
new CallCountAsserter(1, call => call.MustHaveHappened(1, Times.OrMore)),
new CallCountAsserter(2, call => call.MustHaveHappened(1, Times.OrMore)),
new CallCountAsserter(0, call => call.MustHaveHappened(1, Times.OrLess)),
new CallCountAsserter(1, call => call.MustHaveHappened(1, Times.OrLess)),
new CallCountAsserter(2, call => call.MustHaveHappened(2, Times.Exactly)),
new CallCountAsserter(2, call => call.MustHaveHappened(2, Times.OrMore)),
new CallCountAsserter(3, call => call.MustHaveHappened(2, Times.OrMore)),
new CallCountAsserter(0, call => call.MustHaveHappened(2, Times.OrLess)),
new CallCountAsserter(1, call => call.MustHaveHappened(2, Times.OrLess)),
new CallCountAsserter(2, call => call.MustHaveHappened(2, Times.OrLess)),
new CallCountAsserter(3, call => call.MustHaveHappened(3, Times.Exactly)),
new CallCountAsserter(3, call => call.MustHaveHappened(3, Times.OrMore)),
new CallCountAsserter(4, call => call.MustHaveHappened(3, Times.OrMore)),
new CallCountAsserter(0, call => call.MustHaveHappened(3, Times.OrLess)),
new CallCountAsserter(1, call => call.MustHaveHappened(3, Times.OrLess)),
new CallCountAsserter(2, call => call.MustHaveHappened(3, Times.OrLess)),
new CallCountAsserter(3, call => call.MustHaveHappened(3, Times.OrLess)),
new CallCountAsserter(0, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 2 == 0)),
new CallCountAsserter(2, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 2 == 0)),
new CallCountAsserter(3, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 3 == 0))
};
}
private static IEnumerable<CallCountAsserter> NonMatchingAssertions()
{
return new[]
{
new CallCountAsserter(0, call => call.MustHaveHappened(), "once or more but no calls were made"),
new CallCountAsserter(1, call => call.MustNotHaveHappened(), "never but found it once"),
new CallCountAsserter(2, call => call.MustNotHaveHappened(), "never but found it twice"),
new CallCountAsserter(3, call => call.MustNotHaveHappened(), "never but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappenedOnceExactly(), "once exactly but no calls were made"),
new CallCountAsserter(2, call => call.MustHaveHappenedOnceExactly(), "once exactly but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappenedOnceExactly(), "once exactly but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappenedOnceOrMore(), "once or more but no calls were made"),
new CallCountAsserter(2, call => call.MustHaveHappenedOnceOrLess(), "once or less but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappenedOnceOrLess(), "once or less but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappenedTwiceExactly(), "twice exactly but no calls were made"),
new CallCountAsserter(1, call => call.MustHaveHappenedTwiceExactly(), "twice exactly but found it once"),
new CallCountAsserter(3, call => call.MustHaveHappenedTwiceExactly(), "twice exactly but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappenedTwiceOrMore(), "twice or more but no calls were made"),
new CallCountAsserter(1, call => call.MustHaveHappenedTwiceOrMore(), "twice or more but found it once"),
new CallCountAsserter(3, call => call.MustHaveHappenedTwiceOrLess(), "twice or less but found it 3 times"),
new CallCountAsserter(1, call => call.MustHaveHappened(0, Times.Exactly), "never but found it once"),
new CallCountAsserter(2, call => call.MustHaveHappened(0, Times.Exactly), "never but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappened(0, Times.Exactly), "never but found it 3 times"),
new CallCountAsserter(1, call => call.MustHaveHappened(0, Times.OrLess), "never but found it once"),
new CallCountAsserter(2, call => call.MustHaveHappened(0, Times.OrLess), "never but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappened(0, Times.OrLess), "never but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappened(1, Times.Exactly), "once exactly but no calls"),
new CallCountAsserter(2, call => call.MustHaveHappened(1, Times.Exactly), "once exactly but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappened(1, Times.Exactly), "once exactly but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappened(1, Times.OrMore), "once or more but no calls were made"),
new CallCountAsserter(2, call => call.MustHaveHappened(1, Times.OrLess), "once or less but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappened(1, Times.OrLess), "once or less but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappened(2, Times.Exactly), "twice exactly but no calls"),
new CallCountAsserter(1, call => call.MustHaveHappened(2, Times.Exactly), "twice exactly but found it once"),
new CallCountAsserter(3, call => call.MustHaveHappened(2, Times.Exactly), "twice exactly but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappened(2, Times.OrMore), "twice or more but no calls were made"),
new CallCountAsserter(1, call => call.MustHaveHappened(2, Times.OrMore), "twice or more but found it once"),
new CallCountAsserter(3, call => call.MustHaveHappened(2, Times.OrLess), "twice or less but found it 3 times"),
new CallCountAsserter(0, call => call.MustHaveHappened(3, Times.Exactly), "3 times exactly but no calls were made"),
new CallCountAsserter(1, call => call.MustHaveHappened(3, Times.Exactly), "3 times exactly but found it once"),
new CallCountAsserter(2, call => call.MustHaveHappened(3, Times.Exactly), "3 times exactly but found it twice"),
new CallCountAsserter(4, call => call.MustHaveHappened(3, Times.Exactly), "3 times exactly but found it 4 times"),
new CallCountAsserter(0, call => call.MustHaveHappened(3, Times.OrMore), "3 times or more but no calls were made"),
new CallCountAsserter(1, call => call.MustHaveHappened(3, Times.OrMore), "3 times or more but found it once"),
new CallCountAsserter(2, call => call.MustHaveHappened(3, Times.OrMore), "3 times or more but found it twice"),
new CallCountAsserter(4, call => call.MustHaveHappened(3, Times.OrLess), "3 times or less but found it 4 times"),
new CallCountAsserter(0, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 2 == 1), "a number of times matching the predicate 'n => ((n % 2) == 1)' but no calls were made"),
new CallCountAsserter(1, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 2 == 0), "a number of times matching the predicate 'n => ((n % 2) == 0)' but found it once"),
new CallCountAsserter(2, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 2 == 1), "a number of times matching the predicate 'n => ((n % 2) == 1)' but found it twice"),
new CallCountAsserter(3, call => call.MustHaveHappenedANumberOfTimesMatching(n => n % 2 == 0), "a number of times matching the predicate 'n => ((n % 2) == 0)' but found it 3 times")
};
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Messaging.EventHubs.Core;
using Azure.Messaging.EventHubs.Diagnostics;
namespace Azure.Messaging.EventHubs.Primitives
{
/// <summary>
/// Handles all load balancing concerns for an event processor including claiming, stealing, and relinquishing ownership.
/// </summary>
///
internal class PartitionLoadBalancer
{
/// <summary>The random number generator to use for a specific thread.</summary>
private static readonly ThreadLocal<Random> RandomNumberGenerator = new ThreadLocal<Random>(() => new Random(Interlocked.Increment(ref s_randomSeed)), false);
/// <summary>The seed to use for initializing random number generated for a given thread-specific instance.</summary>
private static int s_randomSeed = Environment.TickCount;
/// <summary>
/// Responsible for creation of checkpoints and for ownership claim.
/// </summary>
///
private readonly StorageManager StorageManager;
/// <summary>
/// A partition distribution dictionary, mapping an owner's identifier to the amount of partitions it owns and its list of partitions.
/// </summary>
///
private readonly Dictionary<string, List<EventProcessorPartitionOwnership>> ActiveOwnershipWithDistribution = new Dictionary<string, List<EventProcessorPartitionOwnership>>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// The fully qualified Event Hubs namespace that the processor is associated with. This is likely
/// to be similar to <c>{yournamespace}.servicebus.windows.net</c>.
/// </summary>
///
public string FullyQualifiedNamespace { get; }
/// <summary>
/// The name of the Event Hub that the processor is connected to, specific to the
/// Event Hubs namespace that contains it.
/// </summary>
///
public string EventHubName { get; }
/// <summary>
/// The name of the consumer group this load balancer is associated with. Events will be
/// read only in the context of this group.
/// </summary>
///
public string ConsumerGroup { get; }
/// <summary>
/// The identifier of the EventProcessorClient that owns this load balancer.
/// </summary>
///
public string OwnerIdentifier { get; }
/// <summary>
/// The minimum amount of time for an ownership to be considered expired without further updates.
/// </summary>
///
public TimeSpan OwnershipExpirationInterval { get; }
/// <summary>
/// The minimum amount of time to be elapsed between two load balancing verifications.
/// </summary>
///
public TimeSpan LoadBalanceInterval { get; internal set; }
/// <summary>
/// Indicates whether the load balancer believes itself to be in a balanced state
/// when considering its fair share of partitions and whether any partitions
/// remain unclaimed.
/// </summary>
///
public virtual bool IsBalanced { get; private set; }
/// <summary>
/// The partitionIds currently owned by the associated event processor.
/// </summary>
///
public virtual IEnumerable<string> OwnedPartitionIds => InstanceOwnership.Keys;
/// <summary>
/// The instance of <see cref="PartitionLoadBalancerEventSource" /> which can be mocked for testing.
/// </summary>
///
internal PartitionLoadBalancerEventSource Logger { get; set; } = PartitionLoadBalancerEventSource.Log;
/// <summary>
/// The set of partition ownership the associated event processor owns. Partition ids are used as keys.
/// </summary>
///
private Dictionary<string, EventProcessorPartitionOwnership> InstanceOwnership { get; set; } = new Dictionary<string, EventProcessorPartitionOwnership>();
/// <summary>
/// Initializes a new instance of the <see cref="PartitionLoadBalancer" /> class.
/// </summary>
///
/// <param name="storageManager">Responsible for creation of checkpoints and for ownership claim.</param>
/// <param name="identifier">The identifier of the EventProcessorClient that owns this load balancer.</param>
/// <param name="consumerGroup">The name of the consumer group this load balancer is associated with.</param>
/// <param name="fullyQualifiedNamespace">The fully qualified Event Hubs namespace that the processor is associated with.</param>
/// <param name="eventHubName">The name of the Event Hub that the processor is associated with.</param>
/// <param name="ownershipExpirationInterval">The minimum amount of time for an ownership to be considered expired without further updates.</param>
/// <param name="loadBalancingInterval">The minimum amount of time to be elapsed between two load balancing verifications.</param>
///
public PartitionLoadBalancer(StorageManager storageManager,
string identifier,
string consumerGroup,
string fullyQualifiedNamespace,
string eventHubName,
TimeSpan ownershipExpirationInterval,
TimeSpan loadBalancingInterval)
{
Argument.AssertNotNull(storageManager, nameof(storageManager));
Argument.AssertNotNullOrEmpty(identifier, nameof(identifier));
Argument.AssertNotNullOrEmpty(consumerGroup, nameof(consumerGroup));
Argument.AssertNotNullOrEmpty(fullyQualifiedNamespace, nameof(fullyQualifiedNamespace));
Argument.AssertNotNullOrEmpty(eventHubName, nameof(eventHubName));
StorageManager = storageManager;
OwnerIdentifier = identifier;
FullyQualifiedNamespace = fullyQualifiedNamespace;
EventHubName = eventHubName;
ConsumerGroup = consumerGroup;
OwnershipExpirationInterval = ownershipExpirationInterval;
LoadBalanceInterval = loadBalancingInterval;
}
/// <summary>
/// Initializes a new instance of the <see cref="PartitionLoadBalancer" /> class.
/// </summary>
///
protected PartitionLoadBalancer()
{
// Because this constructor is used heavily in testing, initialize the
// critical timing properties to their default option values.
var options = new EventProcessorOptions();
LoadBalanceInterval = options.LoadBalancingUpdateInterval;
OwnershipExpirationInterval = options.PartitionOwnershipExpirationInterval;
}
/// <summary>
/// Performs load balancing between multiple EventProcessorClient instances, claiming others' partitions to enforce
/// a more equal distribution when necessary. It also manages its own partition processing tasks and ownership.
/// </summary>
///
/// <param name="partitionIds">The set of partitionIds available for ownership balancing.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken" /> instance to signal the request to cancel the operation.</param>
///
/// <returns>The claimed ownership. <c>null</c> if this instance is not eligible, if no claimable ownership was found or if the claim attempt failed.</returns>
///
public virtual async ValueTask<EventProcessorPartitionOwnership> RunLoadBalancingAsync(string[] partitionIds,
CancellationToken cancellationToken)
{
// Renew this instance's ownership so they don't expire.
await RenewOwnershipAsync(cancellationToken).ConfigureAwait(false);
// From the storage service, obtain a complete list of ownership, including expired ones. We may still need
// their eTags to claim orphan partitions.
IEnumerable<EventProcessorPartitionOwnership> completeOwnershipList;
try
{
completeOwnershipList = (await StorageManager.ListOwnershipAsync(FullyQualifiedNamespace, EventHubName, ConsumerGroup, cancellationToken)
.ConfigureAwait(false))
.ToList();
}
catch (TaskCanceledException)
{
throw;
}
catch (OperationCanceledException)
{
throw new TaskCanceledException();
}
catch (Exception ex)
{
// If ownership list retrieval fails, give up on the current cycle. There's nothing more we can do
// without an updated ownership list. Set the EventHubName to null so it doesn't modify the exception
// message. This exception message is used so the processor can retrieve the raw Operation string, and
// adding the EventHubName would append unwanted info to it.
throw new EventHubsException(true, null, Resources.OperationListOwnership, ex);
}
// There's no point in continuing the current cycle if we failed to fetch the completeOwnershipList.
if (completeOwnershipList == default)
{
return default;
}
// Create a partition distribution dictionary from the complete ownership list we have, mapping an owner's identifier to the list of
// partitions it owns. When an event processor goes down and it has only expired ownership, it will not be taken into consideration
// by others. The expiration time defaults to 30 seconds, but it may be overridden by a derived class.
var unclaimedPartitions = new HashSet<string>(partitionIds);
var utcNow = GetDateTimeOffsetNow();
var activeOwnership = default(EventProcessorPartitionOwnership);
ActiveOwnershipWithDistribution.Clear();
ActiveOwnershipWithDistribution[OwnerIdentifier] = new List<EventProcessorPartitionOwnership>();
foreach (EventProcessorPartitionOwnership ownership in completeOwnershipList)
{
if (utcNow.Subtract(ownership.LastModifiedTime) < OwnershipExpirationInterval && !string.IsNullOrEmpty(ownership.OwnerIdentifier))
{
activeOwnership = ownership;
// If a processor crashes and restarts, then it is possible for it to own partitions that it is not currently
// tracking as owned. Test for this case and ensure that ownership is tracked and extended.
if ((string.Equals(ownership.OwnerIdentifier, OwnerIdentifier, StringComparison.OrdinalIgnoreCase)) && (!InstanceOwnership.ContainsKey(ownership.PartitionId)))
{
(_, activeOwnership) = await ClaimOwnershipAsync(ownership.PartitionId, new[] { ownership }, cancellationToken).ConfigureAwait(false);
// If the claim failed, then the ownership period was not extended. Since the original ownership had not
// yet expired prior to the claim attempt, consider the original to be the active ownership for this cycle.
if (activeOwnership == default)
{
activeOwnership = ownership;
}
InstanceOwnership[activeOwnership.PartitionId] = activeOwnership;
}
// Update active ownership and trim the unclaimed partitions.
if (ActiveOwnershipWithDistribution.ContainsKey(activeOwnership.OwnerIdentifier))
{
ActiveOwnershipWithDistribution[activeOwnership.OwnerIdentifier].Add(activeOwnership);
}
else
{
ActiveOwnershipWithDistribution[activeOwnership.OwnerIdentifier] = new List<EventProcessorPartitionOwnership> { activeOwnership };
}
unclaimedPartitions.Remove(activeOwnership.PartitionId);
}
}
// Find an ownership to claim and try to claim it. The method will return null if this instance was not eligible to
// increase its ownership list, if no claimable ownership could be found or if a claim attempt has failed.
var (claimAttempted, claimedOwnership) = await FindAndClaimOwnershipAsync(completeOwnershipList, unclaimedPartitions, partitionIds.Length, cancellationToken).ConfigureAwait(false);
if (claimedOwnership != null)
{
InstanceOwnership[claimedOwnership.PartitionId] = claimedOwnership;
}
// Update the balanced state. Consider the load balanced if this processor has its minimum share of partitions and did not
// attempt to claim a partition.
var minimumDesiredPartitions = partitionIds.Length / ActiveOwnershipWithDistribution.Keys.Count;
IsBalanced = ((InstanceOwnership.Count >= minimumDesiredPartitions) && (!claimAttempted));
return claimedOwnership;
}
/// <summary>
/// Relinquishes this instance's ownership so they can be claimed by other processors and clears the OwnedPartitionIds.
/// </summary>
///
/// <param name="cancellationToken">A <see cref="CancellationToken" /> instance to signal the request to cancel the operation.</param>
///
public virtual async Task RelinquishOwnershipAsync(CancellationToken cancellationToken)
{
IEnumerable<EventProcessorPartitionOwnership> ownershipToRelinquish = InstanceOwnership.Values
.Select(ownership => new EventProcessorPartitionOwnership
{
FullyQualifiedNamespace = ownership.FullyQualifiedNamespace,
EventHubName = ownership.EventHubName,
ConsumerGroup = ownership.ConsumerGroup,
OwnerIdentifier = string.Empty, //set ownership to Empty so that it is treated as available to claim
PartitionId = ownership.PartitionId,
LastModifiedTime = ownership.LastModifiedTime,
Version = ownership.Version
});
await StorageManager.ClaimOwnershipAsync(ownershipToRelinquish, cancellationToken).ConfigureAwait(false);
InstanceOwnership.Clear();
}
/// <summary>
/// Finds and tries to claim an ownership if this processor instance is eligible to increase its ownership list.
/// </summary>
///
/// <param name="completeOwnershipEnumerable">A complete enumerable of ownership obtained from the storage service.</param>
/// <param name="unclaimedPartitions">The set of partitionIds that are currently unclaimed.</param>
/// <param name="partitionCount">The count of partitions.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken" /> instance to signal the request to cancel the operation.</param>
///
/// <returns>A tuple indicating whether a claim was attempted and any ownership that was claimed. The claimed ownership will be <c>null</c> if no claim was attempted or if the claim attempt failed.</returns>
///
private ValueTask<(bool WasClaimAttempted, EventProcessorPartitionOwnership ClaimedPartition)> FindAndClaimOwnershipAsync(IEnumerable<EventProcessorPartitionOwnership> completeOwnershipEnumerable,
HashSet<string> unclaimedPartitions,
int partitionCount,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested<TaskCanceledException>();
// The minimum owned partitions count is the minimum amount of partitions every event processor needs to own when the distribution
// is balanced. If n = minimumOwnedPartitionsCount, a balanced distribution will only have processors that own n or n + 1 partitions
// each. We can guarantee the partition distribution has at least one key, which corresponds to this event processor instance, even
// if it owns no partitions.
var unevenPartitionDistribution = (partitionCount % ActiveOwnershipWithDistribution.Keys.Count) > 0;
var minimumOwnedPartitionsCount = partitionCount / ActiveOwnershipWithDistribution.Keys.Count;
Logger.MinimumPartitionsPerEventProcessor(minimumOwnedPartitionsCount);
var ownedPartitionsCount = ActiveOwnershipWithDistribution[OwnerIdentifier].Count;
Logger.CurrentOwnershipCount(ownedPartitionsCount, OwnerIdentifier);
// There are two possible situations in which we may need to claim a partition ownership:
//
// - The first one is when we are below the minimum amount of owned partitions. There's nothing more to check, as we need to claim more
// partitions to enforce balancing.
//
// - The second case is a bit tricky. Sometimes the claim must be performed by an event processor that already has reached the minimum
// amount of ownership. This may happen, for instance, when we have 13 partitions and 3 processors, each of them owning 4 partitions.
// The minimum amount of partitions per processor is, in fact, 4, but in this example we still have 1 orphan partition to claim. To
// avoid overlooking this kind of situation, we may want to claim an ownership when we have exactly the minimum amount of ownership,
// but we are making sure there are no better candidates among the other event processors.
if (ownedPartitionsCount < minimumOwnedPartitionsCount
|| (ownedPartitionsCount == minimumOwnedPartitionsCount
&& ActiveOwnershipWithDistribution.Keys.Count > 1
&& unevenPartitionDistribution
&& !ActiveOwnershipWithDistribution.Values.Any(partitions => partitions.Count < minimumOwnedPartitionsCount)))
{
// Look for unclaimed partitions. If any, randomly pick one of them to claim.
Logger.UnclaimedPartitions(unclaimedPartitions);
if (unclaimedPartitions.Count > 0)
{
var index = RandomNumberGenerator.Value.Next(unclaimedPartitions.Count);
var returnTask = ClaimOwnershipAsync(unclaimedPartitions.ElementAt(index), completeOwnershipEnumerable, cancellationToken);
return new ValueTask<(bool, EventProcessorPartitionOwnership)>(returnTask);
}
// Only consider stealing partitions if there are no unclaimed partitions left. At first, only processors that have exceeded the
// maximum owned partition count should be targeted.
var maximumOwnedPartitionsCount = minimumOwnedPartitionsCount + 1;
var partitionsOwnedByProcessorWithGreaterThanMaximumOwnedPartitionsCount = new List<string>();
var partitionsOwnedByProcessorWithExactlyMaximumOwnedPartitionsCount = new List<string>();
// Build a list of partitions owned by processors owning exactly maximumOwnedPartitionsCount partitions
// and a list of partitions owned by processors owning more than maximumOwnedPartitionsCount partitions.
// Ignore the partitions already owned by this processor even though the current processor should never meet either criteria.
foreach (var key in ActiveOwnershipWithDistribution.Keys)
{
var ownedPartitions = ActiveOwnershipWithDistribution[key];
if (ownedPartitions.Count < maximumOwnedPartitionsCount || string.Equals(key, OwnerIdentifier, StringComparison.OrdinalIgnoreCase))
{
// Skip if the common case is true.
continue;
}
if (ownedPartitions.Count == maximumOwnedPartitionsCount)
{
ownedPartitions
.ForEach(ownership => partitionsOwnedByProcessorWithExactlyMaximumOwnedPartitionsCount.Add(ownership.PartitionId));
}
else
{
ownedPartitions
.ForEach(ownership => partitionsOwnedByProcessorWithGreaterThanMaximumOwnedPartitionsCount.Add(ownership.PartitionId));
}
}
// If this processor has less than the minimum or any other processor has more than the maximum, then we need to steal a partition.
if ((ownedPartitionsCount < minimumOwnedPartitionsCount) || (partitionsOwnedByProcessorWithGreaterThanMaximumOwnedPartitionsCount.Count > 0))
{
Logger.ShouldStealPartition(OwnerIdentifier);
// Prefer stealing from a processor that owns more than the maximum number of partitions.
if (partitionsOwnedByProcessorWithGreaterThanMaximumOwnedPartitionsCount.Count > 0)
{
// If any partitions that can be stolen were found, randomly pick one of them to claim.
Logger.StealPartition(OwnerIdentifier);
var index = RandomNumberGenerator.Value.Next(partitionsOwnedByProcessorWithGreaterThanMaximumOwnedPartitionsCount.Count);
var returnTask = ClaimOwnershipAsync(
partitionsOwnedByProcessorWithGreaterThanMaximumOwnedPartitionsCount[index],
completeOwnershipEnumerable,
cancellationToken);
return new ValueTask<(bool, EventProcessorPartitionOwnership)>(returnTask);
}
else if (ownedPartitionsCount < minimumOwnedPartitionsCount)
{
// If there were no processors that have exceeded the maximum owned partition count and we're below the minimum, we
// need to steal from the processors that have exactly the maximum amount to enforce balancing. If this instance has
// already reached the minimum, there's no benefit to stealing, because the distribution wouldn't change.
Logger.StealPartition(OwnerIdentifier);
// Randomly pick a processor to steal from.
var index = RandomNumberGenerator.Value.Next(partitionsOwnedByProcessorWithExactlyMaximumOwnedPartitionsCount.Count);
var returnTask = ClaimOwnershipAsync(
partitionsOwnedByProcessorWithExactlyMaximumOwnedPartitionsCount[index],
completeOwnershipEnumerable,
cancellationToken);
return new ValueTask<(bool, EventProcessorPartitionOwnership)>(returnTask);
}
}
}
// No ownership has been claimed.
return new ValueTask<(bool, EventProcessorPartitionOwnership)>((false, default));
}
/// <summary>
/// Renews this instance's ownership so they don't expire.
/// </summary>
///
/// <param name="cancellationToken">A <see cref="CancellationToken" /> instance to signal the request to cancel the operation.</param>
///
private async Task RenewOwnershipAsync(CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested<TaskCanceledException>();
Logger.RenewOwnershipStart(OwnerIdentifier);
var utcNow = GetDateTimeOffsetNow();
List<EventProcessorPartitionOwnership> ownershipToRenew = InstanceOwnership.Values
.Where(ownership => (utcNow - ownership.LastModifiedTime) > LoadBalanceInterval)
.Select(ownership => new EventProcessorPartitionOwnership
{
FullyQualifiedNamespace = ownership.FullyQualifiedNamespace,
EventHubName = ownership.EventHubName,
ConsumerGroup = ownership.ConsumerGroup,
OwnerIdentifier = ownership.OwnerIdentifier,
PartitionId = ownership.PartitionId,
LastModifiedTime = utcNow,
Version = ownership.Version
})
.ToList();
try
{
// Update ownerships we renewed and remove the ones we didn't
var newOwnerships = await StorageManager.ClaimOwnershipAsync(ownershipToRenew, cancellationToken)
.ConfigureAwait(false);
foreach (var oldOwnership in ownershipToRenew)
{
InstanceOwnership.Remove(oldOwnership.PartitionId);
}
foreach (var newOwnership in newOwnerships)
{
InstanceOwnership[newOwnership.PartitionId] = newOwnership;
}
}
catch (Exception ex)
{
cancellationToken.ThrowIfCancellationRequested<TaskCanceledException>();
// If ownership renewal fails just give up and try again in the next cycle. The processor may
// end up losing some of its ownership.
Logger.RenewOwnershipError(OwnerIdentifier, ex.Message);
// Set the EventHubName to null so it doesn't modify the exception message. This exception message is
// used so the processor can retrieve the raw Operation string, and adding the EventHubName would append
// unwanted info to it.
throw new EventHubsException(true, null, Resources.OperationRenewOwnership, ex);
}
finally
{
Logger.RenewOwnershipComplete(OwnerIdentifier);
}
}
/// <summary>
/// Tries to claim ownership of the specified partition.
/// </summary>
///
/// <param name="partitionId">The identifier of the Event Hub partition the ownership is associated with.</param>
/// <param name="completeOwnershipEnumerable">A complete enumerable of ownership obtained from the stored service provided by the user.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken" /> instance to signal the request to cancel the operation.</param>
///
/// <returns>A tuple indicating whether a claim was attempted and the claimed ownership. The claimed ownership will be <c>null</c> if the claim attempt failed.</returns>
///
private async Task<(bool WasClaimAttempted, EventProcessorPartitionOwnership ClaimedPartition)> ClaimOwnershipAsync(string partitionId,
IEnumerable<EventProcessorPartitionOwnership> completeOwnershipEnumerable,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested<TaskCanceledException>();
Logger.ClaimOwnershipStart(partitionId);
// We need the eTag from the most recent ownership of this partition, even if it's expired. We want to keep the offset and
// the sequence number as well.
var oldOwnership = completeOwnershipEnumerable.FirstOrDefault(ownership => ownership.PartitionId == partitionId);
var newOwnership = new EventProcessorPartitionOwnership
{
FullyQualifiedNamespace = FullyQualifiedNamespace,
EventHubName = EventHubName,
ConsumerGroup = ConsumerGroup,
OwnerIdentifier = OwnerIdentifier,
PartitionId = partitionId,
LastModifiedTime = DateTimeOffset.UtcNow,
Version = oldOwnership?.Version
};
var claimedOwnership = default(IEnumerable<EventProcessorPartitionOwnership>);
try
{
claimedOwnership = await StorageManager.ClaimOwnershipAsync(new List<EventProcessorPartitionOwnership> { newOwnership }, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
cancellationToken.ThrowIfCancellationRequested<TaskCanceledException>();
// If ownership claim fails, just treat it as a usual ownership claim failure.
Logger.ClaimOwnershipError(partitionId, ex.Message);
// Set the EventHubName to null so it doesn't modify the exception message. This exception message is
// used so the processor can retrieve the raw Operation string, and adding the EventHubName would append
// unwanted info to it. This exception also communicates the PartitionId to the caller.
var exception = new EventHubsException(true, null, Resources.OperationClaimOwnership, ex);
exception.SetFailureOperation(exception.Message);
exception.SetFailureData(partitionId);
throw exception;
}
// We are expecting an enumerable with a single element if the claim attempt succeeds.
return (true, claimedOwnership.FirstOrDefault());
}
/// <summary>
/// Queries the value to use for the current date/time. This is abstracted to allow for deterministic
/// values to be used for testing.
/// </summary>
///
/// <returns>The current date and time, in UTC.</returns>
internal virtual DateTimeOffset GetDateTimeOffsetNow()
{
return DateTimeOffset.UtcNow;
}
}
}
| |
//
// AppDomainTest.cs - NUnit Test Cases for AppDomain
//
// Author:
// Sebastien Pouliot ([email protected])
//
// Copyright (C) 2004 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using NUnit.Framework;
using System;
using System.Collections;
using System.IO;
using System.Reflection;
using System.Reflection.Emit;
using System.Security;
using System.Security.Permissions;
using System.Security.Policy;
using System.Security.Principal;
namespace MonoTests.System {
[TestFixture]
public class AppDomainTest {
private AppDomain ad;
private ArrayList files = new ArrayList ();
[TearDown]
public void TearDown ()
{
if (ad != null) {
try {
AppDomain.Unload (ad);
ad = null;
}
catch {} // do not affect unit test results in TearDown
}
foreach (string fname in files) {
File.Delete (fname);
}
files.Clear ();
}
[Test]
public void SetThreadPrincipal ()
{
IIdentity i = new GenericIdentity ("[email protected]", "rfc822");
IPrincipal p = new GenericPrincipal (i, null);
ad = AppDomain.CreateDomain ("SetThreadPrincipal");
ad.SetThreadPrincipal (p);
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void SetThreadPrincipalNull ()
{
AppDomain.CurrentDomain.SetThreadPrincipal (null);
}
[Test]
[ExpectedException (typeof (PolicyException))]
public void SetThreadPrincipalTwice ()
{
IIdentity i = new GenericIdentity ("[email protected]", "rfc822");
IPrincipal p = new GenericPrincipal (i, null);
ad = AppDomain.CreateDomain ("SetThreadPrincipalTwice");
ad.SetThreadPrincipal (p);
// you only live twice (or so James told me ;-)
ad.SetThreadPrincipal (p);
}
[Test]
[ExpectedException (typeof (AppDomainUnloadedException))]
public void SetThreadPrincipalUnloaded ()
{
ad = AppDomain.CreateDomain ("Ximian");
AppDomain.Unload (ad);
IIdentity i = new GenericIdentity ("[email protected]", "rfc822");
IPrincipal p = new GenericPrincipal (i, null);
ad.SetThreadPrincipal (p);
}
[Test]
public void SetPrincipalPolicy_NoPrincipal ()
{
AppDomain.CurrentDomain.SetPrincipalPolicy (PrincipalPolicy.NoPrincipal);
}
[Test]
public void SetPrincipalPolicy_UnauthenticatedPrincipal ()
{
AppDomain.CurrentDomain.SetPrincipalPolicy (PrincipalPolicy.UnauthenticatedPrincipal);
}
[Test]
public void SetPrincipalPolicy_WindowsPrincipal ()
{
AppDomain.CurrentDomain.SetPrincipalPolicy (PrincipalPolicy.WindowsPrincipal);
}
[Test]
[ExpectedException (typeof (AppDomainUnloadedException))]
public void SetPrincipalPolicyUnloaded ()
{
ad = AppDomain.CreateDomain ("Ximian");
AppDomain.Unload (ad);
ad.SetPrincipalPolicy (PrincipalPolicy.NoPrincipal);
}
[Test]
public void CreateDomain_String ()
{
ad = AppDomain.CreateDomain ("CreateDomain_String");
Assert.IsNotNull (ad.Evidence, "Evidence");
// Evidence are copied (or referenced?) from default app domain
// we can't get default so we use the current (which should have copied the default)
Assert.AreEqual (AppDomain.CurrentDomain.Evidence.Count, ad.Evidence.Count, "Evidence.Count");
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void CreateDomain_String_Null ()
{
ad = AppDomain.CreateDomain (null);
}
[Test]
[Category("NotDotNet")]
public void CreateDomain_StringEvidence ()
{
Evidence e = new Evidence ();
ad = AppDomain.CreateDomain ("CreateDomain_StringEvidence", e);
Assert.IsNotNull (ad.Evidence, "Evidence");
Assert.AreEqual (0, ad.Evidence.Count, "Evidence.Count");
e.AddHost (new Zone (SecurityZone.MyComputer));
Assert.AreEqual (0, ad.Evidence.Count, "Evidence.Count");
// evidence isn't copied but referenced
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void CreateDomain_StringNullEvidence ()
{
ad = AppDomain.CreateDomain (null, new Evidence ());
}
[Test]
public void CreateDomain_StringEvidenceNull ()
{
ad = AppDomain.CreateDomain ("CreateDomain_StringEvidenceNull", null);
Assert.IsNotNull (ad.Evidence, "Evidence");
// Evidence are copied (or referenced?) from default app domain
// we can't get default so we use the current (which should have copied the default)
Evidence e = AppDomain.CurrentDomain.Evidence;
Assert.AreEqual (e.Count, ad.Evidence.Count, "Evidence.Count-1");
e.AddHost (new Zone (SecurityZone.MyComputer));
Assert.AreEqual (e.Count - 1, ad.Evidence.Count, "Evidence.Count-2");
// evidence are copied
}
[Test]
[Category("NotDotNet")]
public void CreateDomain_StringEvidenceAppDomainSetup ()
{
Evidence e = new Evidence ();
AppDomainSetup info = new AppDomainSetup ();
info.ApplicationName = "ApplicationName";
ad = AppDomain.CreateDomain ("CreateDomain_StringEvidenceAppDomainSetup", e, info);
Assert.IsNotNull (ad.Evidence, "Evidence");
Assert.AreEqual (0, ad.Evidence.Count, "Evidence.Count");
Assert.IsNotNull (ad.SetupInformation, "SetupInformation");
Assert.AreEqual ("ApplicationName", ad.SetupInformation.ApplicationName);
e.AddHost (new Zone (SecurityZone.MyComputer));
Assert.AreEqual (0, ad.Evidence.Count, "Evidence.Count");
// evidence isn't copied but referenced
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void CreateDomain_StringNullEvidenceAppDomainSetup ()
{
AppDomainSetup info = new AppDomainSetup ();
ad = AppDomain.CreateDomain (null, new Evidence (), info);
}
[Test]
public void CreateDomain_StringEvidenceNullAppDomainSetup ()
{
AppDomainSetup info = new AppDomainSetup ();
info.ApplicationName = "ApplicationName";
ad = AppDomain.CreateDomain ("CreateDomain_StringEvidenceNullAppDomainSetup", null, info);
Assert.IsNotNull (ad.Evidence, "Evidence");
// Evidence are copied (or referenced?) from default app domain
// we can't get default so we use the current (which should have copied the default)
Assert.AreEqual (AppDomain.CurrentDomain.Evidence.Count, ad.Evidence.Count, "Evidence.Count");
Assert.AreEqual ("ApplicationName", ad.SetupInformation.ApplicationName, "ApplicationName-1");
info.ApplicationName = "Test";
Assert.AreEqual ("Test", info.ApplicationName, "ApplicationName-2");
Assert.AreEqual ("ApplicationName", ad.SetupInformation.ApplicationName, "ApplicationName-3");
// copied
}
[Test]
[Category("NotDotNet")]
public void CreateDomain_StringEvidenceAppDomainSetupNull ()
{
Evidence e = new Evidence ();
ad = AppDomain.CreateDomain ("CreateDomain_StringEvidenceAppDomainSetupNull", e, null);
Assert.IsNotNull (ad.Evidence, "Evidence");
Assert.AreEqual (0, ad.Evidence.Count, "Evidence.Count");
// SetupInformation is copied from default app domain
Assert.IsNotNull (ad.SetupInformation, "SetupInformation");
}
[Test]
public void SetAppDomainPolicy ()
{
ad = AppDomain.CreateDomain ("SetAppDomainPolicy_Null");
ad.SetAppDomainPolicy (PolicyLevel.CreateAppDomainLevel ());
// not much to see
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void SetAppDomainPolicy_Null ()
{
ad = AppDomain.CreateDomain ("SetAppDomainPolicy_Null");
ad.SetAppDomainPolicy (null);
}
[Test]
[ExpectedException (typeof (PolicyException))]
public void SetAppDomainPolicy_Dual ()
{
ad = AppDomain.CreateDomain ("SetAppDomainPolicy_Dual");
PolicyLevel pl = PolicyLevel.CreateAppDomainLevel ();
PermissionSet ps = new PermissionSet (PermissionState.Unrestricted);
pl.RootCodeGroup.PolicyStatement = new PolicyStatement (ps);
ad.SetAppDomainPolicy (pl);
// only one time!
pl = PolicyLevel.CreateAppDomainLevel ();
ps = new PermissionSet (PermissionState.None);
pl.RootCodeGroup.PolicyStatement = new PolicyStatement (ps);
ad.SetAppDomainPolicy (pl);
}
[Test]
[ExpectedException (typeof (AppDomainUnloadedException))]
public void SetAppDomainPolicy_Unloaded ()
{
ad = AppDomain.CreateDomain ("SetAppDomainPolicy_Unloaded");
AppDomain.Unload (ad);
ad.SetAppDomainPolicy (PolicyLevel.CreateAppDomainLevel ());
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void GetData_Null ()
{
AppDomain.CurrentDomain.GetData (null);
}
[Test]
public void SetData ()
{
AppDomain.CurrentDomain.SetData ("data", "data");
Assert.AreEqual ("data", AppDomain.CurrentDomain.GetData ("data"), "GetData");
AppDomain.CurrentDomain.SetData ("data", null);
Assert.IsNull (AppDomain.CurrentDomain.GetData ("data"), "GetData-Null");
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void SetData_Null ()
{
AppDomain.CurrentDomain.SetData (null, "data");
}
#if NET_2_0
[Test]
public void ApplyPolicy ()
{
ad = AppDomain.CreateDomain ("ApplyPolicy");
string fullname = Assembly.GetExecutingAssembly ().FullName;
string result = ad.ApplyPolicy (fullname);
Assert.AreEqual (fullname, result, "ApplyPolicy");
// doesn't even requires an assembly name
Assert.AreEqual ("123", ad.ApplyPolicy ("123"), "Invalid FullName");
}
[Test]
[ExpectedException (typeof (ArgumentException))]
public void ApplyPolicy_Empty ()
{
ad = AppDomain.CreateDomain ("ApplyPolicy_Empty");
ad.ApplyPolicy (String.Empty);
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void ApplyPolicy_Null ()
{
ad = AppDomain.CreateDomain ("ApplyPolicy_Null");
ad.ApplyPolicy (null);
}
[Test]
public void DomainManager ()
{
Assert.IsNull (AppDomain.CurrentDomain.DomainManager, "CurrentDomain.DomainManager");
ad = AppDomain.CreateDomain ("DomainManager");
Assert.IsNull (ad.DomainManager, "ad.DomainManager");
}
[Test]
public void IsDefaultAppDomain ()
{
ad = AppDomain.CreateDomain ("ReflectionOnlyGetAssemblies");
Assert.IsFalse (ad.IsDefaultAppDomain (), "IsDefaultAppDomain");
// we have no public way to get the default appdomain
}
[Test]
public void ReflectionOnlyGetAssemblies ()
{
ad = AppDomain.CreateDomain ("ReflectionOnlyGetAssemblies");
Assembly [] a = ad.ReflectionOnlyGetAssemblies ();
Assert.IsNotNull (a, "ReflectionOnlyGetAssemblies");
Assert.AreEqual (0, a.Length, "Count");
}
#endif
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// --------------------------------------------------------------------------------------
//
// A class that provides a simple, lightweight implementation of lazy initialization,
// obviating the need for a developer to implement a custom, thread-safe lazy initialization
// solution.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
#pragma warning disable 0420
using System.Diagnostics;
using System.Runtime.ExceptionServices;
using System.Runtime.InteropServices;
using System.Threading;
namespace System
{
internal enum LazyState
{
NoneViaConstructor = 0,
NoneViaFactory = 1,
NoneException = 2,
PublicationOnlyViaConstructor = 3,
PublicationOnlyViaFactory = 4,
PublicationOnlyWait = 5,
PublicationOnlyException = 6,
ExecutionAndPublicationViaConstructor = 7,
ExecutionAndPublicationViaFactory = 8,
ExecutionAndPublicationException = 9,
}
/// <summary>
/// LazyHelper serves multiples purposes
/// - minimizing code size of Lazy<T> by implementing as much of the code that is not generic
/// this reduces generic code bloat, making faster class initialization
/// - contains singleton objects that are used to handle threading primitives for PublicationOnly mode
/// - allows for instantiation for ExecutionAndPublication so as to create an object for locking on
/// - holds exception information.
/// </summary>
internal class LazyHelper
{
internal readonly static LazyHelper NoneViaConstructor = new LazyHelper(LazyState.NoneViaConstructor);
internal readonly static LazyHelper NoneViaFactory = new LazyHelper(LazyState.NoneViaFactory);
internal readonly static LazyHelper PublicationOnlyViaConstructor = new LazyHelper(LazyState.PublicationOnlyViaConstructor);
internal readonly static LazyHelper PublicationOnlyViaFactory = new LazyHelper(LazyState.PublicationOnlyViaFactory);
internal readonly static LazyHelper PublicationOnlyWaitForOtherThreadToPublish = new LazyHelper(LazyState.PublicationOnlyWait);
internal LazyState State { get; }
private readonly ExceptionDispatchInfo _exceptionDispatch;
/// <summary>
/// Constructor that defines the state
/// </summary>
internal LazyHelper(LazyState state)
{
State = state;
}
/// <summary>
/// Constructor used for exceptions
/// </summary>
internal LazyHelper(LazyThreadSafetyMode mode, Exception exception)
{
switch(mode)
{
case LazyThreadSafetyMode.ExecutionAndPublication:
State = LazyState.ExecutionAndPublicationException;
break;
case LazyThreadSafetyMode.None:
State = LazyState.NoneException;
break;
case LazyThreadSafetyMode.PublicationOnly:
State = LazyState.PublicationOnlyException;
break;
default:
Debug.Fail("internal constructor, this should never occur");
break;
}
_exceptionDispatch = ExceptionDispatchInfo.Capture(exception);
}
internal void ThrowException()
{
Debug.Assert(_exceptionDispatch != null, "execution path is invalid");
_exceptionDispatch.Throw();
}
private LazyThreadSafetyMode GetMode()
{
switch (State)
{
case LazyState.NoneViaConstructor:
case LazyState.NoneViaFactory:
case LazyState.NoneException:
return LazyThreadSafetyMode.None;
case LazyState.PublicationOnlyViaConstructor:
case LazyState.PublicationOnlyViaFactory:
case LazyState.PublicationOnlyWait:
case LazyState.PublicationOnlyException:
return LazyThreadSafetyMode.PublicationOnly;
case LazyState.ExecutionAndPublicationViaConstructor:
case LazyState.ExecutionAndPublicationViaFactory:
case LazyState.ExecutionAndPublicationException:
return LazyThreadSafetyMode.ExecutionAndPublication;
default:
Debug.Fail("Invalid logic; State should always have a valid value");
return default(LazyThreadSafetyMode);
}
}
internal static LazyThreadSafetyMode? GetMode(LazyHelper state)
{
if (state == null)
return null; // we don't know the mode anymore
return state.GetMode();
}
internal static bool GetIsValueFaulted(LazyHelper state) => state?._exceptionDispatch != null;
internal static LazyHelper Create(LazyThreadSafetyMode mode, bool useDefaultConstructor)
{
switch (mode)
{
case LazyThreadSafetyMode.None:
return useDefaultConstructor ? NoneViaConstructor : NoneViaFactory;
case LazyThreadSafetyMode.PublicationOnly:
return useDefaultConstructor ? PublicationOnlyViaConstructor : PublicationOnlyViaFactory;
case LazyThreadSafetyMode.ExecutionAndPublication:
// we need to create an object for ExecutionAndPublication because we use Monitor-based locking
var state = useDefaultConstructor ? LazyState.ExecutionAndPublicationViaConstructor : LazyState.ExecutionAndPublicationViaFactory;
return new LazyHelper(state);
default:
throw new ArgumentOutOfRangeException(nameof(mode), SR.Lazy_ctor_ModeInvalid);
}
}
internal static object CreateViaDefaultConstructor(Type type)
{
try
{
return Activator.CreateInstance(type);
}
catch (MissingMethodException)
{
throw new MissingMemberException(SR.Lazy_CreateValue_NoParameterlessCtorForT);
}
}
internal static LazyThreadSafetyMode GetModeFromIsThreadSafe(bool isThreadSafe)
{
return isThreadSafe ? LazyThreadSafetyMode.ExecutionAndPublication : LazyThreadSafetyMode.None;
}
}
/// <summary>
/// Provides support for lazy initialization.
/// </summary>
/// <typeparam name="T">Specifies the type of element being lazily initialized.</typeparam>
/// <remarks>
/// <para>
/// By default, all public and protected members of <see cref="Lazy{T}"/> are thread-safe and may be used
/// concurrently from multiple threads. These thread-safety guarantees may be removed optionally and per instance
/// using parameters to the type's constructors.
/// </para>
/// </remarks>
[DebuggerTypeProxy(typeof(LazyDebugView<>))]
[DebuggerDisplay("ThreadSafetyMode={Mode}, IsValueCreated={IsValueCreated}, IsValueFaulted={IsValueFaulted}, Value={ValueForDebugDisplay}")]
public class Lazy<T>
{
private static T CreateViaDefaultConstructor()
{
return (T)LazyHelper.CreateViaDefaultConstructor(typeof(T));
}
// _state, a volatile reference, is set to null after _value has been set
private volatile LazyHelper _state;
// we ensure that _factory when finished is set to null to allow garbage collector to clean up
// any referenced items
private Func<T> _factory;
// _value eventually stores the lazily created value. It is valid when _state = null.
private T _value;
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/> class that
/// uses <typeparamref name="T"/>'s default constructor for lazy initialization.
/// </summary>
/// <remarks>
/// An instance created with this constructor may be used concurrently from multiple threads.
/// </remarks>
public Lazy()
: this(null, LazyThreadSafetyMode.ExecutionAndPublication, useDefaultConstructor:true)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/> class that
/// uses a pre-initialized specified value.
/// </summary>
/// <remarks>
/// An instance created with this constructor should be usable by multiple threads
// concurrently.
/// </remarks>
public Lazy(T value)
{
_value = value;
}
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/> class that uses a
/// specified initialization function.
/// </summary>
/// <param name="valueFactory">
/// The <see cref="T:System.Func{T}"/> invoked to produce the lazily-initialized value when it is
/// needed.
/// </param>
/// <exception cref="System.ArgumentNullException"><paramref name="valueFactory"/> is a null
/// reference (Nothing in Visual Basic).</exception>
/// <remarks>
/// An instance created with this constructor may be used concurrently from multiple threads.
/// </remarks>
public Lazy(Func<T> valueFactory)
: this(valueFactory, LazyThreadSafetyMode.ExecutionAndPublication, useDefaultConstructor:false)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/>
/// class that uses <typeparamref name="T"/>'s default constructor and a specified thread-safety mode.
/// </summary>
/// <param name="isThreadSafe">true if this instance should be usable by multiple threads concurrently; false if the instance will only be used by one thread at a time.
/// </param>
public Lazy(bool isThreadSafe) :
this(null, LazyHelper.GetModeFromIsThreadSafe(isThreadSafe), useDefaultConstructor:true)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/>
/// class that uses <typeparamref name="T"/>'s default constructor and a specified thread-safety mode.
/// </summary>
/// <param name="mode">The lazy thread-safety mode</param>
/// <exception cref="System.ArgumentOutOfRangeException"><paramref name="mode"/> mode contains an invalid valuee</exception>
public Lazy(LazyThreadSafetyMode mode) :
this(null, mode, useDefaultConstructor:true)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/> class
/// that uses a specified initialization function and a specified thread-safety mode.
/// </summary>
/// <param name="valueFactory">
/// The <see cref="T:System.Func{T}"/> invoked to produce the lazily-initialized value when it is needed.
/// </param>
/// <param name="isThreadSafe">true if this instance should be usable by multiple threads concurrently; false if the instance will only be used by one thread at a time.
/// </param>
/// <exception cref="System.ArgumentNullException"><paramref name="valueFactory"/> is
/// a null reference (Nothing in Visual Basic).</exception>
public Lazy(Func<T> valueFactory, bool isThreadSafe) :
this(valueFactory, LazyHelper.GetModeFromIsThreadSafe(isThreadSafe), useDefaultConstructor:false)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="T:System.Threading.Lazy{T}"/> class
/// that uses a specified initialization function and a specified thread-safety mode.
/// </summary>
/// <param name="valueFactory">
/// The <see cref="T:System.Func{T}"/> invoked to produce the lazily-initialized value when it is needed.
/// </param>
/// <param name="mode">The lazy thread-safety mode.</param>
/// <exception cref="System.ArgumentNullException"><paramref name="valueFactory"/> is
/// a null reference (Nothing in Visual Basic).</exception>
/// <exception cref="System.ArgumentOutOfRangeException"><paramref name="mode"/> mode contains an invalid value.</exception>
public Lazy(Func<T> valueFactory, LazyThreadSafetyMode mode)
: this(valueFactory, mode, useDefaultConstructor:false)
{
}
private Lazy(Func<T> valueFactory, LazyThreadSafetyMode mode, bool useDefaultConstructor)
{
if (valueFactory == null && !useDefaultConstructor)
throw new ArgumentNullException(nameof(valueFactory));
_factory = valueFactory;
_state = LazyHelper.Create(mode, useDefaultConstructor);
}
private void ViaConstructor()
{
_value = CreateViaDefaultConstructor();
_state = null; // volatile write, must occur after setting _value
}
private void ViaFactory(LazyThreadSafetyMode mode)
{
try
{
Func<T> factory = _factory;
if (factory == null)
throw new InvalidOperationException(SR.Lazy_Value_RecursiveCallsToValue);
_factory = null;
_value = factory();
_state = null; // volatile write, must occur after setting _value
}
catch (Exception exception)
{
_state = new LazyHelper(mode, exception);
throw;
}
}
private void ExecutionAndPublication(LazyHelper executionAndPublication, bool useDefaultConstructor)
{
lock (executionAndPublication)
{
// it's possible for multiple calls to have piled up behind the lock, so we need to check
// to see if the ExecutionAndPublication object is still the current implementation.
if (ReferenceEquals(_state, executionAndPublication))
{
if (useDefaultConstructor)
{
ViaConstructor();
}
else
{
ViaFactory(LazyThreadSafetyMode.ExecutionAndPublication);
}
}
}
}
private void PublicationOnly(LazyHelper publicationOnly, T possibleValue)
{
LazyHelper previous = Interlocked.CompareExchange(ref _state, LazyHelper.PublicationOnlyWaitForOtherThreadToPublish, publicationOnly);
if (previous == publicationOnly)
{
_factory = null;
_value = possibleValue;
_state = null; // volatile write, must occur after setting _value
}
}
private void PublicationOnlyViaConstructor(LazyHelper initializer)
{
PublicationOnly(initializer, CreateViaDefaultConstructor());
}
private void PublicationOnlyViaFactory(LazyHelper initializer)
{
Func<T> factory = _factory;
if (factory == null)
{
PublicationOnlyWaitForOtherThreadToPublish();
}
else
{
PublicationOnly(initializer, factory());
}
}
private void PublicationOnlyWaitForOtherThreadToPublish()
{
var spinWait = new SpinWait();
while (!ReferenceEquals(_state, null))
{
// We get here when PublicationOnly temporarily sets _state to LazyHelper.PublicationOnlyWaitForOtherThreadToPublish.
// This temporary state should be quickly followed by _state being set to null.
spinWait.SpinOnce();
}
}
private T CreateValue()
{
// we have to create a copy of state here, and use the copy exclusively from here on in
// so as to ensure thread safety.
var state = _state;
if (state != null)
{
switch (state.State)
{
case LazyState.NoneViaConstructor:
ViaConstructor();
break;
case LazyState.NoneViaFactory:
ViaFactory(LazyThreadSafetyMode.None);
break;
case LazyState.PublicationOnlyViaConstructor:
PublicationOnlyViaConstructor(state);
break;
case LazyState.PublicationOnlyViaFactory:
PublicationOnlyViaFactory(state);
break;
case LazyState.PublicationOnlyWait:
PublicationOnlyWaitForOtherThreadToPublish();
break;
case LazyState.ExecutionAndPublicationViaConstructor:
ExecutionAndPublication(state, useDefaultConstructor:true);
break;
case LazyState.ExecutionAndPublicationViaFactory:
ExecutionAndPublication(state, useDefaultConstructor:false);
break;
default:
state.ThrowException();
break;
}
}
return Value;
}
/// <summary>Creates and returns a string representation of this instance.</summary>
/// <returns>The result of calling <see cref="System.Object.ToString"/> on the <see
/// cref="Value"/>.</returns>
/// <exception cref="T:System.NullReferenceException">
/// The <see cref="Value"/> is null.
/// </exception>
public override string ToString()
{
return IsValueCreated ? Value.ToString() : SR.Lazy_ToString_ValueNotCreated;
}
/// <summary>Gets the value of the Lazy<T> for debugging display purposes.</summary>
internal T ValueForDebugDisplay
{
get
{
if (!IsValueCreated)
{
return default(T);
}
return _value;
}
}
/// <summary>
/// Gets a value indicating whether this instance may be used concurrently from multiple threads.
/// </summary>
internal LazyThreadSafetyMode? Mode => LazyHelper.GetMode(_state);
/// <summary>
/// Gets whether the value creation is faulted or not
/// </summary>
internal bool IsValueFaulted => LazyHelper.GetIsValueFaulted(_state);
/// <summary>Gets a value indicating whether the <see cref="T:System.Lazy{T}"/> has been initialized.
/// </summary>
/// <value>true if the <see cref="T:System.Lazy{T}"/> instance has been initialized;
/// otherwise, false.</value>
/// <remarks>
/// The initialization of a <see cref="T:System.Lazy{T}"/> instance may result in either
/// a value being produced or an exception being thrown. If an exception goes unhandled during initialization,
/// <see cref="IsValueCreated"/> will return false.
/// </remarks>
public bool IsValueCreated => _state == null;
/// <summary>Gets the lazily initialized value of the current <see
/// cref="T:System.Threading.Lazy{T}"/>.</summary>
/// <value>The lazily initialized value of the current <see
/// cref="T:System.Threading.Lazy{T}"/>.</value>
/// <exception cref="T:System.MissingMemberException">
/// The <see cref="T:System.Threading.Lazy{T}"/> was initialized to use the default constructor
/// of the type being lazily initialized, and that type does not have a public, parameterless constructor.
/// </exception>
/// <exception cref="T:System.MemberAccessException">
/// The <see cref="T:System.Threading.Lazy{T}"/> was initialized to use the default constructor
/// of the type being lazily initialized, and permissions to access the constructor were missing.
/// </exception>
/// <exception cref="T:System.InvalidOperationException">
/// The <see cref="T:System.Threading.Lazy{T}"/> was constructed with the <see cref="T:System.Threading.LazyThreadSafetyMode.ExecutionAndPublication"/> or
/// <see cref="T:System.Threading.LazyThreadSafetyMode.None"/> and the initialization function attempted to access <see cref="Value"/> on this instance.
/// </exception>
/// <remarks>
/// If <see cref="IsValueCreated"/> is false, accessing <see cref="Value"/> will force initialization.
/// Please <see cref="System.Threading.LazyThreadSafetyMode"> for more information on how <see cref="T:System.Threading.Lazy{T}"/> will behave if an exception is thrown
/// from initialization delegate.
/// </remarks>
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
public T Value => _state == null ? _value : CreateValue();
}
/// <summary>A debugger view of the Lazy<T> to surface additional debugging properties and
/// to ensure that the Lazy<T> does not become initialized if it was not already.</summary>
internal sealed class LazyDebugView<T>
{
//The Lazy object being viewed.
private readonly Lazy<T> _lazy;
/// <summary>Constructs a new debugger view object for the provided Lazy object.</summary>
/// <param name="lazy">A Lazy object to browse in the debugger.</param>
public LazyDebugView(Lazy<T> lazy)
{
_lazy = lazy;
}
/// <summary>Returns whether the Lazy object is initialized or not.</summary>
public bool IsValueCreated
{
get { return _lazy.IsValueCreated; }
}
/// <summary>Returns the value of the Lazy object.</summary>
public T Value
{
get
{ return _lazy.ValueForDebugDisplay; }
}
/// <summary>Returns the execution mode of the Lazy object</summary>
public LazyThreadSafetyMode? Mode
{
get { return _lazy.Mode; }
}
/// <summary>Returns the execution mode of the Lazy object</summary>
public bool IsValueFaulted
{
get { return _lazy.IsValueFaulted; }
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Llvm.NET.Values;
using Llvm.NET.Instructions;
using System.IO;
using System.Text;
namespace Llvm.NET.DebugInfo
{
/// <summary>DebugInfoBuilder is a factory class for creating DebugInformation for an LLVM
/// <see cref="NativeModule"/></summary>
/// <remarks>
/// Many Debug information metadata nodes are created with unresolved references to additional
/// metadata. To ensure such metadata is resolved applications should call the <see cref="Finish"/>
/// method to resolve and finalize the metadata. After this point only fully resolved nodes may
/// be added to ensure that the data remains valid.
/// </remarks>
public sealed class DebugInfoBuilder : IDisposable
{
/// <summary>Creates a new <see cref="DICompileUnit"/></summary>
/// <param name="language"><see cref="SourceLanguage"/> for the compilation unit</param>
/// <param name="sourceFilePath">Full path to the source file of this compilation unit</param>
/// <param name="producer">Name of the application processing the compilation unit</param>
/// <param name="optimized">Flag to indicate if the code in this compilation unit is optimized</param>
/// <param name="compilationFlags">Additional tool specific flags</param>
/// <param name="runtimeVersion">Runtime version</param>
/// <returns><see cref="DICompileUnit"/></returns>
public DICompileUnit CreateCompileUnit( SourceLanguage language
, string sourceFilePath
, string producer
, bool optimized
, string compilationFlags
, uint runtimeVersion
)
{
return CreateCompileUnit( language
, Path.GetFileName( sourceFilePath )
, Path.GetDirectoryName( sourceFilePath ) ?? Environment.CurrentDirectory
, producer
, optimized
, compilationFlags
, runtimeVersion
);
}
/// <summary>Creates a new <see cref="DICompileUnit"/></summary>
/// <param name="language"><see cref="SourceLanguage"/> for the compilation unit</param>
/// <param name="fileName">Name of the source file of this compilation unit (without any path)</param>
/// <param name="fileDirectory">Path of the directory containing the file</param>
/// <param name="producer">Name of the application processing the compilation unit</param>
/// <param name="optimized">Flag to indicate if the code in this compilation unit is optimized</param>
/// <param name="compilationFlags">Additional tool specific flags</param>
/// <param name="runtimeVersion">Runtime version</param>
/// <returns><see cref="DICompileUnit"/></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "DICompileUnit" )]
public DICompileUnit CreateCompileUnit( SourceLanguage language
, string fileName
, string fileDirectory
, string producer
, bool optimized
, string compilationFlags
, uint runtimeVersion
)
{
if( OwningModule.DICompileUnit != null )
throw new InvalidOperationException( "LLVM only allows one DICompileUnit per module" );
var handle = NativeMethods.DIBuilderCreateCompileUnit( BuilderHandle
, ( uint )language
, fileName
, fileDirectory
, producer
, optimized ? 1 : 0
, compilationFlags
, runtimeVersion
);
var retVal = MDNode.FromHandle<DICompileUnit>( handle );
OwningModule.DICompileUnit = retVal;
return retVal;
}
/// <summary>Creates a <see cref="DINamespace"/></summary>
/// <param name="scope">Containing scope for the namespace or null if the namespace is a global one</param>
/// <param name="name">Name of the namespace</param>
/// <param name="file">Source file containing the declaration (may be null if more than one or not known)</param>
/// <param name="line">Line number of the namespace declaration</param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DINamespace CreateNamespace( DIScope scope, string name, DIFile file, uint line )
{
if( string.IsNullOrWhiteSpace( name ) )
throw new ArgumentException( "name cannot be null or empty", nameof( name ) );
var handle = NativeMethods.DIBuilderCreateNamespace( BuilderHandle
, scope?.MetadataHandle ?? LLVMMetadataRef.Zero
, name
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
);
return MDNode.FromHandle<DINamespace>( handle );
}
/// <summary>Creates a <see cref="DIFile"/></summary>
/// <param name="path">Path of the file (may be <see langword="null"/> or empty)</param>
/// <returns>
/// <see cref="DIFile"/> or <see langword="null"/> if <paramref name="path"/>
/// is <see langword="null"/> empty, or all whitespace
/// </returns>
public DIFile CreateFile( string path )
{
if( string.IsNullOrWhiteSpace( path ) )
return null;
return CreateFile( Path.GetFileName( path ), Path.GetDirectoryName( path ) );
}
/// <summary>Creates a <see cref="DIFile"/></summary>
/// <param name="fileName">Name of the file (may be <see langword="null"/> or empty)</param>
/// <param name="directory">Path of the directory containing the file (may be <see langword="null"/> or empty)</param>
/// <returns>
/// <see cref="DIFile"/> or <see langword="null"/> if <paramref name="fileName"/>
/// is <see langword="null"/> empty, or all whitespace
/// </returns>
public DIFile CreateFile( string fileName, string directory )
{
if( string.IsNullOrWhiteSpace( fileName ) )
return null;
var handle = NativeMethods.DIBuilderCreateFile( BuilderHandle, fileName, directory ?? string.Empty );
return MDNode.FromHandle<DIFile>( handle );
}
/// <summary>Creates a new <see cref="DILexicalBlock"/></summary>
/// <param name="scope"><see cref="DIScope"/> for the block</param>
/// <param name="file"><see cref="DIFile"/> containing the block</param>
/// <param name="line">Starting line number for the block</param>
/// <param name="column">Starting column for the block</param>
/// <returns>
/// <see cref="DILexicalBlock"/> created from the parameters
/// </returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DILexicalBlock CreateLexicalBlock( DIScope scope, DIFile file, uint line, uint column )
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
var handle = NativeMethods.DIBuilderCreateLexicalBlock( BuilderHandle
, scope.MetadataHandle
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, column
);
return MDNode.FromHandle<DILexicalBlock>( handle );
}
/// <summary>Creates a <see cref="DILexicalBlockFile"/></summary>
/// <param name="scope"><see cref="DIScope"/> for the block</param>
/// <param name="file"><see cref="DIFile"/></param>
/// <param name="discriminator">Discriminator to disambiguate lexical blocks with the same file info</param>
/// <returns>
/// <see cref="DILexicalBlockFile"/> constructed from the parameters
/// </returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DILexicalBlockFile CreateLexicalBlockFile( DIScope scope, DIFile file, uint discriminator )
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( file == null )
throw new ArgumentNullException( nameof( file ) );
var handle = NativeMethods.DIBuilderCreateLexicalBlockFile( BuilderHandle, scope.MetadataHandle, file.MetadataHandle, discriminator );
return MDNode.FromHandle<DILexicalBlockFile>( handle );
}
/// <summary>Create a <see cref="DISubProgram"/> with debug information</summary>
/// <param name="scope"><see cref="DIScope"/> for the function</param>
/// <param name="name">Name of the function as it appears in the source language</param>
/// <param name="mangledName">Linkage (mangled) name of the function</param>
/// <param name="file"><see cref="DIFile"/> containing the function</param>
/// <param name="line">starting line of the function definition</param>
/// <param name="signatureType"><see cref="DISubroutineType"/> for the function's signature type</param>
/// <param name="isLocalToUnit">Flag to indicate if this function is local to the compilation unit or available externally</param>
/// <param name="isDefinition">Flag to indicate if this is a definition or a declaration only</param>
/// <param name="scopeLine">starting line of the first scope of the function's body</param>
/// <param name="debugFlags"><see cref="DebugInfoFlags"/> for this function</param>
/// <param name="isOptimized">Flag to indicate if this function is optimized</param>
/// <param name="function">Underlying LLVM <see cref="Function"/> to attach debug info to</param>
/// <param name="typeParameter">Template parameter [default = null]</param>
/// <param name="declaration">Template declarations [default = null]</param>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DISubProgram CreateFunction( DIScope scope
, string name
, string mangledName
, DIFile file
, uint line
, DISubroutineType signatureType
, bool isLocalToUnit
, bool isDefinition
, uint scopeLine
, DebugInfoFlags debugFlags
, bool isOptimized
, Function function
, MDNode typeParameter = null
, MDNode declaration = null
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( string.IsNullOrWhiteSpace( name ) )
name = string.Empty;
if( string.IsNullOrWhiteSpace( mangledName ) )
mangledName = string.Empty;
if( signatureType == null )
throw new ArgumentNullException( nameof( signatureType ) );
if( function == null )
throw new ArgumentNullException( nameof( function ) );
var handle = NativeMethods.DIBuilderCreateFunction( BuilderHandle
, scope.MetadataHandle
, name
, mangledName
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, signatureType.MetadataHandle
, isLocalToUnit ? 1 : 0
, isDefinition ? 1 : 0
, scopeLine
, ( uint )debugFlags
, isOptimized ? 1 : 0
, function.ValueHandle
, typeParameter?.MetadataHandle ?? LLVMMetadataRef.Zero
, declaration?.MetadataHandle ?? LLVMMetadataRef.Zero
);
return MDNode.FromHandle<DISubProgram>( handle );
}
/// <summary>Creates a new forward declaration to a function</summary>
/// <param name="scope"><see cref="DIScope"/> for the declaration</param>
/// <param name="name">Name of the function as it appears in source</param>
/// <param name="mangledName">mangled name of the function (for linker)</param>
/// <param name="file">Source file location for the function</param>
/// <param name="line">starting line of the declaration</param>
/// <param name="subroutineType">Signature for the function</param>
/// <param name="isLocalToUnit">Flag to indicate if this declaration is local to the compilation unit</param>
/// <param name="isDefinition">Flag to indicate if this is a definition</param>
/// <param name="scopeLine">Line of the first scope block</param>
/// <param name="debugFlags"><see cref="DebugInfoFlags"/> for the function</param>
/// <param name="isOptimized">Flag to indicate if the function is optimized</param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DISubProgram ForwardDeclareFunction( DIScope scope
, string name
, string mangledName
, DIFile file
, uint line
, DISubroutineType subroutineType
, bool isLocalToUnit
, bool isDefinition
, uint scopeLine
, DebugInfoFlags debugFlags
, bool isOptimized
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( subroutineType == null )
throw new ArgumentNullException( nameof( subroutineType ) );
if( string.IsNullOrWhiteSpace( name ) )
name = string.Empty;
if( string.IsNullOrWhiteSpace( mangledName ) )
mangledName = string.Empty;
var handle = NativeMethods.DIBuilderCreateTempFunctionFwdDecl( BuilderHandle
, scope.MetadataHandle
, name
, mangledName
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, subroutineType.MetadataHandle
, isLocalToUnit ? 1 : 0
, isDefinition ? 1 : 0
, scopeLine
, ( uint )debugFlags
, isOptimized ? 1 : 0
, LLVMValueRef.Zero
, LLVMMetadataRef.Zero
, LLVMMetadataRef.Zero
);
return MDNode.FromHandle<DISubProgram>( handle );
}
public DILocalVariable CreateLocalVariable( DIScope scope
, string name
, DIFile file
, uint line
, DIType type
, bool alwaysPreserve
, DebugInfoFlags debugFlags
)
{
return CreateLocalVariable( Tag.AutoVariable, scope, name, file, line, type, alwaysPreserve, ( uint )debugFlags, 0 );
}
/// <summary>Creates an argument for a function as a <see cref="DILocalVariable"/></summary>
/// <param name="scope">Scope for the argument</param>
/// <param name="name">Name of the argument</param>
/// <param name="file"><see cref="DIFile"/> containing the function this argument is declared in</param>
/// <param name="line">Line number fort his argument</param>
/// <param name="type">Debug type for this argument</param>
/// <param name="alwaysPreserve">Flag to indicate if this argument is always preserved for debug view even if optimization would remove it</param>
/// <param name="debugFlags"><see cref="DebugInfoFlags"/> for this argument</param>
/// <param name="argNo">One based argument index on the method (e.g the first argument is 1 not 0 )</param>
/// <returns><see cref="DILocalVariable"/> representing the function argument</returns>
public DILocalVariable CreateArgument( DIScope scope
, string name
, DIFile file
, uint line
, DIType type
, bool alwaysPreserve
, DebugInfoFlags debugFlags
, uint argNo
)
{
if (argNo > UInt16.MaxValue)
throw new ArgumentOutOfRangeException(nameof(argNo), "Argument index must fit in 16bit unsigned value");
return CreateLocalVariable( Tag.ArgVariable, scope, name, file, line, type, alwaysPreserve, ( uint )debugFlags, argNo );
}
/// <summary>Construct debug information for a basic type (a.k.a. primitive type)</summary>
/// <param name="name">Name of the type</param>
/// <param name="bitSize">Bit size for the type</param>
/// <param name="bitAlign">Bit alignment for the type</param>
/// <param name="encoding"><see cref="DiTypeKind"/> encoding for the type</param>
/// <returns></returns>
public DIBasicType CreateBasicType( string name, ulong bitSize, ulong bitAlign, DiTypeKind encoding )
{
var handle = NativeMethods.DIBuilderCreateBasicType( BuilderHandle, name, bitSize, bitAlign, ( uint )encoding );
return MDNode.FromHandle<DIBasicType>( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DIDerivedType CreatePointerType( DIType pointeeType, string name, ulong bitSize, ulong bitAlign )
{
var handle = NativeMethods.DIBuilderCreatePointerType( BuilderHandle
, pointeeType?.MetadataHandle ?? LLVMMetadataRef.Zero // null == void
, bitSize
, bitAlign
, name ?? string.Empty
);
return MDNode.FromHandle<DIDerivedType>( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DIDerivedType CreateQualifiedType( DIType baseType, QualifiedTypeTag tag )
{
if( baseType == null )
throw new ArgumentNullException( nameof( baseType ) );
var handle = NativeMethods.DIBuilderCreateQualifiedType( BuilderHandle, ( uint )tag, baseType.MetadataHandle );
return MDNode.FromHandle<DIDerivedType>( handle );
}
public DITypeArray CreateTypeArray( params DIType[ ] types ) => CreateTypeArray( ( IEnumerable<DIType> )types );
public DITypeArray CreateTypeArray( IEnumerable<DIType> types )
{
var handles = types.Select( t => t.MetadataHandle ).ToArray( );
var count = handles.LongLength;
if( count == 0 )
handles = new[ ] { default( LLVMMetadataRef ) };
var handle = NativeMethods.DIBuilderGetOrCreateTypeArray( BuilderHandle, out handles[ 0 ], ( ulong )count );
return new DITypeArray( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DISubroutineType CreateSubroutineType( DebugInfoFlags debugFlags, DITypeArray types )
{
if( types == null )
throw new ArgumentNullException( nameof( types ) );
// NOTE:
// LLVM API has a "DIFile" as the first argument, however it is ignored as
// DISubroutineType doesn't have any scope/file information attached.
// Thus, the file argument here is always null.
var handle = NativeMethods.DIBuilderCreateSubroutineType( BuilderHandle
, LLVMMetadataRef.Zero
, types.MetadataHandle
, ( uint )debugFlags
);
return MDNode.FromHandle<DISubroutineType>( handle );
}
public DISubroutineType CreateSubroutineType( DebugInfoFlags debugFlags )
{
var typeArray = GetOrCreateTypeArray( null );
return CreateSubroutineType( debugFlags, typeArray );
}
public DISubroutineType CreateSubroutineType( DebugInfoFlags debugFlags, DIType returnType, IEnumerable<DIType> types )
{
var typeArray = GetOrCreateTypeArray( ScalarEnumerable.Combine( returnType, types ) );
return CreateSubroutineType( debugFlags, typeArray );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DICompositeType CreateStructType( DIScope scope
, string name
, DIFile file
, uint line
, ulong bitSize
, ulong bitAlign
, uint flags
, DIType derivedFrom
, DINodeArray elements
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( elements == null )
throw new ArgumentNullException( nameof( elements ) );
var handle = NativeMethods.DIBuilderCreateStructType( BuilderHandle
, scope.MetadataHandle
, name
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, bitSize
, bitAlign
, flags
, derivedFrom?.MetadataHandle ?? LLVMMetadataRef.Zero
, elements.Tuple.MetadataHandle
);
return MDNode.FromHandle<DICompositeType>( handle );
}
public DICompositeType CreateStructType( DIScope scope
, string name
, DIFile file
, uint line
, ulong bitSize
, ulong bitAlign
, DebugInfoFlags debugFlags
, DIType derivedFrom
, params DINode[ ] elements
)
{
return CreateStructType( scope, name, file, line, bitSize, bitAlign, ( uint )debugFlags, derivedFrom, GetOrCreateArray( elements ) );
}
public DICompositeType CreateStructType( DIScope scope
, string name
, DIFile file
, uint line
, ulong bitSize
, ulong bitAlign
, DebugInfoFlags debugFlags
, DIType derivedFrom
, IEnumerable<DINode> elements
)
{
return CreateStructType( scope, name, file, line, bitSize, bitAlign, ( uint )debugFlags, derivedFrom, GetOrCreateArray( elements ) );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DIDerivedType CreateMemberType( DIScope scope
, string name
, DIFile file
, uint line
, ulong bitSize
, ulong bitAlign
, ulong bitOffset
, DebugInfoFlags debugFlags
, DIType type
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( type == null )
throw new ArgumentNullException( nameof( type ) );
var handle = NativeMethods.DIBuilderCreateMemberType( BuilderHandle
, scope.MetadataHandle
, name
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, bitSize
, bitAlign
, bitOffset
, ( uint )debugFlags
, type.MetadataHandle
);
return MDNode.FromHandle<DIDerivedType>( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DICompositeType CreateArrayType( ulong bitSize, ulong bitAlign, DIType elementType, DINodeArray subscripts )
{
if( elementType == null )
throw new ArgumentNullException( nameof( elementType ) );
if( subscripts == null )
throw new ArgumentNullException( nameof( subscripts ) );
var handle = NativeMethods.DIBuilderCreateArrayType( BuilderHandle, bitSize, bitAlign, elementType.MetadataHandle, subscripts.Tuple.MetadataHandle );
return MDNode.FromHandle<DICompositeType>( handle );
}
public DICompositeType CreateArrayType( ulong bitSize, ulong bitAlign, DIType elementType, params DINode[ ] subscripts )
{
return CreateArrayType( bitSize, bitAlign, elementType, GetOrCreateArray( subscripts ) );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DIDerivedType CreateTypedef( DIType type, string name, DIFile file, uint line, DINode context )
{
var handle = NativeMethods.DIBuilderCreateTypedef( BuilderHandle
, type?.MetadataHandle ?? LLVMMetadataRef.Zero
, name
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, context?.MetadataHandle ?? LLVMMetadataRef.Zero
);
return MDNode.FromHandle<DIDerivedType>( handle );
}
public DISubRange CreateSubRange( long lo, long count )
{
var handle = NativeMethods.DIBuilderGetOrCreateSubrange( BuilderHandle, lo, count );
return MDNode.FromHandle<DISubRange>( handle );
}
public DINodeArray GetOrCreateArray( IEnumerable<DINode> elements )
{
var buf = elements.Select( d => d?.MetadataHandle ?? LLVMMetadataRef.Zero ).ToArray( );
var actualLen = buf.LongLength;
// for the out parameter trick to work - need to have a valid array with at least one element
if( buf.LongLength == 0 )
buf = new LLVMMetadataRef[ 1 ];
var handle = NativeMethods.DIBuilderGetOrCreateArray( BuilderHandle, out buf[ 0 ], ( ulong )actualLen );
return new DINodeArray( LlvmMetadata.FromHandle<MDTuple>( OwningModule.Context, handle ) );
}
public DITypeArray GetOrCreateTypeArray( params DIType[ ] types ) => GetOrCreateTypeArray( ( IEnumerable<DIType> )types );
public DITypeArray GetOrCreateTypeArray( IEnumerable<DIType> types )
{
var buf = types.Select( t => t?.MetadataHandle ?? LLVMMetadataRef.Zero ).ToArray( );
var handle = NativeMethods.DIBuilderGetOrCreateTypeArray( BuilderHandle, out buf[ 0 ], ( ulong )buf.LongLength );
return new DITypeArray( handle );
}
public DIEnumerator CreateEnumeratorValue( string name, long value )
{
var handle = NativeMethods.DIBuilderCreateEnumeratorValue( BuilderHandle, name, value );
return MDNode.FromHandle<DIEnumerator>( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DICompositeType CreateEnumerationType( DIScope scope
, string name
, DIFile file
, uint lineNumber
, ulong sizeInBits
, ulong alignInBits
, IEnumerable<DIEnumerator> elements
, DIType underlyingType
, string uniqueId = ""
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( underlyingType == null )
throw new ArgumentNullException( nameof( underlyingType ) );
var elementHandles = elements.Select( e => e.MetadataHandle ).ToArray( );
var elementArray = NativeMethods.DIBuilderGetOrCreateArray( BuilderHandle, out elementHandles[ 0 ], ( ulong )elementHandles.LongLength );
var handle = NativeMethods.DIBuilderCreateEnumerationType( BuilderHandle
, scope.MetadataHandle
, name
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, lineNumber
, sizeInBits
, alignInBits
, elementArray
, underlyingType.MetadataHandle
, uniqueId
);
return MDNode.FromHandle<DICompositeType>( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DIGlobalVariable CreateGlobalVariable( DINode scope
, string name
, string linkageName
, DIFile file
, uint lineNo
, DIType type
, bool isLocalToUnit
, Value value
, DINode declaration = null
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
if( type == null )
throw new ArgumentNullException( nameof( type ) );
if( value == null )
throw new ArgumentNullException( nameof( value ) );
var handle = NativeMethods.DIBuilderCreateGlobalVariable( BuilderHandle
, scope.MetadataHandle
, name
, linkageName
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, lineNo
, type.MetadataHandle
, isLocalToUnit
, value.ValueHandle
, declaration?.MetadataHandle ?? LLVMMetadataRef.Zero
);
return MDNode.FromHandle<DIGlobalVariable>( handle );
}
public void Finish( )
{
if( !IsFinished )
{
var unresolvedTemps = from node in OwningModule.Context.Metadata.OfType<MDNode>( )
where node.IsTemporary && !node.IsResolved
select node;
if( unresolvedTemps.Any( ) )
{
var bldr = new StringBuilder( "Temporaries must be resolved before finalizing debug information:\n" );
foreach( var node in unresolvedTemps )
bldr.AppendFormat( "\t{0}\n", node.ToString( ) );
throw new InvalidOperationException( bldr.ToString( ) );
}
NativeMethods.DIBuilderFinalize( BuilderHandle );
IsFinished = true;
}
}
public Instruction InsertDeclare( Value storage, DILocalVariable varInfo, DILocation location, Instruction insertBefore )
{
return InsertDeclare( storage, varInfo, CreateExpression( ), location, insertBefore );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public Instruction InsertDeclare( Value storage, DILocalVariable varInfo, DIExpression expression, DILocation location, Instruction insertBefore )
{
if( storage == null )
throw new ArgumentNullException( nameof( storage ) );
if( varInfo == null )
throw new ArgumentNullException( nameof( varInfo ) );
if( expression == null )
throw new ArgumentNullException( nameof( expression ) );
if( location == null )
throw new ArgumentNullException( nameof( location ) );
if( insertBefore == null )
throw new ArgumentNullException( nameof( insertBefore ) );
var handle = NativeMethods.DIBuilderInsertDeclareBefore( BuilderHandle
, storage.ValueHandle
, varInfo.MetadataHandle
, expression.MetadataHandle
, location.MetadataHandle
, insertBefore.ValueHandle
);
return Value.FromHandle<Instruction>( handle );
}
public CallInstruction InsertDeclare( Value storage, DILocalVariable varInfo, DILocation location, BasicBlock insertAtEnd )
{
return InsertDeclare( storage, varInfo, CreateExpression( ), location, insertAtEnd );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public CallInstruction InsertDeclare( Value storage, DILocalVariable varInfo, DIExpression espression, DILocation location, BasicBlock insertAtEnd )
{
if( storage == null )
throw new ArgumentNullException( nameof( storage ) );
if( varInfo == null )
throw new ArgumentNullException( nameof( varInfo ) );
if( espression == null )
throw new ArgumentNullException( nameof( espression ) );
if( location == null )
throw new ArgumentNullException( nameof( location ) );
if( insertAtEnd == null )
throw new ArgumentNullException( nameof( insertAtEnd ) );
if (location.Scope.SubProgram != varInfo.Scope.SubProgram)
throw new ArgumentException("Mismatched scopes for location and variable");
var handle = NativeMethods.DIBuilderInsertDeclareAtEnd( BuilderHandle
, storage.ValueHandle
, varInfo.MetadataHandle
, espression.MetadataHandle
, location.MetadataHandle
, insertAtEnd.BlockHandle
);
return Value.FromHandle<CallInstruction>( handle );
}
public CallInstruction InsertValue( Value value
, UInt64 offset
, DILocalVariable varInfo
, DILocation location
, Instruction insertBefore
)
{
return InsertValue( value, offset, varInfo, null, location, insertBefore );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters" )]
public CallInstruction InsertValue( Value value
, UInt64 offset
, DILocalVariable varInfo
, DIExpression expression
, DILocation location
, Instruction insertBefore
)
{
if( value == null )
throw new ArgumentNullException( nameof( value ) );
if( varInfo == null )
throw new ArgumentNullException( nameof( varInfo ) );
if( expression == null )
throw new ArgumentNullException( nameof( expression ) );
if( location == null )
throw new ArgumentNullException( nameof( location ) );
if( insertBefore == null )
throw new ArgumentNullException( nameof( insertBefore ) );
var handle = NativeMethods.DIBuilderInsertValueBefore( BuilderHandle
, value.ValueHandle
, offset
, varInfo.MetadataHandle
, expression?.MetadataHandle ?? CreateExpression( ).MetadataHandle
, location.MetadataHandle
, insertBefore.ValueHandle
);
var retVal = Value.FromHandle<CallInstruction>( handle );
retVal.IsTailCall = true;
return retVal;
}
public CallInstruction InsertValue( Value value
, UInt64 offset
, DILocalVariable varInfo
, DILocation location
, BasicBlock insertAtEnd
)
{
return InsertValue( value, offset, varInfo, null, location, insertAtEnd );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters")]
public CallInstruction InsertValue( Value value
, DILocalVariable varInfo
, DIExpression expression
, DILocation location
, BasicBlock insertAtEnd
)
{
return InsertValue(value, 0, varInfo, expression, location, insertAtEnd);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters" )]
public CallInstruction InsertValue( Value value
, UInt64 offset
, DILocalVariable varInfo
, DIExpression expression
, DILocation location
, BasicBlock insertAtEnd
)
{
if( value == null )
throw new ArgumentNullException( nameof( value ) );
if( varInfo == null )
throw new ArgumentNullException( nameof( varInfo ) );
if( expression == null )
throw new ArgumentNullException( nameof( expression ) );
if( location == null )
throw new ArgumentNullException( nameof( location ) );
if( insertAtEnd == null )
throw new ArgumentNullException( nameof( insertAtEnd ) );
if( location.Scope != varInfo.Scope )
throw new ArgumentException( "mismatched scopes" );
if( !location.Describes(insertAtEnd.ContainingFunction ) )
throw new ArgumentException( "location does not describe the specified block's containing function" );
var handle = NativeMethods.DIBuilderInsertValueAtEnd( BuilderHandle
, value.ValueHandle
, offset
, varInfo.MetadataHandle
, expression?.MetadataHandle ?? CreateExpression( ).MetadataHandle
, location.MetadataHandle
, insertAtEnd.BlockHandle
);
var retVal = Value.FromHandle<CallInstruction>( handle );
retVal.IsTailCall = true;
return retVal;
}
public DIExpression CreateExpression( params ExpressionOp[ ] operations ) => CreateExpression( ( IEnumerable<ExpressionOp> )operations );
public DIExpression CreateExpression( IEnumerable<ExpressionOp> operations )
{
var args = operations.Cast<long>( ).ToArray( );
var actualCount = args.LongLength;
if( args.Length == 0 )
args = new long[ 1 ];
var handle = NativeMethods.DIBuilderCreateExpression( BuilderHandle, out args[ 0 ], ( ulong )actualCount );
return new DIExpression( handle );
}
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Specific type required by interop call" )]
public DICompositeType CreateReplaceableCompositeType( Tag tag
, string name
, DINode scope
, DIFile file
, uint line
, uint lang = 0
, ulong sizeInBits = 0
, ulong alignBits = 0
, DebugInfoFlags flags = DebugInfoFlags.None
)
{
if( scope == null )
throw new ArgumentNullException( nameof( scope ) );
var handle = NativeMethods.DIBuilderCreateReplaceableCompositeType( BuilderHandle
, ( uint )tag
, name
, scope.MetadataHandle
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, lang
, sizeInBits
, alignBits
, (uint)flags
);
return MDNode.FromHandle<DICompositeType>( handle );
}
public void Dispose( )
{
if( BuilderHandle.Pointer != IntPtr.Zero )
{
NativeMethods.DIBuilderDestroy( BuilderHandle );
BuilderHandle = default( LLVMDIBuilderRef );
}
}
internal DebugInfoBuilder( NativeModule owningModule )
: this( owningModule, true )
{
}
// keeping this private for now as there doesn't seem to be a good reason to support
// allowUnresolved == false
private DebugInfoBuilder( NativeModule owningModule, bool allowUnresolved )
{
if( owningModule == null )
throw new ArgumentNullException( nameof( owningModule ) );
BuilderHandle = NativeMethods.NewDIBuilder( owningModule.ModuleHandle, allowUnresolved );
OwningModule = owningModule;
}
private DILocalVariable CreateLocalVariable( Tag dwarfTag
, DIScope scope
, string name
, DIFile file
, uint line
, DIType type
, bool alwaysPreserve
, uint flags
, uint argNo
)
{
var handle = NativeMethods.DIBuilderCreateLocalVariable( BuilderHandle
, ( uint )dwarfTag
, scope.MetadataHandle
, name
, file?.MetadataHandle ?? LLVMMetadataRef.Zero
, line
, type.MetadataHandle
, alwaysPreserve ? 1 : 0
, flags
, argNo
);
return MDNode.FromHandle<DILocalVariable>( handle );
}
private readonly NativeModule OwningModule;
private bool IsFinished;
internal LLVMDIBuilderRef BuilderHandle { get; private set; }
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace NorthwindRepository{
/// <summary>
/// Strongly-typed collection for the SalesTotalsByAmount class.
/// </summary>
[Serializable]
public partial class SalesTotalsByAmountCollection : ReadOnlyList<SalesTotalsByAmount, SalesTotalsByAmountCollection>
{
public SalesTotalsByAmountCollection() {}
}
/// <summary>
/// This is Read-only wrapper class for the Sales Totals by Amount view.
/// </summary>
[Serializable]
public partial class SalesTotalsByAmount : ReadOnlyRecord<SalesTotalsByAmount>, IReadOnlyRecord
{
#region Default Settings
protected static void SetSQLProps()
{
GetTableSchema();
}
#endregion
#region Schema Accessor
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
{
SetSQLProps();
}
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Sales Totals by Amount", TableType.View, DataService.GetInstance("NorthwindRepository"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarSaleAmount = new TableSchema.TableColumn(schema);
colvarSaleAmount.ColumnName = "SaleAmount";
colvarSaleAmount.DataType = DbType.Currency;
colvarSaleAmount.MaxLength = 0;
colvarSaleAmount.AutoIncrement = false;
colvarSaleAmount.IsNullable = true;
colvarSaleAmount.IsPrimaryKey = false;
colvarSaleAmount.IsForeignKey = false;
colvarSaleAmount.IsReadOnly = false;
schema.Columns.Add(colvarSaleAmount);
TableSchema.TableColumn colvarOrderID = new TableSchema.TableColumn(schema);
colvarOrderID.ColumnName = "OrderID";
colvarOrderID.DataType = DbType.Int32;
colvarOrderID.MaxLength = 0;
colvarOrderID.AutoIncrement = false;
colvarOrderID.IsNullable = false;
colvarOrderID.IsPrimaryKey = false;
colvarOrderID.IsForeignKey = false;
colvarOrderID.IsReadOnly = false;
schema.Columns.Add(colvarOrderID);
TableSchema.TableColumn colvarCompanyName = new TableSchema.TableColumn(schema);
colvarCompanyName.ColumnName = "CompanyName";
colvarCompanyName.DataType = DbType.String;
colvarCompanyName.MaxLength = 40;
colvarCompanyName.AutoIncrement = false;
colvarCompanyName.IsNullable = false;
colvarCompanyName.IsPrimaryKey = false;
colvarCompanyName.IsForeignKey = false;
colvarCompanyName.IsReadOnly = false;
schema.Columns.Add(colvarCompanyName);
TableSchema.TableColumn colvarShippedDate = new TableSchema.TableColumn(schema);
colvarShippedDate.ColumnName = "ShippedDate";
colvarShippedDate.DataType = DbType.DateTime;
colvarShippedDate.MaxLength = 0;
colvarShippedDate.AutoIncrement = false;
colvarShippedDate.IsNullable = true;
colvarShippedDate.IsPrimaryKey = false;
colvarShippedDate.IsForeignKey = false;
colvarShippedDate.IsReadOnly = false;
schema.Columns.Add(colvarShippedDate);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["NorthwindRepository"].AddSchema("Sales Totals by Amount",schema);
}
}
#endregion
#region Query Accessor
public static Query CreateQuery()
{
return new Query(Schema);
}
#endregion
#region .ctors
public SalesTotalsByAmount()
{
SetSQLProps();
SetDefaults();
MarkNew();
}
public SalesTotalsByAmount(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
{
ForceDefaults();
}
MarkNew();
}
public SalesTotalsByAmount(object keyID)
{
SetSQLProps();
LoadByKey(keyID);
}
public SalesTotalsByAmount(string columnName, object columnValue)
{
SetSQLProps();
LoadByParam(columnName,columnValue);
}
#endregion
#region Props
[XmlAttribute("SaleAmount")]
[Bindable(true)]
public decimal? SaleAmount
{
get
{
return GetColumnValue<decimal?>("SaleAmount");
}
set
{
SetColumnValue("SaleAmount", value);
}
}
[XmlAttribute("OrderID")]
[Bindable(true)]
public int OrderID
{
get
{
return GetColumnValue<int>("OrderID");
}
set
{
SetColumnValue("OrderID", value);
}
}
[XmlAttribute("CompanyName")]
[Bindable(true)]
public string CompanyName
{
get
{
return GetColumnValue<string>("CompanyName");
}
set
{
SetColumnValue("CompanyName", value);
}
}
[XmlAttribute("ShippedDate")]
[Bindable(true)]
public DateTime? ShippedDate
{
get
{
return GetColumnValue<DateTime?>("ShippedDate");
}
set
{
SetColumnValue("ShippedDate", value);
}
}
#endregion
#region Columns Struct
public struct Columns
{
public static string SaleAmount = @"SaleAmount";
public static string OrderID = @"OrderID";
public static string CompanyName = @"CompanyName";
public static string ShippedDate = @"ShippedDate";
}
#endregion
#region IAbstractRecord Members
public new CT GetColumnValue<CT>(string columnName) {
return base.GetColumnValue<CT>(columnName);
}
public object GetColumnValue(string columnName) {
return base.GetColumnValue<object>(columnName);
}
#endregion
}
}
| |
namespace Larsu.Hibernate
{
using System;
using NHibernate;
using NHibernate.Engine;
using NHibernate.Hql.Util;
using NHibernate.Proxy;
public static class IEntityExtensions
{
[Obsolete]
public static bool? IsTransient(this IEntity entity)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
var session = SessionManager.GetCurrentSession();
return entity.IsTransient(session);
}
[Obsolete]
public static bool? IsTransient(this IEntity entity, ISession session)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (session == null)
{
throw new ArgumentNullException(nameof(session));
}
var sessionFactoryImpl = (ISessionFactoryImplementor)session.SessionFactory;
var typeName = entity.GetType().AssemblyQualifiedName;
var persister = new SessionFactoryHelper(sessionFactoryImpl).RequireClassPersister(typeName);
return persister.IsTransient(entity, (ISessionImplementor)session);
}
[Obsolete]
public static void Save(this IEntity entity)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
var session = SessionManager.GetCurrentSession();
entity.Save(session);
}
[Obsolete]
public static void Save(this IEntity entity, ISession session)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (session == null)
{
throw new ArgumentNullException(nameof(session));
}
using (var transaction = session.BeginTransaction())
{
session.Save(entity);
transaction.Commit();
}
}
[Obsolete]
public static void Save(this IEntity entity, string name)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
var session = SessionManager.GetCurrentSession(name);
entity.Save(session);
}
[Obsolete]
public static void Save(this IEntity entity, ITransaction transaction)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (transaction == null)
{
throw new ArgumentNullException(nameof(transaction));
}
var session = SessionManager.GetCurrentSession();
if (!transaction.Equals(session.Transaction))
{
throw new InvalidOperationException();
}
session.Save(entity);
}
[Obsolete]
public static void Merge(this IEntity entity)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
var session = SessionManager.GetCurrentSession();
entity.Merge(session);
}
[Obsolete]
public static void Merge(this IEntity entity, ISession session)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (session == null)
{
throw new ArgumentNullException(nameof(session));
}
using (var transaction = session.BeginTransaction())
{
session.Merge(entity);
transaction.Commit();
}
}
[Obsolete]
public static void Merge(this IEntity entity, string name)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
var session = SessionManager.GetCurrentSession(name);
entity.Merge(session);
}
[Obsolete]
public static void Delete(this IEntity entity)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
var session = SessionManager.GetCurrentSession();
entity.Delete(session);
}
[Obsolete]
public static void Delete(this IEntity entity, ISession session)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (session == null)
{
throw new ArgumentNullException(nameof(session));
}
using (var transaction = session.BeginTransaction())
{
session.Delete(entity);
transaction.Commit();
}
}
[Obsolete]
public static void Delete(this IEntity entity, string name)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
var session = SessionManager.GetCurrentSession(name);
entity.Delete(session);
}
[Obsolete]
public static void Delete(this IEntity entity, ITransaction transaction)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (transaction == null)
{
throw new ArgumentNullException(nameof(transaction));
}
var session = SessionManager.GetCurrentSession();
session.Delete(entity);
}
public static Type UnwrapType(this IEntity entity)
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
var type = entity.GetType();
if (entity is INHibernateProxy)
{
return type.BaseType;
}
return type;
}
[Obsolete]
public static TEntity Unwrap<TEntity>(this IEntity entity) where TEntity : class, IEntity
{
var session = SessionManager.GetCurrentSession();
return entity.Unwrap<TEntity>(session);
}
[Obsolete]
public static TEntity Unwrap<TEntity>(this IEntity entity, string name) where TEntity : class, IEntity
{
var session = SessionManager.GetCurrentSession(name);
return entity.Unwrap<TEntity>(session);
}
[Obsolete]
public static TEntity Unwrap<TEntity>(this IEntity entity, ISession session) where TEntity : class, IEntity
{
if (entity == null)
{
throw new ArgumentNullException(nameof(entity));
}
if (session == null)
{
throw new ArgumentNullException(nameof(session));
}
if (!(entity is INHibernateProxy))
{
return (TEntity)entity;
}
var initializer = (entity as INHibernateProxy).HibernateLazyInitializer;
if (initializer.IsUninitialized)
{
return initializer.GetImplementation() as TEntity;
}
var impl = session.GetSessionImplementation();
var unwrapped = impl.PersistenceContext.Unproxy(entity);
return unwrapped as TEntity;
}
}
}
| |
// Copyright 2018 Esri.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
// You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
// language governing permissions and limitations under the License.
using System.Windows;
using Esri.ArcGISRuntime.Geometry;
using Esri.ArcGISRuntime.Mapping;
using Esri.ArcGISRuntime.Symbology;
using Esri.ArcGISRuntime.UI;
namespace ArcGISRuntime.WPF.Samples.ClipGeometry
{
[ArcGISRuntime.Samples.Shared.Attributes.Sample(
name: "Clip geometry",
category: "Geometry",
description: "Clip a geometry with another geometry.",
instructions: "Click the \"Clip\" button to clip the blue graphic with the red dashed envelopes.",
tags: new[] { "analysis", "clip", "geometry" })]
public partial class ClipGeometry
{
// Graphics overlay to display input geometries for the clip operation.
private GraphicsOverlay _inputGeometriesGraphicsOverlay;
// Graphic that represents the 1st parameter of the GeometryEngine.Clip operation - it follows the boundary of Colorado.
private Graphic _coloradoGraphic;
// One of the graphics that represents the 2nd parameter of the GeometryEngine.Clip operation - it will be an envelope
// that falls completely outside the boundary of Colorado.
private Graphic _outsideGraphic;
// One of the graphics that represents the 2nd parameter of the GeometryEngine.Clip operation - it will be an envelope
// that is completely contained within the boundary of Colorado.
private Graphic _containedGraphic;
// One of the graphics that represents the 2nd parameter of the GeometryEngine.Clip operation - it will be an envelope
// that is intersects the boundary of Colorado.
private Graphic _intersectingGraphic;
// Graphics overlay to display the resulting geometries from the three GeometryEngine.Clip operations.
private GraphicsOverlay _clipAreasGraphicsOverlay;
public ClipGeometry()
{
InitializeComponent();
Initialize();
}
private void Initialize()
{
// Create a new map using the WebMercator spatial reference.
Map newMap = new Map(SpatialReferences.WebMercator)
{
// Set the basemap of the map to be a topographic layer.
Basemap = new Basemap(BasemapStyle.ArcGISTopographic)
};
// Create a graphics overlay to hold the input geometries for the clip operation.
_inputGeometriesGraphicsOverlay = new GraphicsOverlay();
// Add the input geometries graphics overlay to the MapView.
MyMapView.GraphicsOverlays.Add(_inputGeometriesGraphicsOverlay);
// Create a graphics overlay to hold the resulting geometries from the three GeometryEngine.Clip operations.
_clipAreasGraphicsOverlay = new GraphicsOverlay();
// Add the resulting geometries graphics overlay to the MapView.
MyMapView.GraphicsOverlays.Add(_clipAreasGraphicsOverlay);
// Create a simple line symbol for the 1st parameter of the GeometryEngine.Clip operation - it follows the
// boundary of Colorado.
SimpleLineSymbol coloradoSimpleLineSymbol = new SimpleLineSymbol(
SimpleLineSymbolStyle.Solid, System.Drawing.Color.Blue, 4);
// Create the color that will be used as the fill for the Colorado graphic. It will be a semi-transparent, blue color.
System.Drawing.Color coloradoFillColor = System.Drawing.Color.FromArgb(34, 0, 0, 255);
// Create the simple fill symbol for the Colorado graphic - comprised of a fill style, fill color and outline.
SimpleFillSymbol coloradoSimpleFillSymbol = new SimpleFillSymbol(
SimpleFillSymbolStyle.Solid, coloradoFillColor, coloradoSimpleLineSymbol);
// Create the geometry of the Colorado graphic.
Envelope colorado = new Envelope(
new MapPoint(-11362327.128340, 5012861.290274, SpatialReferences.WebMercator),
new MapPoint(-12138232.018408, 4441198.773776, SpatialReferences.WebMercator));
// Create the graphic for Colorado - comprised of a polygon shape and fill symbol.
_coloradoGraphic = new Graphic(colorado, coloradoSimpleFillSymbol);
// Add the Colorado graphic to the input geometries graphics overlay collection.
_inputGeometriesGraphicsOverlay.Graphics.Add(_coloradoGraphic);
// Create a simple line symbol for the three different clip geometries.
SimpleLineSymbol clipGeomtriesSimpleLineSymbol = new SimpleLineSymbol(
SimpleLineSymbolStyle.Dot, System.Drawing.Color.Red, 5);
// Create an envelope outside Colorado.
Envelope outsideEnvelope = new Envelope(
new MapPoint(-11858344.321294, 5147942.225174, SpatialReferences.WebMercator),
new MapPoint(-12201990.219681, 5297071.577304, SpatialReferences.WebMercator));
// Create the graphic for an envelope outside Colorado - comprised of a polyline shape and line symbol.
_outsideGraphic = new Graphic(outsideEnvelope, clipGeomtriesSimpleLineSymbol);
// Add the envelope outside Colorado graphic to the graphics overlay collection.
_inputGeometriesGraphicsOverlay.Graphics.Add(_outsideGraphic);
// Create an envelope intersecting Colorado.
Envelope intersectingEnvelope = new Envelope(
new MapPoint(-11962086.479298, 4566553.881363, SpatialReferences.WebMercator),
new MapPoint(-12260345.183558, 4332053.378376, SpatialReferences.WebMercator));
// Create the graphic for an envelope intersecting Colorado - comprised of a polyline shape and line symbol.
_intersectingGraphic = new Graphic(intersectingEnvelope, clipGeomtriesSimpleLineSymbol);
// Add the envelope intersecting Colorado graphic to the graphics overlay collection.
_inputGeometriesGraphicsOverlay.Graphics.Add(_intersectingGraphic);
// Create a envelope inside Colorado.
Envelope containedEnvelope = new Envelope(
new MapPoint(-11655182.595204, 4741618.772994, SpatialReferences.WebMercator),
new MapPoint(-11431488.567009, 4593570.068343, SpatialReferences.WebMercator));
// Create the graphic for an envelope inside Colorado - comprised of a polyline shape and line symbol.
_containedGraphic = new Graphic(containedEnvelope, clipGeomtriesSimpleLineSymbol);
// Add the envelope inside Colorado graphic to the graphics overlay collection.
_inputGeometriesGraphicsOverlay.Graphics.Add(_containedGraphic);
// Get the extent of all of the graphics in the graphics overlay with a little padding to used as the initial zoom extent of the map.
Geometry visibleExtent = GetExtentOfGraphicsOverlay(_inputGeometriesGraphicsOverlay, 1.3, SpatialReferences.WebMercator);
// Set the initial visual extent of the map view to the extent of the graphics overlay.
newMap.InitialViewpoint = new Viewpoint(visibleExtent);
// Assign the map to the MapView.
MyMapView.Map = newMap;
}
private Geometry GetExtentOfGraphicsOverlay(GraphicsOverlay inputGraphicsOverlay, double expansionFactor, SpatialReference spatialReferenceType)
{
// Get all of the graphics contained in the graphics overlay.
GraphicCollection inputGraphicCollection = inputGraphicsOverlay.Graphics;
// Create a new envelope builder using the same spatial reference as the graphics.
EnvelopeBuilder unionEnvelopeBuilder = new EnvelopeBuilder(spatialReferenceType);
// Loop through each graphic in the graphic collection.
foreach (Graphic oneGraphic in inputGraphicCollection)
{
// Union the extent of each graphic in the envelope builder.
unionEnvelopeBuilder.UnionOf(oneGraphic.Geometry.Extent);
}
// Expand the envelope builder by the expansion factor.
unionEnvelopeBuilder.Expand(expansionFactor);
// Return the unioned extent plus the expansion factor.
return unionEnvelopeBuilder.Extent;
}
private void ClipButton_Click(object sender, RoutedEventArgs e)
{
try
{
// Remove the Colorado graphic from the input geometries graphics overlay collection. That way it will be easier
// to see the clip versions of the GeometryEngine.Clip operation.
_inputGeometriesGraphicsOverlay.Graphics.Remove(_coloradoGraphic);
// Loop through each graphic in the input geometries for the clip operation.
foreach (Graphic oneGraphic in _inputGeometriesGraphicsOverlay.Graphics)
{
// Perform the clip operation. The first parameter of the clip operation will always be the Colorado graphic.
// The second parameter of the clip operation will be one of the 3 different clip geometries (_outsideGraphic,
// _containedGraphic, or _intersectingGraphic).
Geometry myGeometry = GeometryEngine.Clip(_coloradoGraphic.Geometry, (Envelope)oneGraphic.Geometry);
// Only work on returned geometries that are not null.
if (myGeometry != null)
{
// Create the graphic as a result of the clip operation using the same symbology that was defined for
// the _coloradoGraphic defined in the Initialize() method previously.
Graphic clippedGraphic = new Graphic(myGeometry, _coloradoGraphic.Symbol);
// Add the clipped graphic to the clip areas graphics overlay collection.
_clipAreasGraphicsOverlay.Graphics.Add(clippedGraphic);
}
}
// Disable the button after has been used.
ClipButton.IsEnabled = false;
}
catch (System.Exception ex)
{
// Display an error message if there is a problem generating clip operation.
MessageBox.Show(ex.Message, "Geometry Engine Failed!");
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.Linq;
using System.Text;
using System.Threading;
using System.Collections.Generic;
using ODataValidator.ValidationService;
using Microsoft.Protocols.TestTools;
namespace Microsoft.Protocols.TestSuites.Validator
{
public class ValidatorAdapter : ManagedAdapterBase, IValidatorAdapter
{
/// <summary>
///
/// </summary>
private ODataValidator.ValidationService.ODataValidator validator;
/// <summary>
/// Initialize, generate the transport
/// </summary>
/// <param name="testSite">The initialed test site</param>
public override void Initialize(ITestSite testSite)
{
base.Initialize(testSite);
testSite.DefaultProtocolDocShortName = "ODataValidator";
TripPinService.SetTestSite(testSite);
ConformanceDataService.SetTestSite(testSite);
AtomDataService.SetTestSite(testSite);
}
/// <summary>
///
/// </summary>
/// <param name="uri"></param>
/// <param name="format"></param>
/// <param name="headers"></param>
/// <returns></returns>
public Guid[] SendRequest(string uri, string format, string toCrawl, string headers, string isConformance = null, string isMetaData = "no", string levelTypes = null)
{
validator = new ODataValidator.ValidationService.ODataValidator();
IEnumerable<JobGroup> groups = validator.UriValidationJobs(uri, format, toCrawl, isMetaData, headers, isConformance, levelTypes);
JobGroup[] JobGroupArray = groups.ToArray<JobGroup>();
for (int i = 0; i < JobGroupArray.Length; i++)
{
if (JobGroupArray[i].MasterJobId == JobGroupArray[i].DerivativeJobId)
{
if (i != 0)
{
JobGroup temp = JobGroupArray[i];
JobGroupArray[i] = JobGroupArray[0];
JobGroupArray[0] = temp;
}
break;
}
}
var jobIDs = (from job in JobGroupArray select job.DerivativeJobId).ToArray();
if (jobIDs.Length == 0)
return new Guid[] { Guid.Empty };
validator.Dispose();
return (from job in jobIDs where job.HasValue select job.Value).ToArray<Guid>();
}
/// <summary>
///
/// </summary>
/// <param name="jobGroup"></param>
/// <returns></returns>
public bool IsJobCompleted(Guid jobId, out int ruleCount)
{
ruleCount = 0;
try
{
while (true)
{
using (var ctx = SuiteEntitiesUtility.GetODataValidationSuiteEntities())
{
var job = (from j in ctx.ExtValidationJobs
where j.ID == jobId
select j).FirstOrDefault();
if (job != null && job.Complete.HasValue && job.Complete == true)
{
ruleCount = job.RuleCount.Value;
return true;
}
else
{
Thread.Sleep(5000);
}
}
}
}
catch (System.Data.OptimisticConcurrencyException)
{
return false;
// error occured while trying to mark operation as complete. This is not a terminal error for this system and
// this is on a threadpool thread so swallow the exception
}
}
/// <summary>
///
/// </summary>
/// <param name="jobGroup"></param>
/// <returns></returns>
public List<TestResult> GetTestResults(Guid jobId)
{
List<TestResult> testResults = new List<TestResult>();
try
{
using (var ctx = SuiteEntitiesUtility.GetODataValidationSuiteEntities())
{
var results = from j in ctx.TestResults
where j.ValidationJobID == jobId
select j;
testResults = results.ToList();
}
}
catch (System.Data.OptimisticConcurrencyException)
{
// error occured while trying to mark operation as complete. This is not a terminal error for this system and
// this is on a threadpool thread so swallow the exception
}
return testResults;
}
public ParsedResult ParseResults(List<TestResult> results)
{
ParsedResult parsedResult = new ParsedResult();
parsedResult.Parse(results);
return parsedResult;
}
public bool GetRulesCountByRequirementLevel(List<string> RuleNameList, string testResultPath)
{
if (null == RuleNameList || RuleNameList.Count == 0)
return false;
RuleNameList.Sort();
List<string> notInRuleArrayNameList = new List<string>();
// Init level count
string[] levelList = Enum.GetNames(typeof(ODataValidator.RuleEngine.RequirementLevel));
Dictionary<string, int> levelCount = new Dictionary<string, int>();
foreach (var level in levelList)
{
levelCount.Add(level, 0);
}
// Total the level count
var ruleArray = ODataValidator.RuleEngine.RuleCatalogCollection.Instance.ToArray();
foreach (var name in RuleNameList)
{
int ruleIndex = ruleArray.ToList().FindIndex(i => string.Equals(i.Name, name, StringComparison.OrdinalIgnoreCase) == true);
if (ruleIndex >= 0)
{
levelCount[Enum.GetName(typeof(ODataValidator.RuleEngine.RequirementLevel), ruleArray[ruleIndex].RequirementLevel)]++;
}
else // Cannot find this rule in ruleArry
{
notInRuleArrayNameList.Add(name);
}
}
// Out the log in test result file
using (System.IO.StreamWriter file = new System.IO.StreamWriter(testResultPath, true))
{
file.WriteLine();
foreach (var pair in levelCount)
{
if (pair.Value > 0)
{
file.WriteLine("{0} : {1}", pair.Key, pair.Value);
}
}
if (notInRuleArrayNameList.Count > 0)
{
file.WriteLine("Not in Rule Store : {0}", notInRuleArrayNameList.Count);
foreach (var oname in notInRuleArrayNameList)
{
file.WriteLine(string.Format("\t{0}", oname));
}
}
file.Close();
return true;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Runtime.Serialization.Json
{
using System.Runtime.Serialization;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text;
using System.Xml;
using System.Collections;
using DataContractDictionary = System.Collections.Generic.Dictionary<System.Xml.XmlQualifiedName, DataContract>;
using System.Globalization;
using System.Reflection;
using System.Security;
public sealed class DataContractJsonSerializer : XmlObjectSerializer
{
private const char BACK_SLASH = '\\';
private const char FORWARD_SLASH = '/';
private const char HIGH_SURROGATE_START = (char)0xd800;
private const char LOW_SURROGATE_END = (char)0xdfff;
private const char MAX_CHAR = (char)0xfffe;
private const char WHITESPACE = ' ';
internal IList<Type> knownTypeList;
internal DataContractDictionary knownDataContracts;
private EmitTypeInformation _emitTypeInformation;
private ReadOnlyCollection<Type> _knownTypeCollection;
private int _maxItemsInObjectGraph;
private bool _serializeReadOnlyTypes;
private DateTimeFormat _dateTimeFormat;
private bool _useSimpleDictionaryFormat;
private DataContractJsonSerializerImpl _serializer;
private bool _ignoreExtensionDataObject;
public DataContractJsonSerializer(Type type)
{
_serializer = new DataContractJsonSerializerImpl(type);
}
public DataContractJsonSerializer(Type type, string rootName)
: this(type, rootName, null)
{
}
public DataContractJsonSerializer(Type type, XmlDictionaryString rootName)
: this(type, rootName, null)
{
}
public DataContractJsonSerializer(Type type, IEnumerable<Type> knownTypes)
{
_serializer = new DataContractJsonSerializerImpl(type, knownTypes);
}
public DataContractJsonSerializer(Type type, string rootName, IEnumerable<Type> knownTypes)
: this(type, new DataContractJsonSerializerSettings() { RootName = rootName, KnownTypes = knownTypes })
{
}
public DataContractJsonSerializer(Type type, XmlDictionaryString rootName, IEnumerable<Type> knownTypes)
{
_serializer = new DataContractJsonSerializerImpl(type, rootName, knownTypes);
}
public DataContractJsonSerializer(Type type, DataContractJsonSerializerSettings settings)
{
_serializer = new DataContractJsonSerializerImpl(type, settings);
}
public bool IgnoreExtensionDataObject
{
get { return _ignoreExtensionDataObject; }
}
public ReadOnlyCollection<Type> KnownTypes
{
get
{
if (_knownTypeCollection == null)
{
if (knownTypeList != null)
{
_knownTypeCollection = new ReadOnlyCollection<Type>(knownTypeList);
}
else
{
_knownTypeCollection = new ReadOnlyCollection<Type>(Array.Empty<Type>());
}
}
return _knownTypeCollection;
}
}
internal override DataContractDictionary KnownDataContracts
{
get
{
if (this.knownDataContracts == null && this.knownTypeList != null)
{
// This assignment may be performed concurrently and thus is a race condition.
// It's safe, however, because at worse a new (and identical) dictionary of
// data contracts will be created and re-assigned to this field. Introduction
// of a lock here could lead to deadlocks.
this.knownDataContracts = XmlObjectSerializerContext.GetDataContractsForKnownTypes(this.knownTypeList);
}
return this.knownDataContracts;
}
}
public int MaxItemsInObjectGraph
{
get { return _maxItemsInObjectGraph; }
}
internal bool AlwaysEmitTypeInformation
{
get
{
return _emitTypeInformation == EmitTypeInformation.Always;
}
}
public DateTimeFormat DateTimeFormat
{
get
{
return _dateTimeFormat;
}
}
public EmitTypeInformation EmitTypeInformation
{
get
{
return _emitTypeInformation;
}
}
public bool SerializeReadOnlyTypes
{
get
{
return _serializeReadOnlyTypes;
}
}
public bool UseSimpleDictionaryFormat
{
get
{
return _useSimpleDictionaryFormat;
}
}
internal static void CheckIfTypeIsReference(DataContract dataContract)
{
if (dataContract.IsReference)
{
throw System.ServiceModel.DiagnosticUtility.ExceptionUtility.ThrowHelperError(
XmlObjectSerializer.CreateSerializationException(SR.Format(
SR.JsonUnsupportedForIsReference,
DataContract.GetClrTypeFullName(dataContract.UnderlyingType),
dataContract.IsReference)));
}
}
internal static DataContract GetDataContract(DataContract declaredTypeContract, Type declaredType, Type objectType)
{
DataContract contract = DataContractSerializer.GetDataContract(declaredTypeContract, declaredType, objectType);
CheckIfTypeIsReference(contract);
return contract;
}
public override void WriteObject(Stream stream, object graph)
{
_serializer.WriteObject(stream, graph);
}
public override void WriteObject(XmlWriter writer, object graph)
{
_serializer.WriteObject(writer, graph);
}
public override void WriteObject(XmlDictionaryWriter writer, object graph)
{
_serializer.WriteObject(writer, graph);
}
public override object ReadObject(Stream stream)
{
return _serializer.ReadObject(stream);
}
public override object ReadObject(XmlReader reader)
{
return _serializer.ReadObject(reader);
}
public override object ReadObject(XmlReader reader, bool verifyObjectName)
{
return _serializer.ReadObject(reader, verifyObjectName);
}
public override object ReadObject(XmlDictionaryReader reader)
{
return _serializer.ReadObject(reader);
}
private List<Type> GetKnownTypesFromContext(XmlObjectSerializerContext context, IList<Type> serializerKnownTypeList)
{
List<Type> knownTypesList = new List<Type>();
if (context != null)
{
List<XmlQualifiedName> stableNames = new List<XmlQualifiedName>();
Dictionary<XmlQualifiedName, DataContract>[] entries = context.scopedKnownTypes.dataContractDictionaries;
if (entries != null)
{
for (int i = 0; i < entries.Length; i++)
{
Dictionary<XmlQualifiedName, DataContract> entry = entries[i];
if (entry != null)
{
foreach (KeyValuePair<XmlQualifiedName, DataContract> pair in entry)
{
if (!stableNames.Contains(pair.Key))
{
stableNames.Add(pair.Key);
knownTypesList.Add(pair.Value.UnderlyingType);
}
}
}
}
}
if (serializerKnownTypeList != null)
{
knownTypesList.AddRange(serializerKnownTypeList);
}
}
return knownTypesList;
}
internal static void InvokeOnSerializing(Object value, DataContract contract, XmlObjectSerializerWriteContextComplexJson context)
{
if (contract is ClassDataContract)
{
ClassDataContract classContract = contract as ClassDataContract;
if (classContract.BaseContract != null)
InvokeOnSerializing(value, classContract.BaseContract, context);
if (classContract.OnSerializing != null)
{
bool memberAccessFlag = classContract.RequiresMemberAccessForWrite(null);
try
{
classContract.OnSerializing.Invoke(value, new object[] { context.GetStreamingContext() });
}
catch (SecurityException securityException)
{
if (memberAccessFlag)
{
classContract.RequiresMemberAccessForWrite(securityException);
}
else
{
throw;
}
}
catch (TargetInvocationException targetInvocationException)
{
if (targetInvocationException.InnerException == null)
throw;
//We are catching the TIE here and throws the inner exception only,
//this is needed to have a consistent exception story in all serializers
throw targetInvocationException.InnerException;
}
}
}
}
internal static void InvokeOnSerialized(Object value, DataContract contract, XmlObjectSerializerWriteContextComplexJson context)
{
if (contract is ClassDataContract)
{
ClassDataContract classContract = contract as ClassDataContract;
if (classContract.BaseContract != null)
InvokeOnSerialized(value, classContract.BaseContract, context);
if (classContract.OnSerialized != null)
{
bool memberAccessFlag = classContract.RequiresMemberAccessForWrite(null);
try
{
classContract.OnSerialized.Invoke(value, new object[] { context.GetStreamingContext() });
}
catch (SecurityException securityException)
{
if (memberAccessFlag)
{
classContract.RequiresMemberAccessForWrite(securityException);
}
else
{
throw;
}
}
catch (TargetInvocationException targetInvocationException)
{
if (targetInvocationException.InnerException == null)
throw;
//We are catching the TIE here and throws the inner exception only,
//this is needed to have a consistent exception story in all serializers
throw targetInvocationException.InnerException;
}
}
}
}
internal static void InvokeOnDeserializing(Object value, DataContract contract, XmlObjectSerializerReadContextComplexJson context)
{
if (contract is ClassDataContract)
{
ClassDataContract classContract = contract as ClassDataContract;
if (classContract.BaseContract != null)
InvokeOnDeserializing(value, classContract.BaseContract, context);
if (classContract.OnDeserializing != null)
{
bool memberAccessFlag = classContract.RequiresMemberAccessForRead(null);
try
{
classContract.OnDeserializing.Invoke(value, new object[] { context.GetStreamingContext() });
}
catch (SecurityException securityException)
{
if (memberAccessFlag)
{
classContract.RequiresMemberAccessForRead(securityException);
}
else
{
throw;
}
}
catch (TargetInvocationException targetInvocationException)
{
if (targetInvocationException.InnerException == null)
throw;
//We are catching the TIE here and throws the inner exception only,
//this is needed to have a consistent exception story in all serializers
throw targetInvocationException.InnerException;
}
}
}
}
internal static void InvokeOnDeserialized(object value, DataContract contract, XmlObjectSerializerReadContextComplexJson context)
{
if (contract is ClassDataContract)
{
ClassDataContract classContract = contract as ClassDataContract;
if (classContract.BaseContract != null)
InvokeOnDeserialized(value, classContract.BaseContract, context);
if (classContract.OnDeserialized != null)
{
bool memberAccessFlag = classContract.RequiresMemberAccessForRead(null);
try
{
classContract.OnDeserialized.Invoke(value, new object[] { context.GetStreamingContext() });
}
catch (SecurityException securityException)
{
if (memberAccessFlag)
{
classContract.RequiresMemberAccessForRead(securityException);
}
else
{
throw;
}
}
catch (TargetInvocationException targetInvocationException)
{
if (targetInvocationException.InnerException == null)
throw;
//We are catching the TIE here and throws the inner exception only,
//this is needed to have a consistent exception story in all serializers
throw targetInvocationException.InnerException;
}
}
}
}
internal static bool CharacterNeedsEscaping(char ch)
{
return (ch == FORWARD_SLASH || ch == JsonGlobals.QuoteChar || ch < WHITESPACE || ch == BACK_SLASH
|| (ch >= HIGH_SURROGATE_START && (ch <= LOW_SURROGATE_END || ch >= MAX_CHAR)));
}
internal static bool CheckIfJsonNameRequiresMapping(string jsonName)
{
if (jsonName != null)
{
if (!DataContract.IsValidNCName(jsonName))
{
return true;
}
for (int i = 0; i < jsonName.Length; i++)
{
if (CharacterNeedsEscaping(jsonName[i]))
{
return true;
}
}
}
return false;
}
internal static bool CheckIfJsonNameRequiresMapping(XmlDictionaryString jsonName)
{
return (jsonName == null) ? false : CheckIfJsonNameRequiresMapping(jsonName.Value);
}
internal static string ConvertXmlNameToJsonName(string xmlName)
{
return XmlConvert.DecodeName(xmlName);
}
internal static XmlDictionaryString ConvertXmlNameToJsonName(XmlDictionaryString xmlName)
{
return (xmlName == null) ? null : new XmlDictionary().Add(ConvertXmlNameToJsonName(xmlName.Value));
}
internal static object ReadJsonValue(DataContract contract, XmlReaderDelegator reader, XmlObjectSerializerReadContextComplexJson context)
{
return JsonDataContract.GetJsonDataContract(contract).ReadJsonValue(reader, context);
}
internal static void WriteJsonValue(JsonDataContract contract, XmlWriterDelegator writer, object graph, XmlObjectSerializerWriteContextComplexJson context, RuntimeTypeHandle declaredTypeHandle)
{
contract.WriteJsonValue(writer, graph, context, declaredTypeHandle);
}
public override void WriteStartObject(XmlWriter writer, object graph)
{
_serializer.WriteStartObject(writer, graph);
}
public override void WriteStartObject(XmlDictionaryWriter writer, object graph)
{
_serializer.WriteStartObject(writer, graph);
}
public override void WriteObjectContent(XmlWriter writer, object graph)
{
_serializer.WriteObjectContent(writer, graph);
}
public override void WriteObjectContent(XmlDictionaryWriter writer, object graph)
{
_serializer.WriteObjectContent(writer, graph);
}
public override void WriteEndObject(XmlWriter writer)
{
_serializer.WriteEndObject(writer);
}
public override void WriteEndObject(XmlDictionaryWriter writer)
{
_serializer.WriteEndObject(writer);
}
public override object ReadObject(XmlDictionaryReader reader, bool verifyObjectName)
{
return _serializer.ReadObject(reader, verifyObjectName);
}
public override bool IsStartObject(XmlReader reader)
{
return _serializer.IsStartObject(reader);
}
public override bool IsStartObject(XmlDictionaryReader reader)
{
return _serializer.IsStartObject(reader);
}
}
internal sealed class DataContractJsonSerializerImpl : XmlObjectSerializer
{
internal IList<Type> knownTypeList;
internal DataContractDictionary knownDataContracts;
private EmitTypeInformation _emitTypeInformation;
private bool _ignoreExtensionDataObject;
private ReadOnlyCollection<Type> _knownTypeCollection;
private int _maxItemsInObjectGraph;
private DataContract _rootContract; // post-surrogate
private XmlDictionaryString _rootName;
private bool _rootNameRequiresMapping;
private Type _rootType;
private bool _serializeReadOnlyTypes;
private DateTimeFormat _dateTimeFormat;
private bool _useSimpleDictionaryFormat;
public DataContractJsonSerializerImpl(Type type)
: this(type, (IEnumerable<Type>)null)
{
}
public DataContractJsonSerializerImpl(Type type, IEnumerable<Type> knownTypes)
: this(type, null, knownTypes, int.MaxValue, false, false)
{
}
public DataContractJsonSerializerImpl(Type type, XmlDictionaryString rootName, IEnumerable<Type> knownTypes)
: this(type, rootName, knownTypes, int.MaxValue, false, false)
{
}
internal DataContractJsonSerializerImpl(Type type,
XmlDictionaryString rootName,
IEnumerable<Type> knownTypes,
int maxItemsInObjectGraph,
bool ignoreExtensionDataObject,
bool alwaysEmitTypeInformation)
{
EmitTypeInformation emitTypeInformation = alwaysEmitTypeInformation ? EmitTypeInformation.Always : EmitTypeInformation.AsNeeded;
Initialize(type, rootName, knownTypes, maxItemsInObjectGraph, ignoreExtensionDataObject, emitTypeInformation, false, null, false);
}
public DataContractJsonSerializerImpl(Type type, DataContractJsonSerializerSettings settings)
{
if (settings == null)
{
settings = new DataContractJsonSerializerSettings();
}
XmlDictionaryString rootName = (settings.RootName == null) ? null : new XmlDictionary(1).Add(settings.RootName);
Initialize(type, rootName, settings.KnownTypes, settings.MaxItemsInObjectGraph, settings.IgnoreExtensionDataObject,
settings.EmitTypeInformation, settings.SerializeReadOnlyTypes, settings.DateTimeFormat, settings.UseSimpleDictionaryFormat);
}
public ReadOnlyCollection<Type> KnownTypes
{
get
{
if (_knownTypeCollection == null)
{
if (knownTypeList != null)
{
_knownTypeCollection = new ReadOnlyCollection<Type>(knownTypeList);
}
else
{
_knownTypeCollection = new ReadOnlyCollection<Type>(Array.Empty<Type>());
}
}
return _knownTypeCollection;
}
}
internal override DataContractDictionary KnownDataContracts
{
get
{
if (this.knownDataContracts == null && this.knownTypeList != null)
{
// This assignment may be performed concurrently and thus is a race condition.
// It's safe, however, because at worse a new (and identical) dictionary of
// data contracts will be created and re-assigned to this field. Introduction
// of a lock here could lead to deadlocks.
this.knownDataContracts = XmlObjectSerializerContext.GetDataContractsForKnownTypes(this.knownTypeList);
}
return this.knownDataContracts;
}
}
internal int MaxItemsInObjectGraph
{
get { return _maxItemsInObjectGraph; }
}
internal bool AlwaysEmitTypeInformation
{
get
{
return _emitTypeInformation == EmitTypeInformation.Always;
}
}
public EmitTypeInformation EmitTypeInformation
{
get
{
return _emitTypeInformation;
}
}
public bool SerializeReadOnlyTypes
{
get
{
return _serializeReadOnlyTypes;
}
}
public DateTimeFormat DateTimeFormat
{
get
{
return _dateTimeFormat;
}
}
public bool UseSimpleDictionaryFormat
{
get
{
return _useSimpleDictionaryFormat;
}
}
private DataContract RootContract
{
get
{
if (_rootContract == null)
{
_rootContract = DataContract.GetDataContract(_rootType);
CheckIfTypeIsReference(_rootContract);
}
return _rootContract;
}
}
private XmlDictionaryString RootName
{
get
{
return _rootName ?? JsonGlobals.rootDictionaryString;
}
}
public override bool IsStartObject(XmlReader reader)
{
// No need to pass in DateTimeFormat to JsonReaderDelegator: no DateTimes will be read in IsStartObject
return IsStartObjectHandleExceptions(new JsonReaderDelegator(reader));
}
public override bool IsStartObject(XmlDictionaryReader reader)
{
// No need to pass in DateTimeFormat to JsonReaderDelegator: no DateTimes will be read in IsStartObject
return IsStartObjectHandleExceptions(new JsonReaderDelegator(reader));
}
public override object ReadObject(Stream stream)
{
CheckNull(stream, nameof(stream));
return ReadObject(JsonReaderWriterFactory.CreateJsonReader(stream, XmlDictionaryReaderQuotas.Max));
}
public override object ReadObject(XmlReader reader)
{
return ReadObjectHandleExceptions(new JsonReaderDelegator(reader, this.DateTimeFormat), true);
}
public override object ReadObject(XmlReader reader, bool verifyObjectName)
{
return ReadObjectHandleExceptions(new JsonReaderDelegator(reader, this.DateTimeFormat), verifyObjectName);
}
public override object ReadObject(XmlDictionaryReader reader)
{
return ReadObjectHandleExceptions(new JsonReaderDelegator(reader, this.DateTimeFormat), true); // verifyObjectName
}
public override object ReadObject(XmlDictionaryReader reader, bool verifyObjectName)
{
return ReadObjectHandleExceptions(new JsonReaderDelegator(reader, this.DateTimeFormat), verifyObjectName);
}
public override void WriteEndObject(XmlWriter writer)
{
// No need to pass in DateTimeFormat to JsonWriterDelegator: no DateTimes will be written in end object
WriteEndObjectHandleExceptions(new JsonWriterDelegator(writer));
}
public override void WriteEndObject(XmlDictionaryWriter writer)
{
// No need to pass in DateTimeFormat to JsonWriterDelegator: no DateTimes will be written in end object
WriteEndObjectHandleExceptions(new JsonWriterDelegator(writer));
}
public override void WriteObject(Stream stream, object graph)
{
CheckNull(stream, nameof(stream));
XmlDictionaryWriter jsonWriter = JsonReaderWriterFactory.CreateJsonWriter(stream, Encoding.UTF8, false); // ownsStream
WriteObject(jsonWriter, graph);
jsonWriter.Flush();
}
public override void WriteObject(XmlWriter writer, object graph)
{
WriteObjectHandleExceptions(new JsonWriterDelegator(writer, this.DateTimeFormat), graph);
}
public override void WriteObject(XmlDictionaryWriter writer, object graph)
{
WriteObjectHandleExceptions(new JsonWriterDelegator(writer, this.DateTimeFormat), graph);
}
public override void WriteObjectContent(XmlWriter writer, object graph)
{
WriteObjectContentHandleExceptions(new JsonWriterDelegator(writer, this.DateTimeFormat), graph);
}
public override void WriteObjectContent(XmlDictionaryWriter writer, object graph)
{
WriteObjectContentHandleExceptions(new JsonWriterDelegator(writer, this.DateTimeFormat), graph);
}
public override void WriteStartObject(XmlWriter writer, object graph)
{
// No need to pass in DateTimeFormat to JsonWriterDelegator: no DateTimes will be written in start object
WriteStartObjectHandleExceptions(new JsonWriterDelegator(writer), graph);
}
public override void WriteStartObject(XmlDictionaryWriter writer, object graph)
{
// No need to pass in DateTimeFormat to JsonWriterDelegator: no DateTimes will be written in start object
WriteStartObjectHandleExceptions(new JsonWriterDelegator(writer), graph);
}
internal static bool CheckIfJsonNameRequiresMapping(string jsonName)
{
if (jsonName != null)
{
if (!DataContract.IsValidNCName(jsonName))
{
return true;
}
for (int i = 0; i < jsonName.Length; i++)
{
if (XmlJsonWriter.CharacterNeedsEscaping(jsonName[i]))
{
return true;
}
}
}
return false;
}
internal static bool CheckIfJsonNameRequiresMapping(XmlDictionaryString jsonName)
{
return (jsonName == null) ? false : CheckIfJsonNameRequiresMapping(jsonName.Value);
}
internal static bool CheckIfXmlNameRequiresMapping(string xmlName)
{
return (xmlName == null) ? false : CheckIfJsonNameRequiresMapping(ConvertXmlNameToJsonName(xmlName));
}
internal static bool CheckIfXmlNameRequiresMapping(XmlDictionaryString xmlName)
{
return (xmlName == null) ? false : CheckIfXmlNameRequiresMapping(xmlName.Value);
}
internal static string ConvertXmlNameToJsonName(string xmlName)
{
return XmlConvert.DecodeName(xmlName);
}
internal static XmlDictionaryString ConvertXmlNameToJsonName(XmlDictionaryString xmlName)
{
return (xmlName == null) ? null : new XmlDictionary().Add(ConvertXmlNameToJsonName(xmlName.Value));
}
internal static bool IsJsonLocalName(XmlReaderDelegator reader, string elementName)
{
string name;
if (XmlObjectSerializerReadContextComplexJson.TryGetJsonLocalName(reader, out name))
{
return (elementName == name);
}
return false;
}
internal static object ReadJsonValue(DataContract contract, XmlReaderDelegator reader, XmlObjectSerializerReadContextComplexJson context)
{
return JsonDataContract.GetJsonDataContract(contract).ReadJsonValue(reader, context);
}
internal static void WriteJsonNull(XmlWriterDelegator writer)
{
writer.WriteAttributeString(null, JsonGlobals.typeString, null, JsonGlobals.nullString); // prefix // namespace
}
internal static void WriteJsonValue(JsonDataContract contract, XmlWriterDelegator writer, object graph, XmlObjectSerializerWriteContextComplexJson context, RuntimeTypeHandle declaredTypeHandle)
{
contract.WriteJsonValue(writer, graph, context, declaredTypeHandle);
}
internal override Type GetDeserializeType()
{
return _rootType;
}
internal override Type GetSerializeType(object graph)
{
return (graph == null) ? _rootType : graph.GetType();
}
internal override bool InternalIsStartObject(XmlReaderDelegator reader)
{
if (IsRootElement(reader, RootContract, RootName, XmlDictionaryString.Empty))
{
return true;
}
return IsJsonLocalName(reader, RootName.Value);
}
internal override object InternalReadObject(XmlReaderDelegator xmlReader, bool verifyObjectName)
{
if (MaxItemsInObjectGraph == 0)
{
throw XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ExceededMaxItemsQuota, MaxItemsInObjectGraph));
}
if (verifyObjectName)
{
if (!InternalIsStartObject(xmlReader))
{
throw XmlObjectSerializer.CreateSerializationExceptionWithReaderDetails(SR.Format(SR.ExpectingElement, XmlDictionaryString.Empty, RootName), xmlReader);
}
}
else if (!IsStartElement(xmlReader))
{
throw XmlObjectSerializer.CreateSerializationExceptionWithReaderDetails(SR.Format(SR.ExpectingElementAtDeserialize, XmlNodeType.Element), xmlReader);
}
DataContract contract = RootContract;
if (contract.IsPrimitive && object.ReferenceEquals(contract.UnderlyingType, _rootType))// handle Nullable<T> differently
{
return DataContractJsonSerializerImpl.ReadJsonValue(contract, xmlReader, null);
}
XmlObjectSerializerReadContextComplexJson context = XmlObjectSerializerReadContextComplexJson.CreateContext(this, contract);
return context.InternalDeserialize(xmlReader, _rootType, contract, null, null);
}
internal override void InternalWriteEndObject(XmlWriterDelegator writer)
{
writer.WriteEndElement();
}
internal override void InternalWriteObject(XmlWriterDelegator writer, object graph)
{
InternalWriteStartObject(writer, graph);
InternalWriteObjectContent(writer, graph);
InternalWriteEndObject(writer);
}
internal override void InternalWriteObjectContent(XmlWriterDelegator writer, object graph)
{
if (MaxItemsInObjectGraph == 0)
{
throw XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ExceededMaxItemsQuota, MaxItemsInObjectGraph));
}
DataContract contract = RootContract;
Type declaredType = contract.UnderlyingType;
Type graphType = (graph == null) ? declaredType : graph.GetType();
//if (dataContractSurrogate != null)
//{
// graph = DataContractSerializer.SurrogateToDataContractType(dataContractSurrogate, graph, declaredType, ref graphType);
//}
if (graph == null)
{
WriteJsonNull(writer);
}
else
{
if (declaredType == graphType)
{
if (contract.CanContainReferences)
{
XmlObjectSerializerWriteContextComplexJson context = XmlObjectSerializerWriteContextComplexJson.CreateContext(this, contract);
context.OnHandleReference(writer, graph, true); // canContainReferences
context.SerializeWithoutXsiType(contract, writer, graph, declaredType.TypeHandle);
}
else
{
DataContractJsonSerializerImpl.WriteJsonValue(JsonDataContract.GetJsonDataContract(contract), writer, graph, null, declaredType.TypeHandle); // XmlObjectSerializerWriteContextComplexJson
}
}
else
{
XmlObjectSerializerWriteContextComplexJson context = XmlObjectSerializerWriteContextComplexJson.CreateContext(this, RootContract);
contract = DataContractJsonSerializerImpl.GetDataContract(contract, declaredType, graphType);
if (contract.CanContainReferences)
{
context.OnHandleReference(writer, graph, true); // canContainCyclicReference
context.SerializeWithXsiTypeAtTopLevel(contract, writer, graph, declaredType.TypeHandle, graphType);
}
else
{
context.SerializeWithoutXsiType(contract, writer, graph, declaredType.TypeHandle);
}
}
}
}
internal override void InternalWriteStartObject(XmlWriterDelegator writer, object graph)
{
if (_rootNameRequiresMapping)
{
writer.WriteStartElement("a", JsonGlobals.itemString, JsonGlobals.itemString);
writer.WriteAttributeString(null, JsonGlobals.itemString, null, RootName.Value);
}
else
{
writer.WriteStartElement(RootName, XmlDictionaryString.Empty);
}
}
private void AddCollectionItemTypeToKnownTypes(Type knownType)
{
Type itemType;
Type typeToCheck = knownType;
while (CollectionDataContract.IsCollection(typeToCheck, out itemType))
{
if (itemType.IsGenericType && (itemType.GetGenericTypeDefinition() == Globals.TypeOfKeyValue))
{
itemType = Globals.TypeOfKeyValuePair.MakeGenericType(itemType.GenericTypeArguments);
}
this.knownTypeList.Add(itemType);
typeToCheck = itemType;
}
}
private void Initialize(Type type,
IEnumerable<Type> knownTypes,
int maxItemsInObjectGraph,
bool ignoreExtensionDataObject,
EmitTypeInformation emitTypeInformation,
bool serializeReadOnlyTypes,
DateTimeFormat dateTimeFormat,
bool useSimpleDictionaryFormat)
{
CheckNull(type, nameof(type));
_rootType = type;
if (knownTypes != null)
{
this.knownTypeList = new List<Type>();
foreach (Type knownType in knownTypes)
{
this.knownTypeList.Add(knownType);
if (knownType != null)
{
AddCollectionItemTypeToKnownTypes(knownType);
}
}
}
if (maxItemsInObjectGraph < 0)
{
throw new ArgumentOutOfRangeException(nameof(maxItemsInObjectGraph), SR.ValueMustBeNonNegative);
}
_maxItemsInObjectGraph = maxItemsInObjectGraph;
_ignoreExtensionDataObject = ignoreExtensionDataObject;
_emitTypeInformation = emitTypeInformation;
_serializeReadOnlyTypes = serializeReadOnlyTypes;
_dateTimeFormat = dateTimeFormat;
_useSimpleDictionaryFormat = useSimpleDictionaryFormat;
}
private void Initialize(Type type,
XmlDictionaryString rootName,
IEnumerable<Type> knownTypes,
int maxItemsInObjectGraph,
bool ignoreExtensionDataObject,
EmitTypeInformation emitTypeInformation,
bool serializeReadOnlyTypes,
DateTimeFormat dateTimeFormat,
bool useSimpleDictionaryFormat)
{
Initialize(type, knownTypes, maxItemsInObjectGraph, ignoreExtensionDataObject, emitTypeInformation, serializeReadOnlyTypes, dateTimeFormat, useSimpleDictionaryFormat);
_rootName = ConvertXmlNameToJsonName(rootName);
_rootNameRequiresMapping = CheckIfJsonNameRequiresMapping(_rootName);
}
internal static void CheckIfTypeIsReference(DataContract dataContract)
{
if (dataContract.IsReference)
{
throw XmlObjectSerializer.CreateSerializationException(SR.Format(SR.JsonUnsupportedForIsReference, DataContract.GetClrTypeFullName(dataContract.UnderlyingType), dataContract.IsReference));
}
}
internal static DataContract GetDataContract(DataContract declaredTypeContract, Type declaredType, Type objectType)
{
DataContract contract = DataContractSerializer.GetDataContract(declaredTypeContract, declaredType, objectType);
CheckIfTypeIsReference(contract);
return contract;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Data;
using System.Data.SqlClient;
using System.IO;
using System.Windows.Forms;
using System.Xml.Serialization;
using CslaGenerator.Controls;
using CslaGenerator.Data;
using CslaGenerator.Metadata;
using CslaGenerator.Util;
using CslaGenerator.Util.PropertyBags;
using DBSchemaInfo.Base;
namespace CslaGenerator
{
public class GeneratorController : IDisposable
{
#region Private Fields
private string[] _commandlineArgs;
private CslaGeneratorUnit _currentUnit = null;
private CslaObjectInfo _currentCslaObject = null;
private AssociativeEntity _currentAssociativeEntitiy = null;
private ProjectProperties _currentPropertiesTab = null;
private string _currentFilePath = string.Empty;
private CSLAgen _frmGenerator = null;
private static ICatalog _catalog = null;
private static GeneratorController _current;
private PropertyContext _propertyContext = new PropertyContext();
private DbSchemaPanel _dbSchemaPanel = null;
public bool IsLoading = false;
#endregion
#region Constructors/Dispose
public GeneratorController()
{
Init();
_current = this;
GetConfig();
}
private void Init()
{
_frmGenerator = new CSLAgen(this);
_frmGenerator.ProjectPanel.SelectedItemsChanged += CslaObjectList_SelectedItemsChanged;
_frmGenerator.ProjectPanel.LastItemRemoved += delegate { _currentCslaObject = null; };
_frmGenerator.ObjectRelationsBuilder.SelectedItemsChanged += AssociativeEntitiesList_SelectedItemsChanged;
_frmGenerator.Show();
}
public void Dispose()
{
}
/// <summary>
/// Processes command line args passed to CSLA Gen. Called after the generator is created.
/// </summary>
private void ProcessCommandLineArgs()
{
if (this.CommandLineArgs.Length > 0)
{
string filename = this.CommandLineArgs[0];
if (File.Exists(filename))
{
// request that the UI load the project, since it keeps track
// of *additional* state (isNew) that this class is unaware of.
_frmGenerator.OpenProjectFile(filename);
}
}
}
#endregion
#region Main (application entry point)
/// <summary>
/// The main entry point for the application.
/// </summary>
/// <param name="args">
/// Command line arguments. First arg can be a filename to load.
/// </param>
[STAThread]
static void Main(string[] args)
{
GeneratorController controller = new GeneratorController();
controller.GeneratorForm.Closing += new CancelEventHandler(controller.GeneratorForm_Closing);
controller.CommandLineArgs = args;
// process the command line args here so we have a UI, also, we can not process in Init without
// modifying more code to take args[]
controller.ProcessCommandLineArgs();
Application.Run();
}
#endregion
#region Public Properties
public CslaGeneratorUnit CurrentUnit
{
get { return _currentUnit; }
private set
{
_frmGenerator.ObjectRelationsBuilderDockPanel.Show(_frmGenerator.DockPanel);
if (_currentUnit != null)
{
if (_currentPropertiesTab != null && !_currentPropertiesTab.IsDisposed)
{
if (_currentPropertiesTab.Visible)
_currentPropertiesTab.Close();
_currentPropertiesTab.Dispose();
}
}
_currentUnit = value;
_currentPropertiesTab = new ProjectProperties();
_currentPropertiesTab.LoadInfo();
_currentPropertiesTab.Show(_frmGenerator.DockPanel);
}
}
internal ProjectProperties ProjectPropertiesTab
{
get
{
return _currentPropertiesTab;
}
}
public string[] CommandLineArgs
{
get { return _commandlineArgs; }
set { _commandlineArgs = value; }
}
internal ProjectProperties CurrentPropertiesTab
{
get
{
if (_currentPropertiesTab != null)
if (_currentPropertiesTab.IsDisposed)
{
_currentPropertiesTab = new ProjectProperties();
_currentPropertiesTab.LoadInfo();
}
return _currentPropertiesTab;
}
}
public CSLAgen GeneratorForm
{
get { return _frmGenerator; }
set { _frmGenerator = value; }
}
public string CurrentFilePath
{
get { return _currentFilePath; }
set { _currentFilePath = value; }
}
public string TemplatesDirectory { get; set; }
#endregion
#region Internal Properties
internal static ICatalog Catalog
{
get { return _catalog; }
set { _catalog = value; }
}
internal static GeneratorController Current
{
get { return _current; }
}
#endregion
#region Public Methods
public void Connect()
{
ConnectionForm frmConn = new ConnectionForm();
DialogResult result = frmConn.ShowDialog();
if (result == DialogResult.OK)
{
Cursor.Current = Cursors.WaitCursor;
BuildSchemaTree(ConnectionFactory.ConnectionString);
Cursor.Current = Cursors.Default;
}
}
public void Load(string fileName)
{
IsLoading = true;
FileStream fs = null;
try
{
Cursor.Current = Cursors.WaitCursor;
fs = File.Open(fileName, FileMode.Open);
XmlSerializer s = new XmlSerializer(typeof(CslaGeneratorUnit));
CurrentUnit = (CslaGeneratorUnit)s.Deserialize(fs);
_currentUnit.ResetParent();
_currentCslaObject = null;
_currentAssociativeEntitiy = null;
_currentFilePath = GetFilePath(fileName);
BindControls();
ConnectionFactory.ConnectionString = _currentUnit.ConnectionString;
// check if this is a valid connection, else let the user enter new connection info
SqlConnection cn = null;
try
{
cn = ConnectionFactory.NewConnection;
cn.Open();
BuildSchemaTree(_currentUnit.ConnectionString);
}
catch //(SqlException e)
{
// call connect function which will allow user to enter new info
Connect();
}
finally
{
if (cn != null && cn.State == ConnectionState.Open)
{
cn.Close();
}
}
_currentUnit.CslaObjects.ListChanged += CslaObjects_ListChanged;
foreach (CslaObjectInfo info in _currentUnit.CslaObjects)
{
info.InheritedType.Parent = info;
}
if (_currentUnit.CslaObjects.Count > 0)
{
if (_frmGenerator.ProjectPanel.ListObjects.Items.Count > 0)
{
_currentCslaObject = (CslaObjectInfo)_frmGenerator.ProjectPanel.ListObjects.Items[0];
// _frmGenerator.PropertyGrid.SelectedObject = new PropertyBag(_currentCslaObject, _propertyContext);
}
else
{
_currentCslaObject = null;
// _frmGenerator.PropertyGrid.SelectedObject = null;
}
if (_dbSchemaPanel != null)
_dbSchemaPanel.CslaObjectInfo = _currentCslaObject;
}
else
{
_currentCslaObject = null;
// _frmGenerator.PropertyGrid.SelectedObject = null;
if (_dbSchemaPanel != null)
_dbSchemaPanel.CslaObjectInfo = null;
}
_frmGenerator.ProjectPanel.ApplyFiltersPresenter();
_currentUnit.AssociativeEntities.ListChanged += AssociativeEntities_ListChanged;
/*if (_currentUnit.AssociativeEntities.Count > 0)
{
_currentAssociativeEntitiy = _currentUnit.AssociativeEntities[0];
}
else
{
_currentAssociativeEntitiy = null;
_frmGenerator.ObjectRelationsBuilder.PropertyGrid1.SelectedObject = null;
_frmGenerator.ObjectRelationsBuilder.PropertyGrid2.SelectedObject = null;
_frmGenerator.ObjectRelationsBuilder.PropertyGrid3.SelectedObject = null;
}*/
}
catch (Exception e)
{
MessageBox.Show(_frmGenerator, @"An error occurred while trying to load: " + Environment.NewLine + e.Message + Environment.NewLine + e.StackTrace, "Loading Error");
}
finally
{
Cursor.Current = Cursors.Default;
fs.Close();
IsLoading = false;
}
}
private void CslaObjects_ListChanged(object sender, ListChangedEventArgs e)
{
if (e.ListChangedType == ListChangedType.ItemChanged)
{
if (e.PropertyDescriptor.Name == "ObjectType" && _frmGenerator.ProjectPanel.FilterTypeIsActive)
ReloadPropertyGrid();
}
}
private void AssociativeEntities_ListChanged(object sender, ListChangedEventArgs e)
{
if (e.ListChangedType == ListChangedType.ItemChanged)
{
if (e.PropertyDescriptor.Name == "ObjectName" || e.PropertyDescriptor.Name == "RelationType")
ReloadBuilderPropertyGrid();
}
}
public void NewCslaUnit()
{
CurrentUnit = new CslaGeneratorUnit();
_currentFilePath = Path.GetTempPath() + @"\" + Guid.NewGuid().ToString();
_currentCslaObject = null;
_currentUnit.ConnectionString = ConnectionFactory.ConnectionString;
BindControls();
EnableButtons();
_frmGenerator.PropertyGrid.SelectedObject = null;
}
public void Save(string fileName)
{
if (!_frmGenerator.ApplyProjectProperties())
return;
FileStream fs = null;
string tempFile = Path.GetTempPath() + Guid.NewGuid().ToString() + ".cslagenerator";
bool success = false;
try
{
Cursor.Current = Cursors.WaitCursor;
fs = File.Open(tempFile, FileMode.Create);
XmlSerializer s = new XmlSerializer(typeof(CslaGeneratorUnit));
s.Serialize(fs, _currentUnit);
success = true;
}
catch (Exception e)
{
MessageBox.Show(_frmGenerator, @"An error occurred while trying to save: " + Environment.NewLine + e.Message, "Save Error");
}
finally
{
Cursor.Current = Cursors.Default;
fs.Close();
}
if (success)
{
File.Delete(fileName);
File.Move(tempFile, fileName);
_currentFilePath = GetFilePath(fileName);
}
}
public string RetrieveFilename(string fileName)
{
int n = fileName.Length - 1;
int x = 0;
while (n >= 0)
{
x = x + 1;
if (fileName.Substring(n, 1) == @"\")
{
return fileName.Substring(n + 1, x - 1);
}
n = n - 1;
}
return "";
}
#endregion
#region Private Methods
private string GetFilePath(string fileName)
{
System.IO.FileInfo fi = new System.IO.FileInfo(fileName);
return fi.Directory.FullName;
}
private void BindControls()
{
if (_currentUnit != null)
{
_frmGenerator.ProjectNameTextBox.DataBindings.Clear();
_frmGenerator.ProjectNameTextBox.DataBindings.Add("Text", _currentUnit, "ProjectName");
_frmGenerator.TargetDirectoryTextBox.DataBindings.Clear();
_frmGenerator.TargetDirectoryTextBox.DataBindings.Add("Text", _currentUnit, "TargetDirectory");
BindCslaList();
BindRelationsList();
}
}
private void BindCslaList()
{
if (_currentUnit != null)
{
_frmGenerator.ProjectPanel.Objects = _currentUnit.CslaObjects;
_frmGenerator.ProjectPanel.ApplyFilters(true);
_frmGenerator.ProjectPanel.ListObjects.ClearSelected();
if (_frmGenerator.ProjectPanel.ListObjects.Items.Count > 0)
_frmGenerator.ProjectPanel.ListObjects.SelectedIndex = 0;
// make sure the previous stored selection is cleared
_frmGenerator.ProjectPanel.ClearSelectedItems();
}
}
private void BindRelationsList()
{
if (_currentUnit != null)
{
_frmGenerator.ObjectRelationsBuilder.AssociativeEntities = _currentUnit.AssociativeEntities;
_frmGenerator.ObjectRelationsBuilder.FillViews(true);
_frmGenerator.ObjectRelationsBuilder.GetCurrentListBox().ClearSelected();
if (_frmGenerator.ObjectRelationsBuilder.GetCurrentListBox().Items.Count > 0)
_frmGenerator.ObjectRelationsBuilder.GetCurrentListBox().SelectedIndex = 0;
// make sure the previous stored selection is cleared
_frmGenerator.ObjectRelationsBuilder.ClearSelectedItems();
}
}
private void BuildSchemaTree(string connectionString)
{
try
{
//dbSchemaPanel = new DbSchemaPanel(ref _currentUnit, ref _currentCslaObject, connectionString);
_dbSchemaPanel = new DbSchemaPanel(_currentUnit, _currentCslaObject, connectionString);
_dbSchemaPanel.BuildSchemaTree();
_frmGenerator.DbSchemaPanel = _dbSchemaPanel;
_frmGenerator.AddCtrlToMiddlePane(_dbSchemaPanel);
_dbSchemaPanel.SetDbColumnsPctHeight(73);
_dbSchemaPanel.SetDbTreeViewPctHeight(73);
}
catch (Exception e)
{
throw (e);
}
}
private void EnableButtons()
{
//TODO: This needs to be applied to menu
//frmGenerator.AddPropertiesButtton.Enabled = true;
//frmGenerator.AddObjectButton.Enabled = true;
//frmGenerator.DeleteObjectButton.Enabled = true;
//frmGenerator.SaveButton.Enabled = true;
//frmGenerator.DuplicateButton.Enabled = true;
//frmGenerator.ConnectButton.Enabled = true;
//frmGenerator.SelectDirectoryButton.Enabled = true;
}
private string GetFileNameWithoutExtension(string fileName)
{
int index = fileName.LastIndexOf(".");
if (index >= 0)
{
return fileName.Substring(0, index);
}
return fileName;
}
private string GetFileExtension(string fileName)
{
int index = fileName.LastIndexOf(".");
if (index >= 0)
{
return fileName.Substring(index + 1);
}
return ".cs";
}
private string GetTemplateName(CslaObjectInfo info)
{
switch (info.ObjectType)
{
case CslaObjectType.EditableRoot:
return ConfigurationManager.AppSettings["EditableRootTemplate"];
case CslaObjectType.EditableChild:
return ConfigurationManager.AppSettings["EditableChildTemplate"];
case CslaObjectType.EditableRootCollection:
return ConfigurationManager.AppSettings["EditableRootCollectionTemplate"];
case CslaObjectType.EditableChildCollection:
return ConfigurationManager.AppSettings["EditableChildCollectionTemplate"];
case CslaObjectType.EditableSwitchable:
return ConfigurationManager.AppSettings["EditableSwitchableTemplate"];
case CslaObjectType.DynamicEditableRoot:
return ConfigurationManager.AppSettings["DynamicEditableRootTemplate"];
case CslaObjectType.DynamicEditableRootCollection:
return ConfigurationManager.AppSettings["DynamicEditableRootCollectionTemplate"];
case CslaObjectType.ReadOnlyObject:
return ConfigurationManager.AppSettings["ReadOnlyObjectTemplate"];
case CslaObjectType.ReadOnlyCollection:
return ConfigurationManager.AppSettings["ReadOnlyCollectionTemplate"];
default:
return String.Empty;
}
}
#endregion
#region Event Handlers
private void GeneratorForm_Closing(object sender, CancelEventArgs e)
{
Application.Exit();
}
private void CslaObjectList_SelectedItemsChanged(object sender, EventArgs e)
{
// fired on "ObjectName" changed for the following scenario:
// Suppose we have a filter on the name,
// and we change the object name in such way that
// the object isn't visible any longer,
// and it must not be shown on the PropertyGrid
// THEN we need to reload the PropertyGrid
ReloadPropertyGrid();
}
private void AssociativeEntitiesList_SelectedItemsChanged(object sender, EventArgs e)
{
ReloadBuilderPropertyGrid();
}
// changed visibility so ActiveObjects settings can be hidden dynamicaly
internal void ReloadPropertyGrid()
{
if (_dbSchemaPanel != null)
_dbSchemaPanel.CslaObjectInfo = null;
var selectedItems = new List<CslaObjectInfo>();
foreach (CslaObjectInfo obj in _frmGenerator.ProjectPanel.ListObjects.SelectedItems)
{
selectedItems.Add(obj);
if (!IsLoading && _dbSchemaPanel != null)
{
_currentCslaObject = obj;
_dbSchemaPanel.CslaObjectInfo = obj;
}
}
if (_dbSchemaPanel != null && selectedItems.Count != 1)
{
_currentCslaObject = null;
_dbSchemaPanel.CslaObjectInfo = null;
}
if (selectedItems.Count == 0)
_frmGenerator.PropertyGrid.SelectedObject = null;
else
_frmGenerator.PropertyGrid.SelectedObject = new PropertyBag(selectedItems.ToArray(), _propertyContext);
}
void ReloadBuilderPropertyGrid()
{
var selectedItems = new List<AssociativeEntity>();
var listBoxSelectedItems = _frmGenerator.ObjectRelationsBuilder.GetCurrentListBox().SelectedItems;
foreach (AssociativeEntity obj in listBoxSelectedItems)
{
selectedItems.Add(obj);
if (!IsLoading)
{
_currentAssociativeEntitiy = obj;
}
}
if (selectedItems.Count == 0)
{
_currentAssociativeEntitiy = null;
}
else
{
if (_currentAssociativeEntitiy == null)
_currentAssociativeEntitiy = selectedItems[0];
}
_frmGenerator.ObjectRelationsBuilder.SetAllPropertyGridSelectedObject(_currentAssociativeEntitiy);
}
#endregion
private void GetConfig()
{
var tDir = ConfigTools.Get("TemplatesDirectory");
if (string.IsNullOrEmpty(tDir))
{
tDir = ConfigTools.OriginalGet("TemplatesDirectory");
while (tDir.LastIndexOf(@"\\") == tDir.Length - 2)
{
tDir = tDir.Substring(0, tDir.Length - 1);
}
}
if (string.IsNullOrEmpty(tDir))
{
TemplatesDirectory = Environment.SpecialFolder.Desktop.ToString();
}
else
{
TemplatesDirectory = tDir;
}
}
#region Nested CslaObjectInfoComparer
private class CslaObjectInfoComparer : IComparer<CslaObjectInfo>
{
#region IComparer<CslaObjectInfo> Members
public int Compare(CslaObjectInfo x, CslaObjectInfo y)
{
return x.ObjectName.CompareTo(y.ObjectName);
}
#endregion
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Runtime.ExceptionServices;
using System.Runtime.InteropServices;
using System.Text;
namespace System
{
public static partial class AppContext
{
[Flags]
private enum SwitchValueState
{
HasFalseValue = 0x1,
HasTrueValue = 0x2,
HasLookedForOverride = 0x4,
UnknownValue = 0x8 // Has no default and could not find an override
}
private static readonly Dictionary<string, SwitchValueState> s_switchMap = new Dictionary<string, SwitchValueState>();
private static Dictionary<String, Object> s_localStore = new Dictionary<String, Object>();
private static string s_defaultBaseDirectory;
static AppContext()
{
// populate the AppContext with the default set of values
AppContextDefaultValues.PopulateDefaultValues();
}
public static string TargetFrameworkName
{
get
{
throw new NotImplementedException();
}
}
public static string BaseDirectory
{
get
{
// The value of APP_CONTEXT_BASE_DIRECTORY key has to be a string and it is not allowed to be any other type.
// Otherwise the caller will get invalid cast exception
return (string)GetData("APP_CONTEXT_BASE_DIRECTORY") ??
(s_defaultBaseDirectory ?? (s_defaultBaseDirectory = GetBaseDirectoryCore()));
}
}
public static object GetData(string name)
{
if (name == null)
throw new ArgumentNullException(nameof(name));
object data;
lock (((ICollection)s_localStore).SyncRoot)
{
s_localStore.TryGetValue(name, out data);
}
return data;
}
public static void SetData(string name, object data)
{
if (name == null)
throw new ArgumentNullException(nameof(name));
// SetData should only be used to set values that don't already exist.
object currentVal;
lock (((ICollection)s_localStore).SyncRoot)
{
s_localStore.TryGetValue(name, out currentVal);
}
if (currentVal != null)
{
throw new InvalidOperationException(SR.InvalidOperation_SetData_OnlyOnce);
}
lock (((ICollection)s_localStore).SyncRoot)
{
s_localStore[name] = data;
}
}
public static event UnhandledExceptionEventHandler UnhandledException;
public static event System.EventHandler<FirstChanceExceptionEventArgs> FirstChanceException;
public static event System.EventHandler ProcessExit;
internal static event System.EventHandler Unloading;
private static void OnUnhandledException(object sender, UnhandledExceptionEventArgs e)
{
var unhandledException = UnhandledException;
if (unhandledException != null)
{
unhandledException(sender, e);
}
}
private static void OnFirstChanceException(object sender, FirstChanceExceptionEventArgs e)
{
var firstChanceException = FirstChanceException;
if (firstChanceException != null)
{
firstChanceException(sender, e);
}
}
private static void OnProcessExit(object sender, EventArgs e)
{
var processExit = ProcessExit;
if (processExit != null)
{
processExit(sender, e);
}
}
private static void OnUnloading(object sender, EventArgs e)
{
var unloading = Unloading;
if (unloading != null)
{
unloading(sender, e);
}
}
#region Switch APIs
/// <summary>
/// Try to get the value of the switch.
/// </summary>
/// <param name="switchName">The name of the switch</param>
/// <param name="isEnabled">A variable where to place the value of the switch</param>
/// <returns>A return value of true represents that the switch was set and <paramref name="isEnabled"/> contains the value of the switch</returns>
public static bool TryGetSwitch(string switchName, out bool isEnabled)
{
if (switchName == null)
throw new ArgumentNullException(nameof(switchName));
if (switchName.Length == 0)
throw new ArgumentException(SR.Argument_EmptyName, nameof(switchName));
// By default, the switch is not enabled.
isEnabled = false;
SwitchValueState switchValue;
lock (s_switchMap)
{
if (s_switchMap.TryGetValue(switchName, out switchValue))
{
// The value is in the dictionary.
// There are 3 cases here:
// 1. The value of the switch is 'unknown'. This means that the switch name is not known to the system (either via defaults or checking overrides).
// Example: This is the case when, during a servicing event, a switch is added to System.Xml which ships before mscorlib. The value of the switch
// Will be unknown to mscorlib.dll and we want to prevent checking the overrides every time we check this switch
// 2. The switch has a valid value AND we have read the overrides for it
// Example: TryGetSwitch is called for a switch set via SetSwitch
// 3. The switch has the default value and we need to check for overrides
// Example: TryGetSwitch is called for the first time for a switch that has a default value
// 1. The value is unknown
if (switchValue == SwitchValueState.UnknownValue)
{
isEnabled = false;
return false;
}
// We get the value of isEnabled from the value that we stored in the dictionary
isEnabled = (switchValue & SwitchValueState.HasTrueValue) == SwitchValueState.HasTrueValue;
// 2. The switch has a valid value AND we have checked for overrides
if ((switchValue & SwitchValueState.HasLookedForOverride) == SwitchValueState.HasLookedForOverride)
{
return true;
}
// Update the switch in the dictionary to mark it as 'checked for override'
s_switchMap[switchName] = (isEnabled ? SwitchValueState.HasTrueValue : SwitchValueState.HasFalseValue)
| SwitchValueState.HasLookedForOverride;
return true;
}
else
{
// The value is NOT in the dictionary
// In this case we need to see if we have an override defined for the value.
// There are 2 cases:
// 1. The value has an override specified. In this case we need to add the value to the dictionary
// and mark it as checked for overrides
// Example: In a servicing event, System.Xml introduces a switch and an override is specified.
// The value is not found in mscorlib (as System.Xml ships independent of mscorlib)
// 2. The value does not have an override specified
// In this case, we want to capture the fact that we looked for a value and found nothing by adding
// an entry in the dictionary with the 'sentinel' value of 'SwitchValueState.UnknownValue'.
// Example: This will prevent us from trying to find overrides for values that we don't have in the dictionary
// 1. The value has an override specified.
bool overrideValue;
if (AppContextDefaultValues.TryGetSwitchOverride(switchName, out overrideValue))
{
isEnabled = overrideValue;
// Update the switch in the dictionary to mark it as 'checked for override'
s_switchMap[switchName] = (isEnabled ? SwitchValueState.HasTrueValue : SwitchValueState.HasFalseValue)
| SwitchValueState.HasLookedForOverride;
return true;
}
// 2. The value does not have an override.
s_switchMap[switchName] = SwitchValueState.UnknownValue;
}
}
return false; // we did not find a value for the switch
}
/// <summary>
/// Assign a switch a value
/// </summary>
/// <param name="switchName">The name of the switch</param>
/// <param name="isEnabled">The value to assign</param>
public static void SetSwitch(string switchName, bool isEnabled)
{
if (switchName == null)
throw new ArgumentNullException(nameof(switchName));
if (switchName.Length == 0)
throw new ArgumentException(SR.Argument_EmptyName, nameof(switchName));
SwitchValueState switchValue = (isEnabled ? SwitchValueState.HasTrueValue : SwitchValueState.HasFalseValue)
| SwitchValueState.HasLookedForOverride;
lock (s_switchMap)
{
// Store the new value and the fact that we checked in the dictionary
s_switchMap[switchName] = switchValue;
}
}
/// <summary>
/// This method is going to be called from the AppContextDefaultValues class when setting up the
/// default values for the switches. !!!! This method is called during the static constructor so it does not
/// take a lock !!!! If you are planning to use this outside of that, please ensure proper locking.
/// </summary>
internal static void DefineSwitchDefault(string switchName, bool isEnabled)
{
s_switchMap[switchName] = isEnabled ? SwitchValueState.HasTrueValue : SwitchValueState.HasFalseValue;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Diagnostics;
using System.IO;
using System.Linq;
namespace Umbraco.Cms.Infrastructure.Persistence
{
/// <summary>
/// Manages LocalDB databases.
/// </summary>
/// <remarks>
/// <para>Latest version is SQL Server 2016 Express LocalDB,
/// see https://docs.microsoft.com/en-us/sql/database-engine/configure-windows/sql-server-2016-express-localdb
/// which can be installed by downloading the Express installer from https://www.microsoft.com/en-us/sql-server/sql-server-downloads
/// (about 5MB) then select 'download media' to download SqlLocalDB.msi (about 44MB), which you can execute. This installs
/// LocalDB only. Though you probably want to install the full Express. You may also want to install SQL Server Management
/// Studio which can be used to connect to LocalDB databases.</para>
/// <para>See also https://github.com/ritterim/automation-sql which is a somewhat simpler version of this.</para>
/// </remarks>
public class LocalDb
{
private int _version;
private bool _hasVersion;
private string _exe;
#region Availability & Version
/// <summary>
/// Gets the LocalDb installed version.
/// </summary>
/// <remarks>If more than one version is installed, returns the highest available. Returns
/// the major version as an integer e.g. 11, 12...</remarks>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
public int Version
{
get
{
EnsureVersion();
if (_version <= 0)
throw new InvalidOperationException("LocalDb is not available.");
return _version;
}
}
/// <summary>
/// Ensures that the LocalDb version is detected.
/// </summary>
private void EnsureVersion()
{
if (_hasVersion) return;
DetectVersion();
_hasVersion = true;
}
/// <summary>
/// Gets a value indicating whether LocalDb is available.
/// </summary>
public bool IsAvailable
{
get
{
EnsureVersion();
return _version > 0;
}
}
/// <summary>
/// Ensures that LocalDb is available.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
private void EnsureAvailable()
{
if (IsAvailable == false)
throw new InvalidOperationException("LocalDb is not available.");
}
/// <summary>
/// Detects LocalDb installed version.
/// </summary>
/// <remarks>If more than one version is installed, the highest available is detected.</remarks>
private void DetectVersion()
{
_hasVersion = true;
_version = -1;
_exe = null;
var programFiles = Environment.GetEnvironmentVariable("ProgramFiles");
// MS SQL Server installs in e.g. "C:\Program Files\Microsoft SQL Server", so
// we want to detect it in "%ProgramFiles%\Microsoft SQL Server" - however, if
// Umbraco runs as a 32bits process (e.g. IISExpress configured as 32bits)
// on a 64bits system, %ProgramFiles% will point to "C:\Program Files (x86)"
// and SQL Server cannot be found. But then, %ProgramW6432% will point to
// the original "C:\Program Files". Using it to fix the path.
// see also: MSDN doc for WOW64 implementation
//
var programW6432 = Environment.GetEnvironmentVariable("ProgramW6432");
if (string.IsNullOrWhiteSpace(programW6432) == false && programW6432 != programFiles)
programFiles = programW6432;
if (string.IsNullOrWhiteSpace(programFiles)) return;
// detect 15, 14, 13, 12, 11
for (var i = 15; i > 10; i--)
{
var exe = Path.Combine(programFiles, $@"Microsoft SQL Server\{i}0\Tools\Binn\SqlLocalDB.exe");
if (File.Exists(exe) == false) continue;
_version = i;
_exe = exe;
break;
}
}
#endregion
#region Instances
/// <summary>
/// Gets the name of existing LocalDb instances.
/// </summary>
/// <returns>The name of existing LocalDb instances.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
public string[] GetInstances()
{
EnsureAvailable();
var rc = ExecuteSqlLocalDb("i", out var output, out var error); // info
if (rc != 0 || error != string.Empty) return null;
return output.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
}
/// <summary>
/// Gets a value indicating whether a LocalDb instance exists.
/// </summary>
/// <param name="instanceName">The name of the instance.</param>
/// <returns>A value indicating whether a LocalDb instance with the specified name exists.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
public bool InstanceExists(string instanceName)
{
EnsureAvailable();
var instances = GetInstances();
return instances != null && instances.Contains(instanceName, StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Creates a LocalDb instance.
/// </summary>
/// <param name="instanceName">The name of the instance.</param>
/// <returns>A value indicating whether the instance was created without errors.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
public bool CreateInstance(string instanceName)
{
EnsureAvailable();
return ExecuteSqlLocalDb($"c \"{instanceName}\"", out _, out var error) == 0 && error == string.Empty;
}
/// <summary>
/// Drops a LocalDb instance.
/// </summary>
/// <param name="instanceName">The name of the instance.</param>
/// <returns>A value indicating whether the instance was dropped without errors.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
/// <remarks>
/// When an instance is dropped all the attached database files are deleted.
/// Successful if the instance does not exist.
/// </remarks>
public bool DropInstance(string instanceName)
{
EnsureAvailable();
var instance = GetInstance(instanceName);
if (instance == null) return true;
instance.DropDatabases(); // else the files remain
// -i force NOWAIT, -k kills
return ExecuteSqlLocalDb($"p \"{instanceName}\" -i", out _, out var error) == 0 && error == string.Empty
&& ExecuteSqlLocalDb($"d \"{instanceName}\"", out _, out error) == 0 && error == string.Empty;
}
/// <summary>
/// Stops a LocalDb instance.
/// </summary>
/// <param name="instanceName">The name of the instance.</param>
/// <returns>A value indicating whether the instance was stopped without errors.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
/// <remarks>
/// Successful if the instance does not exist.
/// </remarks>
public bool StopInstance(string instanceName)
{
EnsureAvailable();
if (InstanceExists(instanceName) == false) return true;
// -i force NOWAIT, -k kills
return ExecuteSqlLocalDb($"p \"{instanceName}\" -i", out _, out var error) == 0 && error == string.Empty;
}
/// <summary>
/// Stops a LocalDb instance.
/// </summary>
/// <param name="instanceName">The name of the instance.</param>
/// <returns>A value indicating whether the instance was started without errors.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
/// <remarks>
/// Failed if the instance does not exist.
/// </remarks>
public bool StartInstance(string instanceName)
{
EnsureAvailable();
if (InstanceExists(instanceName) == false) return false;
return ExecuteSqlLocalDb($"s \"{instanceName}\"", out _, out var error) == 0 && error == string.Empty;
}
/// <summary>
/// Gets a LocalDb instance.
/// </summary>
/// <param name="instanceName">The name of the instance.</param>
/// <returns>The instance with the specified name if it exists, otherwise null.</returns>
/// <exception cref="InvalidOperationException">Thrown when LocalDb is not available.</exception>
public Instance GetInstance(string instanceName)
{
EnsureAvailable();
return InstanceExists(instanceName) ? new Instance(instanceName) : null;
}
#endregion
#region Databases
/// <summary>
/// Represents a LocalDb instance.
/// </summary>
/// <remarks>
/// LocalDb is assumed to be available, and the instance is assumed to exist.
/// </remarks>
public class Instance
{
private readonly string _masterCstr;
/// <summary>
/// Gets the name of the instance.
/// </summary>
public string InstanceName { get; }
/// <summary>
/// Initializes a new instance of the <see cref="Instance"/> class.
/// </summary>
/// <param name="instanceName"></param>
public Instance(string instanceName)
{
InstanceName = instanceName;
_masterCstr = $@"Server=(localdb)\{instanceName};Integrated Security=True;";
}
public static string GetConnectionString(string instanceName, string databaseName)
{
return $@"Server=(localdb)\{instanceName};Integrated Security=True;Database={databaseName};";
}
/// <summary>
/// Gets a LocalDb connection string.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <returns>The connection string for the specified database.</returns>
/// <remarks>
/// The database should exist in the LocalDb instance.
/// </remarks>
public string GetConnectionString(string databaseName)
{
return _masterCstr + $@"Database={databaseName};";
}
/// <summary>
/// Gets a LocalDb connection string for an attached database.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <param name="filesPath">The directory containing database files.</param>
/// <returns>The connection string for the specified database.</returns>
/// <remarks>
/// The database should not exist in the LocalDb instance.
/// It will be attached with its name being its MDF filename (full path), uppercased, when
/// the first connection is opened, and remain attached until explicitly detached.
/// </remarks>
public string GetAttachedConnectionString(string databaseName, string filesPath)
{
GetDatabaseFiles(databaseName, filesPath, out _, out _, out _, out var mdfFilename, out _);
return _masterCstr + $@"AttachDbFileName='{mdfFilename}';";
}
/// <summary>
/// Gets the name of existing databases.
/// </summary>
/// <returns>The name of existing databases.</returns>
public string[] GetDatabases()
{
var userDatabases = new List<string>();
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
var databases = new Dictionary<string, string>();
SetCommand(cmd, @"
SELECT name, filename FROM sys.sysdatabases");
using (var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
databases[reader.GetString(0)] = reader.GetString(1);
}
}
foreach (var database in databases)
{
var dbname = database.Key;
if (dbname == "master" || dbname == "tempdb" || dbname == "model" || dbname == "msdb")
continue;
// TODO: shall we deal with stale databases?
// TODO: is it always ok to assume file names?
//var mdf = database.Value;
//var ldf = mdf.Replace(".mdf", "_log.ldf");
//if (staleOnly && File.Exists(mdf) && File.Exists(ldf))
// continue;
//ExecuteDropDatabase(cmd, dbname, mdf, ldf);
//count++;
userDatabases.Add(dbname);
}
}
return userDatabases.ToArray();
}
/// <summary>
/// Gets a value indicating whether a database exists.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <returns>A value indicating whether a database with the specified name exists.</returns>
/// <remarks>
/// A database exists if it is registered in the instance, and its files exist. If the database
/// is registered but some of its files are missing, the database is dropped.
/// </remarks>
public bool DatabaseExists(string databaseName)
{
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
var mdf = GetDatabase(cmd, databaseName);
if (mdf == null) return false;
// it can exist, even though its files have been deleted
// if files exist assume all is ok (should we try to connect?)
var ldf = GetLogFilename(mdf);
if (File.Exists(mdf) && File.Exists(ldf))
return true;
ExecuteDropDatabase(cmd, databaseName, mdf, ldf);
}
return false;
}
/// <summary>
/// Creates a new database.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <param name="filesPath">The directory containing database files.</param>
/// <returns>A value indicating whether the database was created without errors.</returns>
/// <remarks>
/// Failed if a database with the specified name already exists in the instance,
/// or if the database files already exist in the specified directory.
/// </remarks>
public bool CreateDatabase(string databaseName, string filesPath)
{
GetDatabaseFiles(databaseName, filesPath, out var logName, out _, out _, out var mdfFilename, out var ldfFilename);
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
var mdf = GetDatabase(cmd, databaseName);
if (mdf != null) return false;
// cannot use parameters on CREATE DATABASE
// ie "CREATE DATABASE @0 ..." does not work
SetCommand(cmd, $@"
CREATE DATABASE {QuotedName(databaseName)}
ON (NAME=N{QuotedName(databaseName, '\'')}, FILENAME={QuotedName(mdfFilename, '\'')})
LOG ON (NAME=N{QuotedName(logName, '\'')}, FILENAME={QuotedName(ldfFilename, '\'')})");
var unused = cmd.ExecuteNonQuery();
}
return true;
}
/// <summary>
/// Drops a database.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <returns>A value indicating whether the database was dropped without errors.</returns>
/// <remarks>
/// Successful if the database does not exist.
/// Deletes the database files.
/// </remarks>
public bool DropDatabase(string databaseName)
{
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
SetCommand(cmd, @"
SELECT name, filename FROM master.dbo.sysdatabases WHERE ('[' + name + ']' = @0 OR name = @0)",
databaseName);
var mdf = GetDatabase(cmd, databaseName);
if (mdf == null) return true;
ExecuteDropDatabase(cmd, databaseName, mdf);
}
return true;
}
/// <summary>
/// Drops stale databases.
/// </summary>
/// <returns>The number of databases that were dropped.</returns>
/// <remarks>
/// A database is considered stale when its files cannot be found.
/// </remarks>
public int DropStaleDatabases()
{
return DropDatabases(true);
}
/// <summary>
/// Drops databases.
/// </summary>
/// <param name="staleOnly">A value indicating whether to delete only stale database.</param>
/// <returns>The number of databases that were dropped.</returns>
/// <remarks>
/// A database is considered stale when its files cannot be found.
/// </remarks>
public int DropDatabases(bool staleOnly = false)
{
var count = 0;
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
var databases = new Dictionary<string, string>();
SetCommand(cmd, @"
SELECT name, filename FROM sys.sysdatabases");
using (var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
databases[reader.GetString(0)] = reader.GetString(1);
}
}
foreach (var database in databases)
{
var dbname = database.Key;
if (dbname == "master" || dbname == "tempdb" || dbname == "model" || dbname == "msdb")
continue;
var mdf = database.Value;
var ldf = mdf.Replace(".mdf", "_log.ldf");
if (staleOnly && File.Exists(mdf) && File.Exists(ldf))
continue;
ExecuteDropDatabase(cmd, dbname, mdf, ldf);
count++;
}
}
return count;
}
/// <summary>
/// Detaches a database.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <returns>The directory containing the database files.</returns>
/// <exception cref="InvalidOperationException">Thrown when a database with the specified name does not exist.</exception>
public string DetachDatabase(string databaseName)
{
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
var mdf = GetDatabase(cmd, databaseName);
if (mdf == null)
throw new InvalidOperationException("Database does not exist.");
DetachDatabase(cmd, databaseName);
return Path.GetDirectoryName(mdf);
}
}
/// <summary>
/// Attaches a database.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <param name="filesPath">The directory containing database files.</param>
/// <exception cref="InvalidOperationException">Thrown when a database with the specified name already exists.</exception>
public void AttachDatabase(string databaseName, string filesPath)
{
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
var mdf = GetDatabase(cmd, databaseName);
if (mdf != null)
throw new InvalidOperationException("Database already exists.");
AttachDatabase(cmd, databaseName, filesPath);
}
}
/// <summary>
/// Gets the file names of a database.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <param name="mdfName">The MDF logical name.</param>
/// <param name="ldfName">The LDF logical name.</param>
/// <param name="mdfFilename">The MDF filename.</param>
/// <param name="ldfFilename">The LDF filename.</param>
public void GetFilenames(string databaseName,
out string mdfName, out string ldfName,
out string mdfFilename, out string ldfFilename)
{
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
GetFilenames(cmd, databaseName, out mdfName, out ldfName, out mdfFilename, out ldfFilename);
}
}
/// <summary>
/// Kills all existing connections.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
public void KillConnections(string databaseName)
{
using (var conn = new SqlConnection(_masterCstr))
using (var cmd = conn.CreateCommand())
{
conn.Open();
SetCommand(cmd, @"
DECLARE @sql VARCHAR(MAX);
SELECT @sql = COALESCE(@sql,'') + 'kill ' + CONVERT(VARCHAR, SPId) + ';'
FROM master.sys.sysprocesses
WHERE DBId = DB_ID(@0) AND SPId <> @@SPId;
EXEC(@sql);",
databaseName);
cmd.ExecuteNonQuery();
}
}
/// <summary>
/// Gets a database.
/// </summary>
/// <param name="cmd">The Sql Command.</param>
/// <param name="databaseName">The name of the database.</param>
/// <returns>The full filename of the MDF file, if the database exists, otherwise null.</returns>
private static string GetDatabase(SqlCommand cmd, string databaseName)
{
SetCommand(cmd, @"
SELECT name, filename FROM master.dbo.sysdatabases WHERE ('[' + name + ']' = @0 OR name = @0)",
databaseName);
string mdf = null;
using (var reader = cmd.ExecuteReader())
{
if (reader.Read())
mdf = reader.GetString(1) ?? string.Empty;
while (reader.Read())
{
}
}
return mdf;
}
/// <summary>
/// Drops a database and its files.
/// </summary>
/// <param name="cmd">The Sql command.</param>
/// <param name="databaseName">The name of the database.</param>
/// <param name="mdf">The name of the database (MDF) file.</param>
/// <param name="ldf">The name of the log (LDF) file.</param>
private static void ExecuteDropDatabase(SqlCommand cmd, string databaseName, string mdf, string ldf = null)
{
try
{
// cannot use parameters on ALTER DATABASE
// ie "ALTER DATABASE @0 ..." does not work
SetCommand(cmd, $@"
ALTER DATABASE {QuotedName(databaseName)} SET SINGLE_USER WITH ROLLBACK IMMEDIATE");
var unused1 = cmd.ExecuteNonQuery();
}
catch (SqlException e)
{
if (e.Message.Contains("Unable to open the physical file") && e.Message.Contains("Operating system error 2:"))
{
// quite probably, the files were missing
// yet, it should be possible to drop the database anyways
// but we'll have to deal with the files
}
else
{
// no idea, throw
throw;
}
}
// cannot use parameters on DROP DATABASE
// ie "DROP DATABASE @0 ..." does not work
SetCommand(cmd, $@"
DROP DATABASE {QuotedName(databaseName)}");
var unused2 = cmd.ExecuteNonQuery();
// be absolutely sure
if (File.Exists(mdf)) File.Delete(mdf);
ldf = ldf ?? GetLogFilename(mdf);
if (File.Exists(ldf)) File.Delete(ldf);
}
/// <summary>
/// Gets the log (LDF) filename corresponding to a database (MDF) filename.
/// </summary>
/// <param name="mdfFilename">The MDF filename.</param>
/// <returns></returns>
private static string GetLogFilename(string mdfFilename)
{
if (mdfFilename.EndsWith(".mdf") == false)
throw new ArgumentException("Not a valid MDF filename (no .mdf extension).", nameof(mdfFilename));
return mdfFilename.Substring(0, mdfFilename.Length - ".mdf".Length) + "_log.ldf";
}
/// <summary>
/// Detaches a database.
/// </summary>
/// <param name="cmd">The Sql command.</param>
/// <param name="databaseName">The name of the database.</param>
private static void DetachDatabase(SqlCommand cmd, string databaseName)
{
// cannot use parameters on ALTER DATABASE
// ie "ALTER DATABASE @0 ..." does not work
SetCommand(cmd, $@"
ALTER DATABASE {QuotedName(databaseName)} SET SINGLE_USER WITH ROLLBACK IMMEDIATE");
var unused1 = cmd.ExecuteNonQuery();
SetCommand(cmd, @"
EXEC sp_detach_db @dbname=@0",
databaseName);
var unused2 = cmd.ExecuteNonQuery();
}
/// <summary>
/// Attaches a database.
/// </summary>
/// <param name="cmd">The Sql command.</param>
/// <param name="databaseName">The name of the database.</param>
/// <param name="filesPath">The directory containing database files.</param>
private static void AttachDatabase(SqlCommand cmd, string databaseName, string filesPath)
{
GetDatabaseFiles(databaseName, filesPath,
out var logName, out _, out _, out var mdfFilename, out var ldfFilename);
// cannot use parameters on CREATE DATABASE
// ie "CREATE DATABASE @0 ..." does not work
SetCommand(cmd, $@"
CREATE DATABASE {QuotedName(databaseName)}
ON (NAME=N{QuotedName(databaseName, '\'')}, FILENAME={QuotedName(mdfFilename, '\'')})
LOG ON (NAME=N{QuotedName(logName, '\'')}, FILENAME={QuotedName(ldfFilename, '\'')})
FOR ATTACH");
var unused = cmd.ExecuteNonQuery();
}
/// <summary>
/// Sets a database command.
/// </summary>
/// <param name="cmd">The command.</param>
/// <param name="sql">The command text.</param>
/// <param name="args">The command arguments.</param>
/// <remarks>
/// The command text must refer to arguments as @0, @1... each referring
/// to the corresponding position in <paramref name="args"/>.
/// </remarks>
private static void SetCommand(SqlCommand cmd, string sql, params object[] args)
{
cmd.CommandType = CommandType.Text;
cmd.CommandText = sql;
cmd.Parameters.Clear();
for (var i = 0; i < args.Length; i++)
cmd.Parameters.AddWithValue("@" + i, args[i]);
}
/// <summary>
/// Gets the file names of a database.
/// </summary>
/// <param name="cmd">The Sql command.</param>
/// <param name="databaseName">The name of the database.</param>
/// <param name="mdfName">The MDF logical name.</param>
/// <param name="ldfName">The LDF logical name.</param>
/// <param name="mdfFilename">The MDF filename.</param>
/// <param name="ldfFilename">The LDF filename.</param>
private void GetFilenames(SqlCommand cmd, string databaseName,
out string mdfName, out string ldfName,
out string mdfFilename, out string ldfFilename)
{
mdfName = ldfName = mdfFilename = ldfFilename = null;
SetCommand(cmd, @"
SELECT DB_NAME(database_id), type_desc, name, physical_name
FROM master.sys.master_files
WHERE database_id=DB_ID(@0)",
databaseName);
using (var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
var type = reader.GetString(1);
if (type == "ROWS")
{
mdfName = reader.GetString(2);
ldfName = reader.GetString(3);
}
else if (type == "LOG")
{
ldfName = reader.GetString(2);
ldfFilename = reader.GetString(3);
}
}
}
}
}
/// <summary>
/// Copy database files.
/// </summary>
/// <param name="databaseName">The name of the source database.</param>
/// <param name="filesPath">The directory containing source database files.</param>
/// <param name="targetDatabaseName">The name of the target database.</param>
/// <param name="targetFilesPath">The directory containing target database files.</param>
/// <param name="sourceExtension">The source database files extension.</param>
/// <param name="targetExtension">The target database files extension.</param>
/// <param name="overwrite">A value indicating whether to overwrite the target files.</param>
/// <param name="delete">A value indicating whether to delete the source files.</param>
/// <remarks>
/// The <paramref name="targetDatabaseName"/>, <paramref name="targetFilesPath"/>, <paramref name="sourceExtension"/>
/// and <paramref name="targetExtension"/> parameters are optional. If they result in target being identical
/// to source, no copy is performed. If <paramref name="delete"/> is false, nothing happens, otherwise the source
/// files are deleted.
/// If target is not identical to source, files are copied or moved, depending on the value of <paramref name="delete"/>.
/// Extensions are used eg to copy MyDatabase.mdf to MyDatabase.mdf.temp.
/// </remarks>
public void CopyDatabaseFiles(string databaseName, string filesPath,
string targetDatabaseName = null, string targetFilesPath = null,
string sourceExtension = null, string targetExtension = null,
bool overwrite = false, bool delete = false)
{
var nop = (targetFilesPath == null || targetFilesPath == filesPath)
&& (targetDatabaseName == null || targetDatabaseName == databaseName)
&& (sourceExtension == null && targetExtension == null || sourceExtension == targetExtension);
if (nop && delete == false) return;
GetDatabaseFiles(databaseName, filesPath,
out _, out _, out _, out var mdfFilename, out var ldfFilename);
if (sourceExtension != null)
{
mdfFilename += "." + sourceExtension;
ldfFilename += "." + sourceExtension;
}
if (nop)
{
// delete
if (File.Exists(mdfFilename)) File.Delete(mdfFilename);
if (File.Exists(ldfFilename)) File.Delete(ldfFilename);
}
else
{
// copy or copy+delete ie move
GetDatabaseFiles(targetDatabaseName ?? databaseName, targetFilesPath ?? filesPath,
out _, out _, out _, out var targetMdfFilename, out var targetLdfFilename);
if (targetExtension != null)
{
targetMdfFilename += "." + targetExtension;
targetLdfFilename += "." + targetExtension;
}
if (delete)
{
if (overwrite && File.Exists(targetMdfFilename)) File.Delete(targetMdfFilename);
if (overwrite && File.Exists(targetLdfFilename)) File.Delete(targetLdfFilename);
File.Move(mdfFilename, targetMdfFilename);
File.Move(ldfFilename, targetLdfFilename);
}
else
{
File.Copy(mdfFilename, targetMdfFilename, overwrite);
File.Copy(ldfFilename, targetLdfFilename, overwrite);
}
}
}
/// <summary>
/// Gets a value indicating whether database files exist.
/// </summary>
/// <param name="databaseName">The name of the source database.</param>
/// <param name="filesPath">The directory containing source database files.</param>
/// <param name="extension">The database files extension.</param>
/// <returns>A value indicating whether the database files exist.</returns>
/// <remarks>
/// Extensions are used eg to copy MyDatabase.mdf to MyDatabase.mdf.temp.
/// </remarks>
public bool DatabaseFilesExist(string databaseName, string filesPath, string extension = null)
{
GetDatabaseFiles(databaseName, filesPath,
out _, out _, out _, out var mdfFilename, out var ldfFilename);
if (extension != null)
{
mdfFilename += "." + extension;
ldfFilename += "." + extension;
}
return File.Exists(mdfFilename) && File.Exists(ldfFilename);
}
/// <summary>
/// Gets the name of the database files.
/// </summary>
/// <param name="databaseName">The name of the database.</param>
/// <param name="filesPath">The directory containing database files.</param>
/// <param name="logName">The name of the log.</param>
/// <param name="baseFilename">The base filename (the MDF filename without the .mdf extension).</param>
/// <param name="baseLogFilename">The base log filename (the LDF filename without the .ldf extension).</param>
/// <param name="mdfFilename">The MDF filename.</param>
/// <param name="ldfFilename">The LDF filename.</param>
private static void GetDatabaseFiles(string databaseName, string filesPath,
out string logName,
out string baseFilename, out string baseLogFilename,
out string mdfFilename, out string ldfFilename)
{
logName = databaseName + "_log";
baseFilename = Path.Combine(filesPath, databaseName);
baseLogFilename = Path.Combine(filesPath, logName);
mdfFilename = baseFilename + ".mdf";
ldfFilename = baseFilename + "_log.ldf";
}
#endregion
#region SqlLocalDB
/// <summary>
/// Executes the SqlLocalDB command.
/// </summary>
/// <param name="args">The arguments.</param>
/// <param name="output">The command standard output.</param>
/// <param name="error">The command error output.</param>
/// <returns>The process exit code.</returns>
/// <remarks>
/// Execution is successful if the exit code is zero, and error is empty.
/// </remarks>
private int ExecuteSqlLocalDb(string args, out string output, out string error)
{
if (_exe == null) // should never happen - we should not execute if not available
{
output = string.Empty;
error = "SqlLocalDB.exe not found";
return -1;
}
using (var p = new Process
{
StartInfo =
{
UseShellExecute = false,
RedirectStandardOutput = true,
RedirectStandardError = true,
FileName = _exe,
Arguments = args,
CreateNoWindow = true,
WindowStyle = ProcessWindowStyle.Hidden
}
})
{
p.Start();
output = p.StandardOutput.ReadToEnd();
error = p.StandardError.ReadToEnd();
p.WaitForExit();
return p.ExitCode;
}
}
/// <summary>
/// Returns a Unicode string with the delimiters added to make the input string a valid SQL Server delimited identifier.
/// </summary>
/// <param name="name">The name to quote.</param>
/// <param name="quote">A quote character.</param>
/// <returns></returns>
/// <remarks>
/// This is a C# implementation of T-SQL QUOTEDNAME.
/// <paramref name="quote"/> is optional, it can be '[' (default), ']', '\'' or '"'.
/// </remarks>
internal static string QuotedName(string name, char quote = '[')
{
switch (quote)
{
case '[':
case ']':
return "[" + name.Replace("]", "]]") + "]";
case '\'':
return "'" + name.Replace("'", "''") + "'";
case '"':
return "\"" + name.Replace("\"", "\"\"") + "\"";
default:
throw new NotSupportedException("Not a valid quote character.");
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Threading;
namespace System.Runtime.Caching
{
internal class MemoryCacheEntry : MemoryCacheKey
{
private object _value;
private DateTime _utcCreated;
private int _state;
// expiration
private DateTime _utcAbsExp;
private TimeSpan _slidingExp;
private ExpiresEntryRef _expiresEntryRef;
private byte _expiresBucket; // index of the expiration list (bucket)
// usage
private byte _usageBucket; // index of the usage list (== priority-1)
private UsageEntryRef _usageEntryRef; // ref into the usage list
private DateTime _utcLastUpdateUsage; // time we last updated usage
private CacheEntryRemovedCallback _callback;
private SeldomUsedFields _fields; // optimization to reduce workingset when the entry hasn't any dependencies
private class SeldomUsedFields
{
internal Collection<ChangeMonitor> _dependencies; // the entry's dependency needs to be disposed when the entry is released
internal Dictionary<MemoryCacheEntryChangeMonitor, MemoryCacheEntryChangeMonitor> _dependents; // dependents must be notified when this entry is removed
internal MemoryCache _cache;
internal Tuple<MemoryCacheStore, MemoryCacheEntry> _updateSentinel; // the MemoryCacheEntry (and its associated store) of the OnUpdateSentinel for this entry, if there is one
}
internal object Value
{
get { return _value; }
}
internal bool HasExpiration()
{
return _utcAbsExp < DateTime.MaxValue;
}
internal DateTime UtcAbsExp
{
get { return _utcAbsExp; }
set { _utcAbsExp = value; }
}
internal DateTime UtcCreated
{
get { return _utcCreated; }
}
internal ExpiresEntryRef ExpiresEntryRef
{
get { return _expiresEntryRef; }
set { _expiresEntryRef = value; }
}
internal byte ExpiresBucket
{
get { return _expiresBucket; }
set { _expiresBucket = value; }
}
internal bool InExpires()
{
return !_expiresEntryRef.IsInvalid;
}
internal TimeSpan SlidingExp
{
get { return _slidingExp; }
}
internal EntryState State
{
get { return (EntryState)_state; }
set { _state = (int)value; }
}
internal byte UsageBucket
{
get { return _usageBucket; }
}
internal UsageEntryRef UsageEntryRef
{
get { return _usageEntryRef; }
set { _usageEntryRef = value; }
}
[SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode", Justification = "Grandfathered suppression from original caching code checkin")]
internal DateTime UtcLastUpdateUsage
{
get { return _utcLastUpdateUsage; }
set { _utcLastUpdateUsage = value; }
}
internal MemoryCacheEntry(string key,
object value,
DateTimeOffset absExp,
TimeSpan slidingExp,
CacheItemPriority priority,
Collection<ChangeMonitor> dependencies,
CacheEntryRemovedCallback removedCallback,
MemoryCache cache) : base(key)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
_utcCreated = DateTime.UtcNow;
_value = value;
_slidingExp = slidingExp;
if (_slidingExp > TimeSpan.Zero)
{
_utcAbsExp = _utcCreated + _slidingExp;
}
else
{
_utcAbsExp = absExp.UtcDateTime;
}
_expiresEntryRef = ExpiresEntryRef.INVALID;
_expiresBucket = 0xff;
_usageEntryRef = UsageEntryRef.INVALID;
if (priority == CacheItemPriority.NotRemovable)
{
_usageBucket = 0xff;
}
else
{
_usageBucket = 0;
}
_callback = removedCallback;
if (dependencies != null)
{
_fields = new SeldomUsedFields();
_fields._dependencies = dependencies;
_fields._cache = cache;
}
}
internal void AddDependent(MemoryCache cache, MemoryCacheEntryChangeMonitor dependent)
{
lock (this)
{
if (State > EntryState.AddedToCache)
{
return;
}
if (_fields == null)
{
_fields = new SeldomUsedFields();
}
if (_fields._cache == null)
{
_fields._cache = cache;
}
if (_fields._dependents == null)
{
_fields._dependents = new Dictionary<MemoryCacheEntryChangeMonitor, MemoryCacheEntryChangeMonitor>();
}
_fields._dependents[dependent] = dependent;
}
}
private void CallCacheEntryRemovedCallback(MemoryCache cache, CacheEntryRemovedReason reason)
{
if (_callback == null)
{
return;
}
CacheEntryRemovedArguments args = new CacheEntryRemovedArguments(cache, reason, new CacheItem(Key, _value));
try
{
_callback(args);
}
catch
{
//
}
}
internal void CallNotifyOnChanged()
{
if (_fields != null && _fields._dependencies != null)
{
foreach (ChangeMonitor monitor in _fields._dependencies)
{
monitor.NotifyOnChanged(new OnChangedCallback(this.OnDependencyChanged));
}
}
}
internal bool CompareExchangeState(EntryState value, EntryState comparand)
{
return (Interlocked.CompareExchange(ref _state, (int)value, (int)comparand) == (int)comparand);
}
// Associates this entry with an update sentinel. If this entry has a sliding expiration, we need to
// touch the sentinel so that it doesn't expire.
internal void ConfigureUpdateSentinel(MemoryCacheStore sentinelStore, MemoryCacheEntry sentinelEntry)
{
lock (this)
{
if (_fields == null)
{
_fields = new SeldomUsedFields();
}
_fields._updateSentinel = Tuple.Create(sentinelStore, sentinelEntry);
}
}
internal bool HasUsage()
{
return _usageBucket != 0xff;
}
internal bool InUsage()
{
return !_usageEntryRef.IsInvalid;
}
private void OnDependencyChanged(object state)
{
if (State == EntryState.AddedToCache)
{
_fields._cache.RemoveEntry(this.Key, this, CacheEntryRemovedReason.ChangeMonitorChanged);
}
}
internal void Release(MemoryCache cache, CacheEntryRemovedReason reason)
{
State = EntryState.Closed;
// Are there any cache entries that depend on this entry?
// If so, we need to fire their dependencies.
Dictionary<MemoryCacheEntryChangeMonitor, MemoryCacheEntryChangeMonitor>.KeyCollection deps = null;
// clone the dependents
lock (this)
{
if (_fields != null && _fields._dependents != null && _fields._dependents.Count > 0)
{
deps = _fields._dependents.Keys;
// set to null so RemoveDependent does not attempt to access it, since we're not
// using a copy of the KeyCollection.
_fields._dependents = null;
Debug.Assert(_fields._dependents == null, "_fields._dependents == null");
}
}
if (deps != null)
{
foreach (MemoryCacheEntryChangeMonitor dependent in deps)
{
if (dependent != null)
{
dependent.OnCacheEntryReleased();
}
}
}
CallCacheEntryRemovedCallback(cache, reason);
// Dispose any dependencies
if (_fields != null && _fields._dependencies != null)
{
foreach (ChangeMonitor monitor in _fields._dependencies)
{
monitor.Dispose();
}
}
}
internal void RemoveDependent(MemoryCacheEntryChangeMonitor dependent)
{
lock (this)
{
if (_fields != null && _fields._dependents != null)
{
_fields._dependents.Remove(dependent);
}
}
}
internal void UpdateSlidingExp(DateTime utcNow, CacheExpires expires)
{
if (_slidingExp > TimeSpan.Zero)
{
DateTime utcNewExpires = utcNow + _slidingExp;
if (utcNewExpires - _utcAbsExp >= CacheExpires.MIN_UPDATE_DELTA || utcNewExpires < _utcAbsExp)
{
expires.UtcUpdate(this, utcNewExpires);
}
}
}
internal void UpdateSlidingExpForUpdateSentinel()
{
// We don't need a lock to get information about the update sentinel
SeldomUsedFields fields = _fields;
if (fields != null)
{
Tuple<MemoryCacheStore, MemoryCacheEntry> sentinelInfo = fields._updateSentinel;
// touch the update sentinel to keep it from expiring
if (sentinelInfo != null)
{
MemoryCacheStore sentinelStore = sentinelInfo.Item1;
MemoryCacheEntry sentinelEntry = sentinelInfo.Item2;
sentinelStore.UpdateExpAndUsage(sentinelEntry, updatePerfCounters: false); // perf counters shouldn't be polluted by touching update sentinel entry
}
}
}
internal void UpdateUsage(DateTime utcNow, CacheUsage usage)
{
// update, but not more frequently than once per second.
if (InUsage() && _utcLastUpdateUsage < utcNow - CacheUsage.CORRELATED_REQUEST_TIMEOUT)
{
_utcLastUpdateUsage = utcNow;
usage.Update(this);
if (_fields != null && _fields._dependencies != null)
{
foreach (ChangeMonitor monitor in _fields._dependencies)
{
MemoryCacheEntryChangeMonitor m = monitor as MemoryCacheEntryChangeMonitor;
if (m == null)
{
continue;
}
foreach (MemoryCacheEntry e in m.Dependencies)
{
MemoryCacheStore store = e._fields._cache.GetStore(e);
e.UpdateUsage(utcNow, store.Usage);
}
}
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Specialized;
using System.Diagnostics;
using System.Management.Automation.Internal;
namespace Microsoft.PowerShell.Commands.Internal.Format
{
/// <summary>
/// Class to write object properties in list form by using
/// the host screen interfaces.
/// </summary>
internal class ListWriter
{
/// <summary>
/// Labels already padded with blanks, separator characters, etc.
/// </summary>
private string[] _propertyLabels;
/// <summary>
/// Display length of the property labels in the array (all the same length)
/// </summary>
private int _propertyLabelsDisplayLength = 0;
/// <summary>
/// Column width of the screen.
/// </summary>
private int _columnWidth = 0;
/// <summary>
/// </summary>
/// <param name="propertyNames">Names of the properties to display.</param>
/// <param name="screenColumnWidth">Column width of the screen.</param>
/// <param name="dc">Instance of the DisplayCells helper object.</param>
internal void Initialize(string[] propertyNames, int screenColumnWidth, DisplayCells dc)
{
_columnWidth = screenColumnWidth;
if (propertyNames == null || propertyNames.Length == 0)
{
// there is nothing to show
_disabled = true;
return;
}
_disabled = false;
Debug.Assert(propertyNames != null, "propertyNames is null");
Debug.Assert(propertyNames.Length > 0, "propertyNames has zero length");
// assess the useful widths
if ((screenColumnWidth - Separator.Length - MinFieldWidth - MinLabelWidth) < 0)
{
// we do not have enough space for any meaningful display
_disabled = true;
return;
}
// check if we have to truncate the labels
int maxAllowableLabelLength = screenColumnWidth - Separator.Length - MinFieldWidth;
// find out the max display length (cell count) of the property names
_propertyLabelsDisplayLength = 0; // reset max
// cache the cell lengths for each property
Span<int> propertyNameCellCounts = propertyNames.Length <= OutCommandInner.StackAllocThreshold ? stackalloc int[propertyNames.Length] : new int[propertyNames.Length];;
for (int k = 0; k < propertyNames.Length; k++)
{
Debug.Assert(propertyNames[k] != null, "propertyNames[k] is null");
propertyNameCellCounts[k] = dc.Length(propertyNames[k]);
if (propertyNameCellCounts[k] > _propertyLabelsDisplayLength)
_propertyLabelsDisplayLength = propertyNameCellCounts[k];
}
if (_propertyLabelsDisplayLength > maxAllowableLabelLength)
{
// need to truncate
_propertyLabelsDisplayLength = maxAllowableLabelLength;
}
_propertyLabels = new string[propertyNames.Length];
for (int k = 0; k < propertyNames.Length; k++)
{
if (propertyNameCellCounts[k] < _propertyLabelsDisplayLength)
{
// shorter than the max, add padding
_propertyLabels[k] = propertyNames[k] + StringUtil.Padding(_propertyLabelsDisplayLength - propertyNameCellCounts[k]);
}
else if (propertyNameCellCounts[k] > _propertyLabelsDisplayLength)
{
// longer than the max, clip
_propertyLabels[k] = propertyNames[k].Substring(0, dc.GetHeadSplitLength(propertyNames[k], _propertyLabelsDisplayLength));
}
else
{
_propertyLabels[k] = propertyNames[k];
}
_propertyLabels[k] += Separator;
}
_propertyLabelsDisplayLength += Separator.Length;
}
/// <summary>
/// Write the values of the properties of an object.
/// </summary>
/// <param name="values">Array with the values in form of formatted strings.</param>
/// <param name="lo">LineOutput interface to write to.</param>
internal void WriteProperties(string[] values, LineOutput lo)
{
if (_disabled)
return;
string[] valuesToPrint = null;
if (values == null)
{
// we have nothing, but we have to create an empty array
valuesToPrint = new string[_propertyLabels.Length];
for (int k = 0; k < _propertyLabels.Length; k++)
valuesToPrint[k] = string.Empty;
}
else if (values.Length < _propertyLabels.Length)
{
// need to pad to the end of the array
valuesToPrint = new string[_propertyLabels.Length];
for (int k = 0; k < _propertyLabels.Length; k++)
{
if (k < values.Length)
valuesToPrint[k] = values[k];
else
valuesToPrint[k] = string.Empty;
}
}
else if (values.Length > _propertyLabels.Length)
{
// need to trim
valuesToPrint = new string[_propertyLabels.Length];
for (int k = 0; k < _propertyLabels.Length; k++)
valuesToPrint[k] = values[k];
}
else
{
// perfect match
valuesToPrint = values;
}
Debug.Assert(lo != null, "LineOutput is null");
for (int k = 0; k < _propertyLabels.Length; k++)
{
WriteProperty(k, valuesToPrint[k], lo);
}
}
/// <summary>
/// Helper, writing a single property to the screen.
/// It wraps the value of the property if it is tool long to fit.
/// </summary>
/// <param name="k">Index of property to write.</param>
/// <param name="propertyValue">String value of the property to write.</param>
/// <param name="lo">LineOutput interface to write to.</param>
private void WriteProperty(int k, string propertyValue, LineOutput lo)
{
if (propertyValue == null)
propertyValue = string.Empty;
// make sure we honor embedded newlines
string[] lines = StringManipulationHelper.SplitLines(propertyValue);
// padding to use in the lines after the first
string padding = null;
for (int i = 0; i < lines.Length; i++)
{
string prependString = null;
if (i == 0)
prependString = _propertyLabels[k];
else
{
if (padding == null)
padding = StringUtil.Padding(_propertyLabelsDisplayLength);
prependString = padding;
}
WriteSingleLineHelper(prependString, lines[i], lo);
}
}
/// <summary>
/// Internal helper to split a line that is too long to fit and pad it to the left
/// with a given string.
/// </summary>
/// <param name="prependString">String to add to the left.</param>
/// <param name="line">Line to print.</param>
/// <param name="lo">LineOuput to write to.</param>
private void WriteSingleLineHelper(string prependString, string line, LineOutput lo)
{
if (line == null)
line = string.Empty;
// compute the width of the field for the value string (in screen cells)
int fieldCellCount = _columnWidth - _propertyLabelsDisplayLength;
// split the lines
StringCollection sc = StringManipulationHelper.GenerateLines(lo.DisplayCells, line, fieldCellCount, fieldCellCount);
// padding to use in the lines after the first
string padding = StringUtil.Padding(_propertyLabelsDisplayLength);
// display the string collection
for (int k = 0; k < sc.Count; k++)
{
if (k == 0)
{
lo.WriteLine(prependString + sc[k]);
}
else
{
lo.WriteLine(padding + sc[k]);
}
}
}
/// <summary>
/// Set to true when the width of the screen is too small to do anything useful.
/// </summary>
private bool _disabled = false;
private const string Separator = " : ";
/// <summary>
/// Minimum width for the property label field.
/// </summary>
private const int MinLabelWidth = 1;
/// <summary>
/// Minimum width for the property value field.
/// </summary>
private const int MinFieldWidth = 1;
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.SqlClient;
using System.Data;
using CALI.Database.Contracts;
using CALI.Database.Contracts.Data;
///////////////////////////////////////////////////////////
//Do not modify this file. Use a partial class to extend.//
///////////////////////////////////////////////////////////
// This file contains static implementations of the LogLogic
// Add your own static methods by making a new partial class.
// You cannot override static methods, instead override the methods
// located in LogLogicBase by making a partial class of LogLogic
// and overriding the base methods.
namespace CALI.Database.Logic.Data
{
public partial class LogLogic
{
//Put your code in a separate file. This is auto generated.
/// <summary>
/// Run Log_Insert.
/// </summary>
/// <param name="fldRunOnMachineName">Value for RunOnMachineName</param>
/// <param name="fldLogContents">Value for LogContents</param>
/// <param name="fldRunTime">Value for RunTime</param>
public static int? InsertNow(string fldRunOnMachineName
, string fldLogContents
, DateTime fldRunTime
)
{
return (new LogLogic()).Insert(fldRunOnMachineName
, fldLogContents
, fldRunTime
);
}
/// <summary>
/// Run Log_Insert.
/// </summary>
/// <param name="fldRunOnMachineName">Value for RunOnMachineName</param>
/// <param name="fldLogContents">Value for LogContents</param>
/// <param name="fldRunTime">Value for RunTime</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
public static int? InsertNow(string fldRunOnMachineName
, string fldLogContents
, DateTime fldRunTime
, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Insert(fldRunOnMachineName
, fldLogContents
, fldRunTime
, connection, transaction);
}
/// <summary>
/// Insert by providing a populated data row container
/// </summary>
/// <param name="row">The table row data to use</param>
/// <returns>The number of rows affected.</returns>
public static int InsertNow(LogContract row)
{
return (new LogLogic()).Insert(row);
}
/// <summary>
/// Insert by providing a populated data contract
/// </summary>
/// <param name="row">The table row data to use</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int InsertNow(LogContract row, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Insert(row, connection, transaction);
}
/// <summary>
/// Insert the rows in bulk, uses the same connection (faster).
/// </summary>
/// <param name="rows">The table rows to Insert</param>
/// <returns>The number of rows affected.</returns>
public static int InsertAllNow(List<LogContract> rows)
{
return (new LogLogic()).InsertAll(rows);
}
/// <summary>
/// Insert the rows in bulk, uses the same connection (faster), in a provided transaction scrope.
/// </summary>
/// <param name="rows">The table rows to Insert</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int InsertAllNow(List<LogContract> rows, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).InsertAll(rows, connection, transaction);
}
/// <summary>
/// Run Log_Update.
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <param name="fldRunOnMachineName">Value for RunOnMachineName</param>
/// <param name="fldLogContents">Value for LogContents</param>
/// <param name="fldRunTime">Value for RunTime</param>
/// <returns>The number of rows affected.</returns>
public static int UpdateNow(int fldLogId
, string fldRunOnMachineName
, string fldLogContents
, DateTime fldRunTime
)
{
return (new LogLogic()).Update(fldLogId
, fldRunOnMachineName
, fldLogContents
, fldRunTime
);
}
/// <summary>
/// Run Log_Update.
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <param name="fldRunOnMachineName">Value for RunOnMachineName</param>
/// <param name="fldLogContents">Value for LogContents</param>
/// <param name="fldRunTime">Value for RunTime</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int UpdateNow(int fldLogId
, string fldRunOnMachineName
, string fldLogContents
, DateTime fldRunTime
, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Update(fldLogId
, fldRunOnMachineName
, fldLogContents
, fldRunTime
, connection, transaction);
}
/// <summary>
/// Update by providing a populated data row container
/// </summary>
/// <param name="row">The table row data to use</param>
/// <returns>The number of rows affected.</returns>
public static int UpdateNow(LogContract row)
{
return (new LogLogic()).Update(row);
}
/// <summary>
/// Update by providing a populated data contract
/// </summary>
/// <param name="row">The table row data to use</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int UpdateNow(LogContract row, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Update(row, connection, transaction);
}
/// <summary>
/// Update the rows in bulk, uses the same connection (faster).
/// </summary>
/// <param name="rows">The table rows to Update</param>
/// <returns>The number of rows affected.</returns>
public static int UpdateAllNow(List<LogContract> rows)
{
return (new LogLogic()).UpdateAll(rows);
}
/// <summary>
/// Update the rows in bulk, uses the same connection (faster), in a provided transaction scrope.
/// </summary>
/// <param name="rows">The table rows to Update</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int UpdateAllNow(List<LogContract> rows, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).UpdateAll(rows, connection, transaction);
}
/// <summary>
/// Run Log_Delete.
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <returns>The number of rows affected.</returns>
public static int DeleteNow(int fldLogId
)
{
return (new LogLogic()).Delete(fldLogId
);
}
/// <summary>
/// Run Log_Delete.
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int DeleteNow(int fldLogId
, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Delete(fldLogId
, connection, transaction);
}
/// <summary>
/// Delete by providing a populated data row container
/// </summary>
/// <param name="row">The table row data to use</param>
/// <returns>The number of rows affected.</returns>
public static int DeleteNow(LogContract row)
{
return (new LogLogic()).Delete(row);
}
/// <summary>
/// Delete by providing a populated data contract
/// </summary>
/// <param name="row">The table row data to use</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int DeleteNow(LogContract row, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Delete(row, connection, transaction);
}
/// <summary>
/// Delete the rows in bulk, uses the same connection (faster).
/// </summary>
/// <param name="rows">The table rows to Delete</param>
/// <returns>The number of rows affected.</returns>
public static int DeleteAllNow(List<LogContract> rows)
{
return (new LogLogic()).DeleteAll(rows);
}
/// <summary>
/// Delete the rows in bulk, uses the same connection (faster), in a provided transaction scrope.
/// </summary>
/// <param name="rows">The table rows to Delete</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int DeleteAllNow(List<LogContract> rows, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).DeleteAll(rows, connection, transaction);
}
/// <summary>
/// Determine if the table contains a row with the existing values
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <returns>True, if the values exist, or false.</returns>
public static bool ExistsNow(int fldLogId
)
{
return (new LogLogic()).Exists(fldLogId
);
}
/// <summary>
/// Determine if the table contains a row with the existing values
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>True, if the values exist, or false.</returns>
public static bool ExistsNow(int fldLogId
, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).Exists(fldLogId
, connection, transaction);
}
/// <summary>
/// Run Log_Search, and return results as a list of LogRow.
/// </summary>
/// <param name="fldRunOnMachineName">Value for RunOnMachineName</param>
/// <param name="fldLogContents">Value for LogContents</param>
/// <returns>A collection of LogRow.</returns>
public static List<LogContract> SearchNow(string fldRunOnMachineName
, string fldLogContents
)
{
var driver = new LogLogic();
driver.Search(fldRunOnMachineName
, fldLogContents
);
return driver.Results;
}
/// <summary>
/// Run Log_Search, and return results as a list of LogRow.
/// </summary>
/// <param name="fldRunOnMachineName">Value for RunOnMachineName</param>
/// <param name="fldLogContents">Value for LogContents</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>A collection of LogRow.</returns>
public static List<LogContract> SearchNow(string fldRunOnMachineName
, string fldLogContents
, SqlConnection connection, SqlTransaction transaction)
{
var driver = new LogLogic();
driver.Search(fldRunOnMachineName
, fldLogContents
, connection, transaction);
return driver.Results;
}
/// <summary>
/// Run Log_SelectAll, and return results as a list of LogRow.
/// </summary>
/// <returns>A collection of LogRow.</returns>
public static List<LogContract> SelectAllNow()
{
var driver = new LogLogic();
driver.SelectAll();
return driver.Results;
}
/// <summary>
/// Run Log_SelectAll, and return results as a list of LogRow.
/// </summary>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>A collection of LogRow.</returns>
public static List<LogContract> SelectAllNow(SqlConnection connection, SqlTransaction transaction)
{
var driver = new LogLogic();
driver.SelectAll(connection, transaction);
return driver.Results;
}
/// <summary>
/// Run Log_SelectBy_LogId, and return results as a list of LogRow.
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <returns>A collection of LogRow.</returns>
public static List<LogContract> SelectBy_LogIdNow(int fldLogId
)
{
var driver = new LogLogic();
driver.SelectBy_LogId(fldLogId
);
return driver.Results;
}
/// <summary>
/// Run Log_SelectBy_LogId, and return results as a list of LogRow.
/// </summary>
/// <param name="fldLogId">Value for LogId</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>A collection of LogRow.</returns>
public static List<LogContract> SelectBy_LogIdNow(int fldLogId
, SqlConnection connection, SqlTransaction transaction)
{
var driver = new LogLogic();
driver.SelectBy_LogId(fldLogId
, connection, transaction);
return driver.Results;
}
/// <summary>
/// Read all Log rows from the provided reader into the list structure of LogRows
/// </summary>
/// <param name="reader">The result of running a sql command.</param>
/// <returns>A populated LogRows or an empty LogRows if there are no results.</returns>
public static List<LogContract> ReadAllNow(SqlDataReader reader)
{
var driver = new LogLogic();
driver.ReadAll(reader);
return driver.Results;
}
/// <summary>");
/// Advance one, and read values into a Log
/// </summary>
/// <param name="reader">The result of running a sql command.</param>");
/// <returns>A Log or null if there are no results.</returns>
public static LogContract ReadOneNow(SqlDataReader reader)
{
var driver = new LogLogic();
return driver.ReadOne(reader) ? driver.Results[0] : null;
}
/// <summary>
/// Saves the row, either by inserting (when the identity key is null) or by updating (identity key has value).
/// </summary>
/// <param name="row">The data to save</param>
/// <returns>The number of rows affected.</returns>
public static int SaveNow(LogContract row)
{
if(row.LogId == null)
{
return InsertNow(row);
}
else
{
return UpdateNow(row);
}
}
/// <summary>
/// Saves the row, either by inserting (when the identity key is null) or by updating (identity key has value).
/// </summary>
/// <param name="row">The data to save</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int SaveNow(LogContract row, SqlConnection connection, SqlTransaction transaction)
{
if(row.LogId == null)
{
return InsertNow(row, connection, transaction);
}
else
{
return UpdateNow(row, connection, transaction);
}
}
/// <summary>
/// Save the rows in bulk, uses the same connection (faster).
/// </summary>
/// <param name="rows">The table rows to Save</param>
/// <returns>The number of rows affected.</returns>
public static int SaveAllNow(List<LogContract> rows)
{
return (new LogLogic()).SaveAll(rows);
}
/// <summary>
/// Save the rows in bulk, uses the same connection (faster), in a provided transaction scrope.
/// </summary>
/// <param name="rows">The table rows to Save</param>
/// <param name="connection">The SqlConnection to use</param>
/// <param name="transaction">The SqlTransaction to use</param>
/// <returns>The number of rows affected.</returns>
public static int SaveAllNow(List<LogContract> rows, SqlConnection connection, SqlTransaction transaction)
{
return (new LogLogic()).SaveAll(rows, connection, transaction);
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using Hyak.Common;
using Microsoft.Azure;
using Microsoft.WindowsAzure.Management.Scheduler.Models;
namespace Microsoft.WindowsAzure.Management.Scheduler.Models
{
/// <summary>
/// The response structure for the Cloud Service List operation.
/// </summary>
public partial class CloudServiceListResponse : AzureOperationResponse, IEnumerable<CloudServiceListResponse.CloudService>
{
private IList<CloudServiceListResponse.CloudService> _cloudServices;
/// <summary>
/// Optional. The list of cloud service locations for this subscription.
/// </summary>
public IList<CloudServiceListResponse.CloudService> CloudServices
{
get { return this._cloudServices; }
set { this._cloudServices = value; }
}
/// <summary>
/// Initializes a new instance of the CloudServiceListResponse class.
/// </summary>
public CloudServiceListResponse()
{
this.CloudServices = new LazyList<CloudServiceListResponse.CloudService>();
}
/// <summary>
/// Gets the sequence of CloudServices.
/// </summary>
public IEnumerator<CloudServiceListResponse.CloudService> GetEnumerator()
{
return this.CloudServices.GetEnumerator();
}
/// <summary>
/// Gets the sequence of CloudServices.
/// </summary>
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return this.GetEnumerator();
}
/// <summary>
/// Defines a cloud service-supporting region in which a item is
/// located.
/// </summary>
public partial class CloudService
{
private string _description;
/// <summary>
/// Optional. The description of the cloud service region.
/// </summary>
public string Description
{
get { return this._description; }
set { this._description = value; }
}
private string _geoRegion;
/// <summary>
/// Optional. The geographical region in which this cloud service
/// can run.
/// </summary>
public string GeoRegion
{
get { return this._geoRegion; }
set { this._geoRegion = value; }
}
private string _label;
/// <summary>
/// Optional. The label of the cloud service region.
/// </summary>
public string Label
{
get { return this._label; }
set { this._label = value; }
}
private string _name;
/// <summary>
/// Optional. The name of the cloud service region.
/// </summary>
public string Name
{
get { return this._name; }
set { this._name = value; }
}
private IList<CloudServiceListResponse.CloudService.AddOnResource> _resources;
/// <summary>
/// Optional. A list of existing resources installed into a cloud
/// service region.
/// </summary>
public IList<CloudServiceListResponse.CloudService.AddOnResource> Resources
{
get { return this._resources; }
set { this._resources = value; }
}
/// <summary>
/// Initializes a new instance of the CloudService class.
/// </summary>
public CloudService()
{
this.Resources = new LazyList<CloudServiceListResponse.CloudService.AddOnResource>();
}
/// <summary>
/// A store add-on item.
/// </summary>
public partial class AddOnResource
{
private string _eTag;
/// <summary>
/// Optional. The ETag for this resource.
/// </summary>
public string ETag
{
get { return this._eTag; }
set { this._eTag = value; }
}
private string _name;
/// <summary>
/// Optional. The user-input name of this item.
/// </summary>
public string Name
{
get { return this._name; }
set { this._name = value; }
}
private string _namespace;
/// <summary>
/// Optional. The namespace in which this item resides.
/// </summary>
public string Namespace
{
get { return this._namespace; }
set { this._namespace = value; }
}
private IDictionary<string, string> _outputItems;
/// <summary>
/// Optional. Output items associated with an individual
/// resource.
/// </summary>
public IDictionary<string, string> OutputItems
{
get { return this._outputItems; }
set { this._outputItems = value; }
}
private string _plan;
/// <summary>
/// Optional. The plan for this item as selected by the user.
/// </summary>
public string Plan
{
get { return this._plan; }
set { this._plan = value; }
}
private string _schemaVersion;
/// <summary>
/// Optional. The schema version for this resource.
/// </summary>
public string SchemaVersion
{
get { return this._schemaVersion; }
set { this._schemaVersion = value; }
}
private string _state;
/// <summary>
/// Optional. The state of this resource.
/// </summary>
public string State
{
get { return this._state; }
set { this._state = value; }
}
private CloudServiceListResponse.CloudService.AddOnResource.OperationStatus _status;
/// <summary>
/// Optional. Operation status items associated with an
/// individual resource.
/// </summary>
public CloudServiceListResponse.CloudService.AddOnResource.OperationStatus Status
{
get { return this._status; }
set { this._status = value; }
}
private string _type;
/// <summary>
/// Optional. The type of store item.
/// </summary>
public string Type
{
get { return this._type; }
set { this._type = value; }
}
private IList<CloudServiceListResponse.CloudService.AddOnResource.UsageLimit> _usageLimits;
/// <summary>
/// Optional. Usage meters associated with an individual
/// resource.
/// </summary>
public IList<CloudServiceListResponse.CloudService.AddOnResource.UsageLimit> UsageLimits
{
get { return this._usageLimits; }
set { this._usageLimits = value; }
}
/// <summary>
/// Initializes a new instance of the AddOnResource class.
/// </summary>
public AddOnResource()
{
this.OutputItems = new LazyDictionary<string, string>();
this.UsageLimits = new LazyList<CloudServiceListResponse.CloudService.AddOnResource.UsageLimit>();
}
/// <summary>
/// The operation status of an individual resource item.
/// </summary>
public partial class OperationStatus
{
private Error _error;
/// <summary>
/// Optional. The error details for operations that failed.
/// </summary>
public Error Error
{
get { return this._error; }
set { this._error = value; }
}
private string _result;
/// <summary>
/// Optional. The result of this operation status.
/// </summary>
public string Result
{
get { return this._result; }
set { this._result = value; }
}
private string _type;
/// <summary>
/// Optional. The type of this operation status.
/// </summary>
public string Type
{
get { return this._type; }
set { this._type = value; }
}
/// <summary>
/// Initializes a new instance of the OperationStatus class.
/// </summary>
public OperationStatus()
{
}
}
/// <summary>
/// Describes the current utilization and metering of a
/// resource item.
/// </summary>
public partial class UsageLimit
{
private string _amountIncluded;
/// <summary>
/// Optional. Defines the limit of this usage included in
/// this resource's plan.
/// </summary>
public string AmountIncluded
{
get { return this._amountIncluded; }
set { this._amountIncluded = value; }
}
private string _amountUsed;
/// <summary>
/// Optional. The amount of this resource that has already
/// been used.
/// </summary>
public string AmountUsed
{
get { return this._amountUsed; }
set { this._amountUsed = value; }
}
private string _name;
/// <summary>
/// Optional. The name of this usage limit.
/// </summary>
public string Name
{
get { return this._name; }
set { this._name = value; }
}
private string _unit;
/// <summary>
/// Optional. The unit in which this usage limit is
/// measured.
/// </summary>
public string Unit
{
get { return this._unit; }
set { this._unit = value; }
}
/// <summary>
/// Initializes a new instance of the UsageLimit class.
/// </summary>
public UsageLimit()
{
}
}
}
}
}
}
| |
#region usings
using System;
using System.ComponentModel.Composition;
using System.Runtime.InteropServices;
using System.Collections;
using System.Collections.Generic;
using SlimDX;
using SlimDX.DirectInput;
using VVVV.PluginInterfaces.V1;
using VVVV.PluginInterfaces.V2;
using VVVV.Utils.VColor;
using VVVV.Utils.VMath;
using VVVV.Core.Logging;
#endregion usings
namespace mp.essentials.Nodes.Devices
{
[PluginInfo(
Name = "GameController",
Category = "Devices",
Version = "DirectInput",
Tags = "Joystick, Gamepad, Analog",
Author = "microdee"
)]
public class DirectInputDevicesGameControllerNode : IPluginEvaluate
{
#region fields & pins
[Input("Window Handle", DefaultValue = -1, IsSingle = true)]
public ISpread<int> FHandle;
[Input("Foreground", DefaultValue = 0, IsSingle = true)]
public ISpread<bool> FFrg;
[Input("Exclusive", DefaultValue = 0, IsSingle = true)]
public ISpread<bool> FExclusive;
[Input("Reinitialize", DefaultValue = 0, IsBang = true, IsSingle = true)]
public ISpread<bool> FInit;
[Input("Device", EnumName = "DirectInputGameControllerDevices", IsSingle = true)]
public IDiffSpread<EnumEntry> FDevice;
[Output("Name")]
public ISpread<string> FName;
[Output("XYZ ")]
public ISpread<Vector3D> FXYZ;
[Output("Velocity ")]
public ISpread<Vector3D> Fv;
[Output("Acceleration ")]
public ISpread<Vector3D> Fa;
[Output("Rotation ")]
public ISpread<Vector3D> FRot;
[Output("Angular Velocity ")]
public ISpread<Vector3D> FAv;
[Output("Angular Acceleration ")]
public ISpread<Vector3D> FAa;
[Output("Torque ")]
public ISpread<Vector3D> Ft;
[Output("Force ")]
public ISpread<Vector3D> Ff;
[Output("Sliders")]
public ISpread<ISpread<int>> FSliders;
[Output("Point of View")]
public ISpread<ISpread<int>> FPoV;
[Output("Buttons")]
public ISpread<ISpread<bool>> FButtons;
[Output("Objects")]
public ISpread<ISpread<DeviceObjectInstance>> FObjects;
[Import()]
public ILogger FLogger;
#endregion fields & pins
[DllImport("C:\\Windows\\System32\\user32.dll")]
public static extern IntPtr GetForegroundWindow();
private DirectInput dinput = new DirectInput();
private List<Joystick> Joysticks = new List<Joystick>();
private bool init = true;
private void InitDevice() {
Joysticks.Clear();
foreach(DeviceInstance di in dinput.GetDevices(DeviceClass.GameController, DeviceEnumerationFlags.AttachedOnly))
{
if(FDevice[0].Name == "All" || FDevice[0].Name == di.InstanceName)
Joysticks.Add(new Joystick(dinput, di.InstanceGuid));
}
IntPtr handle = (FHandle[0]==-1) ? GetForegroundWindow() : new IntPtr(FHandle[0]);
if(FFrg[0] && (!FExclusive[0])) {
foreach(Joystick J in Joysticks)
{
J.SetCooperativeLevel(
handle,
CooperativeLevel.Foreground | CooperativeLevel.Nonexclusive
);
}
}
if(FFrg[0] && FExclusive[0]) {
foreach(Joystick J in Joysticks)
{
J.SetCooperativeLevel(
handle,
CooperativeLevel.Foreground | CooperativeLevel.Exclusive
);
}
}
if(!FFrg[0]) {
foreach(Joystick J in Joysticks)
{
J.SetCooperativeLevel(
handle,
CooperativeLevel.Background | CooperativeLevel.Nonexclusive
);
}
}
foreach(Joystick J in Joysticks)
{
J.Acquire();
}
}
private void UpdateDeviceList()
{
var s = new string[]{"All"};
EnumManager.UpdateEnum("DirectInputGameControllerDevices", "All", s);
foreach(DeviceInstance di in dinput.GetDevices(DeviceClass.GameController, DeviceEnumerationFlags.AttachedOnly))
{
EnumManager.AddEntry("DirectInputGameControllerDevices", di.InstanceName);
}
}
[ImportingConstructor]
public DirectInputDevicesGameControllerNode()
{
UpdateDeviceList();
}
//called when data for any output pin is requested
public void Evaluate(int SpreadMax)
{
if(FInit[0] || init || FDevice.IsChanged)
{
if(FInit[0])
{
}
InitDevice();
init = false;
}
FName.SliceCount = Joysticks.Count;
FXYZ.SliceCount = Joysticks.Count;
Fv.SliceCount = Joysticks.Count;
Fa.SliceCount = Joysticks.Count;
FRot.SliceCount = Joysticks.Count;
FAv.SliceCount = Joysticks.Count;
FAa.SliceCount = Joysticks.Count;
Ft.SliceCount = Joysticks.Count;
Ff.SliceCount = Joysticks.Count;
FSliders.SliceCount = Joysticks.Count;
FPoV.SliceCount = Joysticks.Count;
FButtons.SliceCount = Joysticks.Count;
FObjects.SliceCount = Joysticks.Count;
for(int i=0; i<Joysticks.Count; i++)
{
Joystick J = Joysticks[i];
JoystickState Js = J.GetCurrentState();
FName[i] = J.Information.InstanceName;
FXYZ[i] = new Vector3D(Js.X, Js.Y, Js.Z);
Fv[i] = new Vector3D(Js.VelocityX, Js.VelocityY, Js.VelocityZ);
Fa[i] = new Vector3D(Js.AccelerationX, Js.AccelerationY, Js.AccelerationZ);
FRot[i] = new Vector3D(Js.RotationX, Js.RotationY, Js.RotationZ);
FAv[i] = new Vector3D(Js.AngularVelocityX, Js.AngularVelocityY, Js.AngularVelocityZ);
FAa[i] = new Vector3D(Js.AngularAccelerationX, Js.AngularAccelerationY, Js.AngularAccelerationZ);
Ft[i] = new Vector3D(Js.TorqueX, Js.TorqueY, Js.TorqueZ);
Ff[i] = new Vector3D(Js.ForceX, Js.ForceY, Js.ForceZ);
int[] sliders = Js.GetSliders();
FSliders[i].SliceCount = sliders.Length;
for(int j=0; j<sliders.Length; j++)
{
FSliders[i][j] = sliders[j];
}
int[] pov = Js.GetPointOfViewControllers();
FPoV[i].SliceCount = pov.Length;
for(int j=0; j<pov.Length; j++)
{
FPoV[i][j] = pov[j];
}
bool[] buttons = Js.GetButtons();
FButtons[i].SliceCount = buttons.Length;
for(int j=0; j<buttons.Length; j++)
{
FButtons[i][j] = buttons[j];
}
IList<DeviceObjectInstance> objs = J.GetObjects();
FObjects[i].SliceCount = objs.Count;
for(int j=0; j<objs.Count; j++)
{
FObjects[i][j] = objs[j];
}
}
}
}
[PluginInfo(
Name = "DeviceObject",
Category = "Devices",
Version = "DirectInput",
Author = "microdee"
)]
public class DirectInputDevicesDeviceObjectNode : IPluginEvaluate
{
[Input("Device Object")]
public ISpread<DeviceObjectInstance> FDOI;
[Output("Collection Number")]
public ISpread<int> FCollectionNumber;
[Output("Designator Index")]
public ISpread<int> FDesignatorIndex;
[Output("Dimension")]
public ISpread<int> FDimension;
[Output("Exponent")]
public ISpread<int> FExponent;
[Output("Force Feedback Resolution")]
public ISpread<int> FFFR;
[Output("Maximum Force Feedback")]
public ISpread<int> FMFF;
[Output("Name")]
public ISpread<string> FName;
[Output("Object Type")]
public ISpread<string> FType;
[Output("Offset")]
public ISpread<int> FOffset;
[Output("Report ID")]
public ISpread<int> FReportID;
[Output("Usage")]
public ISpread<int> FUsage;
[Output("Usage Page")]
public ISpread<int> FUsagePage;
public void Evaluate(int SpreadMax)
{
FCollectionNumber.SliceCount = FDOI.SliceCount;
FDesignatorIndex.SliceCount = FDOI.SliceCount;
FDimension.SliceCount = FDOI.SliceCount;
FExponent.SliceCount = FDOI.SliceCount;
FFFR.SliceCount = FDOI.SliceCount;
FMFF.SliceCount = FDOI.SliceCount;
FName.SliceCount = FDOI.SliceCount;
FType.SliceCount = FDOI.SliceCount;
FOffset.SliceCount = FDOI.SliceCount;
FReportID.SliceCount = FDOI.SliceCount;
FUsage.SliceCount = FDOI.SliceCount;
FUsagePage.SliceCount = FDOI.SliceCount;
for(int i=0; i<FDOI.SliceCount; i++)
{
FCollectionNumber[i] = FDOI[i].CollectionNumber;
FDesignatorIndex[i] = FDOI[i].DesignatorIndex;
FDimension[i] = FDOI[i].Dimension;
FExponent[i] = FDOI[i].Exponent;
FFFR[i] = FDOI[i].ForceFeedbackResolution;
FMFF[i] = FDOI[i].MaximumForceFeedback;
FName[i] = FDOI[i].Name;
FType[i] = FDOI[i].ObjectType.ToString();
FOffset[i] = FDOI[i].Offset;
FReportID[i] = FDOI[i].ReportId;
FUsage[i] = FDOI[i].Usage;
FUsagePage[i] = FDOI[i].UsagePage;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using AutoMapper;
using FizzWare.NBuilder;
using Moq;
using NUnit.Framework;
using ReMi.BusinessEntities.DeploymentTool;
using ReMi.BusinessEntities.Products;
using ReMi.BusinessEntities.ReleaseCalendar;
using ReMi.TestUtils.UnitTests;
using ReMi.Contracts.Plugins.Data.SourceControl;
using ReMi.Contracts.Plugins.Services.SourceControl;
using ReMi.DataAccess.BusinessEntityGateways.Products;
using ReMi.DataAccess.BusinessEntityGateways.ReleaseCalendar;
using ReMi.DataAccess.BusinessEntityGateways.ReleasePlan;
using ReMi.DataAccess.BusinessEntityGateways.SourceControl;
using ReMi.DataAccess.Exceptions;
using ReMi.Queries.ReleasePlan;
using ReMi.QueryHandlers.ReleasePlan;
namespace ReMi.QueryHandlers.Tests.ReleasePlan
{
public class GetReleaseChangesHandlerTests : TestClassFor<GetReleaseChangesHandler>
{
private Mock<IReleaseWindowGateway> _releaseWindowGatewayMock;
private Mock<ISourceControlChangeGateway> _changesGatewayMock;
private Mock<IReleaseJobGateway> _releaseJobGatewayMock;
private Mock<IReleaseRepositoryGateway> _releaseRepositoryGatewayMock;
private Mock<IProductGateway> _packageGatewayMock;
private Mock<IMappingEngine> _mappingEngineMock;
private Mock<ISourceControl> _sourceControlMock;
protected override GetReleaseChangesHandler ConstructSystemUnderTest()
{
return new GetReleaseChangesHandler
{
ReleaseWindowGatewayFactory = () => _releaseWindowGatewayMock.Object,
MappingEngine = _mappingEngineMock.Object,
SourceControlChangeGatewayFactory = () => _changesGatewayMock.Object,
SourceControlService = _sourceControlMock.Object,
ProductGatewayFactory = () => _packageGatewayMock.Object,
ReleaseJobGatewayFactory = () => _releaseJobGatewayMock.Object,
ReleaseRepositoryGatewayFactory = () => _releaseRepositoryGatewayMock.Object
};
}
protected override void TestInitialize()
{
_releaseWindowGatewayMock = new Mock<IReleaseWindowGateway>(MockBehavior.Strict);
_sourceControlMock = new Mock<ISourceControl>(MockBehavior.Strict);
_packageGatewayMock = new Mock<IProductGateway>(MockBehavior.Strict);
_changesGatewayMock = new Mock<ISourceControlChangeGateway>(MockBehavior.Strict);
_mappingEngineMock = new Mock<IMappingEngine>(MockBehavior.Strict);
_releaseJobGatewayMock = new Mock<IReleaseJobGateway>(MockBehavior.Strict);
_releaseRepositoryGatewayMock = new Mock<IReleaseRepositoryGateway>(MockBehavior.Strict);
base.TestInitialize();
}
[Test]
public void Handle_ShouldThrowException_WhenNoPackageAssignToReleaseWindow()
{
var request = new GetReleaseChangesRequest { ReleaseWindowId = Guid.NewGuid() };
var releaseWindow = Builder<ReleaseWindow>.CreateNew()
.With(x => x.ExternalId, request.ReleaseWindowId)
.Build();
_releaseWindowGatewayMock.Setup(x => x.GetByExternalId(request.ReleaseWindowId, true, false))
.Returns(releaseWindow);
_releaseWindowGatewayMock.Setup(x => x.Dispose());
var ex = Assert.Throws<ProductShouldBeAssignedException>(() => Sut.Handle(request));
Assert.IsTrue(ex.Message.Contains(request.ReleaseWindowId.ToString()));
}
[Test]
public void Handle_ShouldGetSourceCodeChangesFromDataBase_WhenReleaseIsApprovedAndRequestIsNotBackground()
{
var request = new GetReleaseChangesRequest { ReleaseWindowId = Guid.NewGuid() };
var releaseWindow = Builder<ReleaseWindow>.CreateNew()
.With(x => x.ExternalId, request.ReleaseWindowId)
.With(x => x.ApprovedOn, RandomData.RandomDateTime())
.With(x => x.Products, new[] { "product" })
.Build();
var changes = Builder<SourceControlChange>.CreateListOfSize(5).Build();
_releaseWindowGatewayMock.Setup(x => x.GetByExternalId(request.ReleaseWindowId, true, false))
.Returns(releaseWindow);
_changesGatewayMock.Setup(x => x.GetChanges(releaseWindow.ExternalId))
.Returns(changes);
_releaseWindowGatewayMock.Setup(x => x.Dispose());
_changesGatewayMock.Setup(x => x.Dispose());
var result = Sut.Handle(request);
CollectionAssert.AreEquivalent(changes, result.Changes);
_releaseWindowGatewayMock.Verify(x => x.GetByExternalId(It.IsAny<Guid>(), It.IsAny<bool>(), It.IsAny<bool>()), Times.Once);
_changesGatewayMock.Verify(x => x.GetChanges(It.IsAny<Guid>()), Times.Once);
}
[Test]
public void Handle_ShouldGetSourceCodeChangesFromServiceWhichaAreNotYetInDataBase_WhenReleaseNotApprovedAndRequestIsNotBackground()
{
var package = Builder<Product>.CreateNew().Build();
var releaseWindow = Builder<ReleaseWindow>.CreateNew()
.With(x => x.ExternalId, Guid.NewGuid())
.With(x => x.Products, new[] { package.Description })
.With(x => x.ApprovedOn, null)
.Build();
var jobs = Builder<ReleaseJob>.CreateListOfSize(5).Build();
var request = new GetReleaseChangesRequest { ReleaseWindowId = releaseWindow.ExternalId };
var changes = Builder<SourceControlChange>.CreateListOfSize(5).Build();
_releaseWindowGatewayMock.Setup(x => x.GetByExternalId(request.ReleaseWindowId, true, false))
.Returns(releaseWindow);
_packageGatewayMock.Setup(x => x.GetProducts(releaseWindow.ExternalId))
.Returns(new[] { package });
_releaseJobGatewayMock.Setup(x => x.GetReleaseJobs(releaseWindow.ExternalId, false))
.Returns(jobs);
_sourceControlMock.Setup(x => x.GetSourceControlRetrieveMode(
It.Is<IEnumerable<Guid>>(i => i.Contains(package.ExternalId))))
.Returns(new Dictionary<Guid, SourceControlRetrieveMode>
{
{ package.ExternalId, SourceControlRetrieveMode.DeploymentJobs }
});
_sourceControlMock.Setup(x => x.GetChangesByReleaseJobs(
It.Is<IEnumerable<Guid>>(p => p.SequenceEqual(new[] { package.ExternalId })),
It.Is<IEnumerable<Guid>>(p => p.SequenceEqual(jobs.Select(j => j.JobId)))))
.Returns(changes);
_changesGatewayMock.Setup(x => x.FilterExistingChangesByProduct(
It.Is<IEnumerable<string>>(p => p.SequenceEqual(changes.Select(c => c.Identifier))),
It.Is<IEnumerable<Guid>>(p => p.SequenceEqual(new[] { package.ExternalId }))))
.Returns(changes.Take(3).Select(x => x.Identifier).ToArray());
_releaseWindowGatewayMock.Setup(x => x.Dispose());
_packageGatewayMock.Setup(x => x.Dispose());
_releaseJobGatewayMock.Setup(x => x.Dispose());
_changesGatewayMock.Setup(x => x.Dispose());
var result = Sut.Handle(request);
CollectionAssert.AreEquivalent(changes.Skip(3), result.Changes);
_releaseWindowGatewayMock.Verify(x => x.GetByExternalId(It.IsAny<Guid>(), It.IsAny<bool>(), It.IsAny<bool>()), Times.Once);
_packageGatewayMock.Verify(x => x.GetProducts(It.IsAny<Guid>()), Times.Once);
_releaseJobGatewayMock.Verify(x => x.GetReleaseJobs(It.IsAny<Guid>(), It.IsAny<bool>()), Times.Once);
_sourceControlMock.Verify(x => x.GetChangesByReleaseJobs(It.IsAny<IEnumerable<Guid>>(), It.IsAny<IEnumerable<Guid>>()), Times.Once);
_changesGatewayMock.Verify(x => x.FilterExistingChangesByProduct(
It.IsAny<IEnumerable<string>>(), It.IsAny<IEnumerable<Guid>>()), Times.Once);
_changesGatewayMock.Verify(x => x.GetChanges(It.IsAny<Guid>()), Times.Never);
}
[Test]
public void Handle_ShouldGetSourceCodeChangesFromServiceWithoutFiltering_WhenReleaseNotApprovedAndRequestIsBackground()
{
var package = Builder<Product>.CreateNew().Build();
var releaseWindow = Builder<ReleaseWindow>.CreateNew()
.With(x => x.ExternalId, Guid.NewGuid())
.With(x => x.Products, new[] { package.Description })
.With(x => x.ApprovedOn, null)
.Build();
var jobs = Builder<ReleaseJob>.CreateListOfSize(5).Build();
var request = new GetReleaseChangesRequest { ReleaseWindowId = releaseWindow.ExternalId, IsBackground = true };
var changes = Builder<SourceControlChange>.CreateListOfSize(5).Build();
_releaseWindowGatewayMock.Setup(x => x.GetByExternalId(request.ReleaseWindowId, true, false))
.Returns(releaseWindow);
_packageGatewayMock.Setup(x => x.GetProducts(releaseWindow.ExternalId))
.Returns(new[] { package });
_releaseJobGatewayMock.Setup(x => x.GetReleaseJobs(releaseWindow.ExternalId, false))
.Returns(jobs);
_sourceControlMock.Setup(x => x.GetSourceControlRetrieveMode(
It.Is<IEnumerable<Guid>>(i => i.Contains(package.ExternalId))))
.Returns(new Dictionary<Guid, SourceControlRetrieveMode>
{
{ package.ExternalId, SourceControlRetrieveMode.DeploymentJobs }
});
_sourceControlMock.Setup(x => x.GetChangesByReleaseJobs(
It.Is<IEnumerable<Guid>>(p => p.SequenceEqual(new[] { package.ExternalId })),
It.Is<IEnumerable<Guid>>(p => p.SequenceEqual(jobs.Select(j => j.JobId)))))
.Returns(changes);
_releaseWindowGatewayMock.Setup(x => x.Dispose());
_packageGatewayMock.Setup(x => x.Dispose());
_releaseJobGatewayMock.Setup(x => x.Dispose());
_changesGatewayMock.Setup(x => x.Dispose());
var result = Sut.Handle(request);
CollectionAssert.AreEquivalent(changes, result.Changes);
_releaseWindowGatewayMock.Verify(x => x.GetByExternalId(It.IsAny<Guid>(), It.IsAny<bool>(), It.IsAny<bool>()), Times.Once);
_packageGatewayMock.Verify(x => x.GetProducts(It.IsAny<Guid>()), Times.Once);
_releaseJobGatewayMock.Verify(x => x.GetReleaseJobs(It.IsAny<Guid>(), It.IsAny<bool>()), Times.Once);
_sourceControlMock.Verify(x => x.GetChangesByReleaseJobs(It.IsAny<IEnumerable<Guid>>(), It.IsAny<IEnumerable<Guid>>()), Times.Once);
_changesGatewayMock.Verify(x => x.FilterExistingChangesByProduct(
It.IsAny<IEnumerable<string>>(), It.IsAny<IEnumerable<Guid>>()), Times.Never);
_changesGatewayMock.Verify(x => x.GetChanges(It.IsAny<Guid>()), Times.Never);
}
[Test]
public void Handle_ShouldGetSourceCodeChangesByRepositoriesFromServiceWithoutFiltering_WhenReleaseNotApprovedAndRequestIsBackground()
{
var package = Builder<Product>.CreateNew().Build();
var releaseWindow = Builder<ReleaseWindow>.CreateNew()
.With(x => x.ExternalId, Guid.NewGuid())
.With(x => x.Products, new[] { package.Description })
.With(x => x.ApprovedOn, null)
.Build();
var repositories = Builder<ReleaseRepository>.CreateListOfSize(5).Build();
var request = new GetReleaseChangesRequest { ReleaseWindowId = releaseWindow.ExternalId, IsBackground = true };
var changes = Builder<SourceControlChange>.CreateListOfSize(5).Build();
_releaseWindowGatewayMock.Setup(x => x.GetByExternalId(request.ReleaseWindowId, true, false))
.Returns(releaseWindow);
_packageGatewayMock.Setup(x => x.GetProducts(releaseWindow.ExternalId))
.Returns(new[] { package });
_releaseRepositoryGatewayMock.Setup(x => x.GetReleaseRepositories(request.ReleaseWindowId))
.Returns(repositories);
_sourceControlMock.Setup(x => x.GetSourceControlRetrieveMode(
It.Is<IEnumerable<Guid>>(i => i.Contains(package.ExternalId))))
.Returns(new Dictionary<Guid, SourceControlRetrieveMode>
{
{ package.ExternalId, SourceControlRetrieveMode.RepositoryIdentifier }
});
_sourceControlMock.Setup(x => x.GetChangesByRepository(
It.Is<IEnumerable<Guid>>(p => p.SequenceEqual(new[] { package.ExternalId })),
It.Is<IEnumerable<ReleaseRepository>>(p => p.SequenceEqual(repositories.Where(r => r.IsIncluded)))))
.Returns(changes);
_releaseRepositoryGatewayMock.Setup(x => x.Dispose());
_releaseWindowGatewayMock.Setup(x => x.Dispose());
_packageGatewayMock.Setup(x => x.Dispose());
_releaseJobGatewayMock.Setup(x => x.Dispose());
_changesGatewayMock.Setup(x => x.Dispose());
var result = Sut.Handle(request);
CollectionAssert.AreEquivalent(changes, result.Changes);
_releaseWindowGatewayMock.Verify(x => x.GetByExternalId(It.IsAny<Guid>(), It.IsAny<bool>(), It.IsAny<bool>()), Times.Once);
_packageGatewayMock.Verify(x => x.GetProducts(It.IsAny<Guid>()), Times.Once);
_releaseRepositoryGatewayMock.Verify(x => x.GetReleaseRepositories(It.IsAny<Guid>()), Times.Once);
_releaseJobGatewayMock.Verify(x => x.GetReleaseJobs(It.IsAny<Guid>(), It.IsAny<bool>()), Times.Never);
_sourceControlMock.Verify(x => x.GetChangesByRepository(It.IsAny<IEnumerable<Guid>>(), It.IsAny<IEnumerable<ReleaseRepository>>()), Times.Once);
_sourceControlMock.Verify(x => x.GetChangesByReleaseJobs(It.IsAny<IEnumerable<Guid>>(), It.IsAny<IEnumerable<Guid>>()), Times.Never);
_changesGatewayMock.Verify(x => x.FilterExistingChangesByProduct(
It.IsAny<IEnumerable<string>>(), It.IsAny<IEnumerable<Guid>>()), Times.Never);
_changesGatewayMock.Verify(x => x.GetChanges(It.IsAny<Guid>()), Times.Never);
}
}
}
| |
using System.Collections.Generic;
using System.Threading.Tasks;
using Content.Server.GameObjects.Components.GUI;
using Content.Server.GameObjects.Components.Items.Storage;
using Content.Server.GameObjects.Components.Weapon.Ranged.Barrels;
using Content.Shared.GameObjects.Components.Weapons.Ranged.Barrels;
using Content.Shared.Interfaces;
using Content.Shared.Interfaces.GameObjects.Components;
using Robust.Server.GameObjects;
using Robust.Server.GameObjects.Components.Container;
using Robust.Server.Interfaces.GameObjects;
using Robust.Shared.GameObjects;
using Robust.Shared.Interfaces.GameObjects;
using Robust.Shared.Localization;
using Robust.Shared.Serialization;
namespace Content.Server.GameObjects.Components.Weapon.Ranged.Ammunition
{
/// <summary>
/// Used to load certain ranged weapons quickly
/// </summary>
[RegisterComponent]
public class SpeedLoaderComponent : Component, IAfterInteract, IInteractUsing, IMapInit, IUse
{
public override string Name => "SpeedLoader";
private BallisticCaliber _caliber;
public int Capacity => _capacity;
private int _capacity;
private Container _ammoContainer;
private Stack<IEntity> _spawnedAmmo;
private int _unspawnedCount;
public int AmmoLeft => _spawnedAmmo.Count + _unspawnedCount;
private string _fillPrototype;
public override void ExposeData(ObjectSerializer serializer)
{
base.ExposeData(serializer);
serializer.DataField(ref _caliber, "caliber", BallisticCaliber.Unspecified);
serializer.DataField(ref _capacity, "capacity", 6);
serializer.DataField(ref _fillPrototype, "fillPrototype", null);
_spawnedAmmo = new Stack<IEntity>(_capacity);
}
public override void Initialize()
{
base.Initialize();
_ammoContainer = ContainerManagerComponent.Ensure<Container>($"{Name}-container", Owner, out var existing);
if (existing)
{
foreach (var ammo in _ammoContainer.ContainedEntities)
{
_unspawnedCount--;
_spawnedAmmo.Push(ammo);
}
}
}
void IMapInit.MapInit()
{
_unspawnedCount += _capacity;
UpdateAppearance();
}
private void UpdateAppearance()
{
if (Owner.TryGetComponent(out AppearanceComponent appearanceComponent))
{
appearanceComponent?.SetData(MagazineBarrelVisuals.MagLoaded, true);
appearanceComponent?.SetData(AmmoVisuals.AmmoCount, AmmoLeft);
appearanceComponent?.SetData(AmmoVisuals.AmmoMax, Capacity);
}
}
public bool TryInsertAmmo(IEntity user, IEntity entity)
{
if (!entity.TryGetComponent(out AmmoComponent ammoComponent))
{
return false;
}
if (ammoComponent.Caliber != _caliber)
{
Owner.PopupMessage(user, Loc.GetString("Wrong caliber"));
return false;
}
if (AmmoLeft >= Capacity)
{
Owner.PopupMessage(user, Loc.GetString("No room"));
return false;
}
_spawnedAmmo.Push(entity);
_ammoContainer.Insert(entity);
UpdateAppearance();
return true;
}
private bool UseEntity(IEntity user)
{
if (!user.TryGetComponent(out HandsComponent handsComponent))
{
return false;
}
var ammo = TakeAmmo();
if (ammo == null)
{
return false;
}
var itemComponent = ammo.GetComponent<ItemComponent>();
if (!handsComponent.CanPutInHand(itemComponent))
{
ServerRangedBarrelComponent.EjectCasing(ammo);
}
else
{
handsComponent.PutInHand(itemComponent);
}
UpdateAppearance();
return true;
}
private IEntity TakeAmmo()
{
if (_spawnedAmmo.TryPop(out var entity))
{
_ammoContainer.Remove(entity);
return entity;
}
if (_unspawnedCount > 0)
{
entity = Owner.EntityManager.SpawnEntity(_fillPrototype, Owner.Transform.Coordinates);
_unspawnedCount--;
}
return entity;
}
void IAfterInteract.AfterInteract(AfterInteractEventArgs eventArgs)
{
if (eventArgs.Target == null)
{
return;
}
// This area is dirty but not sure of an easier way to do it besides add an interface or somethin
bool changed = false;
if (eventArgs.Target.TryGetComponent(out RevolverBarrelComponent revolverBarrel))
{
for (var i = 0; i < Capacity; i++)
{
var ammo = TakeAmmo();
if (ammo == null)
{
break;
}
if (revolverBarrel.TryInsertBullet(eventArgs.User, ammo))
{
changed = true;
continue;
}
// Take the ammo back
TryInsertAmmo(eventArgs.User, ammo);
break;
}
} else if (eventArgs.Target.TryGetComponent(out BoltActionBarrelComponent boltActionBarrel))
{
for (var i = 0; i < Capacity; i++)
{
var ammo = TakeAmmo();
if (ammo == null)
{
break;
}
if (boltActionBarrel.TryInsertBullet(eventArgs.User, ammo))
{
changed = true;
continue;
}
// Take the ammo back
TryInsertAmmo(eventArgs.User, ammo);
break;
}
}
if (changed)
{
UpdateAppearance();
}
}
async Task<bool> IInteractUsing.InteractUsing(InteractUsingEventArgs eventArgs)
{
return TryInsertAmmo(eventArgs.User, eventArgs.Using);
}
bool IUse.UseEntity(UseEntityEventArgs eventArgs)
{
return UseEntity(eventArgs.User);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Net.Security;
using System.Runtime.InteropServices;
using System.Security.Authentication;
using System.Security.Authentication.ExtendedProtection;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Win32.SafeHandles;
namespace System.Net.Security
{
internal static class SslStreamPal
{
private static readonly StreamSizes s_streamSizes = new StreamSizes();
public static Exception GetException(SecurityStatusPal status)
{
return status.Exception ?? new Interop.OpenSsl.SslException((int)status.ErrorCode);
}
internal const bool StartMutualAuthAsAnonymous = false;
internal const bool CanEncryptEmptyMessage = false;
public static void VerifyPackageInfo()
{
}
public static SecurityStatusPal AcceptSecurityContext(ref SafeFreeCredentials credential, ref SafeDeleteContext context,
SecurityBuffer inputBuffer, SecurityBuffer outputBuffer, bool remoteCertRequired)
{
return HandshakeInternal(credential, ref context, inputBuffer, outputBuffer, true, remoteCertRequired);
}
public static SecurityStatusPal InitializeSecurityContext(ref SafeFreeCredentials credential, ref SafeDeleteContext context,
string targetName, SecurityBuffer inputBuffer, SecurityBuffer outputBuffer)
{
return HandshakeInternal(credential, ref context, inputBuffer, outputBuffer, false, false);
}
public static SecurityStatusPal InitializeSecurityContext(SafeFreeCredentials credential, ref SafeDeleteContext context, string targetName, SecurityBuffer[] inputBuffers, SecurityBuffer outputBuffer)
{
Debug.Assert(inputBuffers.Length == 2);
Debug.Assert(inputBuffers[1].token == null);
return HandshakeInternal(credential, ref context, inputBuffers[0], outputBuffer, false, false);
}
public static SafeFreeCredentials AcquireCredentialsHandle(X509Certificate certificate,
SslProtocols protocols, EncryptionPolicy policy, bool isServer)
{
return new SafeFreeSslCredentials(certificate, protocols, policy);
}
public static SecurityStatusPal EncryptMessage(SafeDeleteContext securityContext, byte[] input, int offset, int size, int headerSize, int trailerSize, ref byte[] output, out int resultSize)
{
return EncryptDecryptHelper(securityContext, input, offset, size, true, ref output, out resultSize);
}
public static SecurityStatusPal DecryptMessage(SafeDeleteContext securityContext, byte[] buffer, ref int offset, ref int count)
{
int resultSize;
SecurityStatusPal retVal = EncryptDecryptHelper(securityContext, buffer, offset, count, false, ref buffer, out resultSize);
if (retVal.ErrorCode == SecurityStatusPalErrorCode.OK ||
retVal.ErrorCode == SecurityStatusPalErrorCode.Renegotiate)
{
count = resultSize;
}
return retVal;
}
public static ChannelBinding QueryContextChannelBinding(SafeDeleteContext securityContext, ChannelBindingKind attribute)
{
ChannelBinding bindingHandle;
if (attribute == ChannelBindingKind.Endpoint)
{
bindingHandle = EndpointChannelBindingToken.Build(securityContext);
if (bindingHandle == null)
{
throw Interop.OpenSsl.CreateSslException(SR.net_ssl_invalid_certificate);
}
}
else
{
bindingHandle = Interop.OpenSsl.QueryChannelBinding(
((SafeDeleteSslContext)securityContext).SslContext,
attribute);
}
return bindingHandle;
}
public static void QueryContextStreamSizes(SafeDeleteContext securityContext, out StreamSizes streamSizes)
{
streamSizes = s_streamSizes;
}
public static void QueryContextConnectionInfo(SafeDeleteContext securityContext, out SslConnectionInfo connectionInfo)
{
connectionInfo = new SslConnectionInfo(((SafeDeleteSslContext)securityContext).SslContext);
}
private static SecurityStatusPal HandshakeInternal(SafeFreeCredentials credential, ref SafeDeleteContext context,
SecurityBuffer inputBuffer, SecurityBuffer outputBuffer, bool isServer, bool remoteCertRequired)
{
Debug.Assert(!credential.IsInvalid);
try
{
if ((null == context) || context.IsInvalid)
{
context = new SafeDeleteSslContext(credential as SafeFreeSslCredentials, isServer, remoteCertRequired);
}
byte[] output = null;
int outputSize;
bool done;
if (null == inputBuffer)
{
done = Interop.OpenSsl.DoSslHandshake(((SafeDeleteSslContext)context).SslContext, null, 0, 0, out output, out outputSize);
}
else
{
done = Interop.OpenSsl.DoSslHandshake(((SafeDeleteSslContext)context).SslContext, inputBuffer.token, inputBuffer.offset, inputBuffer.size, out output, out outputSize);
}
outputBuffer.size = outputSize;
outputBuffer.offset = 0;
outputBuffer.token = outputSize > 0 ? output : null;
return new SecurityStatusPal(done ? SecurityStatusPalErrorCode.OK : SecurityStatusPalErrorCode.ContinueNeeded);
}
catch (Exception exc)
{
return new SecurityStatusPal(SecurityStatusPalErrorCode.InternalError, exc);
}
}
private static SecurityStatusPal EncryptDecryptHelper(SafeDeleteContext securityContext, byte[] input, int offset, int size, bool encrypt, ref byte[] output, out int resultSize)
{
resultSize = 0;
try
{
Interop.Ssl.SslErrorCode errorCode = Interop.Ssl.SslErrorCode.SSL_ERROR_NONE;
SafeSslHandle scHandle = ((SafeDeleteSslContext)securityContext).SslContext;
if (encrypt)
{
resultSize = Interop.OpenSsl.Encrypt(scHandle, input, offset, size, ref output, out errorCode);
}
else
{
Debug.Assert(offset == 0, "Expected offset 0 when decrypting");
Debug.Assert(ReferenceEquals(input, output), "Expected input==output when decrypting");
resultSize = Interop.OpenSsl.Decrypt(scHandle, input, size, out errorCode);
}
switch (errorCode)
{
case Interop.Ssl.SslErrorCode.SSL_ERROR_RENEGOTIATE:
return new SecurityStatusPal(SecurityStatusPalErrorCode.Renegotiate);
case Interop.Ssl.SslErrorCode.SSL_ERROR_ZERO_RETURN:
return new SecurityStatusPal(SecurityStatusPalErrorCode.ContextExpired);
case Interop.Ssl.SslErrorCode.SSL_ERROR_NONE:
case Interop.Ssl.SslErrorCode.SSL_ERROR_WANT_READ:
return new SecurityStatusPal(SecurityStatusPalErrorCode.OK);
default:
return new SecurityStatusPal(SecurityStatusPalErrorCode.InternalError, new Interop.OpenSsl.SslException((int)errorCode));
}
}
catch (Exception ex)
{
return new SecurityStatusPal(SecurityStatusPalErrorCode.InternalError, ex);
}
}
public static SecurityStatusPal ApplyAlertToken(ref SafeFreeCredentials credentialsHandle, SafeDeleteContext securityContext, TlsAlertType alertType, TlsAlertMessage alertMessage)
{
// There doesn't seem to be an exposed API for writing an alert,
// the API seems to assume that all alerts are generated internally by
// SSLHandshake.
return new SecurityStatusPal(SecurityStatusPalErrorCode.OK);
}
public static SecurityStatusPal ApplyShutdownToken(ref SafeFreeCredentials credentialsHandle, SafeDeleteContext securityContext)
{
SafeDeleteSslContext sslContext = ((SafeDeleteSslContext)securityContext);
// Unset the quiet shutdown option initially configured.
Interop.Ssl.SslSetQuietShutdown(sslContext.SslContext, 0);
int status = Interop.Ssl.SslShutdown(sslContext.SslContext);
if (status == 0)
{
// Call SSL_shutdown again for a bi-directional shutdown.
status = Interop.Ssl.SslShutdown(sslContext.SslContext);
}
if (status == 1)
return new SecurityStatusPal(SecurityStatusPalErrorCode.OK);
Interop.Ssl.SslErrorCode code = Interop.Ssl.SslGetError(sslContext.SslContext, status);
if (code == Interop.Ssl.SslErrorCode.SSL_ERROR_WANT_READ ||
code == Interop.Ssl.SslErrorCode.SSL_ERROR_WANT_WRITE)
{
return new SecurityStatusPal(SecurityStatusPalErrorCode.OK);
}
else
{
return new SecurityStatusPal(SecurityStatusPalErrorCode.InternalError, new Interop.OpenSsl.SslException((int)code));
}
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
/*============================================================
**
** Class: ResourceWriter
**
** <OWNER>[....]</OWNER>
**
**
** Purpose: Default way to write strings to a CLR resource
** file.
**
**
===========================================================*/
namespace System.Resources {
using System;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
#if FEATURE_SERIALIZATION
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
#endif // FEATURE_SERIALIZATION
using System.Globalization;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
using System.Security;
using System.Security.Permissions;
// Generates a binary .resources file in the system default format
// from name and value pairs. Create one with a unique file name,
// call AddResource() at least once, then call Generate() to write
// the .resources file to disk, then call Close() to close the file.
//
// The resources generally aren't written out in the same order
// they were added.
//
// See the RuntimeResourceSet overview for details on the system
// default file format.
//
[System.Runtime.InteropServices.ComVisible(true)]
public sealed class ResourceWriter : IResourceWriter
{
private Func<Type, String> typeConverter;
// Set this delegate to allow multi-targeting for .resources files.
public Func<Type, String> TypeNameConverter
{
get
{
return typeConverter;
}
set
{
typeConverter = value;
}
}
// For cases where users can't create an instance of the deserialized
// type in memory, and need to pass us serialized blobs instead.
// LocStudio's managed code parser will do this in some cases.
private class PrecannedResource
{
internal String TypeName;
internal byte[] Data;
internal PrecannedResource(String typeName, byte[] data)
{
TypeName = typeName;
Data = data;
}
}
private class StreamWrapper
{
internal Stream m_stream;
internal bool m_closeAfterWrite;
internal StreamWrapper(Stream s, bool closeAfterWrite)
{
m_stream = s;
m_closeAfterWrite = closeAfterWrite;
}
}
// An initial size for our internal sorted list, to avoid extra resizes.
private const int _ExpectedNumberOfResources = 1000;
private const int AverageNameSize = 20 * 2; // chars in little endian Unicode
private const int AverageValueSize = 40;
private Dictionary<String, Object> _resourceList;
private Stream _output;
private Dictionary<String, Object> _caseInsensitiveDups;
private Dictionary<String, PrecannedResource> _preserializedData;
private const int _DefaultBufferSize = 4096;
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
public ResourceWriter(String fileName)
{
if (fileName==null)
throw new ArgumentNullException("fileName");
Contract.EndContractBlock();
_output = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None);
_resourceList = new Dictionary<String, Object>(_ExpectedNumberOfResources, FastResourceComparer.Default);
_caseInsensitiveDups = new Dictionary<String, Object>(StringComparer.OrdinalIgnoreCase);
}
public ResourceWriter(Stream stream)
{
if (stream==null)
throw new ArgumentNullException("stream");
if (!stream.CanWrite)
throw new ArgumentException(Environment.GetResourceString("Argument_StreamNotWritable"));
Contract.EndContractBlock();
_output = stream;
_resourceList = new Dictionary<String, Object>(_ExpectedNumberOfResources, FastResourceComparer.Default);
_caseInsensitiveDups = new Dictionary<String, Object>(StringComparer.OrdinalIgnoreCase);
}
// Adds a string resource to the list of resources to be written to a file.
// They aren't written until Generate() is called.
//
public void AddResource(String name, String value)
{
if (name==null)
throw new ArgumentNullException("name");
Contract.EndContractBlock();
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
// Check for duplicate resources whose names vary only by case.
_caseInsensitiveDups.Add(name, null);
_resourceList.Add(name, value);
}
// Adds a resource of type Object to the list of resources to be
// written to a file. They aren't written until Generate() is called.
//
public void AddResource(String name, Object value)
{
if (name==null)
throw new ArgumentNullException("name");
Contract.EndContractBlock();
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
// needed for binary compat
if (value != null && value is Stream)
{
AddResourceInternal(name, (Stream)value, false);
}
else
{
// Check for duplicate resources whose names vary only by case.
_caseInsensitiveDups.Add(name, null);
_resourceList.Add(name, value);
}
}
// Adds a resource of type Stream to the list of resources to be
// written to a file. They aren't written until Generate() is called.
// Doesn't close the Stream when done.
//
public void AddResource(String name, Stream value)
{
if (name == null)
throw new ArgumentNullException("name");
Contract.EndContractBlock();
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
AddResourceInternal(name, value, false);
}
// Adds a resource of type Stream to the list of resources to be
// written to a file. They aren't written until Generate() is called.
// closeAfterWrite parameter indicates whether to close the stream when done.
//
public void AddResource(String name, Stream value, bool closeAfterWrite)
{
if (name == null)
throw new ArgumentNullException("name");
Contract.EndContractBlock();
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
AddResourceInternal(name, value, closeAfterWrite);
}
private void AddResourceInternal(String name, Stream value, bool closeAfterWrite)
{
if (value == null)
{
// Check for duplicate resources whose names vary only by case.
_caseInsensitiveDups.Add(name, null);
_resourceList.Add(name, value);
}
else
{
// make sure the Stream is seekable
if (!value.CanSeek)
throw new ArgumentException(Environment.GetResourceString("NotSupported_UnseekableStream"));
// Check for duplicate resources whose names vary only by case.
_caseInsensitiveDups.Add(name, null);
_resourceList.Add(name, new StreamWrapper(value, closeAfterWrite));
}
}
// Adds a named byte array as a resource to the list of resources to
// be written to a file. They aren't written until Generate() is called.
//
public void AddResource(String name, byte[] value)
{
if (name==null)
throw new ArgumentNullException("name");
Contract.EndContractBlock();
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
// Check for duplicate resources whose names vary only by case.
_caseInsensitiveDups.Add(name, null);
_resourceList.Add(name, value);
}
public void AddResourceData(String name, String typeName, byte[] serializedData)
{
if (name == null)
throw new ArgumentNullException("name");
if (typeName == null)
throw new ArgumentNullException("typeName");
if (serializedData == null)
throw new ArgumentNullException("serializedData");
Contract.EndContractBlock();
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
// Check for duplicate resources whose names vary only by case.
_caseInsensitiveDups.Add(name, null);
if (_preserializedData == null)
_preserializedData = new Dictionary<String, PrecannedResource>(FastResourceComparer.Default);
_preserializedData.Add(name, new PrecannedResource(typeName, serializedData));
}
// Closes the output stream.
public void Close()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
if (disposing) {
if (_resourceList != null) {
Generate();
}
if (_output != null) {
_output.Close();
}
}
_output = null;
_caseInsensitiveDups = null;
// _resourceList is set to null by Generate.
}
public void Dispose()
{
Dispose(true);
}
// After calling AddResource, Generate() writes out all resources to the
// output stream in the system default format.
// If an exception occurs during object serialization or during IO,
// the .resources file is closed and deleted, since it is most likely
// invalid.
[SecuritySafeCritical] // Asserts permission to create & delete a temp file.
public void Generate()
{
if (_resourceList == null)
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ResourceWriterSaved"));
BinaryWriter bw = new BinaryWriter(_output, Encoding.UTF8);
List<String> typeNames = new List<String>();
// Write out the ResourceManager header
// Write out magic number
bw.Write(ResourceManager.MagicNumber);
// Write out ResourceManager header version number
bw.Write(ResourceManager.HeaderVersionNumber);
MemoryStream resMgrHeaderBlob = new MemoryStream(240);
BinaryWriter resMgrHeaderPart = new BinaryWriter(resMgrHeaderBlob);
// Write out class name of IResourceReader capable of handling
// this file.
resMgrHeaderPart.Write(MultitargetingHelpers.GetAssemblyQualifiedName(typeof(ResourceReader),typeConverter));
// Write out class name of the ResourceSet class best suited to
// handling this file.
// This needs to be the same even with multi-targeting. It's the
// full name -- not the ----sembly qualified name.
resMgrHeaderPart.Write(ResourceManager.ResSetTypeName);
resMgrHeaderPart.Flush();
// Write number of bytes to skip over to get past ResMgr header
bw.Write((int)resMgrHeaderBlob.Length);
// Write the rest of the ResMgr header
bw.Write(resMgrHeaderBlob.GetBuffer(), 0, (int)resMgrHeaderBlob.Length);
// End ResourceManager header
// Write out the RuntimeResourceSet header
// Version number
bw.Write(RuntimeResourceSet.Version);
#if RESOURCE_FILE_FORMAT_DEBUG
// Write out a tag so we know whether to enable or disable
// debugging support when reading the file.
bw.Write("***DEBUG***");
#endif
// number of resources
int numResources = _resourceList.Count;
if (_preserializedData != null)
numResources += _preserializedData.Count;
bw.Write(numResources);
// Store values in temporary streams to write at end of file.
int[] nameHashes = new int[numResources];
int[] namePositions = new int[numResources];
int curNameNumber = 0;
MemoryStream nameSection = new MemoryStream(numResources * AverageNameSize);
BinaryWriter names = new BinaryWriter(nameSection, Encoding.Unicode);
// The data section can be very large, and worse yet, we can grow the byte[] used
// for the data section repeatedly. When using large resources like ~100 images,
// this can lead to both a fragmented large object heap as well as allocating about
// 2-3x of our storage needs in extra overhead. Using a temp file can avoid this.
// Assert permission to get a temp file name, which requires two permissions.
// Additionally, we may be running under an account that doesn't have permission to
// write to the temp directory (enforced via a Windows ACL). Fall back to a MemoryStream.
Stream dataSection = null; // Either a FileStream or a MemoryStream
String tempFile = null;
PermissionSet permSet = new PermissionSet(PermissionState.None);
permSet.AddPermission(new EnvironmentPermission(PermissionState.Unrestricted));
permSet.AddPermission(new FileIOPermission(PermissionState.Unrestricted));
try {
permSet.Assert();
tempFile = Path.GetTempFileName();
File.SetAttributes(tempFile, FileAttributes.Temporary | FileAttributes.NotContentIndexed);
// Explicitly opening with FileOptions.DeleteOnClose to avoid complicated File.Delete
// (safe from ----s w/ antivirus software, etc)
dataSection = new FileStream(tempFile, FileMode.Open, FileAccess.ReadWrite, FileShare.Read,
4096, FileOptions.DeleteOnClose | FileOptions.SequentialScan);
}
catch (UnauthorizedAccessException) {
// In case we're running under an account that can't access a temp directory.
dataSection = new MemoryStream();
}
catch (IOException) {
// In case Path.GetTempFileName fails because no unique file names are available
dataSection = new MemoryStream();
}
finally {
PermissionSet.RevertAssert();
}
using(dataSection) {
BinaryWriter data = new BinaryWriter(dataSection, Encoding.UTF8);
#if FEATURE_SERIALIZATION
IFormatter objFormatter = new BinaryFormatter(null, new StreamingContext(StreamingContextStates.File | StreamingContextStates.Persistence));
#endif // FEATURE_SERIALIZATION
#if RESOURCE_FILE_FORMAT_DEBUG
// Write NAMES right before the names section.
names.Write(new byte[] { (byte) 'N', (byte) 'A', (byte) 'M', (byte) 'E', (byte) 'S', (byte) '-', (byte) '-', (byte) '>'});
// Write DATA at the end of the name table section.
data.Write(new byte[] { (byte) 'D', (byte) 'A', (byte) 'T', (byte) 'A', (byte) '-', (byte) '-', (byte)'-', (byte)'>'});
#endif
// We've stored our resources internally in a Hashtable, which
// makes no guarantees about the ordering while enumerating.
// While we do our own sorting of the resource names based on their
// hash values, that's only sorting the nameHashes and namePositions
// arrays. That's all that is strictly required for correctness,
// but for ease of generating a patch in the future that
// modifies just .resources files, we should re-sort them.
SortedList sortedResources = new SortedList(_resourceList, FastResourceComparer.Default);
if (_preserializedData != null) {
foreach (KeyValuePair<String, PrecannedResource> entry in _preserializedData)
sortedResources.Add(entry.Key, entry.Value);
}
IDictionaryEnumerator items = sortedResources.GetEnumerator();
// Write resource name and position to the file, and the value
// to our temporary buffer. Save Type as well.
while (items.MoveNext()) {
nameHashes[curNameNumber] = FastResourceComparer.HashFunction((String)items.Key);
namePositions[curNameNumber++] = (int)names.Seek(0, SeekOrigin.Current);
names.Write((String)items.Key); // key
names.Write((int)data.Seek(0, SeekOrigin.Current)); // virtual offset of value.
#if RESOURCE_FILE_FORMAT_DEBUG
names.Write((byte) '*');
#endif
Object value = items.Value;
ResourceTypeCode typeCode = FindTypeCode(value, typeNames);
// Write out type code
Write7BitEncodedInt(data, (int)typeCode);
// Write out value
PrecannedResource userProvidedResource = value as PrecannedResource;
if (userProvidedResource != null) {
data.Write(userProvidedResource.Data);
}
else {
#if FEATURE_SERIALIZATION
WriteValue(typeCode, value, data, objFormatter);
#else
WriteValue(typeCode, value, data);
#endif
}
#if RESOURCE_FILE_FORMAT_DEBUG
data.Write(new byte[] { (byte) 'S', (byte) 'T', (byte) 'O', (byte) 'P'});
#endif
}
// At this point, the ResourceManager header has been written.
// Finish RuntimeResourceSet header
// Write size & contents of class table
bw.Write(typeNames.Count);
for (int i = 0; i < typeNames.Count; i++)
bw.Write(typeNames[i]);
// Write out the name-related items for lookup.
// Note that the hash array and the namePositions array must
// be sorted in parallel.
Array.Sort(nameHashes, namePositions);
// Prepare to write sorted name hashes (alignment fixup)
// Note: For 64-bit machines, these MUST be aligned on 8 byte
// boundaries! Pointers on IA64 must be aligned! And we'll
// run faster on X86 machines too.
bw.Flush();
int alignBytes = ((int)bw.BaseStream.Position) & 7;
if (alignBytes > 0) {
for (int i = 0; i < 8 - alignBytes; i++)
bw.Write("PAD"[i % 3]);
}
// Write out sorted name hashes.
// Align to 8 bytes.
Contract.Assert((bw.BaseStream.Position & 7) == 0, "ResourceWriter: Name hashes array won't be 8 byte aligned! Ack!");
#if RESOURCE_FILE_FORMAT_DEBUG
bw.Write(new byte[] { (byte) 'H', (byte) 'A', (byte) 'S', (byte) 'H', (byte) 'E', (byte) 'S', (byte) '-', (byte) '>'} );
#endif
foreach (int hash in nameHashes)
bw.Write(hash);
#if RESOURCE_FILE_FORMAT_DEBUG
Console.Write("Name hashes: ");
foreach(int hash in nameHashes)
Console.Write(hash.ToString("x")+" ");
Console.WriteLine();
#endif
// Write relative positions of all the names in the file.
// Note: this data is 4 byte aligned, occuring immediately
// after the 8 byte aligned name hashes (whose length may
// potentially be odd).
Contract.Assert((bw.BaseStream.Position & 3) == 0, "ResourceWriter: Name positions array won't be 4 byte aligned! Ack!");
#if RESOURCE_FILE_FORMAT_DEBUG
bw.Write(new byte[] { (byte) 'P', (byte) 'O', (byte) 'S', (byte) '-', (byte) '-', (byte) '-', (byte) '-', (byte) '>' } );
#endif
foreach (int pos in namePositions)
bw.Write(pos);
#if RESOURCE_FILE_FORMAT_DEBUG
Console.Write("Name positions: ");
foreach(int pos in namePositions)
Console.Write(pos.ToString("x")+" ");
Console.WriteLine();
#endif
// Flush all BinaryWriters to their underlying streams.
bw.Flush();
names.Flush();
data.Flush();
// Write offset to data section
int startOfDataSection = (int)(bw.Seek(0, SeekOrigin.Current) + nameSection.Length);
startOfDataSection += 4; // We're writing an int to store this data, adding more bytes to the header
BCLDebug.Log("RESMGRFILEFORMAT", "Generate: start of DataSection: 0x" + startOfDataSection.ToString("x", CultureInfo.InvariantCulture) + " nameSection length: " + nameSection.Length);
bw.Write(startOfDataSection);
// Write name section.
bw.Write(nameSection.GetBuffer(), 0, (int)nameSection.Length);
names.Close();
// Write data section.
Contract.Assert(startOfDataSection == bw.Seek(0, SeekOrigin.Current), "ResourceWriter::Generate - start of data section is wrong!");
dataSection.Position = 0;
dataSection.CopyTo(bw.BaseStream);
data.Close();
} // using(dataSection) <--- Closes dataSection, which was opened w/ FileOptions.DeleteOnClose
bw.Flush();
// Indicate we've called Generate
_resourceList = null;
}
// Finds the ResourceTypeCode for a type, or adds this type to the
// types list.
private ResourceTypeCode FindTypeCode(Object value, List<String> types)
{
if (value == null)
return ResourceTypeCode.Null;
Type type = value.GetType();
if (type == typeof(String))
return ResourceTypeCode.String;
else if (type == typeof(Int32))
return ResourceTypeCode.Int32;
else if (type == typeof(Boolean))
return ResourceTypeCode.Boolean;
else if (type == typeof(Char))
return ResourceTypeCode.Char;
else if (type == typeof(Byte))
return ResourceTypeCode.Byte;
else if (type == typeof(SByte))
return ResourceTypeCode.SByte;
else if (type == typeof(Int16))
return ResourceTypeCode.Int16;
else if (type == typeof(Int64))
return ResourceTypeCode.Int64;
else if (type == typeof(UInt16))
return ResourceTypeCode.UInt16;
else if (type == typeof(UInt32))
return ResourceTypeCode.UInt32;
else if (type == typeof(UInt64))
return ResourceTypeCode.UInt64;
else if (type == typeof(Single))
return ResourceTypeCode.Single;
else if (type == typeof(Double))
return ResourceTypeCode.Double;
else if (type == typeof (Decimal))
return ResourceTypeCode.Decimal;
else if (type == typeof(DateTime))
return ResourceTypeCode.DateTime;
else if (type == typeof(TimeSpan))
return ResourceTypeCode.TimeSpan;
else if (type == typeof(byte[]))
return ResourceTypeCode.ByteArray;
else if (type == typeof(StreamWrapper))
return ResourceTypeCode.Stream;
// This is a user type, or a precanned resource. Find type
// table index. If not there, add new element.
String typeName;
if (type == typeof(PrecannedResource)) {
typeName = ((PrecannedResource)value).TypeName;
if (typeName.StartsWith("ResourceTypeCode.", StringComparison.Ordinal)) {
typeName = typeName.Substring(17); // Remove through '.'
ResourceTypeCode typeCode = (ResourceTypeCode)Enum.Parse(typeof(ResourceTypeCode), typeName);
return typeCode;
}
}
else
{
typeName = MultitargetingHelpers.GetAssemblyQualifiedName(type, typeConverter);
}
int typeIndex = types.IndexOf(typeName);
if (typeIndex == -1) {
typeIndex = types.Count;
types.Add(typeName);
}
return (ResourceTypeCode)(typeIndex + ResourceTypeCode.StartOfUserTypes);
}
#if FEATURE_SERIALIZATION
private void WriteValue(ResourceTypeCode typeCode, Object value, BinaryWriter writer, IFormatter objFormatter)
#else
private void WriteValue(ResourceTypeCode typeCode, Object value, BinaryWriter writer)
#endif // FEATURE_SERIALIZATION
{
Contract.Requires(writer != null);
switch(typeCode) {
case ResourceTypeCode.Null:
break;
case ResourceTypeCode.String:
writer.Write((String) value);
break;
case ResourceTypeCode.Boolean:
writer.Write((bool) value);
break;
case ResourceTypeCode.Char:
writer.Write((UInt16) (char) value);
break;
case ResourceTypeCode.Byte:
writer.Write((byte) value);
break;
case ResourceTypeCode.SByte:
writer.Write((sbyte) value);
break;
case ResourceTypeCode.Int16:
writer.Write((Int16) value);
break;
case ResourceTypeCode.UInt16:
writer.Write((UInt16) value);
break;
case ResourceTypeCode.Int32:
writer.Write((Int32) value);
break;
case ResourceTypeCode.UInt32:
writer.Write((UInt32) value);
break;
case ResourceTypeCode.Int64:
writer.Write((Int64) value);
break;
case ResourceTypeCode.UInt64:
writer.Write((UInt64) value);
break;
case ResourceTypeCode.Single:
writer.Write((Single) value);
break;
case ResourceTypeCode.Double:
writer.Write((Double) value);
break;
case ResourceTypeCode.Decimal:
writer.Write((Decimal) value);
break;
case ResourceTypeCode.DateTime:
// Use DateTime's ToBinary & FromBinary.
Int64 data = ((DateTime) value).ToBinary();
writer.Write(data);
break;
case ResourceTypeCode.TimeSpan:
writer.Write(((TimeSpan) value).Ticks);
break;
// Special Types
case ResourceTypeCode.ByteArray:
{
byte[] bytes = (byte[]) value;
writer.Write(bytes.Length);
writer.Write(bytes, 0, bytes.Length);
break;
}
case ResourceTypeCode.Stream:
{
StreamWrapper sw = (StreamWrapper)value;
if (sw.m_stream.GetType() == typeof(MemoryStream))
{
MemoryStream ms = (MemoryStream)sw.m_stream;
if (ms.Length > Int32.MaxValue)
throw new ArgumentException(Environment.GetResourceString("ArgumentOutOfRange_StreamLength"));
int offset, len;
ms.InternalGetOriginAndLength(out offset, out len);
byte[] bytes = ms.InternalGetBuffer();
writer.Write(len);
writer.Write(bytes, offset, len);
}
else
{
Stream s = sw.m_stream;
// we've already verified that the Stream is seekable
if (s.Length > Int32.MaxValue)
throw new ArgumentException(Environment.GetResourceString("ArgumentOutOfRange_StreamLength"));
s.Position = 0;
writer.Write((int)s.Length);
byte[] buffer = new byte[_DefaultBufferSize];
int read = 0;
while ((read = s.Read(buffer, 0, buffer.Length)) != 0)
{
writer.Write(buffer, 0, read);
}
if (sw.m_closeAfterWrite)
{
s.Close();
}
}
break;
}
default:
Contract.Assert(typeCode >= ResourceTypeCode.StartOfUserTypes, String.Format(CultureInfo.InvariantCulture, "ResourceReader: Unsupported ResourceTypeCode in .resources file! {0}", typeCode));
#if FEATURE_SERIALIZATION
objFormatter.Serialize(writer.BaseStream, value);
break;
#else
throw new NotSupportedException(Environment.GetResourceString("NotSupported_ResourceObjectSerialization"));
#endif // FEATURE_SERIALIZATION
}
}
private static void Write7BitEncodedInt(BinaryWriter store, int value) {
Contract.Requires(store != null);
// Write out an int 7 bits at a time. The high bit of the byte,
// when on, tells reader to continue reading more bytes.
uint v = (uint) value; // support negative numbers
while (v >= 0x80) {
store.Write((byte) (v | 0x80));
v >>= 7;
}
store.Write((byte)v);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* The main goal of this test list is to make sure that a producer and a consumer with
* separate connections can communicate properly and consitently despite a waiting time
* between messages TestStability with 500 sleep would be the important test, as a delay of
* 30s is executed at each 100 (100, 200, 300, ...).
*/
using System;
using System.Threading;
using Apache.NMS.Test;
using NUnit.Framework;
namespace Apache.NMS.Stomp.Test
{
[TestFixture]
public class NMSTestStability : NMSTestSupport
{
// TODO set proper configuration parameters
private const string destination = "TEST.Stability";
private static int numberOfMessages = 0;
private static IConnection producerConnection = null;
private static IConnection consumerConnection = null;
private static Thread consumerThread = null;
private static Thread producerThread = null;
private static long consumerMessageCounter = 0;
private static long producerMessageCounter = 0;
private static string possibleConsumerException = "";
private static string possibleProducerException = "";
private static bool consumerReady = false;
[SetUp]
public void Init()
{
if (producerConnection != null)
{
producerConnection.Close();
}
if (consumerConnection != null)
{
consumerConnection.Close();
}
if (consumerThread != null)
{
consumerThread.Abort();
}
if (producerThread != null)
{
producerThread.Abort();
}
producerConnection = null;
consumerConnection = null;
consumerThread = null;
producerThread = null;
producerConnection = CreateConnection();
consumerConnection = CreateConnection();
numberOfMessages = 0;
consumerMessageCounter = 0;
producerMessageCounter = 0;
possibleConsumerException = "";
possibleProducerException = "";
consumerReady = false;
//Giving time for the topic to clear out
Thread.Sleep(2500);
}
[TearDown]
public void Dispose()
{
if (producerConnection != null)
{
producerConnection.Close();
}
if (consumerConnection != null)
{
consumerConnection.Close();
}
if (consumerThread != null)
{
consumerThread.Abort();
}
if (producerThread != null)
{
producerThread.Abort();
}
producerConnection = null;
consumerConnection = null;
consumerThread = null;
producerThread = null;
}
#if !NETCF
public enum ProducerTestType
{
CONTINUOUS,
WITHSLEEP
}
[Test]
public void TestStability(
[Values(5, 50, 500)]
int testMessages,
[Values(ProducerTestType.CONTINUOUS, ProducerTestType.WITHSLEEP)]
ProducerTestType producerType)
{
//At 100,200,300, ... a delay of 30 seconds is executed in the producer to cause an unexpected disconnect, due to a malformed ACK?
numberOfMessages = testMessages;
consumerThread = new Thread(NMSTestStability.ConsumerThread);
consumerThread.Start();
if(ProducerTestType.CONTINUOUS == producerType)
{
producerThread = new Thread(NMSTestStability.ProducerThreadContinuous);
}
else
{
producerThread = new Thread(NMSTestStability.ProducerThreadWithSleep);
}
producerThread.Start();
Thread.Sleep(100);
Assert.IsTrue(consumerThread.IsAlive && producerThread.IsAlive);
while (consumerThread.IsAlive && producerThread.IsAlive)
{
Thread.Sleep(100);
}
Assert.IsEmpty(possibleConsumerException);
Assert.IsEmpty(possibleProducerException);
Assert.AreEqual(numberOfMessages, producerMessageCounter);
Assert.AreEqual(numberOfMessages, consumerMessageCounter);
}
#endif
#region Consumer
private static void ConsumerThread()
{
ISession session = consumerConnection.CreateSession();
IMessageConsumer consumer;
IDestination dest = session.GetTopic(destination);
consumer = session.CreateConsumer(dest);
consumer.Listener += new MessageListener(OnMessage);
consumerConnection.ExceptionListener += new ExceptionListener(OnConsumerExceptionListener);
consumerConnection.Start();
consumerReady = true;
while (true)
{
Thread.Sleep(100);
}
}
private static void OnMessage(IMessage receivedMsg)
{
consumerMessageCounter++;
}
public static void OnConsumerExceptionListener(Exception ex)
{
possibleConsumerException = ex.Message;
Thread.CurrentThread.Abort();
}
#endregion Consumer
#region Producer
private static void ProducerThreadWithSleep()
{
ISession session = producerConnection.CreateSession();
IMessageProducer producer;
IDestination dest = session.GetTopic(destination);
producer = session.CreateProducer(dest);
producer.DeliveryMode = MsgDeliveryMode.NonPersistent;
producerConnection.ExceptionListener += new ExceptionListener(OnProducerExceptionListener);
producerConnection.Start();
ITextMessage message;
while (!consumerReady)
{
Thread.Sleep(100);
}
for (int c = 0; c < numberOfMessages; c++)
{
message = session.CreateTextMessage(c.ToString());
message.NMSType = "testType";
producer.Send(message);
producerMessageCounter++;
//Focal point of this test; induce a "long" delay between two messages without any other communication on the topic, Note that thse delays occure only at each 100, and that messages can be sent after the delay before a possible disconnect
if ((c + 1) % 100 == 0)
{
Thread.Sleep(30000);
}
else
{
Thread.Sleep(10);
}
}
}
private static void ProducerThreadContinuous()
{
ISession session = producerConnection.CreateSession();
IMessageProducer producer;
IDestination dest = session.GetTopic(destination);
producer = session.CreateProducer(dest);
producer.DeliveryMode = MsgDeliveryMode.NonPersistent;
producerConnection.ExceptionListener += new ExceptionListener(OnProducerExceptionListener);
producerConnection.Start();
ITextMessage message;
while (!consumerReady)
{
Thread.Sleep(100);
}
for (int c = 0; c < numberOfMessages; c++)
{
message = session.CreateTextMessage(c.ToString());
message.NMSType = "testType";
producer.Send(message);
producerMessageCounter++;
Thread.Sleep(10);
}
}
public static void OnProducerExceptionListener(Exception ex)
{
possibleProducerException = ex.Message;
Thread.CurrentThread.Abort();
}
#endregion Producer
}
}
| |
// <copyright file="BitSpan.cs" company="LeetABit">
// Copyright (c) Hubert Bukowski. All rights reserved.
// Licensed under the MIT License.
// See LICENSE file in the project root for full license information.
// </copyright>
namespace LeetABit.Binary
{
using System;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using LeetABit.Binary.Properties;
/// <summary>
/// Provides bitwise access to the span of bytes.
/// </summary>
public readonly ref struct BitSpan
{
/// <summary>
/// Holds a span that holds source bytes.
/// </summary>
private readonly Span<byte> bits;
/// <summary>
/// Holds an index of the first bit of the bit span.
/// </summary>
private readonly int startBitIndex;
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using entire span of bytes as the bit source
/// and most significant bit ordering.
/// </summary>
/// <param name="span">
/// Span that holds source bits.
/// </param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(Span<byte> span)
: this(span, 0, span.GetBitLength(), BitwiseOrder.MostSignificantFirst)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using span of bytes as the bit source,
/// information about bit index within the source data which determines first bit of the bit span that
/// ends at the end of the source data and most significant bit ordering.
/// </summary>
/// <param name="span">
/// Span that holds source bits.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="span"/> is not empty and <paramref name="startBitIndex"/> is greater than the number of bits available.
/// <para>-or-</para>
/// <paramref name="span"/> is empty and <paramref name="startBitIndex"/> is different than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(Span<byte> span, int startBitIndex)
: this(span, startBitIndex, span.GetBitLength() - startBitIndex, BitwiseOrder.MostSignificantFirst)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using span of bytes as the bit source,
/// information about first bit index, length of the bit span and most significant bit ordering.
/// </summary>
/// <param name="span">
/// Span that holds source bits.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="span"/> is not empty and <paramref name="startBitIndex"/> is greater than the number of bits available.
/// <para>-or-</para>
/// <paramref name="span"/> is empty and <paramref name="startBitIndex"/> is different than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> applied to <paramref name="startBitIndex"/> does not fit into the number of bits available.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(Span<byte> span, int startBitIndex, int bitLength)
: this(span, startBitIndex, bitLength, BitwiseOrder.MostSignificantFirst)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using span of bytes as the bit source,
/// information about first bit index, length of the bit span and bit ordering.
/// </summary>
/// <param name="span">
/// Span that holds source bits.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <param name="bitOrder">
/// Order of the bits within each source byte.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="span"/> is not empty and <paramref name="startBitIndex"/> is greater than the number of bits available.
/// <para>-or-</para>
/// <paramref name="span"/> is empty and <paramref name="startBitIndex"/> is different than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> applied to <paramref name="startBitIndex"/> does not fit into the number of bits available.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(Span<byte> span, int startBitIndex, int bitLength, BitwiseOrder bitOrder)
{
Requires.BitBlockWithinAvailableSpan(startBitIndex, bitLength, span.GetBitLength(), nameof(startBitIndex), nameof(bitLength));
this.bits = span;
this.startBitIndex = startBitIndex;
this.Length = bitLength;
this.BitOrder = bitOrder;
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using entire array of bytes as the bit source
/// and most significant bit ordering.
/// </summary>
/// <param name="array">
/// Array that holds source bits.
/// </param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(byte[]? array)
: this(new Span<byte>(array))
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using array of bytes as the bit source,
/// information about bit index within the source data which determines first bit of the bit span that
/// ends at the end of the source data and most significant bit ordering.
/// </summary>
/// <param name="array">
/// Array that holds source bits.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="array"/> is not empty and <paramref name="startBitIndex"/> is greater than the number of bits available.
/// <para>-or-</para>
/// <paramref name="array"/> is empty and <paramref name="startBitIndex"/> is different than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(byte[]? array, int startBitIndex)
: this(new Span<byte>(array), startBitIndex)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using array of bytes as the bit source,
/// information about first bit index, length of the bit span and most significant bit ordering.
/// </summary>
/// <param name="array">
/// Array that holds source bits.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="array"/> is not empty and <paramref name="startBitIndex"/> is greater than the number of bits available.
/// <para>-or-</para>
/// <paramref name="array"/> is empty and <paramref name="startBitIndex"/> is different than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> applied to <paramref name="startBitIndex"/> does not fit into the number of bits available.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(byte[]? array, int startBitIndex, int bitLength)
: this(new Span<byte>(array), startBitIndex, bitLength)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using array of bytes as the bit source,
/// information about first bit index, length of the bit span and bit ordering.
/// </summary>
/// <param name="array">
/// Array that holds source bits.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <param name="bitOrder">
/// Order of the bits within each source byte.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="array"/> is not empty and <paramref name="startBitIndex"/> is greater than the number of bits available.
/// <para>-or-</para>
/// <paramref name="array"/> is empty and <paramref name="startBitIndex"/> is different than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> applied to <paramref name="startBitIndex"/> does not fit into the number of bits available.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(byte[]? array, int startBitIndex, int bitLength, BitwiseOrder bitOrder)
: this(new Span<byte>(array), startBitIndex, bitLength, bitOrder)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using memory address as the bit source,
/// information about bit length of the bit span and most significant bit ordering.
/// </summary>
/// <param name="address">
/// Pointer to the bit source memory.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="bitLength"/> is less than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[CLSCompliant(false)]
public unsafe BitSpan(void* address, int bitLength)
: this(new Span<byte>(address, BitCalculations.GetElementCountNeeded<byte>(bitLength)), 0, bitLength)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using memory address as the bit source,
/// information about first bit index, length of the bit span and most significant bit ordering.
/// </summary>
/// <param name="address">
/// Pointer to the bit source memory.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[CLSCompliant(false)]
public unsafe BitSpan(void* address, int startBitIndex, int bitLength)
: this(new Span<byte>(address, BitCalculations.GetElementCountNeeded<byte>(startBitIndex + bitLength)), startBitIndex, bitLength)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using memory address as the bit source,
/// information about first bit index, length of the bit span and bit ordering.
/// </summary>
/// <param name="address">
/// Pointer to the bit source memory.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <param name="bitOrder">
/// Order of the bits within each source byte.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[CLSCompliant(false)]
public unsafe BitSpan(void* address, int startBitIndex, int bitLength, BitwiseOrder bitOrder)
: this(new Span<byte>(address, BitCalculations.GetElementCountNeeded<byte>(startBitIndex + bitLength)), startBitIndex, bitLength, bitOrder)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using reference to the first bit source byte,
/// information about bit length of the bit span and most significant bit ordering.
/// </summary>
/// <param name="reference">
/// Reference to the first bit source byte.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="bitLength"/> is less than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(ref byte reference, int bitLength)
: this(ref reference, 0, bitLength)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using reference to the first bit source byte,
/// information about first bit index, length of the bit span and most significant bit ordering.
/// </summary>
/// <param name="reference">
/// Reference to the first bit source byte.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(ref byte reference, int startBitIndex, int bitLength)
: this(MemoryMarshal.CreateSpan<byte>(ref reference, BitCalculations.GetElementCountNeeded<byte>(startBitIndex + bitLength)), startBitIndex, bitLength)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BitSpan"/> struct using reference to the first bit source byte,
/// information about first bit index, length of the bit span and bit ordering.
/// </summary>
/// <param name="reference">
/// Reference to the first bit source byte.
/// </param>
/// <param name="startBitIndex">
/// Index of the first bit of the span.
/// </param>
/// <param name="bitLength">
/// Length in bits of the span.
/// </param>
/// <param name="bitOrder">
/// Order of the bits within each source byte.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan(ref byte reference, int startBitIndex, int bitLength, BitwiseOrder bitOrder)
: this(MemoryMarshal.CreateSpan<byte>(ref reference, BitCalculations.GetElementCountNeeded<byte>(startBitIndex + bitLength)), startBitIndex, bitLength, bitOrder)
{
}
/// <summary>
/// Gets an empty bit span.
/// </summary>
public static BitSpan Empty
{
get
{
return default;
}
}
/// <summary>
/// Gets the number of bits in the span.
/// </summary>
public int Length
{
get;
}
/// <summary>
/// Gets the order in which the bits within each byte are ordered.
/// </summary>
public BitwiseOrder BitOrder
{
get;
}
/// <summary>
/// Gets a value indicating whether the current bit block is empty.
/// </summary>
public bool IsEmpty
{
get
{
return this.Length == 0;
}
}
/// <summary>
/// Gets a value of the bit located at the specified bit index.
/// </summary>
/// <param name="bitIndex">
/// Bit index for whcih a bit value shall be obtained.
/// </param>
/// <returns>
/// <see langword="true"/> if the bit at the specified bit position is set;
/// otherwise, <see langword="false"/>.
/// </returns>
public bool this[int bitIndex]
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get
{
_ = Requires.ArgumentLessThanOrEquals(bitIndex, this.Length, nameof(bitIndex), Resources.Exception_ArgumentOutOfRange_BitIndexOutsideOfBitsSpan);
int bitIndexWithinByte = bitIndex;
int byteIndex = BitCalculations.SplitBitIndex<byte>(ref bitIndexWithinByte);
return (this.bits[byteIndex] & GetByteMask(bitIndexWithinByte, 1, this.BitOrder)) != 0;
}
}
/// <summary>
/// Defines an implicit conversion of an <see cref="Span{T}">Span<byte></see> to a <see cref="BitSpan"/>.
/// </summary>
/// <param name="span">
/// The span to be converted to a <see cref="BitSpan"/>.
/// </param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static implicit operator BitSpan(Span<byte> span)
{
return FromSpan(span);
}
/// <summary>
/// Defines an implicit conversion of an <see cref="Array">byte[]</see> to a <see cref="BitSpan"/>.
/// </summary>
/// <param name="array">
/// The array to be converted to a <see cref="BitSpan"/>.
/// </param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static implicit operator BitSpan(byte[]? array)
{
return FromByteArray(array);
}
/// <summary>
/// Defines an implicit conversion of an <see cref="ArraySegment{T}">ArraySegment<byte></see> to a <see cref="BitSpan"/>.
/// </summary>
/// <param name="arraySegment">
/// The array segment to be converted to a <see cref="BitSpan"/>.
/// </param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static implicit operator BitSpan(ArraySegment<byte> arraySegment)
{
return FromArraySegment(arraySegment);
}
/// <summary>
/// Defines an implicit conversion of an <see cref="BitSpan"/> to a <see cref="ReadOnlyBitSpan"/>.
/// </summary>
/// <param name="bitSpan">
/// The bit span to be converted to a <see cref="ReadOnlyBitSpan"/>.
/// </param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
[SuppressMessage("Usage", "CA2225:Operator overloads have named alternates", Justification = "Alternative method is named AsReadOnly by convention.")]
public static implicit operator ReadOnlyBitSpan(BitSpan bitSpan)
{
return bitSpan.AsReadOnly();
}
/// <summary>
/// Returns a value that indicates whether two <see cref="BitSpan"/> objects are equal.
/// </summary>
/// <param name="left">
/// The first block to compare.
/// </param>
/// <param name="right">
/// The second block to compare.
/// </param>
/// <returns>
/// <see langword="true"/> if the two <see cref="BitSpan"/> objects are equal;
/// otherwise, <see langword="false"/>.
/// </returns>
public static bool operator ==(BitSpan left, BitSpan right)
{
return left.bits == right.bits
&& left.startBitIndex == right.startBitIndex
&& left.Length == right.Length
&& left.BitOrder == right.BitOrder;
}
/// <summary>
/// Returns a value that indicates whether two <see cref="BitSpan"/> objects are not equal.
/// </summary>
/// <param name="left">
/// The first block to compare.
/// </param>
/// <param name="right">
/// The second block to compare.
/// </param>
/// <returns>
/// <see langword="true"/> if the two <see cref="BitSpan"/> objects are not equal;
/// otherwise, <see langword="false"/>.
/// </returns>
public static bool operator !=(BitSpan left, BitSpan right)
{
return !(left == right);
}
/// <summary>
/// Defines conversion of an <see cref="Span{T}">Span<byte></see> to a <see cref="BitSpan"/>.
/// </summary>
/// <param name="span">
/// The span to be converted to a <see cref="BitSpan"/>.
/// </param>
/// <returns>
/// A bit span that corresponds to the span.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static BitSpan FromSpan(Span<byte> span)
{
return new BitSpan(span);
}
/// <summary>
/// Defines conversion of a byte <see cref="Array"/> to a <see cref="BitSpan"/>.
/// </summary>
/// <param name="array">
/// The array to be converted to a <see cref="BitSpan"/>.
/// </param>
/// <returns>
/// A span that corresponds to the array.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static BitSpan FromByteArray(byte[]? array)
{
return new BitSpan(array);
}
/// <summary>
/// Defines conversion of an <see cref="ArraySegment{T}">ArraySegment<byte></see> to a <see cref="BitSpan"/>.
/// </summary>
/// <param name="arraySegment">
/// The array segment to be converted to a <see cref="BitSpan"/>.
/// </param>
/// <returns>
/// A span that corresponds to the array segment.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static BitSpan FromArraySegment(ArraySegment<byte> arraySegment)
{
return new BitSpan(arraySegment);
}
/// <summary>
/// Calls to this method are not supported.
/// </summary>
/// <param name="obj">
/// Not supported.
/// </param>
/// <returns>
/// Method not supported.
/// </returns>
/// <exception cref="NotSupportedException">
/// Thrown always as calls to this method are not supported.
/// </exception>
[Obsolete("Equals() on BitSpan will always throw an exception. Use == instead.")]
[EditorBrowsable(EditorBrowsableState.Never)]
[SuppressMessage("Blocker Code Smell", "S3877:Exceptions should not be thrown from unexpected methods", Justification = "Exception required by ref struct language feature.")]
[SuppressMessage("Design", "CA1065:Do not raise exceptions in unexpected locations", Justification = "Exception required by ref struct language feature.")]
public override bool Equals(object? obj)
{
throw new NotSupportedException(Resources.Exception_NotSupported_CannotCallEqualsOnBitBlock);
}
/// <summary>
/// Calls to this method are not supported.
/// </summary>
/// <returns>
/// Method not supported.
/// </returns>
/// <exception cref="NotSupportedException">
/// Thrown always as calls to this method are not supported.
/// </exception>
[Obsolete("GetHashCode() on BitSpan will always throw an exception.")]
[EditorBrowsable(EditorBrowsableState.Never)]
[SuppressMessage("Blocker Code Smell", "S3877:Exceptions should not be thrown from unexpected methods", Justification = "Exception required by ref struct language feature.")]
[SuppressMessage("Design", "CA1065:Do not raise exceptions in unexpected locations", Justification = "Exception required by ref struct language feature.")]
public override int GetHashCode()
{
throw new NotSupportedException(Resources.Exception_NotSupported_CannotCallGetHashCodeOnBitBlock);
}
/// <summary>
/// Returns the string representation of this <see cref="BitSpan"/> object.
/// </summary>
/// <returns>
/// The string representation of this <see cref="BitSpan"/> object.
/// </returns>
public override string ToString()
{
return $"{typeof(BitSpan).FullName}[{this.Length}]";
}
/// <summary>
/// Forms a slice out of the current bit span starting at a specified bit index for a specified bit length.
/// </summary>
/// <param name="startBitIndex">
/// The bit index at which to begin the slice.
/// </param>
/// <param name="bitLength">
/// The desired bit length for the slice.
/// </param>
/// <returns>
/// A bit span that consists of <paramref name="bitLength"/> bits from the current bit span starting at <paramref name="startBitIndex"/>.
/// </returns>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="startBitIndex"/> is not greater than zero.
/// <para>-or-</para>
/// <paramref name="startBitIndex"/> is greater than specified bit size.
/// <para>-or-</para>
/// <paramref name="bitLength"/> is less than zero.
/// <para>-or-</para>
/// <paramref name="bitLength"/> applied to <paramref name="startBitIndex"/> does not fit into the available bit size.
/// </exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public BitSpan Slice(int startBitIndex, int bitLength)
{
Requires.BitBlockWithinAvailableSpan(startBitIndex, bitLength, this.Length, nameof(startBitIndex), nameof(bitLength));
return new BitSpan(this.bits, this.startBitIndex + startBitIndex, bitLength);
}
/// <summary>
/// Creates a read-only bit span that represents the same bit span as the current instance.
/// </summary>
/// <returns>
/// A newly created instance of the <see cref="ReadOnlyBitSpan"/> read-only ref <see langword="struct"/> that represents the same bit span as the current instance.
/// </returns>
public ReadOnlyBitSpan AsReadOnly()
{
return new ReadOnlyBitSpan(this.bits, this.startBitIndex, this.Length, this.BitOrder);
}
/// <summary>
/// Copies as much bits as possible from the current bit span to the destination.
/// </summary>
/// <param name="destination">
/// Destination span for copied bits.
/// </param>
/// <returns>
/// Number of bits copied.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int CopyBitsTo(BitSpan destination)
{
return destination.CopyBitsFrom(this.bits, this.startBitIndex, this.Length, this.BitOrder);
}
/// <summary>
/// Gets a bit mask composed of one single block of '1' bits.
/// </summary>
/// <param name="startBitIndex">
/// Index of the first bit at which the block of '1' bits shall start.
/// </param>
/// <param name="bitLength">
/// Number of '1' bits in the block.
/// </param>
/// <param name="bitOrder">
/// Determines which bit in the value is to be considered as bit at the index 0.
/// </param>
/// <returns>
/// Value whose only '1' bits are located in a consecutive block of the specified length
/// starting at the specified bit index.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static byte GetByteMask(int startBitIndex, int bitLength, BitwiseOrder bitOrder)
{
return (byte)(bitOrder == BitwiseOrder.MostSignificantFirst
? ((byte)(byte.MaxValue << (BitCalculations.BitSizeOf<byte>() - bitLength))) >> startBitIndex
: (byte.MaxValue >> (BitCalculations.BitSizeOf<byte>() - bitLength)) << startBitIndex);
}
/// <summary>
/// Copies as much bits as possible from the specified source read-only bit span to the current span as destination.
/// </summary>
/// <param name="source">
/// Source read-only span with bits to copy.
/// </param>
/// <param name="sourceBitIndex">
/// Index of the bit in the <paramref name="source"/> from which the copy shall begin.
/// </param>
/// <param name="sourceBitCount">
/// Number of bits in the <paramref name="source"/> to copy.
/// </param>
/// <param name="sourceBitOrder">
/// Order in which the bits within each source bytes are ordered.
/// </param>
/// <returns>
/// Number of bits copied.
/// </returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal int CopyBitsFrom(ReadOnlySpan<byte> source, int sourceBitIndex, int sourceBitCount, BitwiseOrder sourceBitOrder)
{
int destinationBitIndex = this.startBitIndex;
int bitsToCopy = Math.Min(this.Length, sourceBitCount);
int sourceByteIndex = BitCalculations.SplitBitIndex<byte>(ref sourceBitIndex);
int destinationByteIndex = BitCalculations.SplitBitIndex<byte>(ref destinationBitIndex);
CopyBits(source[sourceByteIndex..], sourceBitIndex, sourceBitOrder, this.bits[destinationByteIndex..], destinationBitIndex, this.BitOrder, bitsToCopy);
return bitsToCopy;
}
/// <summary>
/// Copies specified number of bits from the source span to the destination.
/// </summary>
/// <param name="source">
/// Source span with bits to copy.
/// </param>
/// <param name="sourceBitIndex">
/// Index of the first source bit to copy.
/// </param>
/// <param name="sourceBitOrder">
/// Order in which the bits within each source bytes are ordered.
/// </param>
/// <param name="destination">
/// Destination for the copied bits.
/// </param>
/// <param name="destinationBitIndex">
/// Index of the location where the first bit shall be copied.
/// </param>
/// <param name="destinationBitOrder">
/// Order in which the bits within each destination bytes are ordered.
/// </param>
/// <param name="bitsToCopy">
/// Number of bits to copy.
/// </param>
private static void CopyBits(
ReadOnlySpan<byte> source,
int sourceBitIndex,
BitwiseOrder sourceBitOrder,
Span<byte> destination,
int destinationBitIndex,
BitwiseOrder destinationBitOrder,
int bitsToCopy)
{
if (sourceBitIndex != destinationBitIndex)
{
CopyBitsWithShift(source, sourceBitIndex, sourceBitOrder, destination, destinationBitIndex, destinationBitOrder, bitsToCopy);
}
else
{
CopyBitsWithoutShift(source, sourceBitOrder, destination, destinationBitOrder, destinationBitIndex, bitsToCopy);
}
}
/// <summary>
/// Copies bits from memory aligned part of the source and destination spans with shit.
/// </summary>
/// <param name="source">
/// Source span with bits to copy.
/// </param>
/// <param name="sourceBitIndex">
/// Index of the first source bit to copy.
/// </param>
/// <param name="sourceBitOrder">
/// Order in which the bits within each source bytes are ordered.
/// </param>
/// <param name="destination">
/// Destination for the copied bits.
/// </param>
/// <param name="destinationBitIndex">
/// Index of the location where the first bit shall be copied.
/// </param>
/// <param name="destinationBitOrder">
/// Order in which the bits within each destination bytes are ordered.
/// </param>
/// <param name="bitCount">
/// Number of bits to copy.
/// </param>
private static void CopyBitsWithShift(
ReadOnlySpan<byte> source,
int sourceBitIndex,
BitwiseOrder sourceBitOrder,
Span<byte> destination,
int destinationBitIndex,
BitwiseOrder destinationBitOrder,
int bitCount)
{
int shiftCount = destinationBitIndex - sourceBitIndex;
int shiftCountTowardsFirstBit = (shiftCount > 0 ? BitCalculations.BitSizeOf<byte>() : 0) - shiftCount;
int shiftCountTowardsLastBit = BitCalculations.BitSizeOf<byte>() - shiftCountTowardsFirstBit;
int sourceIndex = 0;
int destinationIndex = 0;
byte previousValue = 0;
int maskStart = destinationBitIndex;
if (shiftCount < 0)
{
sourceIndex = 1;
previousValue = EnsureBitOrder(source[0], sourceBitOrder, destinationBitOrder);
}
while (bitCount > 0)
{
byte nextValue = (sourceIndex < source.Length) ? EnsureBitOrder(source[sourceIndex++], sourceBitOrder, destinationBitOrder) : (byte)0;
byte sourceValue = (destinationBitOrder == BitwiseOrder.MostSignificantFirst)
? (byte)((previousValue << shiftCountTowardsFirstBit) | (nextValue >> shiftCountTowardsLastBit))
: (byte)((previousValue >> shiftCountTowardsFirstBit) | (nextValue << shiftCountTowardsLastBit));
previousValue = nextValue;
ref byte destinationValue = ref destination[destinationIndex++];
int bitsInThisStep = Math.Min(BitCalculations.BitSizeOf<nuint>() - maskStart, bitCount);
byte mask = GetByteMask(maskStart, bitsInThisStep, destinationBitOrder);
destinationValue ^= (byte)((destinationValue ^ sourceValue) & mask);
bitCount -= bitsInThisStep;
maskStart = 0;
}
}
/// <summary>
/// Copies bits from memory aligned part of the source and destination spans.
/// </summary>
/// <param name="source">
/// Source span with bits to copy.
/// </param>
/// <param name="sourceBitOrder">
/// Order in which the bits within each source bytes are ordered.
/// </param>
/// <param name="destination">
/// Destination for the copied bits.
/// </param>
/// <param name="destinationBitOrder">
/// Order in which the bits within each destination bytes are ordered.
/// </param>
/// <param name="bitIndex">
/// Same bit index for first bit source and destination.
/// </param>
/// <param name="bitCount">
/// Number of bits to copy.
/// </param>
private static void CopyBitsWithoutShift(
ReadOnlySpan<byte> source,
BitwiseOrder sourceBitOrder,
Span<byte> destination,
BitwiseOrder destinationBitOrder,
int bitIndex,
int bitCount)
{
int sourceIndex = 0;
int destinationIndex = 0;
int maskStart = bitIndex;
while (bitCount > 0)
{
byte sourceValue = EnsureBitOrder(source[sourceIndex++], sourceBitOrder, destinationBitOrder);
ref byte destinationValue = ref destination[destinationIndex++];
int bitsInThisStep = Math.Min(BitCalculations.BitSizeOf<byte>() - maskStart, bitCount);
byte mask = GetByteMask(maskStart, bitsInThisStep, destinationBitOrder);
destinationValue ^= (byte)((destinationValue ^ sourceValue) & mask);
bitCount -= bitsInThisStep;
maskStart = 0;
}
}
/// <summary>
/// Reverses bit order of the specified value if the value bit order does not as expected.
/// </summary>
/// <param name="value">
/// Value which bits shall be in the expected bit order.
/// </param>
/// <param name="valueBitOrder">
/// Bit order of the input <paramref name="value"/>.
/// </param>
/// <param name="expectedBitOrder">
/// Expected bit order.
/// </param>
/// <returns>
/// A <see cref="byte"/> value that has the same bits as input <paramref name="value"/> but in expected order.
/// </returns>
private static byte EnsureBitOrder(byte value, BitwiseOrder valueBitOrder, BitwiseOrder expectedBitOrder)
{
return (valueBitOrder != expectedBitOrder) ? value : ReverseBits(value);
}
/// <summary>
/// Reverses bits in the specified <see cref="byte"/> value.
/// </summary>
/// <param name="value">
/// Value which bits shall be reversed.
/// </param>
/// <returns>
/// A <see cref="byte"/> value that has the same bits as input <paramref name="value"/> but in reverse order.
/// </returns>
private static byte ReverseBits(byte value)
{
return (Unsafe.SizeOf<nuint>() == sizeof(ulong))
? ReverseBitsUsingUInt64(value)
: ReverseBitsUsingUInt32(value);
}
/// <summary>
/// Reverses bits in the specified <see cref="byte"/> value using <see cref="uint"/> operations.
/// </summary>
/// <param name="value">
/// Value which bits shall be reversed.
/// </param>
/// <returns>
/// A <see cref="byte"/> value that has the same bits as input <paramref name="value"/> but in reverse order.
/// </returns>
private static byte ReverseBitsUsingUInt32(byte value)
{
const uint BitSpreader1 = 0b_0000_1000_0000_0010U;
const uint BitSpreader2 = 0b_1000_0000_0010_0000U;
const ulong BitSelector1 = 0b_0010_0010_0001_0001_0000U;
const ulong BitSelector2 = 0b_1000_1000_0100_0100_0000U;
const ulong BitAccumulator = 0b_0001_0000_0001_0000_0001U;
return (byte)(((((value * BitSpreader1) & BitSelector1) | ((value * BitSpreader2) & BitSelector2)) * BitAccumulator) >> (BitCalculations.BitSizeOf<uint>() >> 1));
}
/// <summary>
/// Reverses bits in the specified <see cref="byte"/> value using <see cref="ulong"/> operations.
/// </summary>
/// <param name="value">
/// Value which bits shall be reversed.
/// </param>
/// <returns>
/// A <see cref="byte"/> value that has the same bits as input <paramref name="value"/> but in reverse order.
/// </returns>
private static byte ReverseBitsUsingUInt64(byte value)
{
const ulong BitSpreader = 0b_1000_0000_0010_0000_0000_1000_0000_0010UL;
const ulong BitSelector = 0b_0000_1000_1000_0100_0100_0010_0010_0000UL;
const ulong BitAccumulator = 0b_0000_0001_0000_0001_0000_0001_0000_0001UL;
return (byte)((((value * BitSpreader) & BitSelector) * BitAccumulator) >> (BitCalculations.BitSizeOf<ulong>() >> 1));
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.