context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
using System;
using System.Reflection;
using IBatisNet.Common.Test.Domain;
using IBatisNet.Common.Utilities;
using IBatisNet.Common.Utilities.Objects.Members;
using NUnit.Framework;
namespace IBatisNet.Common.Test.NUnit.CommonTests.Utilities
{
[TestFixture]
public class ProtectedFieldAccessorTest
{
protected ISetAccessorFactory factorySet = null;
protected IGetAccessorFactory factoryGet = null;
protected ISetAccessor intSetAccessor = null;
protected IGetAccessor intGetAccessor = null;
protected ISetAccessor longSetAccessor = null;
protected IGetAccessor longGetAccessor = null;
protected ISetAccessor sbyteSetAccessor = null;
protected IGetAccessor sbyteGetAccessor = null;
protected ISetAccessor datetimeSetAccessor = null;
protected IGetAccessor datetimeGetAccessor = null;
protected ISetAccessor decimalSetAccessor = null;
protected IGetAccessor decimalGetAccessor = null;
protected ISetAccessor byteSetAccessor = null;
protected IGetAccessor byteGetAccessor = null;
protected ISetAccessor stringSetAccessor = null;
protected IGetAccessor stringGetAccessor = null;
protected ISetAccessor charSetAccessor = null;
protected IGetAccessor charGetAccessor = null;
protected ISetAccessor shortSetAccessor = null;
protected IGetAccessor shortGetAccessor = null;
protected ISetAccessor ushortSetAccessor = null;
protected IGetAccessor ushortGetAccessor = null;
protected ISetAccessor uintSetAccessor = null;
protected IGetAccessor uintGetAccessor = null;
protected ISetAccessor ulongSetAccessor = null;
protected IGetAccessor ulongGetAccessor = null;
protected ISetAccessor boolSetAccessor = null;
protected IGetAccessor boolGetAccessor = null;
protected ISetAccessor doubleSetAccessor = null;
protected IGetAccessor doubleGetAccessor = null;
protected ISetAccessor floatSetAccessor = null;
protected IGetAccessor floatGetAccessor = null;
protected ISetAccessor guidSetAccessor = null;
protected IGetAccessor guidGetAccessor = null;
protected ISetAccessor timespanSetAccessor = null;
protected IGetAccessor timespanGetAccessor = null;
protected ISetAccessor accountSetAccessor = null;
protected IGetAccessor accountGetAccessor = null;
protected ISetAccessor enumSetAccessor = null;
protected IGetAccessor enumGetAccessor = null;
#if dotnet2
protected ISetAccessor nullableSetAccessor = null;
protected IGetAccessor nullableGetAccessor = null;
#endif
#region SetUp & TearDown
/// <summary>
/// SetUp
/// </summary>
[SetUp]
public void SetUp()
{
intSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedInt");
intGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedInt");
longSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedLong");
longGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedLong");
sbyteSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedSbyte");
sbyteGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedSbyte");
stringSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedString");
stringGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedString");
datetimeSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedDateTime");
datetimeGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedDateTime");
decimalSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedDecimal");
decimalGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedDecimal");
byteSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedByte");
byteGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedByte");
charSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedChar");
charGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedChar");
shortSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedShort");
shortGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedShort");
ushortSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedUshort");
ushortGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedUshort");
uintSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedUint");
uintGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedUint");
ulongSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedUlong");
ulongGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedUlong");
boolSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedBool");
boolGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedBool");
doubleSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedDouble");
doubleGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedDouble");
floatSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedFloat");
floatGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedFloat");
guidSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedGuid");
guidGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedGuid");
timespanSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedTimeSpan");
timespanGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedTimeSpan");
accountSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedAccount");
accountGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedAccount");
enumSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedDay");
enumGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedDay");
#if dotnet2
nullableSetAccessor = factorySet.CreateSetAccessor(typeof(Property), "protectedintNullable");
nullableGetAccessor = factoryGet.CreateGetAccessor(typeof(Property), "protectedintNullable");
#endif
}
/// <summary>
/// TearDown
/// </summary>
[TearDown]
public void Dispose()
{
}
#endregion
/// <summary>
/// Initialize an sqlMap
/// </summary>
[TestFixtureSetUp]
protected virtual void SetUpFixture()
{
factoryGet = new GetAccessorFactory(true);
factorySet = new SetAccessorFactory(true);
}
/// <summary>
/// Dispose the SqlMap
/// </summary>
[TestFixtureTearDown]
protected virtual void TearDownFixture()
{
factoryGet = null;
factorySet = null;
}
private FieldInfo GetFieldInfo(string fieldName)
{
return typeof(Property).GetField(fieldName, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
}
/// <summary>
/// Test setting null on integer property.
/// </summary>
[Test]
public void TestSetNullOnIntegerField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedInt");
fieldInfo.SetValue(prop, -99);
// Property accessor
intSetAccessor.Set(prop, null);
Assert.AreEqual(0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an integer property.
/// </summary>
[Test]
public void TestSetIntegerField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedInt");
fieldInfo.SetValue(prop, -99);
// Property accessor
int test = 57;
intSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an integer property.
/// </summary>
[Test]
public void TestGetIntegerField()
{
int test = -99;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedInt");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, intGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on Long property.
/// </summary>
[Test]
public void TestSetNullOnLongField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedLong");
fieldInfo.SetValue(prop, 78945566664213223);
// Property accessor
longSetAccessor.Set(prop, null);
Assert.AreEqual((long)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an Long property.
/// </summary>
[Test]
public void TestSetLongField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedLong");
fieldInfo.SetValue(prop, 78945566664213223);
// Property accessor
long test = 123456789987456;
longSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an long property.
/// </summary>
[Test]
public void TestGetLongField()
{
long test = 78945566664213223;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedLong");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, longGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on sbyte property.
/// </summary>
[Test]
public void TestSetNullOnSbyteField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedSbyte");
fieldInfo.SetValue(prop, (SByte)78);
// Property accessor
sbyteSetAccessor.Set(prop, null);
Assert.AreEqual((sbyte)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an sbyte property.
/// </summary>
[Test]
public void TestSetSbyteField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedSbyte");
fieldInfo.SetValue(prop, (SByte)78);
// Property accessor
sbyte test = 19;
sbyteSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an sbyte property.
/// </summary>
[Test]
public void TestGetSbyteField()
{
sbyte test = 78;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedSbyte");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, sbyteGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on String property.
/// </summary>
[Test]
public void TestSetNullOnStringField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedString");
fieldInfo.SetValue(prop, "abc");
// Property accessor
stringSetAccessor.Set(prop, null);
Assert.IsNull(fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an String property.
/// </summary>
[Test]
public void TestSetStringField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedString");
fieldInfo.SetValue(prop, "abc");
// Property accessor
string test = "wxc";
stringSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an String property.
/// </summary>
[Test]
public void TestGetStringField()
{
string test = "abc";
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedString");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, stringGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on DateTime property.
/// </summary>
[Test]
public void TestSetNullOnDateTimeField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDateTime");
fieldInfo.SetValue(prop, DateTime.Now);
// Property accessor
datetimeSetAccessor.Set(prop, null);
Assert.AreEqual(DateTime.MinValue, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an DateTime property.
/// </summary>
[Test]
public void TestSetDateTimeField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDateTime");
fieldInfo.SetValue(prop, DateTime.Now);
// Property accessor
DateTime test = new DateTime(1987, 11, 25);
datetimeSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an DateTime property.
/// </summary>
[Test]
public void TestGetDateTimeField()
{
DateTime test = new DateTime(1987, 11, 25);
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDateTime");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, datetimeGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on decimal property.
/// </summary>
[Test]
public void TestSetNullOnDecimalField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDecimal");
fieldInfo.SetValue(prop, 45.187M);
// Property accessor
decimalSetAccessor.Set(prop, null);
Assert.AreEqual(0.0M, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an decimal property.
/// </summary>
[Test]
public void TestSetDecimalField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDecimal");
fieldInfo.SetValue(prop, 45.187M);
// Property accessor
Decimal test = 789456.141516M;
decimalSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an decimal property.
/// </summary>
[Test]
public void TestGetDecimalField()
{
Decimal test = 789456.141516M;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDecimal");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, decimalGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on byte property.
/// </summary>
[Test]
public void TestSetNullOnByteField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedByte");
fieldInfo.SetValue(prop, (Byte)78);
// Property accessor
byteSetAccessor.Set(prop, null);
Assert.AreEqual((byte)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an byte property.
/// </summary>
[Test]
public void TestSetByteField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedByte");
fieldInfo.SetValue(prop, (Byte)15);
// Property accessor
byte test = 94;
byteSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an byte property.
/// </summary>
[Test]
public void TestGetByteField()
{
byte test = 78;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedByte");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, byteGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on char property.
/// </summary>
[Test]
public void TestSetNullOnCharField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedChar");
fieldInfo.SetValue(prop, 'r');
// Property accessor
charSetAccessor.Set(prop, null);
Assert.AreEqual('\0', fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an char property.
/// </summary>
[Test]
public void TestSetCharField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedChar");
fieldInfo.SetValue(prop, 'b');
// Property accessor
char test = 'j';
charSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an char property.
/// </summary>
[Test]
public void TestGetCharField()
{
char test = 'z';
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedChar");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, charGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on short property.
/// </summary>
[Test]
public void TestSetNullOnShortField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedShort");
fieldInfo.SetValue(prop, (short)5);
// Property accessor
shortSetAccessor.Set(prop, null);
Assert.AreEqual((short)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an short property.
/// </summary>
[Test]
public void TestSetShortField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedShort");
fieldInfo.SetValue(prop, (short)9);
// Property accessor
short test = 45;
shortSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an short property.
/// </summary>
[Test]
public void TestGetShortField()
{
short test = 99;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedShort");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, shortGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on ushort property.
/// </summary>
[Test]
public void TestSetNullOnUShortField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUshort");
fieldInfo.SetValue(prop, (ushort)5);
// Property accessor
ushortSetAccessor.Set(prop, null);
Assert.AreEqual((ushort)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an ushort property.
/// </summary>
[Test]
public void TestSetUShortField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUshort");
fieldInfo.SetValue(prop, (ushort)9);
// Property accessor
ushort test = 45;
ushortSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an ushort property.
/// </summary>
[Test]
public void TestGetUShortField()
{
ushort test = 99;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUshort");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, ushortGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on uint property.
/// </summary>
[Test]
public void TestSetNullOnUIntField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUint");
fieldInfo.SetValue(prop, (UInt32)5);
// Property accessor
uintSetAccessor.Set(prop, null);
Assert.AreEqual((uint)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an uint property.
/// </summary>
[Test]
public void TestSetUIntField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUint");
fieldInfo.SetValue(prop, (UInt32)9);
// Property accessor
uint test = 45;
uintSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an uint property.
/// </summary>
[Test]
public void TestGetUIntField()
{
uint test = 99;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUint");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, uintGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on ulong property.
/// </summary>
[Test]
public void TestSetNullOnULongField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUlong");
fieldInfo.SetValue(prop, (UInt64) 5L);
// Property accessor
ulongSetAccessor.Set(prop, null);
Assert.AreEqual((ulong)0, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an ulong property.
/// </summary>
[Test]
public void TestSetULongField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUlong");
fieldInfo.SetValue(prop, (UInt64)45464646578);
// Property accessor
ulong test = 45;
ulongSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an ulong property.
/// </summary>
[Test]
public void TestGetULongField()
{
ulong test = 99;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedUlong");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, ulongGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on bool property.
/// </summary>
[Test]
public void TestSetNullOnBoolField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedBool");
fieldInfo.SetValue(prop, true);
// Property accessor
boolSetAccessor.Set(prop, null);
Assert.AreEqual(false, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an bool property.
/// </summary>
[Test]
public void TestSetBoolField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedBool");
fieldInfo.SetValue(prop, false);
// Property accessor
bool test = true;
boolSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an bool property.
/// </summary>
[Test]
public void TestGetBoolField()
{
bool test = false;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedBool");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, boolGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on double property.
/// </summary>
[Test]
public void TestSetNullOnDoubleField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDouble");
fieldInfo.SetValue(prop, 788956.56D);
// Property accessor
doubleSetAccessor.Set(prop, null);
Assert.AreEqual(0.0D, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an double property.
/// </summary>
[Test]
public void TestSetDoubleField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDouble");
fieldInfo.SetValue(prop, 56789123.45888D);
// Property accessor
double test = 788956.56D;
doubleSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an double property.
/// </summary>
[Test]
public void TestGetDoubleField()
{
double test = 788956.56D;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDouble");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, doubleGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on float property.
/// </summary>
[Test]
public void TestSetNullOnFloatField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedFloat");
fieldInfo.SetValue(prop, 565.45F);
// Property accessor
floatSetAccessor.Set(prop, null);
Assert.AreEqual(0.0D, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an float property.
/// </summary>
[Test]
public void TestSetFloatField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedFloat");
fieldInfo.SetValue(prop, 565.45F);
// Property accessor
float test = 4567.45F;
floatSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an float property.
/// </summary>
[Test]
public void TestGetFloatField()
{
float test = 565.45F;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedFloat");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, floatGetAccessor.Get(prop));
}
/// <summary>
/// Test setting null on Guid property.
/// </summary>
[Test]
public void TestSetNullOnGuidField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedGuid");
fieldInfo.SetValue(prop, Guid.NewGuid());
// Property accessor
guidSetAccessor.Set(prop, null);
Assert.AreEqual(Guid.Empty, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an Guid property.
/// </summary>
[Test]
public void TestSetGuidField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedGuid");
fieldInfo.SetValue(prop, Guid.NewGuid());
// Property accessor
Guid test = Guid.NewGuid();
guidSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an Guid property.
/// </summary>
[Test]
public void TestGetGuidField()
{
Guid test = Guid.NewGuid();
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedGuid");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, guidGetAccessor.Get(prop));
}
/// <summary>
/// Test the setting null on a TimeSpan property.
/// </summary>
[Test]
public void TestSetNullOnTimeSpanField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedTimeSpan");
fieldInfo.SetValue(prop, new TimeSpan(5, 12, 57, 21, 13));
// Property accessor
timespanSetAccessor.Set(prop, null);
Assert.AreEqual(new TimeSpan(0, 0, 0), fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an TimeSpan property.
/// </summary>
[Test]
public void TestSetTimeSpanField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedTimeSpan");
fieldInfo.SetValue(prop, new TimeSpan(5, 12, 57, 21, 13));
// Property accessor
TimeSpan test = new TimeSpan(15, 5, 21, 45, 35);
timespanSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an TimeSpan property.
/// </summary>
[Test]
public void TestGetTimeSpanField()
{
TimeSpan test = new TimeSpan(5, 12, 57, 21, 13);
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedTimeSpan");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, timespanGetAccessor.Get(prop));
}
/// <summary>
/// Test the setting null on a object property.
/// </summary>
[Test]
public void TestSetNullOnAccountField()
{
Property prop = new Property();
Account ac = new Account();
ac.FirstName = "test";
FieldInfo fieldInfo = GetFieldInfo("protectedAccount");
fieldInfo.SetValue(prop, ac);
// Property accessor
accountSetAccessor.Set(prop, null);
Assert.AreEqual(null, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an object property.
/// </summary>
[Test]
public void TestGetAccountField()
{
Account test = new Account();
test.FirstName = "Gilles";
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedAccount");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(HashCodeProvider.GetIdentityHashCode(test), HashCodeProvider.GetIdentityHashCode(fieldInfo.GetValue(prop)));
Assert.AreEqual(test.FirstName, ((Account)accountGetAccessor.Get(prop)).FirstName);
}
/// <summary>
/// Test setting an object property.
/// </summary>
[Test]
public void TestSetAccountField()
{
Property prop = new Property();
Account ac = new Account();
ac.FirstName = "test";
FieldInfo fieldInfo = GetFieldInfo("protectedAccount");
fieldInfo.SetValue(prop, ac);
// Property accessor
string firstName = "Gilles";
Account test = new Account();
test.FirstName = firstName;
accountSetAccessor.Set(prop, test);
Assert.AreEqual(firstName, ((Account)fieldInfo.GetValue(prop) ).FirstName);
}
/// <summary>
/// Test the setting null on a Enum Field.
/// </summary>
[Test]
public void TestSetNullOnEnumField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDay");
// Property accessor
enumSetAccessor.Set(prop, null);
Assert.AreEqual(0, (int)fieldInfo.GetValue(prop));
}
/// <summary>
/// Test setting an Enum Field.
/// </summary>
[Test]
public void TestSetEnumField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDay");
fieldInfo.SetValue(prop, Days.Thu);
// Property accessor
Days test = Days.Wed;
enumSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an Enum Field.
/// </summary>
[Test]
public void TestGetEnumField()
{
Days test = Days.Wed;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedDay");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, enumGetAccessor.Get(prop));
}
#if dotnet2
/// <summary>
/// Test the setting null on a nullable int Field.
/// </summary>
[Test]
public void TestSetNullOnNullableIntField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedintNullable");
fieldInfo.SetValue(prop, 85);
// Property accessor
nullableSetAccessor.Set(prop, null);
Assert.AreEqual(null, fieldInfo.GetValue(prop));
}
/// <summary>
/// Test getting an nullable int Field.
/// </summary>
[Test]
public void TestGetNullableIntField()
{
Int32? test = 55;
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedintNullable");
fieldInfo.SetValue(prop, test);
// Property accessor
Assert.AreEqual(test, nullableGetAccessor.Get(prop));
}
/// <summary>
/// Test setting an nullable int Field.
/// </summary>
[Test]
public void TestSetNullableIntField()
{
Property prop = new Property();
FieldInfo fieldInfo = GetFieldInfo("protectedintNullable");
fieldInfo.SetValue(prop, 99);
// Property accessor
Int32? test = 55;
nullableSetAccessor.Set(prop, test);
Assert.AreEqual(test, fieldInfo.GetValue(prop));
}
#endif
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using RestSharp.Portable.OAuth2.Configuration;
using RestSharp.Portable.OAuth2.Infrastructure;
using RestSharp.Portable.OAuth2.Models;
namespace RestSharp.Portable.OAuth2
{
/// <summary>
/// Base class for OAuth2 client implementation.
/// </summary>
public abstract class OAuth2Client : IClient
{
private const string _accessTokenKey = "access_token";
private const string _refreshTokenKey = "refresh_token";
private const string _expiresKey = "expires_in";
private const string _tokenTypeKey = "token_type";
private const string _grantTypeAuthorizationKey = "authorization_code";
private const string _grantTypeRefreshTokenKey = "refresh_token";
private readonly IRequestFactory _factory;
/// <summary>
/// Initializes a new instance of the <see cref="OAuth2Client"/> class.
/// </summary>
/// <param name="factory">The factory.</param>
/// <param name="configuration">The configuration.</param>
protected OAuth2Client(IRequestFactory factory, IClientConfiguration configuration)
{
ExpirationSafetyMargin = TimeSpan.FromSeconds(5);
_factory = factory;
Configuration = configuration;
}
/// <summary>
/// Gets the client configuration object.
/// </summary>
public IClientConfiguration Configuration { get; }
/// <summary>
/// Gets the friendly name of provider (OAuth2 service).
/// </summary>
public abstract string Name { get; }
/// <summary>
/// Gets the state (any additional information that was provided by application and is posted back by service).
/// </summary>
public string State { get; private set; }
/// <summary>
/// Gets or sets the access token returned by provider. Can be used for further calls of provider API.
/// </summary>
public string AccessToken { get; protected set; }
/// <summary>
/// Gets or sets the refresh token returned by provider. Can be used for further calls of provider API.
/// </summary>
public string RefreshToken { get; protected set; }
/// <summary>
/// Gets the token type returned by provider. Can be used for further calls of provider API.
/// </summary>
public string TokenType { get; private set; }
/// <summary>
/// Gets or sets the time when the access token expires
/// </summary>
public DateTime? ExpiresAt { get; protected set; }
/// <summary>
/// Gets or sets a safety margin that's used to see if an access token is expired
/// </summary>
public TimeSpan ExpirationSafetyMargin { get; set; }
/// <summary>
/// Gets the instance of the request factory.
/// </summary>
protected IRequestFactory Factory => _factory;
/// <summary>
/// Gets the URI of service which issues access code.
/// </summary>
protected abstract Endpoint AccessCodeServiceEndpoint { get; }
/// <summary>
/// Gets the URI of service which issues access token.
/// </summary>
protected abstract Endpoint AccessTokenServiceEndpoint { get; }
/// <summary>
/// Gets the URI of service which allows to obtain information about user
/// who is currently logged in.
/// </summary>
protected abstract Endpoint UserInfoServiceEndpoint { get; }
private string GrantType { get; set; }
/// <summary>
/// Returns URI of service which should be called in order to start authentication process.
/// This URI should be used for rendering login link.
/// </summary>
/// <param name="state">Any additional information that will be posted back by service.</param>
/// <returns>A string containing the login link URI</returns>
public virtual async Task<string> GetLoginLinkUri(string state = null)
{
var client = _factory.CreateClient(AccessCodeServiceEndpoint);
var request = _factory.CreateRequest(AccessCodeServiceEndpoint);
request.AddObject(
new
{
response_type = "code",
client_id = Configuration.ClientId,
redirect_uri = Configuration.RedirectUri,
scope = Configuration.Scope,
state
},
new[] { (string.IsNullOrEmpty(Configuration.Scope) ? "scope" : null) },
PropertyFilterMode.Exclude);
await BeforeGetLoginLinkUri(
new BeforeAfterRequestArgs()
{
Client = client,
Request = request,
Configuration = Configuration,
});
return client.BuildUri(request).ToString();
}
/// <summary>
/// Obtains user information using RestSharp.Portable.OAuth2 service and data provided via callback request.
/// </summary>
/// <param name="parameters">Callback request payload (parameters).</param>
/// <returns>The user information</returns>
public async Task<UserInfo> GetUserInfo(ILookup<string, string> parameters)
{
GrantType = _grantTypeAuthorizationKey;
CheckErrorAndSetState(parameters);
await QueryAccessToken(parameters);
return await GetUserInfo();
}
/// <summary>
/// Issues query for access token and returns access token.
/// </summary>
/// <param name="parameters">Callback request payload (parameters).</param>
/// <returns>The access token</returns>
public async Task<string> GetToken(ILookup<string, string> parameters)
{
GrantType = _grantTypeAuthorizationKey;
CheckErrorAndSetState(parameters);
await QueryAccessToken(parameters);
return AccessToken;
}
/// <summary>
/// Get the current access token - and optionally refreshes it if it is expired
/// </summary>
/// <param name="refreshToken">The refresh token to use (null == default)</param>
/// <param name="forceUpdate">Enforce an update of the access token?</param>
/// <param name="safetyMargin">A custom safety margin to check if the access token is expired</param>
/// <returns>The current access token</returns>
public async Task<string> GetCurrentToken(string refreshToken = null, bool forceUpdate = false, TimeSpan? safetyMargin = null)
{
bool refreshRequired =
forceUpdate
|| (ExpiresAt != null && DateTime.Now >= (ExpiresAt - (safetyMargin ?? ExpirationSafetyMargin)))
|| string.IsNullOrEmpty(AccessToken);
if (refreshRequired)
{
string refreshTokenValue;
if (!string.IsNullOrEmpty(refreshToken))
{
RefreshToken = refreshToken;
refreshTokenValue = refreshToken;
}
else if (!string.IsNullOrEmpty(RefreshToken))
refreshTokenValue = RefreshToken;
else
throw new Exception("Token never fetched and refresh token not provided.");
var parameters = new Dictionary<string, string>()
{
{ _refreshTokenKey, refreshTokenValue },
};
GrantType = _grantTypeRefreshTokenKey;
await QueryAccessToken(parameters.ToLookup(x => x.Key, x => x.Value, StringComparer.OrdinalIgnoreCase));
}
return AccessToken;
}
/// <summary>
/// Parse the response, search for a key and return its value.
/// </summary>
/// <param name="content">The content to parse</param>
/// <param name="key">The key to query</param>
/// <returns>The value for the queried key</returns>
/// <exception cref="UnexpectedResponseException">Thrown when the key wasn't found</exception>
protected static string ParseStringResponse(string content, string key)
{
var values = ParseStringResponse(content, new[] { key })[key].ToList();
if (values.Count == 0)
throw new UnexpectedResponseException(key);
return values.First();
}
/// <summary>
/// Parse the response for a given key/value using either JSON or form url encoded parameters
/// </summary>
/// <param name="content">The content to parse</param>
/// <param name="keys">The keys to query</param>
/// <returns>The values for the queried keys</returns>
protected static ILookup<string, string> ParseStringResponse(string content, params string[] keys)
{
var result = new List<KeyValuePair<string, string>>();
try
{
// response can be sent in JSON format
var jobj = JObject.Parse(content);
foreach (var key in keys)
{
foreach (var token in jobj.SelectTokens(key))
if (token.HasValues)
{
foreach (var value in token.Values())
result.Add(new KeyValuePair<string, string>(key, (string)value));
}
else
result.Add(new KeyValuePair<string, string>(key, (string)token));
}
}
catch (JsonReaderException)
{
// or it can be in "query string" format (param1=val1¶m2=val2)
var collection = content.ParseQueryString();
foreach (var key in keys)
{
foreach (var item in collection[key])
result.Add(new KeyValuePair<string, string>(key, item));
}
}
return result.ToLookup(x => x.Key, x => x.Value, StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Parse the access token response using either JSON or form url encoded parameters
/// </summary>
/// <param name="content">The content to parse the access token from</param>
/// <returns>The access token</returns>
protected virtual string ParseAccessTokenResponse(string content)
{
return ParseStringResponse(content, _accessTokenKey);
}
/// <summary>
/// Should return parsed <see cref="UserInfo"/> using content received from provider.
/// </summary>
/// <param name="response">The response which is received from the provider.</param>
/// <returns>The found user information</returns>
protected abstract UserInfo ParseUserInfo(IRestResponse response);
/// <summary>
/// Called just before building the request URI when everything is ready.
/// Allows to add extra parameters to request or do any other needed preparations.
/// </summary>
/// <param name="args">The request/response arguments</param>
/// <returns>The task this handler is processed on</returns>
protected virtual Task BeforeGetLoginLinkUri(BeforeAfterRequestArgs args)
{
#if USE_TASKEX
return TaskEx.FromResult(0);
#else
return Task.FromResult(0);
#endif
}
/// <summary>
/// Called before the request to get the access token
/// </summary>
/// <param name="args">The request/response arguments</param>
protected virtual void BeforeGetAccessToken(BeforeAfterRequestArgs args)
{
args.Request.AddObject(
new
{
client_id = Configuration.ClientId,
client_secret = Configuration.ClientSecret,
grant_type = GrantType
});
if (GrantType == _grantTypeRefreshTokenKey)
{
args.Request.AddObject(
new
{
refresh_token = args.Parameters.GetOrThrowUnexpectedResponse(_refreshTokenKey),
});
}
else
{
args.Request.AddObject(
new
{
code = args.Parameters.GetOrThrowUnexpectedResponse("code"),
redirect_uri = Configuration.RedirectUri,
});
}
}
/// <summary>
/// Called just after obtaining response with access token from service.
/// Allows to read extra data returned along with access token.
/// </summary>
/// <param name="args">The request/response arguments</param>
protected virtual void AfterGetAccessToken(BeforeAfterRequestArgs args)
{
}
/// <summary>
/// Called just before issuing request to service when everything is ready.
/// Allows to add extra parameters to request or do any other needed preparations.
/// </summary>
/// <param name="args">The request/response arguments</param>
protected virtual void BeforeGetUserInfo(BeforeAfterRequestArgs args)
{
}
/// <summary>
/// Obtains user information using provider API.
/// </summary>
/// <returns>The queried user information</returns>
protected virtual async Task<UserInfo> GetUserInfo()
{
var client = _factory.CreateClient(UserInfoServiceEndpoint);
client.Authenticator = new OAuth2UriQueryParameterAuthenticator(this);
var request = _factory.CreateRequest(UserInfoServiceEndpoint);
BeforeGetUserInfo(
new BeforeAfterRequestArgs
{
Client = client,
Request = request,
Configuration = Configuration
});
var response = await client.ExecuteAndVerify(request);
var result = ParseUserInfo(response);
result.ProviderName = Name;
return result;
}
private void CheckErrorAndSetState(ILookup<string, string> parameters)
{
const string errorFieldName = "error";
var error = parameters[errorFieldName].ToList();
if (error.Any(x => !string.IsNullOrEmpty(x)))
throw new UnexpectedResponseException(errorFieldName, string.Join("\n", error));
State = string.Join(",", parameters["state"]);
}
/// <summary>
/// Issues query for access token and parses response.
/// </summary>
/// <param name="parameters">Callback request payload (parameters).</param>
/// <returns>The task the query is performed on</returns>
private async Task QueryAccessToken(ILookup<string, string> parameters)
{
var client = _factory.CreateClient(AccessTokenServiceEndpoint);
var request = _factory.CreateRequest(AccessTokenServiceEndpoint, Method.POST);
BeforeGetAccessToken(
new BeforeAfterRequestArgs
{
Client = client,
Request = request,
Parameters = parameters,
Configuration = Configuration
});
var response = await client.ExecuteAndVerify(request);
var content = response.Content;
AccessToken = ParseAccessTokenResponse(content);
if (GrantType != _grantTypeRefreshTokenKey)
RefreshToken = ParseStringResponse(content, new[] { _refreshTokenKey })[_refreshTokenKey].FirstOrDefault();
TokenType = ParseStringResponse(content, new[] { _tokenTypeKey })[_tokenTypeKey].FirstOrDefault();
var expiresIn = ParseStringResponse(content, new[] { _expiresKey })[_expiresKey].Select(x => Convert.ToInt32(x, 10)).FirstOrDefault();
ExpiresAt = expiresIn != 0 ? (DateTime?)DateTime.Now.AddSeconds(expiresIn) : null;
AfterGetAccessToken(
new BeforeAfterRequestArgs
{
Response = response,
Parameters = parameters
});
}
}
}
| |
using EdiEngine.Common.Enums;
using EdiEngine.Common.Definitions;
using EdiEngine.Standards.X12_004010.Segments;
namespace EdiEngine.Standards.X12_004010.Maps
{
public class M_200 : MapLoop
{
public M_200() : base(null)
{
Content.AddRange(new MapBaseEntity[] {
new BGN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new CRO() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new AAA() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
new AMT() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 },
new NX1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new NX2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
new L_N1(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 20 },
new L_LX(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 2 },
new L_TLN(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1000 },
new L_RO(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 500 },
new L_CCI(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new L_INQ(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 100 },
new L_VAR(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 25 },
new LS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new L_NTE(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
new LE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new LS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new L_REF(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 },
new LE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
});
}
//1000
public class L_N1 : MapLoop
{
public L_N1(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 },
new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
});
}
}
//2000
public class L_LX : MapLoop
{
public L_LX(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new LX() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new L_IN1(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 15 },
new L_NX1(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 10 },
new L_N1_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
});
}
}
//2100
public class L_IN1 : MapLoop
{
public L_IN1(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new IN1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new IN2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
new DMG() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new N10() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 },
new QTY() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 15 },
new YNQ() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
});
}
}
//2200
public class L_NX1 : MapLoop
{
public L_NX1(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new NX1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new NX2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new N10() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new ARS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new YNQ() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
});
}
}
//2300
public class L_N1_1 : MapLoop
{
public L_N1_1(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 4 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new YNQ() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
new L_SOI(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
});
}
}
//2310
public class L_SOI : MapLoop
{
public L_SOI(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new SOI() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new EMS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new N10() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new YNQ() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new L_AIN(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 7 },
});
}
}
//2311
public class L_AIN : MapLoop
{
public L_AIN(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new AIN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new YNQ() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
});
}
}
//3000
public class L_TLN : MapLoop
{
public L_TLN(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new TLN() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new N1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 4 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 7 },
new TBI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new PPD() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 15 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
new L_AMT(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 6 },
});
}
}
//3100
public class L_AMT : MapLoop
{
public L_AMT(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new AMT() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
});
}
}
//4000
public class L_RO : MapLoop
{
public L_RO(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new RO() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new CDS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new TBI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
new L_AMT_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 6 },
new L_N1_2(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
});
}
}
//4100
public class L_AMT_1 : MapLoop
{
public L_AMT_1(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new AMT() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
});
}
}
//4200
public class L_N1_2 : MapLoop
{
public L_N1_2(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
});
}
}
//5000
public class L_CCI : MapLoop
{
public L_CCI(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new CCI() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 },
new MSG() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 4 },
});
}
}
//6000
public class L_INQ : MapLoop
{
public L_INQ(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new INQ() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new TBI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new N1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new YNQ() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 },
});
}
}
//7000
public class L_VAR : MapLoop
{
public L_VAR(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new VAR() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 50 },
new IN1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new IN2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
new DMG() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new N10() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new L_NX1_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
new L_N1_3(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 },
new L_SCM(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 },
});
}
}
//7100
public class L_NX1_1 : MapLoop
{
public L_NX1_1(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new NX1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new NX2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 },
});
}
}
//7200
public class L_N1_3 : MapLoop
{
public L_N1_3(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
new EMS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
new DTP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 },
});
}
}
//7300
public class L_SCM : MapLoop
{
public L_SCM(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new SCM() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new SCS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
});
}
}
//8000
public class L_NTE : MapLoop
{
public L_NTE(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new NTE() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new TBI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 },
new MSG() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 },
});
}
}
//9000
public class L_REF : MapLoop
{
public L_REF(MapLoop parentLoop) : base(parentLoop)
{
Content.AddRange(new MapBaseEntity[] {
new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 },
new G32() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 999999 },
new NTE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 },
});
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using Elasticsearch.Net;
using Newtonsoft.Json;
namespace Nest
{
/// <summary>
/// Provides the connection settings for NEST's <see cref="ElasticClient"/>
/// </summary>
public class ConnectionSettings : ConnectionSettingsBase<ConnectionSettings>
{
public ConnectionSettings(Uri uri = null)
: this(new SingleNodeConnectionPool(uri ?? new Uri("http://localhost:9200"))) { }
public ConnectionSettings(IConnectionPool connectionPool)
: this(connectionPool, null, new SerializerFactory()) { }
public ConnectionSettings(IConnectionPool connectionPool, IConnection connection)
: this(connectionPool, connection, new SerializerFactory()) { }
public ConnectionSettings(IConnectionPool connectionPool, Func<ConnectionSettings, IElasticsearchSerializer> serializerFactory)
#pragma warning disable CS0618 // Type or member is obsolete
: this(connectionPool, null, serializerFactory) { }
#pragma warning restore CS0618 // Type or member is obsolete
public ConnectionSettings(IConnectionPool connectionPool, IConnection connection, ISerializerFactory serializerFactory)
: base(connectionPool, connection, serializerFactory, s => serializerFactory.Create(s)) { }
[Obsolete("Please use the constructor taking ISerializerFactory instead of a Func")]
public ConnectionSettings(IConnectionPool connectionPool, IConnection connection, Func<ConnectionSettings, IElasticsearchSerializer> serializerFactory)
: base(connectionPool, connection, null, s => serializerFactory?.Invoke(s)) { }
}
/// <summary>
/// Provides the connection settings for NEST's <see cref="ElasticClient"/>
/// </summary>
[Browsable(false)]
[EditorBrowsable(EditorBrowsableState.Never)]
public abstract class ConnectionSettingsBase<TConnectionSettings> : ConnectionConfiguration<TConnectionSettings>, IConnectionSettingsValues
where TConnectionSettings : ConnectionSettingsBase<TConnectionSettings>, IConnectionSettingsValues
{
private string _defaultIndex;
string IConnectionSettingsValues.DefaultIndex => this._defaultIndex;
private readonly Inferrer _inferrer;
Inferrer IConnectionSettingsValues.Inferrer => _inferrer;
private Func<Type, string> _defaultTypeNameInferrer;
Func<Type, string> IConnectionSettingsValues.DefaultTypeNameInferrer => _defaultTypeNameInferrer;
private readonly FluentDictionary<Type, string> _defaultIndices;
FluentDictionary<Type, string> IConnectionSettingsValues.DefaultIndices => _defaultIndices;
private readonly FluentDictionary<Type, string> _defaultTypeNames;
FluentDictionary<Type, string> IConnectionSettingsValues.DefaultTypeNames => _defaultTypeNames;
private Func<string, string> _defaultFieldNameInferrer;
Func<string, string> IConnectionSettingsValues.DefaultFieldNameInferrer => _defaultFieldNameInferrer;
private readonly FluentDictionary<Type, string> _idProperties = new FluentDictionary<Type, string>();
FluentDictionary<Type, string> IConnectionSettingsValues.IdProperties => _idProperties;
private readonly FluentDictionary<MemberInfo, IPropertyMapping> _propertyMappings = new FluentDictionary<MemberInfo, IPropertyMapping>();
FluentDictionary<MemberInfo, IPropertyMapping> IConnectionSettingsValues.PropertyMappings => _propertyMappings;
private readonly ISerializerFactory _serializerFactory;
ISerializerFactory IConnectionSettingsValues.SerializerFactory => _serializerFactory;
protected ConnectionSettingsBase(
IConnectionPool connectionPool,
IConnection connection,
ISerializerFactory serializerFactory,
Func<TConnectionSettings, IElasticsearchSerializer> serializerFactoryFunc
)
: base(connectionPool, connection, serializerFactoryFunc)
{
this._defaultTypeNameInferrer = (t => t.Name.ToLowerInvariant());
this._defaultFieldNameInferrer = (p => p.ToCamelCase());
this._defaultIndices = new FluentDictionary<Type, string>();
this._defaultTypeNames = new FluentDictionary<Type, string>();
this._serializerFactory = serializerFactory ?? new SerializerFactory();
this._inferrer = new Inferrer(this);
}
protected ConnectionSettingsBase(
IConnectionPool connectionPool,
IConnection connection,
Func<TConnectionSettings, IElasticsearchSerializer> serializerFactoryFunc
)
: this(connectionPool, connection, null, serializerFactoryFunc) { }
IElasticsearchSerializer IConnectionSettingsValues.StatefulSerializer(JsonConverter converter) =>
this._serializerFactory.CreateStateful(this, converter);
/// <summary>
/// The default serializer for requests and responses
/// </summary>
/// <returns></returns>
protected override IElasticsearchSerializer DefaultSerializer(TConnectionSettings settings) => new JsonNetSerializer(settings);
/// <summary>
/// This calls SetDefaultTypenameInferrer with an implementation that will pluralize type names. This used to be the default prior to Nest 0.90
/// </summary>
public TConnectionSettings PluralizeTypeNames()
{
this._defaultTypeNameInferrer = this.LowerCaseAndPluralizeTypeNameInferrer;
return (TConnectionSettings)this;
}
/// <summary>
/// The default index to use when no index is specified.
/// </summary>
/// <param name="defaultIndex">When null/empty/not set might throw
/// <see cref="NullReferenceException"/> later on when not specifying index explicitly while indexing.
/// </param>
public TConnectionSettings DefaultIndex(string defaultIndex)
{
this._defaultIndex = defaultIndex;
return (TConnectionSettings)this;
}
private string LowerCaseAndPluralizeTypeNameInferrer(Type type)
{
type.ThrowIfNull(nameof(type));
return type.Name.MakePlural().ToLowerInvariant();
}
/// <summary>
/// By default NEST camelCases property name (EmailAddress => emailAddress) expressions
/// either via an ElasticProperty attribute or because they are part of Dictionary where the keys should be treated verbatim.
/// <pre>
/// Here you can register a function that transforms these expressions (default casing, pre- or suffixing)
/// </pre>
/// </summary>
public TConnectionSettings DefaultFieldNameInferrer(Func<string, string> fieldNameInferrer)
{
this._defaultFieldNameInferrer = fieldNameInferrer;
return (TConnectionSettings)this;
}
/// <summary>
/// Allows you to override how type names should be represented, the default will call .ToLowerInvariant() on the type's name.
/// </summary>
public TConnectionSettings DefaultTypeNameInferrer(Func<Type, string> typeNameInferrer)
{
typeNameInferrer.ThrowIfNull(nameof(typeNameInferrer));
this._defaultTypeNameInferrer = typeNameInferrer;
return (TConnectionSettings)this;
}
/// <summary>
/// Map types to a index names. Takes precedence over DefaultIndex().
/// </summary>
public TConnectionSettings MapDefaultTypeIndices(Action<FluentDictionary<Type, string>> mappingSelector)
{
mappingSelector.ThrowIfNull(nameof(mappingSelector));
mappingSelector(this._defaultIndices);
return (TConnectionSettings)this;
}
/// <summary>
/// Allows you to override typenames, takes priority over the global DefaultTypeNameInferrer()
/// </summary>
public TConnectionSettings MapDefaultTypeNames(Action<FluentDictionary<Type, string>> mappingSelector)
{
mappingSelector.ThrowIfNull(nameof(mappingSelector));
mappingSelector(this._defaultTypeNames);
return (TConnectionSettings)this;
}
public TConnectionSettings MapIdPropertyFor<TDocument>(Expression<Func<TDocument, object>> objectPath)
{
objectPath.ThrowIfNull(nameof(objectPath));
var memberInfo = new MemberInfoResolver(objectPath);
var fieldName = memberInfo.Members.Single().Name;
if (this._idProperties.ContainsKey(typeof(TDocument)))
{
if (this._idProperties[typeof(TDocument)].Equals(fieldName))
return (TConnectionSettings)this;
throw new ArgumentException("Cannot map '{0}' as the id property for type '{1}': it already has '{2}' mapped."
.F(fieldName, typeof(TDocument).Name, this._idProperties[typeof(TDocument)]));
}
this._idProperties.Add(typeof(TDocument), fieldName);
return (TConnectionSettings)this;
}
public TConnectionSettings MapPropertiesFor<TDocument>(Action<PropertyMappingDescriptor<TDocument>> propertiesSelector)
where TDocument : class
{
propertiesSelector.ThrowIfNull(nameof(propertiesSelector));
var mapper = new PropertyMappingDescriptor<TDocument>();
propertiesSelector(mapper);
ApplyPropertyMappings(mapper.Mappings);
return (TConnectionSettings)this;
}
private void ApplyPropertyMappings<TDocument>(IList<IClrTypePropertyMapping<TDocument>> mappings)
where TDocument : class
{
foreach (var mapping in mappings)
{
var e = mapping.Property;
var memberInfoResolver = new MemberInfoResolver(e);
if (memberInfoResolver.Members.Count > 1)
throw new ArgumentException("MapFieldNameFor can only map direct properties");
if (memberInfoResolver.Members.Count < 1)
throw new ArgumentException("Expression {0} does contain any member access".F(e));
var memberInfo = memberInfoResolver.Members.Last();
if (_propertyMappings.ContainsKey(memberInfo))
{
var newName = mapping.NewName;
var mappedAs = _propertyMappings[memberInfo].Name;
var typeName = typeof(TDocument).Name;
if (mappedAs.IsNullOrEmpty() && newName.IsNullOrEmpty())
throw new ArgumentException("Property mapping '{0}' on type is already ignored"
.F(e, newName, mappedAs, typeName));
if (mappedAs.IsNullOrEmpty())
throw new ArgumentException("Property mapping '{0}' on type {3} can not be mapped to '{1}' it already has an ignore mapping"
.F(e, newName, mappedAs, typeName));
if (newName.IsNullOrEmpty())
throw new ArgumentException("Property mapping '{0}' on type {3} can not be ignored it already has a mapping to '{2}'"
.F(e, newName, mappedAs, typeName));
throw new ArgumentException("Property mapping '{0}' on type {3} can not be mapped to '{1}' already mapped as '{2}'"
.F(e, newName, mappedAs, typeName));
}
_propertyMappings.Add(memberInfo, mapping.ToPropertyMapping());
}
}
public TConnectionSettings InferMappingFor<TDocument>(Func<ClrTypeMappingDescriptor<TDocument>, IClrTypeMapping<TDocument>> selector)
where TDocument : class
{
var inferMapping = selector(new ClrTypeMappingDescriptor<TDocument>());
if (!inferMapping.IndexName.IsNullOrEmpty())
this._defaultIndices.Add(inferMapping.Type, inferMapping.IndexName);
if (!inferMapping.TypeName.IsNullOrEmpty())
this._defaultTypeNames.Add(inferMapping.Type, inferMapping.TypeName);
if (inferMapping.IdProperty != null)
#pragma warning disable CS0618 // Type or member is obsolete but will be private in the future OK to call here
this.MapIdPropertyFor<TDocument>(inferMapping.IdProperty);
#pragma warning restore CS0618
if (inferMapping.Properties != null)
this.ApplyPropertyMappings<TDocument>(inferMapping.Properties);
return (TConnectionSettings)this;
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using BTDB.Buffer;
using BTDB.StreamLayer;
namespace BTDB.KVDBLayer;
class ChunkStorageInKV : IChunkStorage
{
struct StorageValue
{
internal uint FileId;
internal uint FileOfs;
internal uint ContentLengthCompressedIsLeaf;
internal uint ContentLength
{
get { return ContentLengthCompressedIsLeaf / 4; }
set { ContentLengthCompressedIsLeaf = (ContentLengthCompressedIsLeaf & 1) + value * 4; }
}
internal bool Compressed
{
get { return (ContentLengthCompressedIsLeaf & 2) != 0; }
set
{
if (value) ContentLengthCompressedIsLeaf |= 2u; else ContentLengthCompressedIsLeaf &= ~2u;
}
}
internal bool Leaf
{
get { return (ContentLengthCompressedIsLeaf & 1) != 0; }
set
{
if (value) ContentLengthCompressedIsLeaf |= 1u; else ContentLengthCompressedIsLeaf &= ~1u;
}
}
}
readonly long _subDBId;
readonly IFileCollectionWithFileInfos _fileCollection;
readonly long _maxFileSize;
volatile int _keyLen;
readonly ConcurrentDictionary<ByteStructs.Key20, StorageValue> _dict20 = new ConcurrentDictionary<ByteStructs.Key20, StorageValue>(new ByteStructs.Key20EqualityComparer());
readonly object _pureValueFileLock = new object();
IFileCollectionFile? _pureValueFile;
ISpanWriter? _pureValueFileWriter;
IFileCollectionFile? _hashIndexFile;
ISpanWriter? _hashIndexWriter;
public ChunkStorageInKV(long subDBId, IFileCollectionWithFileInfos fileCollection, long maxFileSize)
{
_subDBId = subDBId;
_fileCollection = fileCollection;
_maxFileSize = maxFileSize;
_keyLen = -1;
LoadFiles();
}
void LoadFiles()
{
var hashKeyIndexFiles = new List<KeyValuePair<uint, long>>();
foreach (var pair in _fileCollection.FileInfos)
{
if (pair.Value.SubDBId != _subDBId) continue;
if (pair.Value.FileType == KVFileType.HashKeyIndex)
{
hashKeyIndexFiles.Add(new KeyValuePair<uint, long>(pair.Key, pair.Value.Generation));
}
}
if (hashKeyIndexFiles.Count == 0)
return;
hashKeyIndexFiles.Sort((x, y) => x.Value < y.Value ? -1 : x.Value > y.Value ? 1 : 0);
LoadHashKeyIndex(hashKeyIndexFiles[hashKeyIndexFiles.Count - 1].Key);
}
void LoadHashKeyIndex(uint hashKeyIndexFileId)
{
var reader = new SpanReader(_fileCollection.GetFile(hashKeyIndexFileId).GetExclusiveReader());
_keyLen = (int)((IHashKeyIndex)_fileCollection.FileInfoByIdx(hashKeyIndexFileId)).KeyLen;
HashKeyIndex.SkipHeader(ref reader);
var keyBuf = ByteBuffer.NewSync(new byte[_keyLen]);
while (!reader.Eof)
{
var value = new StorageValue();
value.FileId = reader.ReadVUInt32();
value.FileOfs = reader.ReadVUInt32();
value.ContentLengthCompressedIsLeaf = reader.ReadVUInt32();
reader.ReadBlock(keyBuf);
_dict20.TryAdd(new ByteStructs.Key20(keyBuf), value);
}
}
void CheckOrInitKeyLen(int keyLen)
{
if (_keyLen == -1)
{
if (keyLen != 20) throw new ArgumentException("Length of Key must be 20 bytes");
#pragma warning disable 420
Interlocked.CompareExchange(ref _keyLen, keyLen, -1);
#pragma warning restore 420
}
if (_keyLen != keyLen)
{
throw new ArgumentException("Key length is different from stored");
}
}
public IChunkStorageTransaction StartTransaction()
{
return new ChunkStorageTransaction(this);
}
class ChunkStorageTransaction : IChunkStorageTransaction
{
readonly ChunkStorageInKV _chunkStorageInKV;
public ChunkStorageTransaction(ChunkStorageInKV chunkStorageInKV)
{
_chunkStorageInKV = chunkStorageInKV;
}
public void Dispose()
{
lock (_chunkStorageInKV._pureValueFileLock)
{
_chunkStorageInKV.FlushFiles();
}
}
public void Put(ByteBuffer key, ByteBuffer content, bool isLeaf)
{
_chunkStorageInKV.CheckOrInitKeyLen(key.Length);
var key20 = new ByteStructs.Key20(key);
var d = _chunkStorageInKV._dict20;
StorageValue val;
again:
if (d.TryGetValue(key20, out val))
{
if (val.ContentLength != content.Length) throw new InvalidOperationException("Hash collision or error in memory");
if (!isLeaf && val.Leaf)
{
var newval = val;
newval.Leaf = false;
if (!d.TryUpdate(key20, newval, val)) goto again;
lock (_chunkStorageInKV._pureValueFileLock)
{
_chunkStorageInKV.StoreHashUpdate(key, newval);
}
}
return;
}
lock (_chunkStorageInKV._pureValueFileLock)
{
val = _chunkStorageInKV.StoreContent(content);
val.Leaf = isLeaf;
if (!d.TryAdd(key20, val))
{
goto again;
}
_chunkStorageInKV.StoreHashUpdate(key, val);
}
}
public Task<ByteBuffer> Get(ByteBuffer key)
{
_chunkStorageInKV.CheckOrInitKeyLen(key.Length);
var tcs = new TaskCompletionSource<ByteBuffer>();
var key20 = new ByteStructs.Key20(key);
var d = _chunkStorageInKV._dict20;
StorageValue val;
if (d.TryGetValue(key20, out val))
{
var buf = new byte[val.ContentLength];
_chunkStorageInKV._fileCollection.GetFile(val.FileId).RandomRead(buf, val.FileOfs, false);
tcs.SetResult(ByteBuffer.NewAsync(buf));
}
else
{
tcs.SetResult(ByteBuffer.NewEmpty());
}
return tcs.Task;
}
}
void FlushFiles()
{
if (_pureValueFileWriter != null)
{
_pureValueFile.HardFlushTruncateSwitchToDisposedMode();
}
if (_hashIndexWriter != null)
{
_hashIndexFile.HardFlushTruncateSwitchToDisposedMode();
}
}
StorageValue StoreContent(ByteBuffer content)
{
var result = new StorageValue();
result.Compressed = false;
result.ContentLength = (uint)content.Length;
if (_pureValueFile == null)
StartNewPureValueFile();
result.FileId = _pureValueFile!.Index;
result.FileOfs = (uint)_pureValueFileWriter!.GetCurrentPositionWithoutWriter();
_pureValueFileWriter.WriteBlockWithoutWriter(ref MemoryMarshal.GetReference(content.AsSyncReadOnlySpan()), (uint)content.Length);
_pureValueFile.Flush();
if (_pureValueFileWriter.GetCurrentPositionWithoutWriter() >= _maxFileSize)
{
_pureValueFile.HardFlushTruncateSwitchToReadOnlyMode();
StartNewPureValueFile();
}
return result;
}
void StartNewPureValueFile()
{
_pureValueFile = _fileCollection.AddFile("hpv");
_pureValueFileWriter = _pureValueFile.GetAppenderWriter();
var fileInfo = new FilePureValuesWithId(_subDBId, _fileCollection.NextGeneration(), _fileCollection.Guid);
var writer = new SpanWriter(_pureValueFileWriter);
fileInfo.WriteHeader(ref writer);
writer.Sync();
_pureValueFile.Flush();
_fileCollection.SetInfo(_pureValueFile.Index, fileInfo);
}
void StoreHashUpdate(ByteBuffer key, StorageValue storageValue)
{
if (_hashIndexWriter == null)
{
StartNewHashIndexFile();
}
var writer = new SpanWriter(_hashIndexWriter!);
writer.WriteVUInt32(storageValue.FileId);
writer.WriteVUInt32(storageValue.FileOfs);
writer.WriteVUInt32(storageValue.ContentLengthCompressedIsLeaf);
writer.WriteBlock(key);
writer.Sync();
}
void StartNewHashIndexFile()
{
_hashIndexFile = _fileCollection.AddFile("hid");
_hashIndexWriter = _hashIndexFile.GetExclusiveAppenderWriter();
var fileInfo = new HashKeyIndex(_subDBId, _fileCollection.NextGeneration(), _fileCollection.Guid, (uint)_keyLen);
var writer = new SpanWriter(_hashIndexWriter);
fileInfo.WriteHeader(ref writer);
writer.Sync();
_fileCollection.SetInfo(_hashIndexFile.Index, fileInfo);
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.IsolatedStorage;
using System.Linq;
using System.Linq.Expressions;
using System.Net.Http;
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Android.App;
using Android.Content;
using Android.Content.Res;
using Android.OS;
using Android.Util;
using Android.Views;
using Xamarin.Forms.Internals;
using Xamarin.Forms.Platform.Android;
using Resource = Android.Resource;
using Trace = System.Diagnostics.Trace;
namespace Xamarin.Forms
{
public static class Forms
{
const int TabletCrossover = 600;
static bool? s_supportsProgress;
static bool? s_isLollipopOrNewer;
public static Context Context { get; internal set; }
public static bool IsInitialized { get; private set; }
internal static bool IsLollipopOrNewer
{
get
{
if (!s_isLollipopOrNewer.HasValue)
s_isLollipopOrNewer = (int)Build.VERSION.SdkInt >= 21;
return s_isLollipopOrNewer.Value;
}
}
internal static bool SupportsProgress
{
get
{
var activity = Context as Activity;
if (!s_supportsProgress.HasValue)
{
int progressCircularId = Context.Resources.GetIdentifier("progress_circular", "id", "android");
if (progressCircularId > 0 && activity != null)
s_supportsProgress = activity.FindViewById(progressCircularId) != null;
else
s_supportsProgress = true;
}
return s_supportsProgress.Value;
}
}
internal static AndroidTitleBarVisibility TitleBarVisibility { get; set; }
// Provide backwards compat for Forms.Init and AndroidActivity
// Why is bundle a param if never used?
public static void Init(Context activity, Bundle bundle)
{
Assembly resourceAssembly = Assembly.GetCallingAssembly();
SetupInit(activity, resourceAssembly);
}
public static void Init(Context activity, Bundle bundle, Assembly resourceAssembly)
{
SetupInit(activity, resourceAssembly);
}
/// <summary>
/// Sets title bar visibility programmatically. Must be called after Xamarin.Forms.Forms.Init() method
/// </summary>
/// <param name="visibility">Title bar visibility enum</param>
public static void SetTitleBarVisibility(AndroidTitleBarVisibility visibility)
{
if((Activity)Context == null)
throw new NullReferenceException("Must be called after Xamarin.Forms.Forms.Init() method");
TitleBarVisibility = visibility;
if (TitleBarVisibility == AndroidTitleBarVisibility.Never)
{
if (!((Activity)Context).Window.Attributes.Flags.HasFlag(WindowManagerFlags.Fullscreen))
((Activity)Context).Window.AddFlags(WindowManagerFlags.Fullscreen);
}
else
{
if (((Activity)Context).Window.Attributes.Flags.HasFlag(WindowManagerFlags.Fullscreen))
((Activity)Context).Window.ClearFlags(WindowManagerFlags.Fullscreen);
}
}
public static event EventHandler<ViewInitializedEventArgs> ViewInitialized;
internal static void SendViewInitialized(this VisualElement self, global::Android.Views.View nativeView)
{
EventHandler<ViewInitializedEventArgs> viewInitialized = ViewInitialized;
if (viewInitialized != null)
viewInitialized(self, new ViewInitializedEventArgs { View = self, NativeView = nativeView });
}
static void SetupInit(Context activity, Assembly resourceAssembly)
{
Context = activity;
ResourceManager.Init(resourceAssembly);
Color.Accent = GetAccentColor();
if (!IsInitialized)
Log.Listeners.Add(new DelegateLogListener((c, m) => Trace.WriteLine(m, c)));
Device.PlatformServices = new AndroidPlatformServices();
// use field and not property to avoid exception in getter
if (Device.info != null)
{
((AndroidDeviceInfo)Device.info).Dispose();
Device.info = null;
}
Device.Info = new AndroidDeviceInfo(activity);
var ticker = Ticker.Default as AndroidTicker;
if (ticker != null)
ticker.Dispose();
Ticker.Default = new AndroidTicker();
if (!IsInitialized)
{
Registrar.RegisterAll(new[] { typeof(ExportRendererAttribute), typeof(ExportCellAttribute), typeof(ExportImageSourceHandlerAttribute) });
}
int minWidthDp = Context.Resources.Configuration.SmallestScreenWidthDp;
Device.Idiom = minWidthDp >= TabletCrossover ? TargetIdiom.Tablet : TargetIdiom.Phone;
if (ExpressionSearch.Default == null)
ExpressionSearch.Default = new AndroidExpressionSearch();
IsInitialized = true;
}
static Color GetAccentColor()
{
Color rc;
using (var value = new TypedValue())
{
if (Context.Theme.ResolveAttribute(global::Android.Resource.Attribute.ColorAccent, value, true)) // Android 5.0+
{
rc = Color.FromUint((uint)value.Data);
}
else if(Context.Theme.ResolveAttribute(Context.Resources.GetIdentifier("colorAccent", "attr", Context.PackageName), value, true)) // < Android 5.0
{
rc = Color.FromUint((uint)value.Data);
}
else // fallback to old code if nothing works (don't know if that ever happens)
{
// Detect if legacy device and use appropriate accent color
// Hardcoded because could not get color from the theme drawable
var sdkVersion = (int)Build.VERSION.SdkInt;
if (sdkVersion <= 10)
{
// legacy theme button pressed color
rc = Color.FromHex("#fffeaa0c");
}
else
{
// Holo dark light blue
rc = Color.FromHex("#ff33b5e5");
}
}
}
return rc;
}
class AndroidDeviceInfo : DeviceInfo
{
bool disposed;
readonly Context _formsActivity;
readonly Size _pixelScreenSize;
readonly double _scalingFactor;
Orientation _previousOrientation = Orientation.Undefined;
public AndroidDeviceInfo(Context formsActivity)
{
_formsActivity = formsActivity;
CheckOrientationChanged(_formsActivity.Resources.Configuration.Orientation);
// This will not be an implementation of IDeviceInfoProvider when running inside the context
// of layoutlib, which is what the Android Designer does.
if (_formsActivity is IDeviceInfoProvider)
((IDeviceInfoProvider) _formsActivity).ConfigurationChanged += ConfigurationChanged;
using (DisplayMetrics display = formsActivity.Resources.DisplayMetrics)
{
_scalingFactor = display.Density;
_pixelScreenSize = new Size(display.WidthPixels, display.HeightPixels);
ScaledScreenSize = new Size(_pixelScreenSize.Width / _scalingFactor, _pixelScreenSize.Width / _scalingFactor);
}
}
public override Size PixelScreenSize
{
get { return _pixelScreenSize; }
}
public override Size ScaledScreenSize { get; }
public override double ScalingFactor
{
get { return _scalingFactor; }
}
protected override void Dispose(bool disposing)
{
if (disposing && !disposed) {
disposed = true;
if (_formsActivity is IDeviceInfoProvider)
((IDeviceInfoProvider) _formsActivity).ConfigurationChanged -= ConfigurationChanged;
}
base.Dispose(disposing);
}
void CheckOrientationChanged(Orientation orientation)
{
if (!_previousOrientation.Equals(orientation))
CurrentOrientation = orientation.ToDeviceOrientation();
_previousOrientation = orientation;
}
void ConfigurationChanged(object sender, EventArgs e)
{
CheckOrientationChanged(_formsActivity.Resources.Configuration.Orientation);
}
}
class AndroidExpressionSearch : ExpressionVisitor, IExpressionSearch
{
List<object> _results;
Type _targetType;
public List<T> FindObjects<T>(Expression expression) where T : class
{
_results = new List<object>();
_targetType = typeof(T);
Visit(expression);
return _results.Select(o => o as T).ToList();
}
protected override Expression VisitMember(MemberExpression node)
{
if (node.Expression is ConstantExpression && node.Member is FieldInfo)
{
object container = ((ConstantExpression)node.Expression).Value;
object value = ((FieldInfo)node.Member).GetValue(container);
if (_targetType.IsInstanceOfType(value))
_results.Add(value);
}
return base.VisitMember(node);
}
}
class AndroidPlatformServices : IPlatformServices
{
static readonly MD5CryptoServiceProvider Checksum = new MD5CryptoServiceProvider();
double _buttonDefaultSize;
double _editTextDefaultSize;
double _labelDefaultSize;
double _largeSize;
double _mediumSize;
double _microSize;
double _smallSize;
static Handler s_handler;
public void BeginInvokeOnMainThread(Action action)
{
if (s_handler == null || s_handler.Looper != Looper.MainLooper)
{
s_handler = new Handler(Looper.MainLooper);
}
s_handler.Post(action);
}
public Ticker CreateTicker()
{
return new AndroidTicker();
}
public Assembly[] GetAssemblies()
{
return AppDomain.CurrentDomain.GetAssemblies();
}
public string GetMD5Hash(string input)
{
byte[] bytes = Checksum.ComputeHash(Encoding.UTF8.GetBytes(input));
var ret = new char[32];
for (var i = 0; i < 16; i++)
{
ret[i * 2] = (char)Hex(bytes[i] >> 4);
ret[i * 2 + 1] = (char)Hex(bytes[i] & 0xf);
}
return new string(ret);
}
public double GetNamedSize(NamedSize size, Type targetElementType, bool useOldSizes)
{
if (_smallSize == 0)
{
_smallSize = ConvertTextAppearanceToSize(Resource.Attribute.TextAppearanceSmall, Resource.Style.TextAppearanceDeviceDefaultSmall, 12);
_mediumSize = ConvertTextAppearanceToSize(Resource.Attribute.TextAppearanceMedium, Resource.Style.TextAppearanceDeviceDefaultMedium, 14);
_largeSize = ConvertTextAppearanceToSize(Resource.Attribute.TextAppearanceLarge, Resource.Style.TextAppearanceDeviceDefaultLarge, 18);
_buttonDefaultSize = ConvertTextAppearanceToSize(Resource.Attribute.TextAppearanceButton, Resource.Style.TextAppearanceDeviceDefaultWidgetButton, 14);
_editTextDefaultSize = ConvertTextAppearanceToSize(Resource.Style.TextAppearanceWidgetEditText, Resource.Style.TextAppearanceDeviceDefaultWidgetEditText, 18);
_labelDefaultSize = _smallSize;
// as decreed by the android docs, ALL HAIL THE ANDROID DOCS, ALL GLORY TO THE DOCS, PRAISE HYPNOTOAD
_microSize = Math.Max(1, _smallSize - (_mediumSize - _smallSize));
}
if (useOldSizes)
{
switch (size)
{
case NamedSize.Default:
if (typeof(Button).IsAssignableFrom(targetElementType))
return _buttonDefaultSize;
if (typeof(Label).IsAssignableFrom(targetElementType))
return _labelDefaultSize;
if (typeof(Editor).IsAssignableFrom(targetElementType) || typeof(Entry).IsAssignableFrom(targetElementType) || typeof(SearchBar).IsAssignableFrom(targetElementType))
return _editTextDefaultSize;
return 14;
case NamedSize.Micro:
return 10;
case NamedSize.Small:
return 12;
case NamedSize.Medium:
return 14;
case NamedSize.Large:
return 18;
default:
throw new ArgumentOutOfRangeException("size");
}
}
switch (size)
{
case NamedSize.Default:
if (typeof(Button).IsAssignableFrom(targetElementType))
return _buttonDefaultSize;
if (typeof(Label).IsAssignableFrom(targetElementType))
return _labelDefaultSize;
if (typeof(Editor).IsAssignableFrom(targetElementType) || typeof(Entry).IsAssignableFrom(targetElementType))
return _editTextDefaultSize;
return _mediumSize;
case NamedSize.Micro:
return _microSize;
case NamedSize.Small:
return _smallSize;
case NamedSize.Medium:
return _mediumSize;
case NamedSize.Large:
return _largeSize;
default:
throw new ArgumentOutOfRangeException("size");
}
}
public async Task<Stream> GetStreamAsync(Uri uri, CancellationToken cancellationToken)
{
using (var client = new HttpClient())
using (HttpResponseMessage response = await client.GetAsync(uri, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
Log.Warning("HTTP Request", $"Could not retrieve {uri}, status code {response.StatusCode}");
return null;
}
return await response.Content.ReadAsStreamAsync();
}
}
public IIsolatedStorageFile GetUserStoreForApplication()
{
return new _IsolatedStorageFile(IsolatedStorageFile.GetUserStoreForApplication());
}
public bool IsInvokeRequired
{
get
{
return Looper.MainLooper != Looper.MyLooper();
}
}
public string RuntimePlatform => Device.Android;
public void OpenUriAction(Uri uri)
{
global::Android.Net.Uri aUri = global::Android.Net.Uri.Parse(uri.ToString());
var intent = new Intent(Intent.ActionView, aUri);
Context.StartActivity(intent);
}
public void StartTimer(TimeSpan interval, Func<bool> callback)
{
var handler = new Handler(Looper.MainLooper);
handler.PostDelayed(() =>
{
if (callback())
StartTimer(interval, callback);
handler.Dispose();
handler = null;
}, (long)interval.TotalMilliseconds);
}
double ConvertTextAppearanceToSize(int themeDefault, int deviceDefault, double defaultValue)
{
double myValue;
if (TryGetTextAppearance(themeDefault, out myValue))
return myValue;
if (TryGetTextAppearance(deviceDefault, out myValue))
return myValue;
return defaultValue;
}
static int Hex(int v)
{
if (v < 10)
return '0' + v;
return 'a' + v - 10;
}
static bool TryGetTextAppearance(int appearance, out double val)
{
val = 0;
try
{
using (var value = new TypedValue())
{
if (Context.Theme.ResolveAttribute(appearance, value, true))
{
var textSizeAttr = new[] { Resource.Attribute.TextSize };
const int indexOfAttrTextSize = 0;
using (TypedArray array = Context.ObtainStyledAttributes(value.Data, textSizeAttr))
{
val = Context.FromPixels(array.GetDimensionPixelSize(indexOfAttrTextSize, -1));
return true;
}
}
}
}
catch (Exception ex)
{
Log.Warning("Xamarin.Forms.Platform.Android.AndroidPlatformServices", "Error retrieving text appearance: {0}", ex);
}
return false;
}
public class _IsolatedStorageFile : IIsolatedStorageFile
{
readonly IsolatedStorageFile _isolatedStorageFile;
public _IsolatedStorageFile(IsolatedStorageFile isolatedStorageFile)
{
_isolatedStorageFile = isolatedStorageFile;
}
public Task CreateDirectoryAsync(string path)
{
_isolatedStorageFile.CreateDirectory(path);
return Task.FromResult(true);
}
public Task<bool> GetDirectoryExistsAsync(string path)
{
return Task.FromResult(_isolatedStorageFile.DirectoryExists(path));
}
public Task<bool> GetFileExistsAsync(string path)
{
return Task.FromResult(_isolatedStorageFile.FileExists(path));
}
public Task<DateTimeOffset> GetLastWriteTimeAsync(string path)
{
return Task.FromResult(_isolatedStorageFile.GetLastWriteTime(path));
}
public Task<Stream> OpenFileAsync(string path, FileMode mode, FileAccess access)
{
Stream stream = _isolatedStorageFile.OpenFile(path, (System.IO.FileMode)mode, (System.IO.FileAccess)access);
return Task.FromResult(stream);
}
public Task<Stream> OpenFileAsync(string path, FileMode mode, FileAccess access, FileShare share)
{
Stream stream = _isolatedStorageFile.OpenFile(path, (System.IO.FileMode)mode, (System.IO.FileAccess)access, (System.IO.FileShare)share);
return Task.FromResult(stream);
}
}
}
}
}
| |
using System;
using System.Diagnostics;
using System.IO;
using System.Text.RegularExpressions;
using StructureMap;
using System.Text;
namespace Sep.Git.Tfs.Core
{
public class GitHelpers : IGitHelpers
{
protected readonly TextWriter realStdout;
private readonly IContainer _container;
/// <summary>
/// Starting with version 1.7.10, Git uses UTF-8.
/// Use this encoding for Git input and output.
/// </summary>
private static Encoding _encoding = new UTF8Encoding(false, true);
public GitHelpers(TextWriter stdout, IContainer container)
{
realStdout = stdout;
_container = container;
}
/// <summary>
/// Runs the given git command, and returns the contents of its STDOUT.
/// </summary>
public string Command(params string[] command)
{
string retVal = null;
CommandOutputPipe(stdout => retVal = stdout.ReadToEnd(), command);
return retVal;
}
/// <summary>
/// Runs the given git command, and returns the first line of its STDOUT.
/// </summary>
public string CommandOneline(params string[] command)
{
string retVal = null;
CommandOutputPipe(stdout => retVal = stdout.ReadLine(), command);
return retVal;
}
/// <summary>
/// Runs the given git command, and passes STDOUT through to the current process's STDOUT.
/// </summary>
public void CommandNoisy(params string[] command)
{
CommandOutputPipe(stdout => realStdout.Write(stdout.ReadToEnd()), command);
}
/// <summary>
/// Runs the given git command, and redirects STDOUT to the provided action.
/// </summary>
public void CommandOutputPipe(Action<TextReader> handleOutput, params string[] command)
{
Time(command, () =>
{
AssertValidCommand(command);
var process = Start(command, RedirectStdout);
handleOutput(process.StandardOutput);
Close(process);
});
}
/// <summary>
/// Runs the given git command, and returns a reader for STDOUT. NOTE: The returned value MUST be disposed!
/// </summary>
public TextReader CommandOutputPipe(params string[] command)
{
AssertValidCommand(command);
var process = Start(command, RedirectStdout);
return new ProcessStdoutReader(this, process);
}
class ProcessStdoutReader : TextReader
{
private readonly GitProcess process;
private readonly GitHelpers helper;
public ProcessStdoutReader(GitHelpers helper, GitProcess process)
{
this.helper = helper;
this.process = process;
}
public override void Close()
{
helper.Close(process);
}
public override System.Runtime.Remoting.ObjRef CreateObjRef(Type requestedType)
{
return process.StandardOutput.CreateObjRef(requestedType);
}
protected override void Dispose(bool disposing)
{
if(disposing && process != null)
{
Close();
}
base.Dispose(disposing);
}
public override bool Equals(object obj)
{
return process.StandardOutput.Equals(obj);
}
public override int GetHashCode()
{
return process.StandardOutput.GetHashCode();
}
public override object InitializeLifetimeService()
{
return process.StandardOutput.InitializeLifetimeService();
}
public override int Peek()
{
return process.StandardOutput.Peek();
}
public override int Read()
{
return process.StandardOutput.Read();
}
public override int Read(char[] buffer, int index, int count)
{
return process.StandardOutput.Read(buffer, index, count);
}
public override int ReadBlock(char[] buffer, int index, int count)
{
return process.StandardOutput.ReadBlock(buffer, index, count);
}
public override string ReadLine()
{
return process.StandardOutput.ReadLine();
}
public override string ReadToEnd()
{
return process.StandardOutput.ReadToEnd();
}
public override string ToString()
{
return process.StandardOutput.ToString();
}
}
public void CommandInputPipe(Action<TextWriter> action, params string[] command)
{
Time(command, () =>
{
AssertValidCommand(command);
var process = Start(command, RedirectStdin);
action(process.StandardInput.WithEncoding(_encoding));
Close(process);
});
}
public void CommandInputOutputPipe(Action<TextWriter, TextReader> interact, params string[] command)
{
Time(command, () =>
{
AssertValidCommand(command);
var process = Start(command, Ext.And<ProcessStartInfo>(RedirectStdin, RedirectStdout));
interact(process.StandardInput.WithEncoding(_encoding), process.StandardOutput);
Close(process);
});
}
private void Time(string[] command, Action action)
{
var start = DateTime.Now;
try
{
action();
}
finally
{
var end = DateTime.Now;
Trace.WriteLine(String.Format("[{0}] {1}", end - start, String.Join(" ", command)), "git command time");
}
}
private void Close(GitProcess process)
{
// if caller doesn't read entire stdout to the EOF - it is possible that
// child process will hang waiting until there will be free space in stdout
// buffer to write the rest of the output.
// See https://github.com/git-tfs/git-tfs/issues/121 for details.
if (process.StartInfo.RedirectStandardOutput)
{
process.StandardOutput.BaseStream.CopyTo(Stream.Null);
process.StandardOutput.Close();
}
if (!process.WaitForExit((int)TimeSpan.FromSeconds(10).TotalMilliseconds))
throw new GitCommandException("Command did not terminate.", process);
if(process.ExitCode != 0)
throw new GitCommandException(string.Format("Command exited with error code: {0}\n{1}", process.ExitCode, process.StandardErrorString), process);
}
private void RedirectStdout(ProcessStartInfo startInfo)
{
startInfo.RedirectStandardOutput = true;
startInfo.StandardOutputEncoding = _encoding;
}
private void RedirectStderr(ProcessStartInfo startInfo)
{
startInfo.RedirectStandardError= true;
startInfo.StandardErrorEncoding = _encoding;
}
private void RedirectStdin(ProcessStartInfo startInfo)
{
startInfo.RedirectStandardInput = true;
// there is no StandardInputEncoding property, use extension method StreamWriter.WithEncoding instead
}
private GitProcess Start(string[] command)
{
return Start(command, x => {});
}
protected virtual GitProcess Start(string [] command, Action<ProcessStartInfo> initialize)
{
var startInfo = new ProcessStartInfo();
startInfo.FileName = "git";
startInfo.SetArguments(command);
startInfo.CreateNoWindow = true;
startInfo.UseShellExecute = false;
startInfo.EnvironmentVariables["GIT_PAGER"] = "cat";
RedirectStderr(startInfo);
initialize(startInfo);
Trace.WriteLine("Starting process: " + startInfo.FileName + " " + startInfo.Arguments, "git command");
var process = new GitProcess(Process.Start(startInfo));
process.ConsumeStandardError();
return process;
}
/// <summary>
/// WrapGitCommandErrors the actions, and if there are any git exceptions, rethrow a new exception with the given message.
/// </summary>
/// <param name="exceptionMessage">A friendlier message to wrap the GitCommandException with. {0} is replaced with the command line and {1} is replaced with the exit code.</param>
/// <param name="action"></param>
public void WrapGitCommandErrors(string exceptionMessage, Action action)
{
try
{
action();
}
catch (GitCommandException e)
{
throw new Exception(String.Format(exceptionMessage, e.Process.StartInfo.FileName + " " + e.Process.StartInfo.Arguments, e.Process.ExitCode), e);
}
}
public IGitRepository MakeRepository(string dir)
{
return _container
.With("gitDir").EqualTo(dir)
.GetInstance<IGitRepository>();
}
private static readonly Regex ValidCommandName = new Regex("^[a-z0-9A-Z_-]+$");
private static void AssertValidCommand(string[] command)
{
if(command.Length < 1 || !ValidCommandName.IsMatch(command[0]))
throw new Exception("bad git command: " + (command.Length == 0 ? "" : command[0]));
}
protected class GitProcess
{
Process _process;
public GitProcess(Process process)
{
_process = process;
}
public static implicit operator Process(GitProcess process)
{
return process._process;
}
public string StandardErrorString { get; private set; }
public void ConsumeStandardError()
{
StandardErrorString = "";
_process.ErrorDataReceived += StdErrReceived;
_process.BeginErrorReadLine();
}
private void StdErrReceived(object sender, DataReceivedEventArgs e)
{
if (e.Data != null && e.Data.Trim() != "")
{
var data = e.Data;
Trace.WriteLine(data.TrimEnd(), "git stderr");
StandardErrorString += data;
}
}
// Delegate a bunch of things to the Process.
public ProcessStartInfo StartInfo { get { return _process.StartInfo; } }
public int ExitCode { get { return _process.ExitCode; } }
public StreamWriter StandardInput { get { return _process.StandardInput; } }
public StreamReader StandardOutput { get { return _process.StandardOutput; } }
public bool WaitForExit(int milliseconds)
{
return _process.WaitForExit(milliseconds);
}
}
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://github.com/jskeet/dotnet-protobufs/
// Original C++/Java/Python code:
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using System.Collections.Generic;
using Google.ProtocolBuffers.DescriptorProtos;
namespace Google.ProtocolBuffers.Descriptors
{
/// <summary>
/// Describes a message type.
/// </summary>
public sealed class MessageDescriptor : IndexedDescriptorBase<DescriptorProto, MessageOptions>
{
private readonly MessageDescriptor containingType;
private readonly IList<MessageDescriptor> nestedTypes;
private readonly IList<EnumDescriptor> enumTypes;
private readonly IList<FieldDescriptor> fields;
private readonly IList<FieldDescriptor> extensions;
private bool hasRequiredFields;
internal MessageDescriptor(DescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int typeIndex)
: base(proto, file, ComputeFullName(file, parent, proto.Name), typeIndex)
{
containingType = parent;
nestedTypes = DescriptorUtil.ConvertAndMakeReadOnly(proto.NestedTypeList,
(type, index) =>
new MessageDescriptor(type, file, this, index));
enumTypes = DescriptorUtil.ConvertAndMakeReadOnly(proto.EnumTypeList,
(type, index) =>
new EnumDescriptor(type, file, this, index));
// TODO(jonskeet): Sort fields first?
fields = DescriptorUtil.ConvertAndMakeReadOnly(proto.FieldList,
(field, index) =>
new FieldDescriptor(field, file, this, index, false));
extensions = DescriptorUtil.ConvertAndMakeReadOnly(proto.ExtensionList,
(field, index) =>
new FieldDescriptor(field, file, this, index, true));
file.DescriptorPool.AddSymbol(this);
}
/// <value>
/// If this is a nested type, get the outer descriptor, otherwise null.
/// </value>
public MessageDescriptor ContainingType
{
get { return containingType; }
}
/// <value>
/// An unmodifiable list of this message type's fields.
/// </value>
public IList<FieldDescriptor> Fields
{
get { return fields; }
}
/// <value>
/// An unmodifiable list of this message type's extensions.
/// </value>
public IList<FieldDescriptor> Extensions
{
get { return extensions; }
}
/// <value>
/// An unmodifiable list of this message type's nested types.
/// </value>
public IList<MessageDescriptor> NestedTypes
{
get { return nestedTypes; }
}
/// <value>
/// An unmodifiable list of this message type's enum types.
/// </value>
public IList<EnumDescriptor> EnumTypes
{
get { return enumTypes; }
}
/// <summary>
/// Returns a pre-computed result as to whether this message
/// has required fields. This includes optional fields which are
/// message types which in turn have required fields, and any
/// extension fields.
/// </summary>
internal bool HasRequiredFields
{
get { return hasRequiredFields; }
}
/// <summary>
/// Determines if the given field number is an extension.
/// </summary>
public bool IsExtensionNumber(int number)
{
foreach (DescriptorProto.Types.ExtensionRange range in Proto.ExtensionRangeList)
{
if (range.Start <= number && number < range.End)
{
return true;
}
}
return false;
}
/// <summary>
/// Finds a field by field name.
/// </summary>
/// <param name="name">The unqualified name of the field (e.g. "foo").</param>
/// <returns>The field's descriptor, or null if not found.</returns>
public FieldDescriptor FindFieldByName(String name)
{
return File.DescriptorPool.FindSymbol<FieldDescriptor>(FullName + "." + name);
}
/// <summary>
/// Finds a field by field number.
/// </summary>
/// <param name="number">The field number within this message type.</param>
/// <returns>The field's descriptor, or null if not found.</returns>
public FieldDescriptor FindFieldByNumber(int number)
{
return File.DescriptorPool.FindFieldByNumber(this, number);
}
/// <summary>
/// Finds a field by its property name, as it would be generated by protogen.
/// </summary>
/// <param name="propertyName">The property name within this message type.</param>
/// <returns>The field's descriptor, or null if not found.</returns>
public FieldDescriptor FindFieldByPropertyName(string propertyName)
{
// For reasonably short messages, this will be more efficient than a dictionary
// lookup. It also means we don't need to do things lazily with locks etc.
foreach (FieldDescriptor field in Fields)
{
if (field.CSharpOptions.PropertyName == propertyName)
{
return field;
}
}
return null;
}
/// <summary>
/// Finds a nested descriptor by name. The is valid for fields, nested
/// message types and enums.
/// </summary>
/// <param name="name">The unqualified name of the descriptor, e.g. "Foo"</param>
/// <returns>The descriptor, or null if not found.</returns>
public T FindDescriptor<T>(string name)
where T : class, IDescriptor
{
return File.DescriptorPool.FindSymbol<T>(FullName + "." + name);
}
/// <summary>
/// Looks up and cross-links all fields, nested types, and extensions.
/// </summary>
internal void CrossLink()
{
foreach (MessageDescriptor message in nestedTypes)
{
message.CrossLink();
}
foreach (FieldDescriptor field in fields)
{
field.CrossLink();
}
foreach (FieldDescriptor extension in extensions)
{
extension.CrossLink();
}
}
internal void CheckRequiredFields()
{
IDictionary<MessageDescriptor, byte> alreadySeen = new Dictionary<MessageDescriptor, byte>();
hasRequiredFields = CheckRequiredFields(alreadySeen);
}
private bool CheckRequiredFields(IDictionary<MessageDescriptor, byte> alreadySeen)
{
if (alreadySeen.ContainsKey(this))
{
// The type is already in the cache. This means that either:
// a. The type has no required fields.
// b. We are in the midst of checking if the type has required fields,
// somewhere up the stack. In this case, we know that if the type
// has any required fields, they'll be found when we return to it,
// and the whole call to HasRequiredFields() will return true.
// Therefore, we don't have to check if this type has required fields
// here.
return false;
}
alreadySeen[this] = 0; // Value is irrelevant; we want set semantics
// If the type allows extensions, an extension with message type could contain
// required fields, so we have to be conservative and assume such an
// extension exists.
if (Proto.ExtensionRangeCount != 0)
{
return true;
}
foreach (FieldDescriptor field in Fields)
{
if (field.IsRequired)
{
return true;
}
if (field.MappedType == MappedType.Message)
{
if (field.MessageType.CheckRequiredFields(alreadySeen))
{
return true;
}
}
}
return false;
}
/// <summary>
/// See FileDescriptor.ReplaceProto
/// </summary>
internal override void ReplaceProto(DescriptorProto newProto)
{
base.ReplaceProto(newProto);
for (int i = 0; i < nestedTypes.Count; i++)
{
nestedTypes[i].ReplaceProto(newProto.GetNestedType(i));
}
for (int i = 0; i < enumTypes.Count; i++)
{
enumTypes[i].ReplaceProto(newProto.GetEnumType(i));
}
for (int i = 0; i < fields.Count; i++)
{
fields[i].ReplaceProto(newProto.GetField(i));
}
for (int i = 0; i < extensions.Count; i++)
{
extensions[i].ReplaceProto(newProto.GetExtension(i));
}
}
}
}
| |
// <copyright file="HistogramTests.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2013 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using System.Linq;
namespace MathNet.Numerics.UnitTests.StatisticsTests
{
using System;
using System.Collections.Generic;
using NUnit.Framework;
using Statistics;
/// <summary>
/// Histogram tests.
/// </summary>
[TestFixture, Category("Statistics")]
public class HistogramTests
{
/// <summary>
/// Datatset array.
/// </summary>
readonly double[] _smallDataset = {0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5, 8.5, 9.5};
/// <summary>
/// Datatset array with small absolute values
/// </summary>
/// <remarks>
/// These values are chosen to precisely match the upper bounds of 9 buckets,
/// from 0.5e-22 to 9.5E-22
/// </remarks>
readonly double[] _smallValueDataset =
{
0.5e-22, 1.5E-22, 2.5E-22, 3.4999999999999996E-22, 4.4999999999999989E-22,
5.4999999999999983E-22, 6.4999999999999986E-22, 7.4999999999999988E-22,
8.4999999999999982E-22, 9.5E-22
};
/// <summary>
/// Can create empty bucket.
/// </summary>
[Test]
public void CanCreateEmptyBucket()
{
var b = new Bucket(0.0, 1.0);
}
/// <summary>
/// Can create filled bucket.
/// </summary>
[Test]
public void CanCreateFilledBucket()
{
var b = new Bucket(0.0, 1.0, 10.0);
}
/// <summary>
/// Empty bucket with bad bounds fails.
/// </summary>
[Test]
public void EmptyBucketWithBadBoundsFails()
{
Assert.That(() => new Bucket(1.0, 0.5), Throws.ArgumentException);
}
/// <summary>
/// Empty bucket with bad count fails.
/// </summary>
[Test]
public void EmptyBucketWithBadCountFails()
{
Assert.That(() => new Bucket(1.0, 0.5, -1.0), Throws.ArgumentException);
}
/// <summary>
/// Can get bucket width.
/// </summary>
[Test]
public void CanGetBucketWidth()
{
var b = new Bucket(0.0, 1.0, 10.0);
Assert.AreEqual(1.0, b.Width);
}
/// <summary>
/// Can get bucket count.
/// </summary>
[Test]
public void CanGetBucketCount()
{
var b = new Bucket(0.0, 1.0, 10.0);
Assert.AreEqual(10.0, b.Count);
}
/// <summary>
/// Can get bucket lower bound.
/// </summary>
[Test]
public void CanGetBucketLowerBound()
{
var b = new Bucket(0.0, 1.0, 10.0);
Assert.AreEqual(0.0, b.LowerBound);
}
/// <summary>
/// Can get bucket upper bound.
/// </summary>
[Test]
public void CanGetBucketUpperBound()
{
var b = new Bucket(0.0, 1.0, 10.0);
Assert.AreEqual(1.0, b.UpperBound);
}
/// <summary>
/// Validate contains.
/// </summary>
/// <param name="x">Point values.</param>
/// <param name="r">Expected result.</param>
[TestCase(0.0, -1)]
[TestCase(1.0, 0)]
[TestCase(1.05, 0)]
[TestCase(2.0, 1)]
[TestCase(-1.0, 0 - 1)]
public void ValidateContains(double x, int r)
{
var b = new Bucket(0.0, 1.5, 10.0);
Assert.AreEqual(r, b.Contains(x));
}
/// <summary>
/// Can create empty histogram.
/// </summary>
[Test]
public void CanCreateEmptyHistogram()
{
var h = new Histogram();
}
/// <summary>
/// Can add bucket.
/// </summary>
[Test]
public void CanAddBucket()
{
var h = new Histogram();
h.AddBucket(new Bucket(0.0, 1.0));
}
/// <summary>
/// Can get bucket index of.
/// </summary>
/// <param name="x">Point to check.</param>
/// <param name="i">Bucket index.</param>
[TestCase(0.5, 0)]
[TestCase(1.0, 0)]
[TestCase(10.0, 3)]
[TestCase(10000.0, 4)]
public void CanGetBucketIndexOf(double x, double i)
{
var h = new Histogram();
h.AddBucket(new Bucket(0.0, 1.0));
h.AddBucket(new Bucket(1.0, 2.0));
h.AddBucket(new Bucket(2.0, 3.0));
h.AddBucket(new Bucket(3.0, 20.0));
h.AddBucket(new Bucket(20.0, Double.PositiveInfinity));
Assert.AreEqual(i, h.GetBucketIndexOf(x));
}
/// <summary>
/// Can get bucket index of fails when bucket doesn't exist.
/// </summary>
[Test]
public void CanGetBucketIndexOfFailsWhenBucketDoesNotExist()
{
var h = new Histogram();
h.AddBucket(new Bucket(0.0, 1.0));
h.AddBucket(new Bucket(1.0, 2.0));
h.AddBucket(new Bucket(2.0, 3.0));
h.AddBucket(new Bucket(3.0, 20.0));
h.AddBucket(new Bucket(20.0, Double.PositiveInfinity));
Assert.That(() => { var i = h.GetBucketIndexOf(0.0); }, Throws.ArgumentException);
Assert.That(() => { var i = h.GetBucketIndexOf(-1.0); }, Throws.ArgumentException);
}
/// <summary>
/// Can get bucket of.
/// </summary>
[Test]
public void CanGetBucketOf()
{
var h = new Histogram();
var b = new Bucket(0.0, 1.0);
h.AddBucket(b);
h.AddBucket(new Bucket(1.0, 2.0));
h.AddBucket(new Bucket(2.0, 3.0));
h.AddBucket(new Bucket(3.0, 20.0));
h.AddBucket(new Bucket(20.0, Double.PositiveInfinity));
Assert.AreEqual(b, h.GetBucketOf(0.1));
}
/// <summary>
/// Validate item.
/// </summary>
[Test]
public void ValidateItem()
{
var h = new Histogram();
var b = new Bucket(0.0, 1.0);
var c = new Bucket(3.0, 20.0);
h.AddBucket(b);
h.AddBucket(c);
h.AddBucket(new Bucket(1.0, 2.0));
h.AddBucket(new Bucket(2.0, 3.0));
h.AddBucket(new Bucket(20.0, Double.PositiveInfinity));
Assert.AreEqual(b, h[0]);
Assert.AreEqual(c, h[3]);
}
/// <summary>
/// Can get bucket count in histogram.
/// </summary>
[Test]
public void CanGetBucketCountInHistogram()
{
var h = new Histogram();
h.AddBucket(new Bucket(0.0, 1.0));
h.AddBucket(new Bucket(1.0, 2.0));
h.AddBucket(new Bucket(2.0, 3.0));
h.AddBucket(new Bucket(3.0, 20.0));
h.AddBucket(new Bucket(20.0, Double.PositiveInfinity));
Assert.AreEqual(5, h.BucketCount);
}
/// <summary>
/// Can get total count.
/// </summary>
[Test]
public void CanGetTotalCount()
{
var h = new Histogram();
h.AddBucket(new Bucket(0.0, 1.0, 1));
h.AddBucket(new Bucket(1.0, 2.0, 1));
h.AddBucket(new Bucket(2.0, 3.0, 1));
h.AddBucket(new Bucket(3.0, 20.0, 1));
h.AddBucket(new Bucket(20.0, Double.PositiveInfinity, 1));
Assert.AreEqual(5, h.DataCount);
}
/// <summary>
/// Can create equal spaced histogram.
/// </summary>
[Test]
public void CanCreateEqualSpacedHistogram()
{
var h = new Histogram(new[] {1.0, 5.0, 10.0}, 2);
}
/// <summary>
/// Fail create equal spaced histogram with no data.
/// </summary>
[Test]
public void FailCreateEqualSpacedHistogramWithNoData()
{
Assert.That(() => new Histogram(new List<double>(), 10), Throws.ArgumentException);
}
/// <summary>
/// Can create equal spaced histogram with given lower and upper bounds.
/// </summary>
[Test]
public void CanCreateEqualSpacedHistogramWithGivenLowerAndUpperBound()
{
var h = new Histogram(new[] {1.0, 5.0, 10.0}, 2, 0.0, 20.0);
}
/// <summary>
/// Can add data single.
/// </summary>
[Test]
public void CanAddDataSingle()
{
var h = new Histogram(new[] {1.0, 5.0, 10.0}, 2);
h.AddData(7.0);
Assert.AreEqual(2, h[1].Count);
}
/// <summary>
/// Can add data list.
/// </summary>
[Test]
public void CanAddDataList()
{
var h = new Histogram(new[] {1.0, 5.0, 10.0}, 2);
h.AddData(new[] {7.0, 8.0});
Assert.AreEqual(3, h[1].Count);
}
/// <summary>
/// Add data increase upper bound.
/// </summary>
[Test]
public void AddDataIncreasesUpperBound()
{
var h = new Histogram(new[] {1.0, 5.0, 10.0}, 2);
h.AddData(20.0);
Assert.AreEqual(2, h[1].Count);
}
/// <summary>
/// Add data decrease lower bound.
/// </summary>
[Test]
public void AddDataDecreasesLowerBound()
{
var h = new Histogram(new[] {1.0, 5.0, 10.0}, 2);
h.AddData(0.0);
Assert.AreEqual(3, h[0].Count);
}
/// <summary>
/// Add data equal to the lower bound of a histogram.
/// </summary>
[Test]
public void AddDataEqualToLowerBound()
{
var h = new Histogram(new[] { 1.0, 5.0, 10.0 }, 3, 0.0, 10.0);
Assert.DoesNotThrow(() => h.AddData(0.0));
Assert.AreEqual(2, h[0].Count);
}
/// <summary>
/// Small dataset histogram without bounds.
/// </summary>
[Test]
public void SmallDatasetHistogramWithoutBounds()
{
var hist = new Histogram(_smallDataset, 9);
Assert.AreEqual(9, hist.BucketCount);
for (var i = 1; i < 9; i++)
{
Assert.AreEqual(1.0, hist[i].Count);
}
Assert.AreEqual(2.0, hist[0].Count);
Assert.AreEqual(0.5.Decrement(), hist.LowerBound);
Assert.AreEqual(9.5, hist.UpperBound);
}
/// <summary>
/// Small dataset histogram with bounds.
/// </summary>
[Test]
public void SmallDatasetHistogramWithBounds()
{
var hist = new Histogram(_smallDataset, 10, 0.0, 10.0);
Assert.AreEqual(10, hist.BucketCount);
for (var i = 0; i < 10; i++)
{
Assert.AreEqual(1.0, hist[i].Count);
}
Assert.AreEqual(0.0, hist.LowerBound);
Assert.AreEqual(10.0, hist.UpperBound);
}
/// <summary>
/// Dataset of small values histogram without bounds.
/// </summary>
[Test]
public void SmallValuesHistogramWithoutBounds()
{
var hist = new Histogram(_smallValueDataset, 9);
Assert.AreEqual(9, hist.BucketCount);
for (var i = 1; i < 9; i++)
{
Assert.AreEqual(1.0, hist[i].Count);
}
Assert.AreEqual(2.0, hist[0].Count);
Assert.AreEqual(0.5e-22.Decrement(), hist.LowerBound);
Assert.AreEqual(9.5e-22, hist.UpperBound);
}
/// <summary>
/// Dataset of small values histogram with bounds.
/// </summary>
[Test]
public void SmallValuesHistogramWithBounds()
{
var hist = new Histogram(_smallValueDataset, 10, 0.0, 10e-22);
Assert.AreEqual(10, hist.BucketCount);
for (var i = 0; i < 10; i++)
{
Assert.AreEqual(1.0, hist[i].Count);
}
Assert.AreEqual(0.0, hist.LowerBound);
Assert.AreEqual(10.0e-22, hist.UpperBound);
}
/// <summary>
/// Attempt to construct a dataset with small valued buckets
/// </summary>
[Test]
public void SmallValuesManyBucketsHistogramWithBounds()
{
var hist = new Histogram(_smallValueDataset, 100, 0.0, 10e-22);
Assert.AreEqual(100, hist.BucketCount);
Assert.AreEqual(0.0, hist.LowerBound);
Assert.AreEqual(10.0e-22, hist.UpperBound);
}
}
}
| |
/***********************************************************************************************************************
* TorrentDotNET - A BitTorrent library based on the .NET platform *
* Copyright (C) 2004, Peter Ward *
* *
* This library is free software; you can redistribute it and/or modify it under the terms of the *
* GNU Lesser General Public License as published by the Free Software Foundation; *
* either version 2.1 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License along with this library; *
* if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *
***********************************************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using Net = System.Net;
using Sockets = System.Net.Sockets;
using Threading = System.Threading;
using IO = System.IO;
namespace BitTorrent
{
public class TorrentCheckQueue
{
private bool mInProgress = false;
private bool mUseThread = false;
private bool mStartEachTorrentAfterCheck = false;
private Queue<Torrent> mTorrents = new Queue<Torrent>();
public bool InProgress
{
get { return mInProgress; }
}
public TorrentCheckQueue( bool useBackgroundThread, bool startEachTorrentAfterCheck )
{
mUseThread = useBackgroundThread;
mStartEachTorrentAfterCheck = startEachTorrentAfterCheck;
}
public void AddTorrent( Torrent torrent )
{
mTorrents.Enqueue( torrent );
if ( !mInProgress )
{
mInProgress = true;
if ( mUseThread )
Threading.ThreadPool.QueueUserWorkItem( new Threading.WaitCallback( PrivateStart ) );
else
PrivateStart( null );
}
}
private void PrivateStart( object o )
{
while ( mTorrents.Count > 0 )
{
Torrent torrent = mTorrents.Dequeue();
torrent.CheckFileIntegrity( false );
if ( mStartEachTorrentAfterCheck )
torrent.Start();
}
mInProgress = false;
}
}
// main class. all torrents are started from here
public class Session : System.IDisposable
{
private List<Torrent> mTorrents = new List<Torrent>();
private Sockets.Socket mListener;
private ByteField20 mLocalPeerId;
public List<Torrent> Torrents
{
get { return this.mTorrents; }
}
public ByteField20 LocalPeerID
{
get { return mLocalPeerId; }
}
private static ByteField20 DefaultCalculatePeerId()
{
// calculate our peer id
string peerIdString = "-TN" + string.Format("{0:00}", Config.MajorVersionNumber)
+ string.Format("{0:00}", Config.MinorVersionNumber) + "-";
ByteField20 peerId = new ByteField20();
System.Array.Copy(System.Text.ASCIIEncoding.ASCII.GetBytes(peerIdString), 0, peerId.Data, 0, peerIdString.Length);
const string peerChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
System.Random rand = new System.Random();
for (int i=peerIdString.Length; i<20; ++i)
{
peerId.Data[i] = (byte)peerChars[rand.Next(0, peerChars.Length-1)];
}
return peerId;
}
/// <summary>
/// This should be called once to prevent errors communicating with the tracker. This is a workaround for a bug in the .NET framework
/// regarding parsing the http headers which contain 'unsafe' characters.
/// </summary>
/// <returns></returns>
private static bool SetAllowUnsafeHeaderParsing20()
{
//Get the assembly that contains the internal class
System.Reflection.Assembly aNetAssembly = System.Reflection.Assembly.GetAssembly( typeof( System.Net.Configuration.SettingsSection ) );
if ( aNetAssembly != null )
{
//Use the assembly in order to get the internal type for the internal class
System.Type aSettingsType = aNetAssembly.GetType( "System.Net.Configuration.SettingsSectionInternal" );
if ( aSettingsType != null )
{
//Use the internal static property to get an instance of the internal settings class.
//If the static instance isn't created allready the property will create it for us.
object anInstance = aSettingsType.InvokeMember( "Section",
System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.GetProperty | System.Reflection.BindingFlags.NonPublic, null, null, new object[] { } );
if ( anInstance != null )
{
//Locate the private bool field that tells the framework is unsafe header parsing should be allowed or not
System.Reflection.FieldInfo aUseUnsafeHeaderParsing = aSettingsType.GetField( "useUnsafeHeaderParsing", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance );
if ( aUseUnsafeHeaderParsing != null )
{
aUseUnsafeHeaderParsing.SetValue( anInstance, true );
return true;
}
}
}
}
return false;
}
public Session()
: this( DefaultCalculatePeerId() )
{
}
public Session( ByteField20 localPeerId )
{
mLocalPeerId = localPeerId;
if ( Config.ActiveConfig.ProxyURL != "" ) // set web proxy
{
System.Net.WebRequest.DefaultWebProxy = new Net.WebProxy( Config.ActiveConfig.ProxyURL );
}
SetAllowUnsafeHeaderParsing20();
this.StartServer();
}
public Torrent CreateTorrent( string metafilename )
{
Torrent torrent = new Torrent( this, metafilename );
mTorrents.Add( torrent );
return torrent;
}
public void Dispose()
{
foreach ( Torrent torrent in this.mTorrents )
{
torrent.Dispose();
}
this.mTorrents.Clear();
this.StopServer();
}
public void ProcessWaitingData()
{
foreach ( Torrent torrent in this.mTorrents )
{
torrent.ProcessWaitingData();
}
}
private void StartServer()
{
Net.IPAddress addr = (Config.ActiveConfig.IPToBindServerTo != "" ? Net.IPAddress.Parse(Config.ActiveConfig.IPToBindServerTo) : Net.IPAddress.Any);
int port = Config.ActiveConfig.MinServerPort;
// attempt to find a port within the given port range
while (true)
{
try
{
this.mListener = new Sockets.Socket(Sockets.AddressFamily.InterNetwork, Sockets.SocketType.Stream, Sockets.ProtocolType.Tcp);
this.mListener.Bind(new Net.IPEndPoint(addr, port));
this.mListener.Listen(10);
Config.ActiveConfig.ChosenPort = port;
break;
}
catch (Sockets.SocketException)
{
if (++port > Config.ActiveConfig.MaxServerPort)
throw;
}
}
this.mListener.BeginAccept(new System.AsyncCallback(OnAccept), null);
}
private void StopServer()
{
if (this.mListener != null)
this.mListener.Close();
this.mListener = null;
}
public Torrent FindTorrent(ByteField20 infoDigest)
{
lock ( this.mTorrents )
{
foreach ( Torrent torrent in this.mTorrents )
{
if (torrent.Metainfo.InfoDigest.Equals(infoDigest))
{
return torrent;
}
}
return null;
}
}
private void OnAccept(System.IAsyncResult result)
{
Sockets.Socket socket;
try
{
// Accept connections from other peers, find the appropriate torrent and add the peer to it
socket = this.mListener.EndAccept(result);
}
catch (System.Exception)
{
if (this.mListener != null)
this.mListener.Close();
this.mListener = null;
return;
}
try
{
ByteField20 infoDigest = new ByteField20(), peerId = new ByteField20();
Sockets.NetworkStream netStream = new Sockets.NetworkStream(socket, true);
PeerProtocol.ReceiveHandshake(netStream, ref infoDigest);
Torrent torrent = this.FindTorrent(infoDigest);
if (torrent != null)
{
// found it, finish handshaking and add the peer to the list
PeerProtocol.SendHandshake(netStream, torrent.Metainfo.InfoDigest);
PeerProtocol.SendPeerId(netStream, mLocalPeerId );
if ( !PeerProtocol.ReceivePeerId( netStream, ref peerId ))
{ // NAT check, discard
socket.Close();
}
else
{
if ( !peerId.Equals( mLocalPeerId ) ) // make sure we aren't connecting to ourselves
{
Net.IPEndPoint endPoint = (Net.IPEndPoint)socket.RemoteEndPoint;
PeerInformation peerInformation = new PeerInformation( endPoint.Address.ToString(), endPoint.Port, peerId );
// add the peer to the torrent
torrent.AddPeer( socket, netStream, peerInformation );
}
else
socket.Close();
}
}
else
socket.Close();
}
catch (System.Exception e)
{
Config.LogException( e );
socket.Close();
}
this.mListener.BeginAccept(new System.AsyncCallback(OnAccept), null);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
/// <summary>
/// System.MathF.Sin(System.Single)
/// </summary>
public class MathFSin
{
public static int Main(string[] args)
{
MathFSin test = new MathFSin();
TestLibrary.TestFramework.BeginTestCase("Testing System.MathF.Sin(System.Single).");
if (test.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
retVal = PosTest5() && retVal;
retVal = PosTest6() && retVal;
retVal = PosTest7() && retVal;
retVal = PosTest8() && retVal;
return retVal;
}
public bool PosTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest1: Verify the result when radian is 0.");
try
{
float f = MathF.Sin(0);
if (f != 0)
{
TestLibrary.TestFramework.LogError("P01.1", "The result is error when radian is 0!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P01.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest2: Verify the result is 1 when radian is MathF.PI/2.");
try
{
float f = MathF.Sin(MathF.PI / 2);
if (f != 1)
{
TestLibrary.TestFramework.LogError("P02.1", "The result is error when radian is MathF.PI/2!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P02.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest3: Verify the result is -1 when radian is -MathF.PI/2.");
try
{
float f = MathF.Sin(-MathF.PI / 2);
if (f != -1)
{
TestLibrary.TestFramework.LogError("P03.1", "The result is error when radian is -MathF.PI/2!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P03.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest4()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest4: Verify the result is 1/2 when radian is MathF.PI/6.");
try
{
float f = MathF.Round(MathF.Sin(MathF.PI / 6), 2);
if (f != 0.5d)
{
TestLibrary.TestFramework.LogError("P04.1", "The result is error when radian is MathF.PI/6!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P04.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest5()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest5: Verify the result is -1/2 when radian is -MathF.PI/6.");
try
{
float f = MathF.Round(MathF.Sin(-MathF.PI / 6), 2);
if (f != -0.5d)
{
TestLibrary.TestFramework.LogError("P05.1", "The result is error when radian is -MathF.PI/6!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P05.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest6()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest6: Verify the result is NaN when radian is PositiveInfinity.");
try
{
float f = MathF.Sin(float.PositiveInfinity);
if (f.CompareTo(float.NaN) != 0)
{
TestLibrary.TestFramework.LogError("P06.1", "The result is error when radian is PositiveInfinity!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P06.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest7()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest7: Verify the result is NaN when radian is NegativeInfinity.");
try
{
float f = MathF.Sin(float.NegativeInfinity);
if (f.CompareTo(float.NaN) != 0)
{
TestLibrary.TestFramework.LogError("P07.1", "The result is error when radian is NegativeInfinity!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P07.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest8()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest8: Verify the result is NaN when radian is NaN.");
try
{
float f = MathF.Sin(float.NaN);
if (f.CompareTo(float.NaN) != 0)
{
TestLibrary.TestFramework.LogError("P08.1", "The result is error when radian is NaN!");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("P08.2", "Unexpected exception occurs: " + e);
retVal = false;
}
return retVal;
}
}
| |
/*
* Copyright (c) InWorldz Halcyon Developers
* Copyright (c) Contributors, http://opensimulator.org/
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Reflection;
using System.Xml;
using log4net;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Serialization;
using OpenSim.Framework.Serialization.External;
using OpenSim.Framework.Communications;
using OpenSim.Framework.Communications.Cache;
using OpenSim.Framework.Communications.Osp;
using OpenSim.Region.CoreModules.World.Archiver;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.Region.CoreModules.Avatar.Inventory.Archiver
{
public class InventoryArchiveWriteRequest
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
/// <value>
/// Used to select all inventory nodes in a folder but not the folder itself
/// </value>
private const string STAR_WILDCARD = "*";
private InventoryArchiverModule m_module;
private CachedUserInfo m_userInfo;
private string m_invPath;
protected TarArchiveWriter m_archiveWriter;
protected UuidGatherer m_assetGatherer;
/// <value>
/// Used to collect the uuids of the assets that we need to save into the archive
/// </value>
protected Dictionary<UUID, int> m_assetUuids = new Dictionary<UUID, int>();
/// <value>
/// Used to collect the uuids of the users that we need to save into the archive
/// </value>
protected Dictionary<UUID, int> m_userUuids = new Dictionary<UUID, int>();
/// <value>
/// The stream to which the inventory archive will be saved.
/// </value>
private Stream m_saveStream;
/// <summary>
/// Constructor
/// </summary>
public InventoryArchiveWriteRequest(
InventoryArchiverModule module, CachedUserInfo userInfo, string invPath, string savePath)
: this(
module,
userInfo,
invPath,
new GZipStream(new FileStream(savePath, FileMode.Create), CompressionMode.Compress))
{
}
/// <summary>
/// Constructor
/// </summary>
public InventoryArchiveWriteRequest(
InventoryArchiverModule module, CachedUserInfo userInfo, string invPath, Stream saveStream)
{
m_module = module;
m_userInfo = userInfo;
m_invPath = invPath;
m_saveStream = saveStream;
m_assetGatherer = new UuidGatherer(m_module.CommsManager.AssetCache);
}
protected void ReceivedAllAssets(ICollection<UUID> assetsFoundUuids, ICollection<UUID> assetsNotFoundUuids)
{
Exception reportedException = null;
bool succeeded = true;
try
{
m_archiveWriter.Close();
}
catch (IOException e)
{
m_saveStream.Close();
reportedException = e;
succeeded = false;
}
m_module.TriggerInventoryArchiveSaved(succeeded, m_userInfo, m_invPath, m_saveStream, reportedException);
}
protected void SaveInvItem(InventoryItemBase inventoryItem, string path)
{
string filename = string.Format("{0}{1}_{2}.xml", path, inventoryItem.Name, inventoryItem.ID);
// Record the creator of this item for user record purposes (which might go away soon)
m_userUuids[inventoryItem.CreatorIdAsUuid] = 1;
InventoryItemBase saveItem = (InventoryItemBase)inventoryItem.Clone();
saveItem.CreatorId = OspResolver.MakeOspa(saveItem.CreatorIdAsUuid, m_module.CommsManager);
string serialization = UserInventoryItemSerializer.Serialize(saveItem);
m_archiveWriter.WriteFile(filename, serialization);
m_assetGatherer.GatherAssetUuids(saveItem.AssetID, (AssetType)saveItem.AssetType, m_assetUuids);
}
/// <summary>
/// Save an inventory folder
/// </summary>
/// <param name="inventoryFolder">The inventory folder to save</param>
/// <param name="path">The path to which the folder should be saved</param>
/// <param name="saveThisFolderItself">If true, save this folder itself. If false, only saves contents</param>
protected void SaveInvFolder(InventoryFolderImpl inventoryFolder, string path, bool saveThisFolderItself)
{
if (saveThisFolderItself)
{
path +=
string.Format(
"{0}{1}{2}/",
inventoryFolder.Name,
ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR,
inventoryFolder.ID);
// We need to make sure that we record empty folders
m_archiveWriter.WriteDir(path);
}
List<InventoryFolderImpl> childFolders = inventoryFolder.RequestListOfFolderImpls();
List<InventoryItemBase> items = inventoryFolder.RequestListOfItems();
/*
Dictionary identicalFolderNames = new Dictionary<string, int>();
foreach (InventoryFolderImpl folder in inventories)
{
if (!identicalFolderNames.ContainsKey(folder.Name))
identicalFolderNames[folder.Name] = 0;
else
identicalFolderNames[folder.Name] = identicalFolderNames[folder.Name]++;
int folderNameNumber = identicalFolderName[folder.Name];
SaveInvDir(
folder,
string.Format(
"{0}{1}{2}/",
path, ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR, folderNameNumber));
}
*/
foreach (InventoryFolderImpl childFolder in childFolders)
{
SaveInvFolder(childFolder, path, true);
}
foreach (InventoryItemBase item in items)
{
SaveInvItem(item, path);
}
}
/// <summary>
/// Execute the inventory write request
/// </summary>
public void Execute()
{
/*InventoryFolderImpl inventoryFolder = null;
InventoryItemBase inventoryItem = null;
if (!m_userInfo.HasReceivedInventory)
{
// If the region server has access to the user admin service (by which users are created),
// then we'll assume that it's okay to fiddle with the user's inventory even if they are not on the
// server.
//
// FIXME: FetchInventory should probably be assumed to by async anyway, since even standalones might
// use a remote inventory service, though this is vanishingly rare at the moment.
if (null == m_module.CommsManager.UserAdminService)
{
m_log.ErrorFormat(
"[INVENTORY ARCHIVER]: Have not yet received inventory info for user {0} {1}",
m_userInfo.UserProfile.Name, m_userInfo.UserProfile.ID);
return;
}
else
{
m_userInfo.FetchInventory();
}
}
bool foundStar = false;
// Eliminate double slashes and any leading / on the path. This might be better done within InventoryFolderImpl
// itself (possibly at a small loss in efficiency).
string[] components
= m_invPath.Split(new string[] { InventoryFolderImpl.PATH_DELIMITER }, StringSplitOptions.RemoveEmptyEntries);
int maxComponentIndex = components.Length - 1;
// If the path terminates with a STAR then later on we want to archive all nodes in the folder but not the
// folder itself. This may get more sophisicated later on
if (maxComponentIndex >= 0 && components[maxComponentIndex] == STAR_WILDCARD)
{
foundStar = true;
maxComponentIndex--;
}
m_invPath = String.Empty;
for (int i = 0; i <= maxComponentIndex; i++)
{
m_invPath += components[i] + InventoryFolderImpl.PATH_DELIMITER;
}
// Annoyingly Split actually returns the original string if the input string consists only of delimiters
// Therefore if we still start with a / after the split, then we need the root folder
if (String.IsNullOrEmpty(m_invPath))
{
inventoryFolder = m_userInfo.RootFolder;
}
else
{
m_invPath = m_invPath.Remove(m_invPath.LastIndexOf(InventoryFolderImpl.PATH_DELIMITER));
inventoryFolder = m_userInfo.RootFolder.FindFolderByPath(m_invPath);
}
// The path may point to an item instead
if (inventoryFolder == null)
{
inventoryItem = m_userInfo.RootFolder.FindItemByPath(m_invPath);
}
m_archiveWriter = new TarArchiveWriter(m_saveStream);
if (null == inventoryFolder)
{
if (null == inventoryItem)
{
// We couldn't find the path indicated
m_saveStream.Close();
m_module.TriggerInventoryArchiveSaved(
false, m_userInfo, m_invPath, m_saveStream,
new Exception(string.Format("Could not find inventory entry at path {0}", m_invPath)));
return;
}
else
{
m_log.DebugFormat(
"[INVENTORY ARCHIVER]: Found item {0} {1} at {2}",
inventoryItem.Name, inventoryItem.ID, m_invPath);
SaveInvItem(inventoryItem, ArchiveConstants.INVENTORY_PATH);
}
}
else
{
m_log.DebugFormat(
"[INVENTORY ARCHIVER]: Found folder {0} {1} at {2}",
inventoryFolder.Name, inventoryFolder.ID, m_invPath);
//recurse through all dirs getting dirs and files
SaveInvFolder(inventoryFolder, ArchiveConstants.INVENTORY_PATH, !foundStar);
}
SaveUsers();
new AssetsRequest(
new AssetsArchiver(m_archiveWriter), m_assetUuids.Keys,
m_module.CommsManager.AssetCache, ReceivedAllAssets).Execute();*/
throw new NotImplementedException();
}
/// <summary>
/// Save information for the users that we've collected.
/// </summary>
protected void SaveUsers()
{
m_log.InfoFormat("[INVENTORY ARCHIVER]: Saving user information for {0} users", m_userUuids.Count);
foreach (UUID creatorId in m_userUuids.Keys)
{
// Record the creator of this item
CachedUserInfo creator
= m_module.CommsManager.UserService.GetUserDetails(creatorId);
if (creator != null)
{
m_archiveWriter.WriteFile(
ArchiveConstants.USERS_PATH + creator.UserProfile.Name + ".xml",
UserProfileSerializer.Serialize(creator.UserProfile));
}
else
{
m_log.WarnFormat("[INVENTORY ARCHIVER]: Failed to get creator profile for {0}", creatorId);
}
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Data.Entity;
using System.Data.Entity.Infrastructure;
using System.Data.SqlClient;
using System.Diagnostics;
using System.Linq;
using System.Reactive.Concurrency;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Its.Log.Instrumentation;
using Microsoft.Its.Recipes;
using Microsoft.Reactive.Testing;
using NUnit.Framework;
using Sample.Domain.Ordering;
using Sample.Domain.Projections;
using Assert = NUnit.Framework.Assert;
namespace Microsoft.Its.Domain.Sql.Tests
{
[Category("Catchups")]
[TestFixture]
public class ReadModelRollingCatchupTests : RollingCatchupTest
{
}
[Category("Catchups")]
[TestFixture]
public abstract class RollingCatchupTest : EventStoreDbTest
{
[Test]
public void Events_committed_to_the_event_store_are_caught_up_by_multiple_independent_read_model_stores()
{
var productName = Any.Paragraph(4);
var projector1 = new Projector<Order.ItemAdded>(() => new ReadModels1DbContext())
{
OnUpdate = (work, e) => new ReadModels1DbContext().DisposeAfter(db => UpdateReservedInventory(db, e))
};
var projector2 = new Projector<Order.ItemAdded>(() => new ReadModels2DbContext())
{
OnUpdate = (work, e) => new ReadModels2DbContext().DisposeAfter(db => UpdateReservedInventory(db, e))
};
var numberOfEvents = Any.Int(10, 50);
using (var disposables = new CompositeDisposable())
using (var catchup1 = CreateReadModelCatchup<ReadModels1DbContext>(projector1))
using (var catchup2 = CreateReadModelCatchup<ReadModels2DbContext>(projector2))
{
catchup1.Progress.ForEachAsync(p => Console.WriteLine("catchup1: " + p));
catchup2.Progress.ForEachAsync(p => Console.WriteLine("catchup2: " + p));
Action<string, ThreadStart> startThread =
(name, start) =>
{
var thread = new Thread(() =>
{
Console.WriteLine("starting thread (" + Thread.CurrentThread.ManagedThreadId + ")");
start();
Console.WriteLine("ended thread (" + Thread.CurrentThread.ManagedThreadId + ")");
});
thread.Name = name;
thread.Start();
disposables.Add(Disposable.Create(thread.Abort));
};
Events.Write(numberOfEvents, i => new Order.ItemAdded
{
ProductName = productName,
Quantity = 1,
AggregateId = Any.Guid()
});
// TODO: (Events_committed_to_the_event_store_are_caught_up_by_multiple_independent_read_model_stores) is this leading to intermittent test failures by leaving a dangling app lock?
startThread("catchup1", () =>
{
catchup1.Run().Wait();
catchup1.Dispose();
});
startThread("catchup2", () =>
{
catchup2.Run().Wait();
catchup2.Dispose();
});
Console.WriteLine("Waiting on catchups to complete");
// wait on both catchups to complete
catchup1
.Progress
.Merge(catchup2.Progress)
.Where(p => p.IsEndOfBatch)
.Take(2)
.Timeout(DefaultTimeout)
.Wait();
}
Action<DbContext> verify = db =>
{
var readModelInfoName = ReadModelInfo.NameForProjector(projector1);
var readModelInfos = db.Set<ReadModelInfo>();
Console.WriteLine(new { readModelInfos }.ToLogString());
readModelInfos
.Single(i => i.Name == readModelInfoName)
.CurrentAsOfEventId
.Should()
.Be(HighestEventId + numberOfEvents);
var productInventories = db.Set<ProductInventory>();
Console.WriteLine(new { productInventories }.ToLogString());
productInventories
.Single(pi => pi.ProductName == productName)
.QuantityReserved
.Should()
.Be(numberOfEvents);
};
Console.WriteLine("verifying ReadModels1DbContext...");
new ReadModels1DbContext().DisposeAfter(r => verify(r));
Console.WriteLine("verifying ReadModels2DbContext...");
new ReadModels2DbContext().DisposeAfter(r => verify(r));
}
[Test]
public void Rolling_catchup_can_be_run_based_on_event_store_polling()
{
var numberOfEvents = 50;
Console.WriteLine("writing " + numberOfEvents + " starting at " + HighestEventId);
// start the catchup in polling mode
Projector<Order.ItemAdded> projector = null;
var reading = Task.Run(() =>
{
projector = new Projector<Order.ItemAdded>(() => new ReadModels1DbContext());
using (var catchup = CreateReadModelCatchup<ReadModels1DbContext>(projector).PollEventStore())
{
catchup.Progress
.Do(s => Console.WriteLine(s))
.FirstAsync(s => s.IsEndOfBatch && s.CurrentEventId == numberOfEvents + HighestEventId)
.Timeout(DefaultTimeout)
.Wait();
}
});
// now start writing a bunch of new events
var writing = Task.Run(() => Enumerable.Range(1, numberOfEvents).ForEach(_ =>
{
// add a little variation into the intervals at which new events are written
Thread.Sleep(Any.PositiveInt(1000));
Events.Write(1);
}));
writing.Wait();
reading.Wait();
using (var db = new ReadModels1DbContext())
{
var readModelInfoName = ReadModelInfo.NameForProjector(projector);
db.Set<ReadModelInfo>()
.Single(i => i.Name == readModelInfoName)
.CurrentAsOfEventId
.Should()
.Be(HighestEventId + numberOfEvents);
}
}
[Test]
public void EventStore_polling_polls_again_immediately_if_new_events_were_written_while_the_previous_batch_was_processing()
{
TaskScheduler.UnobservedTaskException += (sender, args) =>
{
Console.WriteLine(args.Exception.ToLogString());
};
Events.Write(1);
var writeAdditionalEvent = true;
var scheduler = new TestScheduler();
var projector = new Projector<Order.ItemAdded>(() => new ReadModels1DbContext())
{
OnUpdate = (work, e) =>
{
if (writeAdditionalEvent)
{
writeAdditionalEvent = false;
Events.Write(1);
}
}
};
var statusReports = new List<ReadModelCatchupStatus>();
using (var catchup = CreateReadModelCatchup<ReadModels1DbContext>(projector))
using (catchup.Progress.Subscribe(s =>
{
statusReports.Add(s);
}))
{
catchup.PollEventStore(TimeSpan.FromSeconds(30), scheduler);
scheduler.AdvanceBy(TimeSpan.FromSeconds(30).Ticks);
statusReports.Count(s => s.IsStartOfBatch)
.Should()
.Be(2);
}
}
[Test]
public async Task EventStore_polling_waits_for_specified_interval_if_no_new_events_have_been_written()
{
Events.Write(1);
var projector = new Projector<Order.Shipped>(() => new ReadModels1DbContext());
var statusReports = new List<ReadModelCatchupStatus>();
var scheduler = new TestScheduler();
using (var catchup = CreateReadModelCatchup<ReadModels1DbContext>(projector))
{
// catch up to the event store
await catchup.Run();
catchup.Progress
.ForEachAsync(s =>
{
statusReports.Add(s);
Console.WriteLine(s);
});
// act
catchup.PollEventStore(TimeSpan.FromSeconds(30), scheduler);
Console.WriteLine("start polling");
scheduler.AdvanceBy(TimeSpan.FromSeconds(59).Ticks);
}
// assert
statusReports.Count.Should().Be(1);
}
[Test]
public void EventStore_polling_continues_even_if_connection_gets_closed_during_replay()
{
Events.Write(3);
var scheduler = new TestScheduler();
var projector = new Projector<Order.ItemAdded>(() => new ReadModels1DbContext())
{
OnUpdate = (work, e) => { }
};
var statusReports = new List<ReadModelCatchupStatus>();
var catchup = CreateReadModelCatchup<ReadModels1DbContext>(projector);
DbConnection dbConnection = new SqlConnection();
catchup.CreateEventStoreDbContext = () =>
{
var context = new EventStoreDbContext();
dbConnection = ((IObjectContextAdapter) context).ObjectContext.Connection;
return context;
};
using (catchup)
{
catchup.Progress
.ForEachAsync(s =>
{
Console.WriteLine(s);
if (!s.IsStartOfBatch && !s.IsEndOfBatch)
{
Console.WriteLine("closing the connection");
// close the connection
dbConnection.Close();
}
statusReports.Add(s);
});
catchup.PollEventStore(TimeSpan.FromSeconds(5), scheduler);
// Advance to trigger the first catchup
scheduler.AdvanceBy(TimeSpan.FromSeconds(5).Ticks);
// Trigger an empty batch replay
scheduler.AdvanceBy(TimeSpan.FromSeconds(5).Ticks);
Events.Write(2);
// Advance to trigger the polling catchup
scheduler.AdvanceBy(TimeSpan.FromSeconds(9).Ticks);
statusReports.Count(s => s.IsStartOfBatch)
.Should()
.Be(3);
}
}
[Test]
public void EventStore_polling_replays_new_events_inserted_after_previous_catchup_completed()
{
Events.Write(3);
var scheduler = new TestScheduler();
var projector = new Projector<Order.ItemAdded>(() => new ReadModels1DbContext())
{
OnUpdate = (work, e) => { }
};
var statusReports = new List<ReadModelCatchupStatus>();
using (var catchup = CreateReadModelCatchup<ReadModels1DbContext>(projector))
{
catchup.Progress
.ForEachAsync(s =>
{
statusReports.Add(s);
Console.WriteLine(s);
});
catchup.PollEventStore(TimeSpan.FromSeconds(5), scheduler);
// Advance to trigger the first catchup
scheduler.AdvanceBy(TimeSpan.FromSeconds(5).Ticks);
// Trigger an empty batch replay
scheduler.AdvanceBy(TimeSpan.FromSeconds(5).Ticks);
Events.Write(2);
// Advance to trigger the polling catchup
scheduler.AdvanceBy(TimeSpan.FromSeconds(9).Ticks);
statusReports.Count(s => s.IsStartOfBatch)
.Should().Be(3);
}
}
[Test]
public void When_one_concurrent_catchup_instance_terminates_due_to_deliberate_disposal_then_another_tries_to_take_over_immediately()
{
// arrange
int numberOfEventsToWrite = 15;
Events.Write(numberOfEventsToWrite);
var scheduler = new TestScheduler();
var projector1 = new Projector<IEvent>(() => new ReadModels1DbContext());
var projector2 = new Projector<IEvent>(() => new ReadModels1DbContext());
var catchup1StatusReports = new List<ReadModelCatchupStatus>();
var catchup2StatusReports = new List<ReadModelCatchupStatus>();
using (var catchup1 = CreateReadModelCatchup<ReadModels1DbContext>(projector1))
using (var catchup2 = CreateReadModelCatchup<ReadModels1DbContext>(projector2))
{
catchup1.Progress.ForEachAsync(s =>
{
catchup1StatusReports.Add(s);
Console.WriteLine("catchup1: " + s);
// when we've processed at least one event, cancel this catchup
if (!s.IsStartOfBatch && s.NumberOfEventsProcessed > 0)
{
catchup1.Dispose();
}
});
catchup2.Progress.ForEachAsync(s =>
{
catchup2StatusReports.Add(s);
Console.WriteLine("catchup2: " + s);
});
// act
catchup1.PollEventStore(TimeSpan.FromSeconds(3), scheduler);
scheduler.Schedule(TimeSpan.FromSeconds(.5),
() =>
{
Console.WriteLine("scheduling catchup2 polling");
catchup2.PollEventStore(TimeSpan.FromSeconds(3), scheduler);
});
scheduler.AdvanceBy(TimeSpan.FromSeconds(3.5).Ticks);
}
// assert
catchup1StatusReports.Count(s => !s.IsStartOfBatch)
.Should()
.Be(1, "sanity check that catchup1 polled");
catchup2StatusReports.Count(s => !s.IsStartOfBatch)
.Should()
.Be(numberOfEventsToWrite - 1);
catchup2StatusReports.Count(s => s.IsStartOfBatch)
.Should()
.Be(1, "sanity check that catchup2 polled");
var expected = Enumerable.Range((int) HighestEventId + 1, numberOfEventsToWrite);
Console.WriteLine("expected: " + expected.ToLogString());
var processed = catchup1StatusReports.Concat(catchup2StatusReports).Where(s => !s.IsStartOfBatch).Select(s => s.CurrentEventId).ToArray();
Console.WriteLine("actual: " + processed.ToLogString());
processed.ShouldBeEquivalentTo(expected);
}
[Test]
public void When_one_concurrent_catchup_instance_terminates_due_to_eventstore_connection_loss_then_another_tries_to_take_over_immediately()
{
// arrange
int numberOfEventsToWrite = 100;
Events.Write(numberOfEventsToWrite);
var projector1 = new Projector<IEvent>(() => new ReadModels1DbContext());
var projector2 = new Projector<IEvent>(() => new ReadModels1DbContext());
var catchup1StatusReports = new List<ReadModelCatchupStatus>();
var catchup2StatusReports = new List<ReadModelCatchupStatus>();
var catchup1 = CreateReadModelCatchup<ReadModels1DbContext>(projector1);
DbConnection dbConnection1 = new SqlConnection();
catchup1.CreateEventStoreDbContext = () =>
{
var context = new EventStoreDbContext();
dbConnection1 = ((IObjectContextAdapter) context).ObjectContext.Connection;
return context;
};
using (catchup1)
using (var catchup2 = CreateReadModelCatchup<ReadModels1DbContext>(projector2))
{
catchup1.Progress.ForEachAsync(s =>
{
catchup1StatusReports.Add(s);
Console.WriteLine("catchup1: " + s);
// when we've processed one event, cancel this catchup
dbConnection1.Close();
});
catchup2.Progress.ForEachAsync(s =>
{
catchup2StatusReports.Add(s);
Console.WriteLine("catchup2: " + s);
});
// act
catchup1.PollEventStore(TimeSpan.FromSeconds(10), new NewThreadScheduler());
new NewThreadScheduler().Schedule(TimeSpan.FromSeconds(5),
() =>
{
Console.WriteLine("scheduling catchup2 polling");
catchup2.PollEventStore(TimeSpan.FromSeconds(10), new NewThreadScheduler());
});
var waitingOnEventId = HighestEventId + numberOfEventsToWrite;
Console.WriteLine(string.Format("waiting on event id {0} to be processed", waitingOnEventId));
catchup1.Progress.Merge(catchup2.Progress)
.FirstAsync(s => s.CurrentEventId == waitingOnEventId)
.Timeout(DefaultTimeout)
.Wait();
}
// assert
catchup1StatusReports.Count(s => !s.IsStartOfBatch)
.Should()
.Be(1, "sanity check that catchup1 polled");
catchup2StatusReports.Count(s => !s.IsStartOfBatch)
.Should()
.Be(numberOfEventsToWrite - 1);
catchup2StatusReports.Count(s => s.IsStartOfBatch)
.Should()
.Be(1, "sanity check that catchup2 polled");
var expected = Enumerable.Range((int) HighestEventId + 1, numberOfEventsToWrite);
Console.WriteLine("expected: " + expected.ToLogString());
var processed = catchup1StatusReports.Concat(catchup2StatusReports).Where(s => !s.IsStartOfBatch).Select(s => s.CurrentEventId).ToArray();
Console.WriteLine("actual: " + processed.ToLogString());
processed.ShouldBeEquivalentTo(expected);
}
[Test]
public void When_concurrent_catchups_are_all_caught_up_then_subsequent_events_are_processed_in_less_than_the_poll_time()
{
// arrange
int numberOfEventsToWrite = 50;
var lastEventId = Events.Write(numberOfEventsToWrite);
var projector1 = new Projector<IEvent>(() => new ReadModels1DbContext());
var projector2 = new Projector<IEvent>(() => new ReadModels1DbContext());
var catchup1StatusReports = new List<ReadModelCatchupStatus>();
var catchup2StatusReports = new List<ReadModelCatchupStatus>();
using (var catchup1 = CreateReadModelCatchup<ReadModels1DbContext>(projector1))
using (var catchup2 = CreateReadModelCatchup<ReadModels1DbContext>(projector2))
{
var catchup1Progress = catchup1.Progress;
var catchup2Progress = catchup2.Progress;
catchup1Progress.ForEachAsync(s =>
{
catchup1StatusReports.Add(s);
Console.WriteLine("catchup1: " + s);
});
catchup2Progress.ForEachAsync(s =>
{
catchup2StatusReports.Add(s);
Console.WriteLine("catchup2: " + s);
});
var pollInterval = TimeSpan.FromSeconds(1);
catchup1.PollEventStore(pollInterval, new NewThreadScheduler());
catchup2.PollEventStore(pollInterval, new NewThreadScheduler());
catchup1Progress.Merge(catchup2Progress)
.FirstAsync(s => s.CurrentEventId == lastEventId)
.Timeout(DefaultTimeout)
.Wait();
// act: new events should be caught up after a few idle polls
Thread.Sleep(TimeSpan.FromSeconds(3));
lastEventId = Events.Write(5);
Console.WriteLine(new { lastEventId });
catchup1Progress.Merge(catchup2Progress)
.Do(s =>
{
if (s.Latency > pollInterval)
{
Assert.Fail(string.Format("Latency ({0}) exceeded poll interval {1}\n({2})",
s.Latency.Value.TotalSeconds,
pollInterval.TotalSeconds,
s));
}
})
.FirstAsync(s => s.CurrentEventId == lastEventId)
.Timeout(TimeSpan.FromSeconds(3))
.Wait();
}
}
[Test]
public void When_a_catchup_has_been_waiting_for_several_poll_intervals_it_only_runs_once()
{
// arrange
int numberOfEventsToWrite = Any.Int(10, 20);
Events.Write(numberOfEventsToWrite);
var testScheduler = new TestScheduler();
var projector1 = new Projector<IEvent>(() => new ReadModels1DbContext())
{
OnUpdate = (work, e) =>
{
// create some delay so that catchup2 will attempt to poll multiple times
testScheduler.Sleep(1000);
}
};
var projector2 = new Projector<IEvent>(() => new ReadModels1DbContext());
var catchup1StatusReports = new List<ReadModelCatchupStatus>();
var catchup2StatusReports = new List<ReadModelCatchupStatus>();
using (var catchup1 = CreateReadModelCatchup<ReadModels1DbContext>(projector1))
using (var catchup2 = CreateReadModelCatchup<ReadModels1DbContext>(projector2))
{
bool catchup1Disposed = false;
catchup1.Progress.ForEachAsync(s =>
{
catchup1StatusReports.Add(s);
Console.WriteLine("catchup1: " + s);
// when the batch is done, dispose, which should allow catchup2 to try
if (s.IsEndOfBatch)
{
Console.WriteLine("disposing catchup1");
catchup1.Dispose();
catchup1Disposed = true;
}
});
catchup2.Progress.ForEachAsync(s =>
{
catchup2StatusReports.Add(s);
Console.WriteLine("catchup2: " + s);
});
// act
var scheduler1 = new SchedulerWatcher(testScheduler, "scheduler1");
catchup1.PollEventStore(TimeSpan.FromSeconds(1), scheduler1);
var scheduler2 = new SchedulerWatcher(testScheduler, "scheduler2");
scheduler2.Schedule(TimeSpan.FromSeconds(1.5),
() =>
{
Console.WriteLine("catchup2 polling starting");
// use a higher poll frequency so the poll timer fires many times while catchup1 is running
catchup2.PollEventStore(TimeSpan.FromSeconds(.5), scheduler2);
});
while (!catchup1Disposed)
{
testScheduler.AdvanceBy(TimeSpan.FromSeconds(.1).Ticks);
}
testScheduler.AdvanceBy(TimeSpan.FromSeconds(1).Ticks);
}
// assert
catchup2StatusReports.Count(s => s.IsStartOfBatch)
.Should()
.Be(1);
}
[Test]
public async Task Catchups_can_be_run_in_parallel_for_different_projectors()
{
var projector1Count = 0;
var projector2Count = 0;
var projector1 = Projector.Create<Order.CreditCardCharged>(e => projector1Count++);
var projector2 = Projector.Create<Order.CustomerInfoChanged>(e => projector2Count++);
Events.Write(15, i => new Order.CreditCardCharged());
Events.Write(25, i => new Order.CustomerInfoChanged());
var catchup1 = CreateReadModelCatchup(projector1);
catchup1.Name = MethodBase.GetCurrentMethod().Name + "1";
var catchup2 = CreateReadModelCatchup(projector2);
catchup2.Name = MethodBase.GetCurrentMethod().Name + "2";
catchup1.Progress.Subscribe(s => Console.WriteLine("catchup1: " + s));
catchup2.Progress.Subscribe(s => Console.WriteLine("catchup2: " + s));
using (catchup1.PollEventStore())
using (catchup2.PollEventStore())
{
await CatchupWrapper.SingleBatchAsync(catchup1, catchup2);
}
projector1Count.Should().Be(15);
projector2Count.Should().Be(25);
}
private static TimeSpan DefaultTimeout
{
get
{
if (!Debugger.IsAttached)
{
return TimeSpan.FromSeconds(60);
}
return TimeSpan.FromMinutes(60);
}
}
private static void UpdateReservedInventory(DbContext db, Order.ItemAdded e)
{
var inventoryRecord = db.Set<ProductInventory>()
.SingleOrDefault(r => r.ProductName == e.ProductName)
.IfNotNull()
.Then(r => r)
.Else(() =>
{
var r = new ProductInventory
{
ProductName = e.ProductName
};
db.Set<ProductInventory>().Add(r);
return r;
});
inventoryRecord.QuantityReserved += e.Quantity;
db.SaveChanges();
}
}
public class SchedulerWatcher : IScheduler
{
private readonly IScheduler inner;
private string name;
public SchedulerWatcher(IScheduler innerScheduler, string name)
{
if (innerScheduler == null)
{
throw new ArgumentNullException("innerScheduler");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
this.name = name;
inner = innerScheduler;
}
public IDisposable Schedule<TState>(TState state, Func<IScheduler, TState, IDisposable> action)
{
Console.WriteLine("> " + name + " scheduling: " + new { state, action }.ToLogString());
return inner.Schedule(state, action);
}
public IDisposable Schedule<TState>(TState state, TimeSpan dueTime, Func<IScheduler, TState, IDisposable> action)
{
Console.WriteLine("> " + name + " scheduling: " + new { state, dueTime, action }.ToLogString());
return inner.Schedule(state, dueTime, action);
}
public IDisposable Schedule<TState>(TState state, DateTimeOffset dueTime, Func<IScheduler, TState, IDisposable> action)
{
Console.WriteLine("> " + name + " scheduling: " + new { state, dueTime, action }.ToLogString());
return inner.Schedule(state, dueTime, action);
}
public DateTimeOffset Now
{
get
{
return DateTime.Now;
}
}
}
}
| |
using System;
using NUnit.Framework;
using VkNet.Enums.SafetyEnums;
using VkNet.Model.Attachments;
namespace VkNet.Tests.Models
{
[TestFixture]
public class VideoModel : BaseTest
{
[Test]
public void ToString_VideoShouldHaveAccessKey()
{
var video = new Video
{
Id = 1234,
OwnerId = 1234,
AccessKey = "test"
};
var result = video.ToString();
Assert.AreEqual(result, "video1234_1234_test");
}
[Test]
public void Cans_ArePresent()
{
ReadJsonFile("Models", "video_with_ads_and_timeline");
var response = GetResponse();
var video = Video.FromJson(response);
Assert.True(video.CanComment);
Assert.True(video.CanLike);
Assert.True(video.CanRepost);
Assert.True(video.CanSubscribe);
Assert.True(video.CanAddToFaves);
Assert.True(video.CanAdd);
}
[Test]
public void OvId_IsPresent()
{
ReadJsonFile("Models", "video_with_ads_and_timeline");
var response = GetResponse();
var video = Video.FromJson(response);
Assert.AreEqual("2930947729488", video.OvId);
}
[Test]
public void Files_AllFields_ArePresent()
{
ReadJsonFile("Models", "video_with_ads_and_timeline");
var response = GetResponse();
var video = Video.FromJson(response);
var files = video.Files;
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=hJ-nQdUFlJE&ct=0&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=0"),
files.Mp4_240);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=AaOYcMVln7E&ct=0&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=1"),
files.Mp4_360);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=stO4D-MNhVA&ct=0&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=2"),
files.Mp4_480);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=6zFPxlcmMW0&ct=0&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=3"),
files.Mp4_720);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=P479kIBCu5M&ct=0&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=5"),
files.Mp4_1080);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/video.m3u8?srcIp=217.70.31.125&expires=1633249784334&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&mid=2930947729488&type=4&sig=kKC4Rp0aao4&ct=8&urls=185.226.52.190&clientType=13&cmd=videoPlayerCdn&id=1777443670608"),
files.Hls);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=stO4D-MNhVA&ct=6&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=2"),
files.DashUni);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=AaOYcMVln7E&ct=6&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=1"),
files.DashSep);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/?sig=_PmzhECbdtY&ct=6&srcIp=217.70.31.125&urls=185.226.52.190&expires=1633249784334&clientType=13&srcAg=UNKNOWN&fromCache=1&ms=45.136.22.169&appId=512000384397&id=1777443670608&type=4"),
files.DashWebm);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/srcIp/217.70.31.125/expires/1633249784334/srcAg/UNKNOWN/fromCache/1/ms/45.136.22.169/mid/2930947729488/type/2/sig/JahccvslsEY/ct/28/urls/185.226.52.190/clientType/13/id/1777443670608/ondemand/hls_1777443670608.m3u8"),
files.HlsOnDemand);
Assert.AreEqual(
new Uri(
"https://vkvd79.mycdn.me/srcIp/217.70.31.125/expires/1633249784334/srcAg/UNKNOWN/fromCache/1/ms/45.136.22.169/mid/2930947729488/type/2/sig/JahccvslsEY/ct/29/urls/185.226.52.190/clientType/13/id/1777443670608/ondemand/dash_1777443670608.mpd"),
files.DashOnDemand);
Assert.AreEqual("vkvd185.mycdn.me", files.FailOverHost);
}
[Test]
public void TimelineThumbs_AllFields_ArePresent()
{
ReadJsonFile("Models", "video_with_ads_and_timeline");
var response = GetResponse();
var video = Video.FromJson(response);
var timelineThumbs = video.TimelineThumbs;
Assert.AreEqual(9, timelineThumbs.CountPerImage);
Assert.AreEqual(3, timelineThumbs.CountPerRow);
Assert.AreEqual(208, timelineThumbs.CountTotal);
Assert.AreEqual(180, timelineThumbs.FrameHeight);
Assert.AreEqual(320.0f, timelineThumbs.FrameWidth);
Assert.AreEqual(24, timelineThumbs.Links.Count);
Assert.IsTrue(timelineThumbs.IsUv);
Assert.AreEqual(5, timelineThumbs.Frequency);
}
[Test]
public void Ads_AllFields_ArePresent()
{
ReadJsonFile("Models", "video_with_ads_and_timeline");
var response = GetResponse();
var video = Video.FromJson(response);
var ads = video.Ads;
Assert.AreEqual(551874, ads.SlotId);
Assert.AreEqual(1.0f, ads.Timeout);
Assert.AreEqual(1, ads.CanPlay);
Assert.IsNotNull(ads.Params);
var sections = ads.Sections;
Assert.AreEqual(3, sections.Count);
Assert.AreEqual(VideoAdsSection.Preroll, sections[0]);
Assert.AreEqual(VideoAdsSection.Midroll, sections[1]);
Assert.AreEqual(VideoAdsSection.Postroll, sections[2]);
var midrollPercents = ads.MidrollPercents;
Assert.AreEqual(2, midrollPercents.Count);
Assert.AreEqual(0.25f, midrollPercents[0]);
Assert.AreEqual(0.75f, midrollPercents[1]);
Assert.AreEqual(1, ads.AutoPlayPreroll);
}
[Test]
public void AdsParams_AllFields_ArePresent()
{
ReadJsonFile("Models", "video_with_ads_and_timeline");
var response = GetResponse();
var video = Video.FromJson(response);
var ads = video.Ads;
var @params = ads.Params;
Assert.IsNotNull(@params);
Assert.AreEqual(12345678, @params.VkId);
Assert.AreEqual(1039, @params.Duration);
Assert.AreEqual("-136270576_456239929", @params.VideoId);
Assert.AreEqual(21469, @params.Pl);
Assert.AreEqual("-585277666870842567", @params.ContentId);
Assert.AreEqual(1, @params.Lang);
// All Puid`s are basically random, but for provided JSON they are provided correctly
Assert.AreEqual("986", @params.PuId1);
Assert.AreEqual(17, @params.PuId2);
Assert.AreEqual(1, @params.PuId3);
Assert.AreEqual(1, @params.PuId4);
Assert.AreEqual(14, @params.PuId5);
Assert.AreEqual(86, @params.PuId6);
Assert.AreEqual(1, @params.PuId7);
Assert.AreEqual(9, @params.PuId8);
Assert.AreEqual(0, @params.PuId9);
Assert.AreEqual(4, @params.PuId10);
Assert.AreEqual(16, @params.PuId12);
Assert.AreEqual(2, @params.PuId13);
Assert.AreEqual(2, @params.PuId14);
Assert.AreEqual(1, @params.PuId15);
Assert.AreEqual(0, @params.PuId18);
Assert.AreEqual(2, @params.PuId21);
Assert.AreEqual("4551227ff8b9944114687581748264426f663db8", @params.Sign);
Assert.AreEqual(136270576, @params.GroupId);
Assert.AreEqual(29, @params.VkCatId);
}
[Test]
public void Video_Live_AllFields_ArePresent()
{
ReadJsonFile("Models", "video_live");
var response = GetResponse();
var video = Video.FromJson(response);
Assert.AreEqual(0, video.Duration);
Assert.AreEqual("live", video.Type);
Assert.AreEqual("started", video.LiveStatus);
Assert.True(video.Live);
Assert.AreEqual(89, video.Spectators);
}
[Test]
public void Video_Live_Files_Contains_Live_Uris()
{
ReadJsonFile("Models", "video_live");
var response = GetResponse();
var video = Video.FromJson(response);
var files = video.Files;
Assert.AreEqual(
new Uri(
"https://vkvsd16.mycdn.me/hls/1095312673357_offset_p.m3u8/sig/OWp_G67RlXg/srcIp/217.70.31.125/expires/1633252742236/clientType/13/srcAg/UNKNOWN/fromCache/1/mid/2669706881869/id/1095312673357/video.m3u8?p"),
files.HlsLivePlayback);
Assert.AreEqual(
new Uri(
"https://vkvsd16.mycdn.me/dash/stream_1095312673357_offset_p/stream.manifest/sig/OWp_G67RlXg/srcIp/217.70.31.125/expires/1633252742236/clientType/13/srcAg/UNKNOWN/fromCache/1/mid/2669706881869/id/1095312673357/video"),
files.DashLivePlayback);
}
[Test]
public void Video_Live_LiveSettings_AllFields_ArePresent()
{
ReadJsonFile("Models", "video_live");
var response = GetResponse();
var video = Video.FromJson(response);
var liveSettings = video.LiveSettings;
Assert.AreEqual(1, liveSettings.CanRewind);
Assert.AreEqual(1, liveSettings.IsEndless);
Assert.AreEqual(7200, liveSettings.MaxDuration);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.DateTimeOffset
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// A sample API that tests datetimeoffset usage for date-time
/// </summary>
public partial class SwaggerDateTimeOffsetClient : ServiceClient<SwaggerDateTimeOffsetClient>, ISwaggerDateTimeOffsetClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Initializes a new instance of the SwaggerDateTimeOffsetClient class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SwaggerDateTimeOffsetClient(params DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the SwaggerDateTimeOffsetClient class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SwaggerDateTimeOffsetClient(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the SwaggerDateTimeOffsetClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SwaggerDateTimeOffsetClient(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the SwaggerDateTimeOffsetClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SwaggerDateTimeOffsetClient(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// An optional partial-method to perform custom initialization.
///</summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.BaseUri = new Uri("http://localhost:3000/api");
SerializationSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
CustomInitialize();
}
/// <summary>
/// Product Types
/// </summary>
/// <param name='responseCode'>
/// The desired returned status code
/// </param>
/// <param name='product'>
/// The only parameter
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Product>> GetProductWithHttpMessagesAsync(string responseCode = default(string), Product product = default(Product), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("responseCode", responseCode);
tracingParameters.Add("product", product);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetProduct", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datatypes").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (responseCode != null)
{
if (_httpRequest.Headers.Contains("response-code"))
{
_httpRequest.Headers.Remove("response-code");
}
_httpRequest.Headers.TryAddWithoutValidation("response-code", responseCode);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
_requestContent = SafeJsonConvert.SerializeObject(product, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Product>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<Product>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Product Types
/// </summary>
/// <param name='responseCode'>
/// The desired returned status code
/// </param>
/// <param name='product'>
/// The only parameter
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Product>> PutProductWithHttpMessagesAsync(string responseCode = default(string), Product product = default(Product), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("responseCode", responseCode);
tracingParameters.Add("product", product);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PutProduct", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datatypes").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (responseCode != null)
{
if (_httpRequest.Headers.Contains("response-code"))
{
_httpRequest.Headers.Remove("response-code");
}
_httpRequest.Headers.TryAddWithoutValidation("response-code", responseCode);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
_requestContent = SafeJsonConvert.SerializeObject(product, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Product>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<Product>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Product Types
/// </summary>
/// <param name='responseCode'>
/// The desired returned status code
/// </param>
/// <param name='product'>
/// The only parameter
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Product>> PostProductWithHttpMessagesAsync(string responseCode = default(string), Product product = default(Product), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("responseCode", responseCode);
tracingParameters.Add("product", product);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PostProduct", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datatypes").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (responseCode != null)
{
if (_httpRequest.Headers.Contains("response-code"))
{
_httpRequest.Headers.Remove("response-code");
}
_httpRequest.Headers.TryAddWithoutValidation("response-code", responseCode);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
_requestContent = SafeJsonConvert.SerializeObject(product, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Product>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<Product>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Product Types
/// </summary>
/// <param name='responseCode'>
/// The desired returned status code
/// </param>
/// <param name='product'>
/// The only parameter
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Product>> PatchProductWithHttpMessagesAsync(string responseCode = default(string), Product product = default(Product), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("responseCode", responseCode);
tracingParameters.Add("product", product);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PatchProduct", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datatypes").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PATCH");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (responseCode != null)
{
if (_httpRequest.Headers.Contains("response-code"))
{
_httpRequest.Headers.Remove("response-code");
}
_httpRequest.Headers.TryAddWithoutValidation("response-code", responseCode);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
_requestContent = SafeJsonConvert.SerializeObject(product, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Product>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<Product>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Uncomment to make asset Get requests for existing
// #define WAIT_ON_INPROGRESS_REQUESTS
using System;
using System.IO;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Threading;
using System.Timers;
using log4net;
using Nini.Config;
using Mono.Addins;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Console;
using OpenSim.Framework.Monitoring;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
//[assembly: Addin("FlotsamAssetCache", "1.1")]
//[assembly: AddinDependency("OpenSim", "0.8.1")]
namespace OpenSim.Region.CoreModules.Asset
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "FlotsamAssetCache")]
public class FlotsamAssetCache : ISharedRegionModule, IAssetCache, IAssetService
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private bool m_Enabled;
private bool m_timerRunning;
private bool m_cleanupRunning;
private const string m_ModuleName = "FlotsamAssetCache";
private const string m_DefaultCacheDirectory = "./assetcache";
private string m_CacheDirectory = m_DefaultCacheDirectory;
private readonly List<char> m_InvalidChars = new List<char>();
private int m_LogLevel = 0;
private ulong m_HitRateDisplay = 100; // How often to display hit statistics, given in requests
private static ulong m_Requests;
private static ulong m_RequestsForInprogress;
private static ulong m_DiskHits;
private static ulong m_MemoryHits;
private static ulong m_weakRefHits;
#if WAIT_ON_INPROGRESS_REQUESTS
private Dictionary<string, ManualResetEvent> m_CurrentlyWriting = new Dictionary<string, ManualResetEvent>();
private int m_WaitOnInprogressTimeout = 3000;
#else
private HashSet<string> m_CurrentlyWriting = new HashSet<string>();
#endif
private bool m_FileCacheEnabled = true;
private ExpiringCache<string, AssetBase> m_MemoryCache;
private bool m_MemoryCacheEnabled = false;
private ExpiringCache<string, object> m_negativeCache;
private bool m_negativeCacheEnabled = true;
private bool m_negativeCacheSliding = false;
// Expiration is expressed in hours.
private double m_MemoryExpiration = 0.016;
private const double m_DefaultFileExpiration = 48;
// Negative cache is in seconds
private int m_negativeExpiration = 120;
private TimeSpan m_FileExpiration = TimeSpan.FromHours(m_DefaultFileExpiration);
private TimeSpan m_FileExpirationCleanupTimer = TimeSpan.FromHours(1.0);
private static int m_CacheDirectoryTiers = 1;
private static int m_CacheDirectoryTierLen = 3;
private static int m_CacheWarnAt = 30000;
private System.Timers.Timer m_CacheCleanTimer;
private IAssetService m_AssetService;
private List<Scene> m_Scenes = new List<Scene>();
private object timerLock = new object();
private Dictionary<string,WeakReference> weakAssetReferences = new Dictionary<string, WeakReference>();
private object weakAssetReferencesLock = new object();
private bool m_updateFileTimeOnCacheHit = false;
public FlotsamAssetCache()
{
m_InvalidChars.AddRange(Path.GetInvalidPathChars());
m_InvalidChars.AddRange(Path.GetInvalidFileNameChars());
}
public Type ReplaceableInterface
{
get { return null; }
}
public string Name
{
get { return m_ModuleName; }
}
public void Initialise(IConfigSource source)
{
IConfig moduleConfig = source.Configs["Modules"];
if (moduleConfig != null)
{
string name = moduleConfig.GetString("AssetCaching", String.Empty);
if (name == Name)
{
m_MemoryCache = new ExpiringCache<string, AssetBase>();
m_negativeCache = new ExpiringCache<string, object>();
m_Enabled = true;
m_log.InfoFormat("[FLOTSAM ASSET CACHE]: {0} enabled", this.Name);
IConfig assetConfig = source.Configs["AssetCache"];
if (assetConfig == null)
{
m_log.Debug(
"[FLOTSAM ASSET CACHE]: AssetCache section missing from config (not copied config-include/FlotsamCache.ini.example? Using defaults.");
}
else
{
m_FileCacheEnabled = assetConfig.GetBoolean("FileCacheEnabled", m_FileCacheEnabled);
m_CacheDirectory = assetConfig.GetString("CacheDirectory", m_DefaultCacheDirectory);
m_MemoryCacheEnabled = assetConfig.GetBoolean("MemoryCacheEnabled", m_MemoryCacheEnabled);
m_MemoryExpiration = assetConfig.GetDouble("MemoryCacheTimeout", m_MemoryExpiration);
m_MemoryExpiration *= 3600.0; // config in hours to seconds
m_negativeCacheEnabled = assetConfig.GetBoolean("NegativeCacheEnabled", m_negativeCacheEnabled);
m_negativeExpiration = assetConfig.GetInt("NegativeCacheTimeout", m_negativeExpiration);
m_negativeCacheSliding = assetConfig.GetBoolean("NegativeCacheSliding", m_negativeCacheSliding);
m_updateFileTimeOnCacheHit = assetConfig.GetBoolean("UpdateFileTimeOnCacheHit", m_updateFileTimeOnCacheHit);
#if WAIT_ON_INPROGRESS_REQUESTS
m_WaitOnInprogressTimeout = assetConfig.GetInt("WaitOnInprogressTimeout", 3000);
#endif
m_LogLevel = assetConfig.GetInt("LogLevel", m_LogLevel);
m_HitRateDisplay = (ulong)assetConfig.GetLong("HitRateDisplay", (long)m_HitRateDisplay);
m_FileExpiration = TimeSpan.FromHours(assetConfig.GetDouble("FileCacheTimeout", m_DefaultFileExpiration));
m_FileExpirationCleanupTimer
= TimeSpan.FromHours(
assetConfig.GetDouble("FileCleanupTimer", m_FileExpirationCleanupTimer.TotalHours));
m_CacheDirectoryTiers = assetConfig.GetInt("CacheDirectoryTiers", m_CacheDirectoryTiers);
m_CacheDirectoryTierLen = assetConfig.GetInt("CacheDirectoryTierLength", m_CacheDirectoryTierLen);
m_CacheWarnAt = assetConfig.GetInt("CacheWarnAt", m_CacheWarnAt);
}
m_log.InfoFormat("[FLOTSAM ASSET CACHE]: Cache Directory {0}", m_CacheDirectory);
if (m_CacheDirectoryTiers < 1)
{
m_CacheDirectoryTiers = 1;
}
else if (m_CacheDirectoryTiers > 3)
{
m_CacheDirectoryTiers = 3;
}
if (m_CacheDirectoryTierLen < 1)
{
m_CacheDirectoryTierLen = 1;
}
else if (m_CacheDirectoryTierLen > 4)
{
m_CacheDirectoryTierLen = 4;
}
MainConsole.Instance.Commands.AddCommand("Assets", true, "fcache status", "fcache status", "Display cache status", HandleConsoleCommand);
MainConsole.Instance.Commands.AddCommand("Assets", true, "fcache clear", "fcache clear [file] [memory]", "Remove all assets in the cache. If file or memory is specified then only this cache is cleared.", HandleConsoleCommand);
MainConsole.Instance.Commands.AddCommand("Assets", true, "fcache assets", "fcache assets", "Attempt a deep scan and cache of all assets in all scenes", HandleConsoleCommand);
MainConsole.Instance.Commands.AddCommand("Assets", true, "fcache expire", "fcache expire <datetime>", "Purge cached assets older then the specified date/time", HandleConsoleCommand);
}
}
}
public void PostInitialise()
{
}
public void Close()
{
}
public void AddRegion(Scene scene)
{
if (m_Enabled)
{
scene.RegisterModuleInterface<IAssetCache>(this);
m_Scenes.Add(scene);
}
}
public void RemoveRegion(Scene scene)
{
if (m_Enabled)
{
scene.UnregisterModuleInterface<IAssetCache>(this);
m_Scenes.Remove(scene);
lock(timerLock)
{
if(m_timerRunning && m_Scenes.Count <= 0)
{
m_timerRunning = false;
m_CacheCleanTimer.Stop();
m_CacheCleanTimer.Close();
}
}
}
}
public void RegionLoaded(Scene scene)
{
if (m_Enabled)
{
if(m_AssetService == null)
m_AssetService = scene.RequestModuleInterface<IAssetService>();
lock(timerLock)
{
if(!m_timerRunning)
{
if (m_FileCacheEnabled && (m_FileExpiration > TimeSpan.Zero) && (m_FileExpirationCleanupTimer > TimeSpan.Zero))
{
m_CacheCleanTimer = new System.Timers.Timer(m_FileExpirationCleanupTimer.TotalMilliseconds);
m_CacheCleanTimer.AutoReset = false;
m_CacheCleanTimer.Elapsed += CleanupExpiredFiles;
m_CacheCleanTimer.Start();
m_timerRunning = true;
}
}
}
if (m_MemoryCacheEnabled)
m_MemoryCache = new ExpiringCache<string, AssetBase>();
lock(weakAssetReferencesLock)
weakAssetReferences = new Dictionary<string, WeakReference>();
}
}
////////////////////////////////////////////////////////////
// IAssetCache
//
private void UpdateWeakReference(string key, AssetBase asset)
{
WeakReference aref = new WeakReference(asset);
lock(weakAssetReferencesLock)
weakAssetReferences[key] = aref;
}
private void UpdateMemoryCache(string key, AssetBase asset)
{
// NOTE DO NOT USE SLIDEEXPIRE option on current libomv
m_MemoryCache.AddOrUpdate(key, asset, m_MemoryExpiration);
}
private void UpdateFileCache(string key, AssetBase asset)
{
string filename = GetFileName(key);
try
{
// If the file is already cached, don't cache it, just touch it so access time is updated
if (File.Exists(filename))
{
UpdateFileLastAccessTime(filename);
}
else
{
// Once we start writing, make sure we flag that we're writing
// that object to the cache so that we don't try to write the
// same file multiple times.
lock (m_CurrentlyWriting)
{
#if WAIT_ON_INPROGRESS_REQUESTS
if (m_CurrentlyWriting.ContainsKey(filename))
{
return;
}
else
{
m_CurrentlyWriting.Add(filename, new ManualResetEvent(false));
}
#else
if (m_CurrentlyWriting.Contains(filename))
{
return;
}
else
{
m_CurrentlyWriting.Add(filename);
}
#endif
}
Util.FireAndForget(
delegate { WriteFileCache(filename, asset); }, null, "FlotsamAssetCache.UpdateFileCache");
}
}
catch (Exception e)
{
m_log.ErrorFormat(
"[FLOTSAM ASSET CACHE]: Failed to update cache for asset {0}. Exception {1} {2}",
asset.ID, e.Message, e.StackTrace);
}
}
public void Cache(AssetBase asset)
{
// TODO: Spawn this off to some seperate thread to do the actual writing
if (asset != null)
{
//m_log.DebugFormat("[FLOTSAM ASSET CACHE]: Caching asset with id {0}", asset.ID);
UpdateWeakReference(asset.ID, asset);
if (m_MemoryCacheEnabled)
UpdateMemoryCache(asset.ID, asset);
if (m_FileCacheEnabled)
UpdateFileCache(asset.ID, asset);
}
}
public void CacheNegative(string id)
{
if (m_negativeCacheEnabled)
{
if (m_negativeCacheSliding)
m_negativeCache.AddOrUpdate(id, null, TimeSpan.FromSeconds(m_negativeExpiration));
else
m_negativeCache.AddOrUpdate(id, null, m_negativeExpiration);
}
}
/// <summary>
/// Updates the cached file with the current time.
/// </summary>
/// <param name="filename">Filename.</param>
/// <returns><c>true</c>, if the update was successful, false otherwise.</returns>
private bool UpdateFileLastAccessTime(string filename)
{
try
{
File.SetLastAccessTime(filename, DateTime.Now);
return true;
}
catch
{
return false;
}
}
private AssetBase GetFromWeakReference(string id)
{
AssetBase asset = null;
WeakReference aref;
lock(weakAssetReferencesLock)
{
if (weakAssetReferences.TryGetValue(id, out aref))
{
asset = aref.Target as AssetBase;
if(asset == null)
weakAssetReferences.Remove(id);
else
m_weakRefHits++;
}
}
return asset;
}
/// <summary>
/// Try to get an asset from the in-memory cache.
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
private AssetBase GetFromMemoryCache(string id)
{
AssetBase asset = null;
if (m_MemoryCache.TryGetValue(id, out asset))
m_MemoryHits++;
return asset;
}
private bool CheckFromMemoryCache(string id)
{
return m_MemoryCache.Contains(id);
}
/// <summary>
/// Try to get an asset from the file cache.
/// </summary>
/// <param name="id"></param>
/// <returns>An asset retrieved from the file cache. null if there was a problem retrieving an asset.</returns>
private AssetBase GetFromFileCache(string id)
{
string filename = GetFileName(id);
#if WAIT_ON_INPROGRESS_REQUESTS
// Check if we're already downloading this asset. If so, try to wait for it to
// download.
if (m_WaitOnInprogressTimeout > 0)
{
m_RequestsForInprogress++;
ManualResetEvent waitEvent;
if (m_CurrentlyWriting.TryGetValue(filename, out waitEvent))
{
waitEvent.WaitOne(m_WaitOnInprogressTimeout);
return Get(id);
}
}
#else
// Track how often we have the problem that an asset is requested while
// it is still being downloaded by a previous request.
if (m_CurrentlyWriting.Contains(filename))
{
m_RequestsForInprogress++;
return null;
}
#endif
AssetBase asset = null;
if (File.Exists(filename))
{
try
{
using (FileStream stream = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read))
{
if (stream.Length == 0) // Empty file will trigger exception below
return null;
BinaryFormatter bformatter = new BinaryFormatter();
asset = (AssetBase)bformatter.Deserialize(stream);
m_DiskHits++;
}
}
catch (System.Runtime.Serialization.SerializationException e)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Failed to get file {0} for asset {1}. Exception {2} {3}",
filename, id, e.Message, e.StackTrace);
// If there was a problem deserializing the asset, the asset may
// either be corrupted OR was serialized under an old format
// {different version of AssetBase} -- we should attempt to
// delete it and re-cache
File.Delete(filename);
}
catch (Exception e)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Failed to get file {0} for asset {1}. Exception {2} {3}",
filename, id, e.Message, e.StackTrace);
}
}
return asset;
}
private bool CheckFromFileCache(string id)
{
bool found = false;
string filename = GetFileName(id);
if (File.Exists(filename))
{
try
{
using (FileStream stream = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read))
{
if (stream != null)
found = true;
}
}
catch (Exception e)
{
m_log.ErrorFormat(
"[FLOTSAM ASSET CACHE]: Failed to check file {0} for asset {1}. Exception {2} {3}",
filename, id, e.Message, e.StackTrace);
}
}
return found;
}
// For IAssetService
public AssetBase Get(string id)
{
AssetBase asset;
Get(id, out asset);
return asset;
}
public bool Get(string id, out AssetBase asset)
{
asset = null;
m_Requests++;
object dummy;
if (m_negativeCache.TryGetValue(id, out dummy))
{
return false;
}
asset = GetFromWeakReference(id);
if (asset != null && m_updateFileTimeOnCacheHit)
{
string filename = GetFileName(id);
UpdateFileLastAccessTime(filename);
}
if (m_MemoryCacheEnabled && asset == null)
{
asset = GetFromMemoryCache(id);
if(asset != null)
{
UpdateWeakReference(id,asset);
if (m_updateFileTimeOnCacheHit)
{
string filename = GetFileName(id);
UpdateFileLastAccessTime(filename);
}
}
}
if (asset == null && m_FileCacheEnabled)
{
asset = GetFromFileCache(id);
if(asset != null)
UpdateWeakReference(id,asset);
}
if (m_MemoryCacheEnabled && asset != null)
UpdateMemoryCache(id, asset);
if (((m_LogLevel >= 1)) && (m_HitRateDisplay != 0) && (m_Requests % m_HitRateDisplay == 0))
{
m_log.InfoFormat("[FLOTSAM ASSET CACHE]: Cache Get :: {0} :: {1}", id, asset == null ? "Miss" : "Hit");
GenerateCacheHitReport().ForEach(l => m_log.InfoFormat("[FLOTSAM ASSET CACHE]: {0}", l));
}
return true;
}
public bool Check(string id)
{
if (m_MemoryCacheEnabled && CheckFromMemoryCache(id))
return true;
if (m_FileCacheEnabled && CheckFromFileCache(id))
return true;
return false;
}
public AssetBase GetCached(string id)
{
AssetBase asset;
Get(id, out asset);
return asset;
}
public void Expire(string id)
{
if (m_LogLevel >= 2)
m_log.DebugFormat("[FLOTSAM ASSET CACHE]: Expiring Asset {0}", id);
try
{
if (m_FileCacheEnabled)
{
string filename = GetFileName(id);
if (File.Exists(filename))
{
File.Delete(filename);
}
}
if (m_MemoryCacheEnabled)
m_MemoryCache.Remove(id);
lock(weakAssetReferencesLock)
weakAssetReferences.Remove(id);
}
catch (Exception e)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Failed to expire cached file {0}. Exception {1} {2}",
id, e.Message, e.StackTrace);
}
}
public void Clear()
{
if (m_LogLevel >= 2)
m_log.Debug("[FLOTSAM ASSET CACHE]: Clearing caches.");
if (m_FileCacheEnabled)
{
foreach (string dir in Directory.GetDirectories(m_CacheDirectory))
{
Directory.Delete(dir);
}
}
if (m_MemoryCacheEnabled)
m_MemoryCache = new ExpiringCache<string, AssetBase>();
if (m_negativeCacheEnabled)
m_negativeCache = new ExpiringCache<string, object>();
lock(weakAssetReferencesLock)
weakAssetReferences = new Dictionary<string, WeakReference>();
}
private void CleanupExpiredFiles(object source, ElapsedEventArgs e)
{
if (m_LogLevel >= 2)
m_log.DebugFormat("[FLOTSAM ASSET CACHE]: Checking for expired files older then {0}.", m_FileExpiration);
lock(timerLock)
{
if(!m_timerRunning || m_cleanupRunning)
return;
m_cleanupRunning = true;
}
// Purge all files last accessed prior to this point
DateTime purgeLine = DateTime.Now - m_FileExpiration;
// An asset cache may contain local non-temporary assets that are not in the asset service. Therefore,
// before cleaning up expired files we must scan the objects in the scene to make sure that we retain
// such local assets if they have not been recently accessed.
TouchAllSceneAssets(false);
foreach (string dir in Directory.GetDirectories(m_CacheDirectory))
{
CleanExpiredFiles(dir, purgeLine);
}
lock(timerLock)
{
if(m_timerRunning)
m_CacheCleanTimer.Start();
m_cleanupRunning = false;
}
}
/// <summary>
/// Recurses through specified directory checking for asset files last
/// accessed prior to the specified purge line and deletes them. Also
/// removes empty tier directories.
/// </summary>
/// <param name="dir"></param>
/// <param name="purgeLine"></param>
private void CleanExpiredFiles(string dir, DateTime purgeLine)
{
try
{
foreach (string file in Directory.GetFiles(dir))
{
if (File.GetLastAccessTime(file) < purgeLine)
{
File.Delete(file);
}
}
// Recurse into lower tiers
foreach (string subdir in Directory.GetDirectories(dir))
{
CleanExpiredFiles(subdir, purgeLine);
}
// Check if a tier directory is empty, if so, delete it
int dirSize = Directory.GetFiles(dir).Length + Directory.GetDirectories(dir).Length;
if (dirSize == 0)
{
Directory.Delete(dir);
}
else if (dirSize >= m_CacheWarnAt)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Cache folder exceeded CacheWarnAt limit {0} {1}. Suggest increasing tiers, tier length, or reducing cache expiration",
dir, dirSize);
}
}
catch (DirectoryNotFoundException)
{
// If we get here, another node on the same box has
// already removed the directory. Continue with next.
}
catch (Exception e)
{
m_log.Warn(
string.Format("[FLOTSAM ASSET CACHE]: Could not complete clean of expired files in {0}, exception ", dir), e);
}
}
/// <summary>
/// Determines the filename for an AssetID stored in the file cache
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
private string GetFileName(string id)
{
// Would it be faster to just hash the darn thing?
foreach (char c in m_InvalidChars)
{
id = id.Replace(c, '_');
}
string path = m_CacheDirectory;
for (int p = 1; p <= m_CacheDirectoryTiers; p++)
{
string pathPart = id.Substring((p - 1) * m_CacheDirectoryTierLen, m_CacheDirectoryTierLen);
path = Path.Combine(path, pathPart);
}
return Path.Combine(path, id);
}
/// <summary>
/// Writes a file to the file cache, creating any nessesary
/// tier directories along the way
/// </summary>
/// <param name="filename"></param>
/// <param name="asset"></param>
private void WriteFileCache(string filename, AssetBase asset)
{
Stream stream = null;
// Make sure the target cache directory exists
string directory = Path.GetDirectoryName(filename);
// Write file first to a temp name, so that it doesn't look
// like it's already cached while it's still writing.
string tempname = Path.Combine(directory, Path.GetRandomFileName());
try
{
try
{
if (!Directory.Exists(directory))
{
Directory.CreateDirectory(directory);
}
stream = File.Open(tempname, FileMode.Create);
BinaryFormatter bformatter = new BinaryFormatter();
bformatter.Serialize(stream, asset);
}
catch (IOException e)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Failed to write asset {0} to temporary location {1} (final {2}) on cache in {3}. Exception {4} {5}.",
asset.ID, tempname, filename, directory, e.Message, e.StackTrace);
return;
}
finally
{
if (stream != null)
stream.Close();
}
try
{
// Now that it's written, rename it so that it can be found.
//
// File.Copy(tempname, filename, true);
// File.Delete(tempname);
//
// For a brief period, this was done as a separate copy and then temporary file delete operation to
// avoid an IOException caused by move if some competing thread had already written the file.
// However, this causes exceptions on Windows when other threads attempt to read a file
// which is still being copied. So instead, go back to moving the file and swallow any IOException.
//
// This situation occurs fairly rarely anyway. We assume in this that moves are atomic on the
// filesystem.
File.Move(tempname, filename);
if (m_LogLevel >= 2)
m_log.DebugFormat("[FLOTSAM ASSET CACHE]: Cache Stored :: {0}", asset.ID);
}
catch (IOException)
{
// If we see an IOException here it's likely that some other competing thread has written the
// cache file first, so ignore. Other IOException errors (e.g. filesystem full) should be
// signally by the earlier temporary file writing code.
}
}
finally
{
// Even if the write fails with an exception, we need to make sure
// that we release the lock on that file, otherwise it'll never get
// cached
lock (m_CurrentlyWriting)
{
#if WAIT_ON_INPROGRESS_REQUESTS
ManualResetEvent waitEvent;
if (m_CurrentlyWriting.TryGetValue(filename, out waitEvent))
{
m_CurrentlyWriting.Remove(filename);
waitEvent.Set();
}
#else
m_CurrentlyWriting.Remove(filename);
#endif
}
}
}
/// <summary>
/// Scan through the file cache, and return number of assets currently cached.
/// </summary>
/// <param name="dir"></param>
/// <returns></returns>
private int GetFileCacheCount(string dir)
{
int count = Directory.GetFiles(dir).Length;
foreach (string subdir in Directory.GetDirectories(dir))
{
count += GetFileCacheCount(subdir);
}
return count;
}
/// <summary>
/// This notes the last time the Region had a deep asset scan performed on it.
/// </summary>
/// <param name="regionID"></param>
private void StampRegionStatusFile(UUID regionID)
{
string RegionCacheStatusFile = Path.Combine(m_CacheDirectory, "RegionStatus_" + regionID.ToString() + ".fac");
try
{
if (File.Exists(RegionCacheStatusFile))
{
File.SetLastWriteTime(RegionCacheStatusFile, DateTime.Now);
}
else
{
File.WriteAllText(
RegionCacheStatusFile,
"Please do not delete this file unless you are manually clearing your Flotsam Asset Cache.");
}
}
catch (Exception e)
{
m_log.Warn(
string.Format(
"[FLOTSAM ASSET CACHE]: Could not stamp region status file for region {0}. Exception ",
regionID),
e);
}
}
/// <summary>
/// Iterates through all Scenes, doing a deep scan through assets
/// to update the access time of all assets present in the scene or referenced by assets
/// in the scene.
/// </summary>
/// <param name="storeUncached">
/// If true, then assets scanned which are not found in cache are added to the cache.
/// </param>
/// <returns>Number of distinct asset references found in the scene.</returns>
private int TouchAllSceneAssets(bool storeUncached)
{
UuidGatherer gatherer = new UuidGatherer(m_AssetService);
Dictionary<UUID, bool> assetsFound = new Dictionary<UUID, bool>();
foreach (Scene s in m_Scenes)
{
StampRegionStatusFile(s.RegionInfo.RegionID);
s.ForEachSOG(delegate(SceneObjectGroup e)
{
if(!m_timerRunning && !storeUncached)
return;
gatherer.AddForInspection(e);
gatherer.GatherAll();
if(!m_timerRunning && !storeUncached)
return;
foreach (UUID assetID in gatherer.GatheredUuids.Keys)
{
if (!assetsFound.ContainsKey(assetID))
{
string filename = GetFileName(assetID.ToString());
if (File.Exists(filename))
{
UpdateFileLastAccessTime(filename);
assetsFound[assetID] = true;
}
else if (storeUncached)
{
AssetBase cachedAsset = m_AssetService.Get(assetID.ToString());
if (cachedAsset == null && gatherer.GatheredUuids[assetID] != (sbyte)AssetType.Unknown)
assetsFound[assetID] = false;
else
assetsFound[assetID] = true;
}
}
else if (!assetsFound[assetID])
{
m_log.DebugFormat(
"[FLOTSAM ASSET CACHE]: Could not find asset {0}, type {1} referenced by object {2} at {3} in scene {4} when pre-caching all scene assets",
assetID, gatherer.GatheredUuids[assetID], e.Name, e.AbsolutePosition, s.Name);
}
}
gatherer.GatheredUuids.Clear();
if(!m_timerRunning && !storeUncached)
return;
if(!storeUncached)
Thread.Sleep(50);
});
if(!m_timerRunning && !storeUncached)
break;
}
return assetsFound.Count;
}
/// <summary>
/// Deletes all cache contents
/// </summary>
private void ClearFileCache()
{
foreach (string dir in Directory.GetDirectories(m_CacheDirectory))
{
try
{
Directory.Delete(dir, true);
}
catch (Exception e)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Couldn't clear asset cache directory {0} from {1}. Exception {2} {3}",
dir, m_CacheDirectory, e.Message, e.StackTrace);
}
}
foreach (string file in Directory.GetFiles(m_CacheDirectory))
{
try
{
File.Delete(file);
}
catch (Exception e)
{
m_log.WarnFormat(
"[FLOTSAM ASSET CACHE]: Couldn't clear asset cache file {0} from {1}. Exception {1} {2}",
file, m_CacheDirectory, e.Message, e.StackTrace);
}
}
}
private List<string> GenerateCacheHitReport()
{
List<string> outputLines = new List<string>();
double invReq = 100.0 / m_Requests;
double weakHitRate = m_weakRefHits * invReq;
int weakEntries = weakAssetReferences.Count;
double fileHitRate = m_DiskHits * invReq;
double TotalHitRate = weakHitRate + fileHitRate;
outputLines.Add(
string.Format("Total requests: {0}", m_Requests));
outputLines.Add(
string.Format("unCollected Hit Rate: {0}% ({1} entries)", weakHitRate.ToString("0.00"),weakEntries));
outputLines.Add(
string.Format("File Hit Rate: {0}%", fileHitRate.ToString("0.00")));
if (m_MemoryCacheEnabled)
{
double HitRate = m_MemoryHits * invReq;
outputLines.Add(
string.Format("Memory Hit Rate: {0}%", HitRate.ToString("0.00")));
TotalHitRate += HitRate;
}
outputLines.Add(
string.Format("Total Hit Rate: {0}%", TotalHitRate.ToString("0.00")));
outputLines.Add(
string.Format(
"Requests overlap during file writing: {0}", m_RequestsForInprogress));
return outputLines;
}
#region Console Commands
private void HandleConsoleCommand(string module, string[] cmdparams)
{
ICommandConsole con = MainConsole.Instance;
if (cmdparams.Length >= 2)
{
string cmd = cmdparams[1];
switch (cmd)
{
case "status":
if (m_MemoryCacheEnabled)
con.OutputFormat("Memory Cache: {0} assets", m_MemoryCache.Count);
else
con.OutputFormat("Memory cache disabled");
if (m_FileCacheEnabled)
{
int fileCount = GetFileCacheCount(m_CacheDirectory);
con.OutputFormat("File Cache: {0} assets", fileCount);
}
else
{
con.Output("File cache disabled");
}
GenerateCacheHitReport().ForEach(l => con.Output(l));
if (m_FileCacheEnabled)
{
con.Output("Deep scans have previously been performed on the following regions:");
foreach (string s in Directory.GetFiles(m_CacheDirectory, "*.fac"))
{
string RegionID = s.Remove(0,s.IndexOf("_")).Replace(".fac","");
DateTime RegionDeepScanTMStamp = File.GetLastWriteTime(s);
con.OutputFormat("Region: {0}, {1}", RegionID, RegionDeepScanTMStamp.ToString("MM/dd/yyyy hh:mm:ss"));
}
}
break;
case "clear":
if (cmdparams.Length < 2)
{
con.Output("Usage is fcache clear [file] [memory]");
break;
}
bool clearMemory = false, clearFile = false;
if (cmdparams.Length == 2)
{
clearMemory = true;
clearFile = true;
}
foreach (string s in cmdparams)
{
if (s.ToLower() == "memory")
clearMemory = true;
else if (s.ToLower() == "file")
clearFile = true;
}
if (clearMemory)
{
if (m_MemoryCacheEnabled)
{
m_MemoryCache.Clear();
con.Output("Memory cache cleared.");
}
else
{
con.Output("Memory cache not enabled.");
}
}
if (clearFile)
{
if (m_FileCacheEnabled)
{
ClearFileCache();
con.Output("File cache cleared.");
}
else
{
con.Output("File cache not enabled.");
}
}
break;
case "assets":
lock(timerLock)
{
if(m_cleanupRunning)
{
con.OutputFormat("FloatSam assets check already running");
return;
}
m_cleanupRunning = true;
}
con.Output("FloatSam Ensuring assets are cached for all scenes.");
WorkManager.RunInThread(delegate
{
bool wasRunning= false;
lock(timerLock)
{
if(m_timerRunning)
{
m_CacheCleanTimer.Stop();
m_timerRunning = false;
wasRunning = true;
Thread.Sleep(100);
}
}
int assetReferenceTotal = TouchAllSceneAssets(true);
GC.Collect();
lock(timerLock)
{
if(wasRunning)
{
m_CacheCleanTimer.Start();
m_timerRunning = true;
}
m_cleanupRunning = false;
}
con.OutputFormat("Completed check with {0} assets.", assetReferenceTotal);
}, null, "TouchAllSceneAssets");
break;
case "expire":
if (cmdparams.Length < 3)
{
con.OutputFormat("Invalid parameters for Expire, please specify a valid date & time", cmd);
break;
}
string s_expirationDate = "";
DateTime expirationDate;
if (cmdparams.Length > 3)
{
s_expirationDate = string.Join(" ", cmdparams, 2, cmdparams.Length - 2);
}
else
{
s_expirationDate = cmdparams[2];
}
if (!DateTime.TryParse(s_expirationDate, out expirationDate))
{
con.OutputFormat("{0} is not a valid date & time", cmd);
break;
}
if (m_FileCacheEnabled)
CleanExpiredFiles(m_CacheDirectory, expirationDate);
else
con.OutputFormat("File cache not active, not clearing.");
break;
default:
con.OutputFormat("Unknown command {0}", cmd);
break;
}
}
else if (cmdparams.Length == 1)
{
con.Output("fcache assets - Attempt a deep cache of all assets in all scenes");
con.Output("fcache expire <datetime> - Purge assets older then the specified date & time");
con.Output("fcache clear [file] [memory] - Remove cached assets");
con.Output("fcache status - Display cache status");
}
}
#endregion
#region IAssetService Members
public AssetMetadata GetMetadata(string id)
{
AssetBase asset;
Get(id, out asset);
return asset.Metadata;
}
public byte[] GetData(string id)
{
AssetBase asset;
Get(id, out asset);
return asset.Data;
}
public bool Get(string id, object sender, AssetRetrieved handler)
{
AssetBase asset;
if (!Get(id, out asset))
return false;
handler(id, sender, asset);
return true;
}
public bool[] AssetsExist(string[] ids)
{
bool[] exist = new bool[ids.Length];
for (int i = 0; i < ids.Length; i++)
{
exist[i] = Check(ids[i]);
}
return exist;
}
public string Store(AssetBase asset)
{
if (asset.FullID == UUID.Zero)
{
asset.FullID = UUID.Random();
}
Cache(asset);
return asset.ID;
}
public bool UpdateContent(string id, byte[] data)
{
AssetBase asset;
if (!Get(id, out asset))
return false;
asset.Data = data;
Cache(asset);
return true;
}
public bool Delete(string id)
{
Expire(id);
return true;
}
#endregion
}
}
| |
/********************************************************************************************
Copyright (c) Microsoft Corporation
All rights reserved.
Microsoft Public License:
This license governs use of the accompanying software. If you use the software, you
accept this license. If you do not accept the license, do not use the software.
1. Definitions
The terms "reproduce," "reproduction," "derivative works," and "distribution" have the
same meaning here as under U.S. copyright law.
A "contribution" is the original software, or any additions or changes to the software.
A "contributor" is any person that distributes its contribution under this license.
"Licensed patents" are a contributor's patent claims that read directly on its contribution.
2. Grant of Rights
(A) Copyright Grant- Subject to the terms of this license, including the license conditions
and limitations in section 3, each contributor grants you a non-exclusive, worldwide,
royalty-free copyright license to reproduce its contribution, prepare derivative works of
its contribution, and distribute its contribution or any derivative works that you create.
(B) Patent Grant- Subject to the terms of this license, including the license conditions
and limitations in section 3, each contributor grants you a non-exclusive, worldwide,
royalty-free license under its licensed patents to make, have made, use, sell, offer for
sale, import, and/or otherwise dispose of its contribution in the software or derivative
works of the contribution in the software.
3. Conditions and Limitations
(A) No Trademark License- This license does not grant you rights to use any contributors'
name, logo, or trademarks.
(B) If you bring a patent claim against any contributor over patents that you claim are
infringed by the software, your patent license from such contributor to the software ends
automatically.
(C) If you distribute any portion of the software, you must retain all copyright, patent,
trademark, and attribution notices that are present in the software.
(D) If you distribute any portion of the software in source code form, you may do so only
under this license by including a complete copy of this license with your distribution.
If you distribute any portion of the software in compiled or object code form, you may only
do so under a license that complies with this license.
(E) The software is licensed "as-is." You bear the risk of using it. The contributors give
no express warranties, guarantees or conditions. You may have additional consumer rights
under your local laws which this license cannot change. To the extent permitted under your
local laws, the contributors exclude the implied warranties of merchantability, fitness for
a particular purpose and non-infringement.
********************************************************************************************/
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Windows.Forms;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using MSBuild = Microsoft.Build.Evaluation;
using MSBuildExecution = Microsoft.Build.Execution;
namespace Microsoft.VisualStudio.Project
{
/// <summary>
/// Creates projects within the solution
/// </summary>
public abstract class ProjectFactory : Microsoft.VisualStudio.Shell.Flavor.FlavoredProjectFactoryBase //, IVsAsynchronousProjectCreate
{
#region fields
private Microsoft.VisualStudio.Shell.Package package;
private System.IServiceProvider site;
private static readonly Lazy<IVsTaskSchedulerService> taskSchedulerService = new Lazy<IVsTaskSchedulerService>(() => Package.GetGlobalService(typeof(SVsTaskSchedulerService)) as IVsTaskSchedulerService);
/// <summary>
/// The msbuild engine that we are going to use.
/// </summary>
private MSBuild.ProjectCollection buildEngine;
/// <summary>
/// The msbuild project for the project file.
/// </summary>
private MSBuild.Project buildProject;
#endregion
#region properties
protected Microsoft.VisualStudio.Shell.Package Package
{
get
{
return this.package;
}
}
protected System.IServiceProvider Site
{
get
{
return this.site;
}
}
/// <summary>
/// The msbuild engine that we are going to use.
/// </summary>
protected MSBuild.ProjectCollection BuildEngine
{
get
{
return this.buildEngine;
}
}
/// <summary>
/// The msbuild project for the temporary project file.
/// </summary>
protected MSBuild.Project BuildProject
{
get
{
return this.buildProject;
}
set
{
this.buildProject = value;
}
}
#endregion
#region ctor
protected ProjectFactory(Microsoft.VisualStudio.Shell.Package package)
{
this.package = package;
this.site = package;
// Please be aware that this methods needs that ServiceProvider is valid, thus the ordering of calls in the ctor matters.
this.buildEngine = Utilities.InitializeMsBuildEngine(this.buildEngine, this.site);
}
#endregion
#region methods
public virtual bool CanCreateProjectAsynchronously(ref Guid rguidProjectID, string filename, uint flags)
{
return true;
}
public void OnBeforeCreateProjectAsync(ref Guid rguidProjectID, string filename, string location, string pszName, uint flags)
{
}
public virtual IVsTask CreateProjectAsync(ref Guid rguidProjectID, string filename, string location, string pszName, uint flags)
{
Guid iid = typeof(IVsHierarchy).GUID;
return VsTaskLibraryHelper.CreateAndStartTask(taskSchedulerService.Value, VsTaskRunContext.UIThreadBackgroundPriority, VsTaskLibraryHelper.CreateTaskBody(() =>
{
IntPtr project;
int cancelled;
CreateProject(filename, location, pszName, flags, ref iid, out project, out cancelled);
if (cancelled != 0)
{
throw new OperationCanceledException();
}
return Marshal.GetObjectForIUnknown(project);
}));
}
#endregion
#region abstract methods
protected abstract ProjectNode CreateProject();
#endregion
#region overriden methods
/// <summary>
/// Rather than directly creating the project, ask VS to initate the process of
/// creating an aggregated project in case we are flavored. We will be called
/// on the IVsAggregatableProjectFactory to do the real project creation.
/// </summary>
/// <param name="fileName">Project file</param>
/// <param name="location">Path of the project</param>
/// <param name="name">Project Name</param>
/// <param name="flags">Creation flags</param>
/// <param name="projectGuid">Guid of the project</param>
/// <param name="project">Project that end up being created by this method</param>
/// <param name="canceled">Was the project creation canceled</param>
protected override void CreateProject(string fileName, string location, string name, uint flags, ref Guid projectGuid, out IntPtr project, out int canceled)
{
project = IntPtr.Zero;
canceled = 0;
// Get the list of GUIDs from the project/template
string guidsList = this.ProjectTypeGuids(fileName);
// Launch the aggregate creation process (we should be called back on our IVsAggregatableProjectFactoryCorrected implementation)
IVsCreateAggregateProject aggregateProjectFactory = (IVsCreateAggregateProject)this.Site.GetService(typeof(SVsCreateAggregateProject));
int hr = aggregateProjectFactory.CreateAggregateProject(guidsList, fileName, location, name, flags, ref projectGuid, out project);
if(hr == VSConstants.E_ABORT)
canceled = 1;
ErrorHandler.ThrowOnFailure(hr);
// This needs to be done after the aggregation is completed (to avoid creating a non-aggregated CCW) and as a result we have to go through the interface
IProjectEventsProvider eventsProvider = (IProjectEventsProvider)Marshal.GetTypedObjectForIUnknown(project, typeof(IProjectEventsProvider));
eventsProvider.ProjectEventsProvider = this.GetProjectEventsProvider();
this.buildProject = null;
}
/// <summary>
/// Instantiate the project class, but do not proceed with the
/// initialization just yet.
/// Delegate to CreateProject implemented by the derived class.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope",
Justification="The global property handles is instantiated here and used in the project node that will Dispose it")]
protected override object PreCreateForOuter(IntPtr outerProjectIUnknown)
{
Debug.Assert(this.buildProject != null, "The build project should have been initialized before calling PreCreateForOuter.");
// Please be very carefull what is initialized here on the ProjectNode. Normally this should only instantiate and return a project node.
// The reason why one should very carefully add state to the project node here is that at this point the aggregation has not yet been created and anything that would cause a CCW for the project to be created would cause the aggregation to fail
// Our reasoning is that there is no other place where state on the project node can be set that is known by the Factory and has to execute before the Load method.
ProjectNode node = this.CreateProject();
Debug.Assert(node != null, "The project failed to be created");
node.BuildEngine = this.buildEngine;
node.BuildProject = this.buildProject;
node.Package = this.package as ProjectPackage;
return node;
}
/// <summary>
/// Retrives the list of project guids from the project file.
/// If you don't want your project to be flavorable, override
/// to only return your project factory Guid:
/// return this.GetType().GUID.ToString("B");
/// </summary>
/// <param name="file">Project file to look into to find the Guid list</param>
/// <returns>List of semi-colon separated GUIDs</returns>
protected override string ProjectTypeGuids(string file)
{
// Load the project so we can extract the list of GUIDs
this.buildProject = Utilities.ReinitializeMsBuildProject(this.buildEngine, file, this.buildProject);
// Retrieve the list of GUIDs, if it is not specify, make it our GUID
string guids = buildProject.GetPropertyValue(ProjectFileConstants.ProjectTypeGuids);
if(String.IsNullOrEmpty(guids))
guids = this.GetType().GUID.ToString("B");
return guids;
}
#endregion
#region helpers
private IProjectEvents GetProjectEventsProvider()
{
ProjectPackage projectPackage = this.package as ProjectPackage;
Debug.Assert(projectPackage != null, "Package not inherited from framework");
if(projectPackage != null)
{
foreach(SolutionListener listener in projectPackage.SolutionListeners)
{
IProjectEvents projectEvents = listener as IProjectEvents;
if(projectEvents != null)
{
return projectEvents;
}
}
}
return null;
}
#endregion
}
}
| |
//
// Copyright (c) 2004-2018 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System.Linq;
using System.Text;
#pragma warning disable 0618
namespace NLog.UnitTests.Contexts
{
using System;
using System.Collections.Generic;
using System.Threading;
using Xunit;
public class NestedDiagnosticsContextTests
{
[Fact]
public void NDCTest1()
{
List<Exception> exceptions = new List<Exception>();
ManualResetEvent mre = new ManualResetEvent(false);
int counter = 100;
int remaining = counter;
for (int i = 0; i < counter; ++i)
{
ThreadPool.QueueUserWorkItem(
s =>
{
try
{
NestedDiagnosticsContext.Clear();
Assert.Equal(string.Empty, NestedDiagnosticsContext.TopMessage);
Assert.Equal(string.Empty, NestedDiagnosticsContext.Pop());
AssertContents(NestedDiagnosticsContext.GetAllMessages());
using (NestedDiagnosticsContext.Push("foo"))
{
Assert.Equal("foo", NestedDiagnosticsContext.TopMessage);
AssertContents(NestedDiagnosticsContext.GetAllMessages(), "foo");
using (NestedDiagnosticsContext.Push("bar"))
{
AssertContents(NestedDiagnosticsContext.GetAllMessages(), "bar", "foo");
Assert.Equal("bar", NestedDiagnosticsContext.TopMessage);
NestedDiagnosticsContext.Push("baz");
AssertContents(NestedDiagnosticsContext.GetAllMessages(), "baz", "bar", "foo");
Assert.Equal("baz", NestedDiagnosticsContext.TopMessage);
Assert.Equal("baz", NestedDiagnosticsContext.Pop());
AssertContents(NestedDiagnosticsContext.GetAllMessages(), "bar", "foo");
Assert.Equal("bar", NestedDiagnosticsContext.TopMessage);
}
AssertContents(NestedDiagnosticsContext.GetAllMessages(), "foo");
Assert.Equal("foo", NestedDiagnosticsContext.TopMessage);
}
AssertContents(NestedDiagnosticsContext.GetAllMessages());
Assert.Equal(string.Empty, NestedDiagnosticsContext.Pop());
}
catch (Exception ex)
{
lock (exceptions)
{
exceptions.Add(ex);
}
}
finally
{
if (Interlocked.Decrement(ref remaining) == 0)
{
mre.Set();
}
}
});
}
mre.WaitOne();
StringBuilder exceptionsMessage = new StringBuilder();
foreach (var ex in exceptions)
{
if (exceptionsMessage.Length > 0)
{
exceptionsMessage.Append("\r\n");
}
exceptionsMessage.Append(ex.ToString());
}
Assert.True(exceptions.Count == 0, exceptionsMessage.ToString());
}
[Fact]
public void NDCTest2()
{
List<Exception> exceptions = new List<Exception>();
ManualResetEvent mre = new ManualResetEvent(false);
int counter = 100;
int remaining = counter;
for (int i = 0; i < counter; ++i)
{
ThreadPool.QueueUserWorkItem(
s =>
{
try
{
NDC.Clear();
Assert.Equal(string.Empty, NDC.TopMessage);
Assert.Equal(string.Empty, NDC.Pop());
AssertContents(NDC.GetAllMessages());
using (NDC.Push("foo"))
{
Assert.Equal("foo", NDC.TopMessage);
AssertContents(NDC.GetAllMessages(), "foo");
using (NDC.Push("bar"))
{
AssertContents(NDC.GetAllMessages(), "bar", "foo");
Assert.Equal("bar", NDC.TopMessage);
NDC.Push("baz");
AssertContents(NDC.GetAllMessages(), "baz", "bar", "foo");
Assert.Equal("baz", NDC.TopMessage);
Assert.Equal("baz", NDC.Pop());
AssertContents(NDC.GetAllMessages(), "bar", "foo");
Assert.Equal("bar", NDC.TopMessage);
}
AssertContents(NDC.GetAllMessages(), "foo");
Assert.Equal("foo", NDC.TopMessage);
}
AssertContents(NDC.GetAllMessages());
Assert.Equal(string.Empty, NDC.Pop());
}
catch (Exception ex)
{
lock (exceptions)
{
exceptions.Add(ex);
}
}
finally
{
if (Interlocked.Decrement(ref remaining) == 0)
{
mre.Set();
}
}
});
}
mre.WaitOne();
StringBuilder exceptionsMessage = new StringBuilder();
foreach (var ex in exceptions)
{
if (exceptionsMessage.Length > 0)
{
exceptionsMessage.Append("\r\n");
}
exceptionsMessage.Append(ex.ToString());
}
Assert.True(exceptions.Count == 0, exceptionsMessage.ToString());
}
[Fact]
public void NDCTest2_object()
{
List<Exception> exceptions = new List<Exception>();
ManualResetEvent mre = new ManualResetEvent(false);
int counter = 100;
int remaining = counter;
for (int i = 0; i < counter; ++i)
{
ThreadPool.QueueUserWorkItem(
s =>
{
try
{
NDC.Clear();
Assert.Null(NDC.TopObject);
Assert.Null(NDC.PopObject());
AssertContents(NDC.GetAllMessages());
using (NDC.Push("foo"))
{
Assert.Equal("foo", NDC.TopObject);
AssertContents(NDC.GetAllObjects(), "foo");
using (NDC.Push("bar"))
{
AssertContents(NDC.GetAllObjects(), "bar", "foo");
Assert.Equal("bar", NDC.TopObject);
NDC.Push("baz");
AssertContents(NDC.GetAllObjects(), "baz", "bar", "foo");
Assert.Equal("baz", NDC.TopObject);
Assert.Equal("baz", NDC.PopObject());
AssertContents(NDC.GetAllObjects(), "bar", "foo");
Assert.Equal("bar", NDC.TopObject);
}
AssertContents(NDC.GetAllObjects(), "foo");
Assert.Equal("foo", NDC.TopObject);
}
AssertContents(NDC.GetAllMessages());
Assert.Null(NDC.PopObject());
}
catch (Exception ex)
{
lock (exceptions)
{
exceptions.Add(ex);
}
}
finally
{
if (Interlocked.Decrement(ref remaining) == 0)
{
mre.Set();
}
}
});
}
mre.WaitOne();
StringBuilder exceptionsMessage = new StringBuilder();
foreach (var ex in exceptions)
{
if (exceptionsMessage.Length > 0)
{
exceptionsMessage.Append("\r\n");
}
exceptionsMessage.Append(ex.ToString());
}
Assert.True(exceptions.Count == 0, exceptionsMessage.ToString());
}
private static void AssertContents(object[] actual, params string[] expected)
{
Assert.Equal(expected.Length, actual.Length);
for (int i = 0; i < expected.Length; ++i)
{
Assert.Equal(expected[i], actual[i]);
}
}
private static void AssertContents(string[] actual, params string[] expected)
{
Assert.Equal(expected.Length, actual.Length);
for (int i = 0; i < expected.Length; ++i)
{
Assert.Equal(expected[i], actual[i]);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Orchard.Logging;
namespace Orchard.ContentManagement.Handlers {
public abstract class ContentHandler : IContentHandler {
protected ContentHandler() {
Filters = new List<IContentFilter>();
Logger = NullLogger.Instance;
}
public List<IContentFilter> Filters { get; set; }
public ILogger Logger { get; set; }
protected void OnActivated<TPart>(Action<ActivatedContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnActivated = handler });
}
protected void OnInitializing<TPart>(Action<InitializingContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnInitializing = handler });
}
protected void OnInitialized<TPart>(Action<InitializingContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnInitialized = handler });
}
protected void OnCreating<TPart>(Action<CreateContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnCreating = handler });
}
protected void OnCreated<TPart>(Action<CreateContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnCreated = handler });
}
protected void OnLoading<TPart>(Action<LoadContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnLoading = handler });
}
protected void OnLoaded<TPart>(Action<LoadContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnLoaded = handler });
}
protected void OnUpdating<TPart>(Action<UpdateContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnUpdating = handler });
}
protected void OnUpdated<TPart>(Action<UpdateContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnUpdated = handler });
}
protected void OnVersioning<TPart>(Action<VersionContentContext, TPart, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnVersioning = handler });
}
protected void OnVersioned<TPart>(Action<VersionContentContext, TPart, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnVersioned = handler });
}
protected void OnPublishing<TPart>(Action<PublishContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnPublishing = handler });
}
protected void OnPublished<TPart>(Action<PublishContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnPublished = handler });
}
protected void OnUnpublishing<TPart>(Action<PublishContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnUnpublishing = handler });
}
protected void OnUnpublished<TPart>(Action<PublishContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnUnpublished = handler });
}
protected void OnRemoving<TPart>(Action<RemoveContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnRemoving = handler });
}
protected void OnRemoved<TPart>(Action<RemoveContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnRemoved = handler });
}
protected void OnDestroying<TPart>(Action<DestroyContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnDestroying = handler });
}
protected void OnDestroyed<TPart>(Action<DestroyContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnDestroyed = handler });
}
protected void OnIndexing<TPart>(Action<IndexContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnIndexing = handler });
}
protected void OnIndexed<TPart>(Action<IndexContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnIndexed = handler });
}
protected void OnCloning<TPart>(Action<CloneContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnCloning = handler });
}
protected void OnCloned<TPart>(Action<CloneContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnCloned = handler });
}
protected void OnImporting<TPart>(Action<ImportContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnImporting = handler });
}
protected void OnImported<TPart>(Action<ImportContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnImported = handler });
}
protected void OnImportCompleted<TPart>(Action<ImportContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnImportCompleted = handler });
}
protected void OnExporting<TPart>(Action<ExportContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnExporting = handler });
}
protected void OnExported<TPart>(Action<ExportContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnExported = handler });
}
protected void OnRestoring<TPart>(Action<RestoreContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnRestoring = handler });
}
protected void OnRestored<TPart>(Action<RestoreContentContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineStorageFilter<TPart> { OnRestored = handler });
}
protected void OnGetContentItemMetadata<TPart>(Action<GetContentItemMetadataContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineTemplateFilter<TPart> { OnGetItemMetadata = handler });
}
protected void OnGetDisplayShape<TPart>(Action<BuildDisplayContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineTemplateFilter<TPart> { OnGetDisplayShape = handler });
}
protected void OnGetEditorShape<TPart>(Action<BuildEditorContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineTemplateFilter<TPart> { OnGetEditorShape = handler });
}
protected void OnUpdateEditorShape<TPart>(Action<UpdateEditorContext, TPart> handler) where TPart : class, IContent {
Filters.Add(new InlineTemplateFilter<TPart> { OnUpdateEditorShape = handler });
}
class InlineStorageFilter<TPart> : StorageFilterBase<TPart> where TPart : class, IContent {
public Action<ActivatedContentContext, TPart> OnActivated { get; set; }
public Action<InitializingContentContext, TPart> OnInitializing { get; set; }
public Action<InitializingContentContext, TPart> OnInitialized { get; set; }
public Action<CreateContentContext, TPart> OnCreating { get; set; }
public Action<CreateContentContext, TPart> OnCreated { get; set; }
public Action<LoadContentContext, TPart> OnLoading { get; set; }
public Action<LoadContentContext, TPart> OnLoaded { get; set; }
public Action<UpdateContentContext, TPart> OnUpdating { get; set; }
public Action<UpdateContentContext, TPart> OnUpdated { get; set; }
public Action<VersionContentContext, TPart, TPart> OnVersioning { get; set; }
public Action<VersionContentContext, TPart, TPart> OnVersioned { get; set; }
public Action<PublishContentContext, TPart> OnPublishing { get; set; }
public Action<PublishContentContext, TPart> OnPublished { get; set; }
public Action<PublishContentContext, TPart> OnUnpublishing { get; set; }
public Action<PublishContentContext, TPart> OnUnpublished { get; set; }
public Action<RemoveContentContext, TPart> OnRemoving { get; set; }
public Action<RemoveContentContext, TPart> OnRemoved { get; set; }
public Action<IndexContentContext, TPart> OnIndexing { get; set; }
public Action<IndexContentContext, TPart> OnIndexed { get; set; }
public Action<CloneContentContext, TPart> OnCloning { get; set; }
public Action<CloneContentContext, TPart> OnCloned { get; set; }
public Action<ImportContentContext, TPart> OnImporting { get; set; }
public Action<ImportContentContext, TPart> OnImported { get; set; }
public Action<ImportContentContext, TPart> OnImportCompleted { get; set; }
public Action<ExportContentContext, TPart> OnExporting { get; set; }
public Action<ExportContentContext, TPart> OnExported { get; set; }
public Action<RestoreContentContext, TPart> OnRestoring { get; set; }
public Action<RestoreContentContext, TPart> OnRestored { get; set; }
public Action<DestroyContentContext, TPart> OnDestroying { get; set; }
public Action<DestroyContentContext, TPart> OnDestroyed { get; set; }
protected override void Activated(ActivatedContentContext context, TPart instance) {
if (OnActivated != null) OnActivated(context, instance);
}
protected override void Initializing(InitializingContentContext context, TPart instance) {
if (OnInitializing != null) OnInitializing(context, instance);
}
protected override void Initialized(InitializingContentContext context, TPart instance) {
if (OnInitialized != null) OnInitialized(context, instance);
}
protected override void Creating(CreateContentContext context, TPart instance) {
if (OnCreating != null) OnCreating(context, instance);
}
protected override void Created(CreateContentContext context, TPart instance) {
if (OnCreated != null) OnCreated(context, instance);
}
protected override void Loading(LoadContentContext context, TPart instance) {
if (OnLoading != null) OnLoading(context, instance);
}
protected override void Loaded(LoadContentContext context, TPart instance) {
if (OnLoaded != null) OnLoaded(context, instance);
}
protected override void Updating(UpdateContentContext context, TPart instance) {
if (OnUpdating != null) OnUpdating(context, instance);
}
protected override void Updated(UpdateContentContext context, TPart instance) {
if (OnUpdated != null) OnUpdated(context, instance);
}
protected override void Versioning(VersionContentContext context, TPart existing, TPart building) {
if (OnVersioning != null) OnVersioning(context, existing, building);
}
protected override void Versioned(VersionContentContext context, TPart existing, TPart building) {
if (OnVersioned != null) OnVersioned(context, existing, building);
}
protected override void Publishing(PublishContentContext context, TPart instance) {
if (OnPublishing != null) OnPublishing(context, instance);
}
protected override void Published(PublishContentContext context, TPart instance) {
if (OnPublished != null) OnPublished(context, instance);
}
protected override void Unpublishing(PublishContentContext context, TPart instance) {
if (OnUnpublishing != null) OnUnpublishing(context, instance);
}
protected override void Unpublished(PublishContentContext context, TPart instance) {
if (OnUnpublished != null) OnUnpublished(context, instance);
}
protected override void Removing(RemoveContentContext context, TPart instance) {
if (OnRemoving != null) OnRemoving(context, instance);
}
protected override void Removed(RemoveContentContext context, TPart instance) {
if (OnRemoved != null) OnRemoved(context, instance);
}
protected override void Indexing(IndexContentContext context, TPart instance) {
if (OnIndexing != null)
OnIndexing(context, instance);
}
protected override void Indexed(IndexContentContext context, TPart instance) {
if (OnIndexed != null)
OnIndexed(context, instance);
}
protected override void Cloning(CloneContentContext context, TPart instance) {
if (OnCloning != null)
OnCloning(context, instance);
}
protected override void Cloned(CloneContentContext context, TPart instance) {
if (OnCloned != null)
OnCloned(context, instance);
}
protected override void Importing(ImportContentContext context, TPart instance) {
if (OnImporting != null)
OnImporting(context, instance);
}
protected override void Imported(ImportContentContext context, TPart instance) {
if (OnImported != null)
OnImported(context, instance);
}
protected override void ImportCompleted(ImportContentContext context, TPart instance) {
if (OnImportCompleted != null)
OnImportCompleted(context, instance);
}
protected override void Exporting(ExportContentContext context, TPart instance) {
if (OnExporting != null)
OnExporting(context, instance);
}
protected override void Exported(ExportContentContext context, TPart instance) {
if (OnExported != null)
OnExported(context, instance);
}
protected override void Restoring(RestoreContentContext context, TPart instance) {
if (OnRestoring != null)
OnRestoring(context, instance);
}
protected override void Restored(RestoreContentContext context, TPart instance) {
if (OnRestored != null)
OnRestored(context, instance);
}
protected override void Destroying(DestroyContentContext context, TPart instance) {
if (OnDestroying != null)
OnDestroying(context, instance);
}
protected override void Destroyed(DestroyContentContext context, TPart instance) {
if (OnDestroyed != null)
OnDestroyed(context, instance);
}
}
class InlineTemplateFilter<TPart> : TemplateFilterBase<TPart> where TPart : class, IContent {
public Action<GetContentItemMetadataContext, TPart> OnGetItemMetadata { get; set; }
public Action<BuildDisplayContext, TPart> OnGetDisplayShape { get; set; }
public Action<BuildEditorContext, TPart> OnGetEditorShape { get; set; }
public Action<UpdateEditorContext, TPart> OnUpdateEditorShape { get; set; }
protected override void GetContentItemMetadata(GetContentItemMetadataContext context, TPart instance) {
if (OnGetItemMetadata != null) OnGetItemMetadata(context, instance);
}
protected override void BuildDisplayShape(BuildDisplayContext context, TPart instance) {
if (OnGetDisplayShape != null) OnGetDisplayShape(context, instance);
}
protected override void BuildEditorShape(BuildEditorContext context, TPart instance) {
if (OnGetEditorShape != null) OnGetEditorShape(context, instance);
}
protected override void UpdateEditorShape(UpdateEditorContext context, TPart instance) {
if (OnUpdateEditorShape != null) OnUpdateEditorShape(context, instance);
}
}
void IContentHandler.Activating(ActivatingContentContext context) {
foreach (var filter in Filters.OfType<IContentActivatingFilter>())
filter.Activating(context);
Activating(context);
}
void IContentHandler.Activated(ActivatedContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Activated(context);
Activated(context);
}
void IContentHandler.Initializing(InitializingContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Initializing(context);
Initializing(context);
}
void IContentHandler.Initialized(InitializingContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Initialized(context);
Initialized(context);
}
void IContentHandler.Creating(CreateContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Creating(context);
Creating(context);
}
void IContentHandler.Created(CreateContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Created(context);
Created(context);
}
void IContentHandler.Loading(LoadContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Loading(context);
Loading(context);
}
void IContentHandler.Loaded(LoadContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Loaded(context);
Loaded(context);
}
void IContentHandler.Updating(UpdateContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Updating(context);
Updating(context);
}
void IContentHandler.Updated(UpdateContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Updated(context);
Updated(context);
}
void IContentHandler.Versioning(VersionContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Versioning(context);
Versioning(context);
}
void IContentHandler.Versioned(VersionContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Versioned(context);
Versioned(context);
}
void IContentHandler.Publishing(PublishContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Publishing(context);
Publishing(context);
}
void IContentHandler.Published(PublishContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Published(context);
Published(context);
}
void IContentHandler.Unpublishing(PublishContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Unpublishing(context);
Unpublishing(context);
}
void IContentHandler.Unpublished(PublishContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Unpublished(context);
Unpublished(context);
}
void IContentHandler.Removing(RemoveContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Removing(context);
Removing(context);
}
void IContentHandler.Removed(RemoveContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Removed(context);
Removed(context);
}
void IContentHandler.Indexing(IndexContentContext context) {
foreach ( var filter in Filters.OfType<IContentStorageFilter>() )
filter.Indexing(context);
Indexing(context);
}
void IContentHandler.Indexed(IndexContentContext context) {
foreach ( var filter in Filters.OfType<IContentStorageFilter>() )
filter.Indexed(context);
Indexed(context);
}
void IContentHandler.Importing(ImportContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Importing(context);
Importing(context);
}
void IContentHandler.Cloned(CloneContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Cloned(context);
Cloned(context);
}
void IContentHandler.Cloning(CloneContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Cloning(context);
Cloning(context);
}
void IContentHandler.Imported(ImportContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Imported(context);
Imported(context);
}
void IContentHandler.ImportCompleted(ImportContentContext importContentContext) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.ImportCompleted(importContentContext);
ImportCompleted(importContentContext);
}
void IContentHandler.Exporting(ExportContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Exporting(context);
Exporting(context);
}
void IContentHandler.Exported(ExportContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Exported(context);
Exported(context);
}
void IContentHandler.Restoring(RestoreContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Restoring(context);
Restoring(context);
}
void IContentHandler.Restored(RestoreContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Restored(context);
Restored(context);
}
void IContentHandler.Destroying(DestroyContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Destroying(context);
Destroying(context);
}
void IContentHandler.Destroyed(DestroyContentContext context) {
foreach (var filter in Filters.OfType<IContentStorageFilter>())
filter.Destroyed(context);
Destroyed(context);
}
void IContentHandler.GetContentItemMetadata(GetContentItemMetadataContext context) {
foreach (var filter in Filters.OfType<IContentTemplateFilter>())
filter.GetContentItemMetadata(context);
GetItemMetadata(context);
}
void IContentHandler.BuildDisplay(BuildDisplayContext context) {
foreach (var filter in Filters.OfType<IContentTemplateFilter>())
filter.BuildDisplayShape(context);
BuildDisplayShape(context);
}
void IContentHandler.BuildEditor(BuildEditorContext context) {
foreach (var filter in Filters.OfType<IContentTemplateFilter>())
filter.BuildEditorShape(context);
BuildEditorShape(context);
}
void IContentHandler.UpdateEditor(UpdateEditorContext context) {
foreach (var filter in Filters.OfType<IContentTemplateFilter>())
filter.UpdateEditorShape(context);
UpdateEditorShape(context);
}
protected virtual void Activating(ActivatingContentContext context) { }
protected virtual void Activated(ActivatedContentContext context) { }
protected virtual void Initializing(InitializingContentContext context) { }
protected virtual void Initialized(InitializingContentContext context) { }
protected virtual void Creating(CreateContentContext context) { }
protected virtual void Created(CreateContentContext context) { }
protected virtual void Loading(LoadContentContext context) { }
protected virtual void Loaded(LoadContentContext context) { }
protected virtual void Updating(UpdateContentContext context) { }
protected virtual void Updated(UpdateContentContext context) { }
protected virtual void Versioning(VersionContentContext context) { }
protected virtual void Versioned(VersionContentContext context) { }
protected virtual void Publishing(PublishContentContext context) { }
protected virtual void Published(PublishContentContext context) { }
protected virtual void Unpublishing(PublishContentContext context) { }
protected virtual void Unpublished(PublishContentContext context) { }
protected virtual void Removing(RemoveContentContext context) { }
protected virtual void Removed(RemoveContentContext context) { }
protected virtual void Indexing(IndexContentContext context) { }
protected virtual void Indexed(IndexContentContext context) { }
protected virtual void Cloning(CloneContentContext context) { }
protected virtual void Cloned(CloneContentContext context) { }
protected virtual void Importing(ImportContentContext context) { }
protected virtual void Imported(ImportContentContext context) { }
protected virtual void ImportCompleted(ImportContentContext context) { }
protected virtual void Exporting(ExportContentContext context) { }
protected virtual void Exported(ExportContentContext context) { }
protected virtual void Restoring(RestoreContentContext context) { }
protected virtual void Restored(RestoreContentContext context) { }
protected virtual void Destroying(DestroyContentContext context) { }
protected virtual void Destroyed(DestroyContentContext context) { }
protected virtual void GetItemMetadata(GetContentItemMetadataContext context) { }
protected virtual void BuildDisplayShape(BuildDisplayContext context) { }
protected virtual void BuildEditorShape(BuildEditorContext context) { }
protected virtual void UpdateEditorShape(UpdateEditorContext context) { }
}
}
| |
using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Collections;
using System.Globalization;
using System.Collections.Generic;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using Vector3 = Microsoft.Xna.Framework.Vector3;
using FlatRedBall.Input;
using FlatRedBall.ManagedSpriteGroups;
using FlatRedBall.Graphics;
using FlatRedBall.Utilities;
using System.Collections.ObjectModel;
using Color = Microsoft.Xna.Framework.Color;
using FlatRedBall.IO;
namespace FlatRedBall.Gui
{
#region Classes
public static class EnumerableExtensionMethods
{
public static bool Contains(this IEnumerable<IWindow> enumerable, IWindow item)
{
foreach (var window in enumerable)
{
if (window == item)
{
return true;
}
}
return false;
}
}
#endregion
#region Enums
public enum Sides
{
None = 0,
Top = 1,
Bottom = 2,
Left = 4,
Right = 8,
TopLeft = Top | Left,
TopRight = Top | Right,
BottomRight = Bottom | Right,
BottomLeft = Bottom | Left
}
#endregion
public delegate void WindowEvent(IWindow window);
public static partial class GuiManager
{
#region Enums
public enum GuiControl
{
Mouse = 0,
Joystick = 1
}
public enum VisibilityPreservation
{
PreserveVisibility,
OverwriteVisibility
}
#endregion
#region Fields
static bool mUIEnabled;
public const string InternalGuiContentManagerName = "FlatRedBall Internal GUI";
static WindowArray mWindowArray;
static ReadOnlyCollection<IWindow> mReadOnlyWindowArray;
// Perishable windows are windows which exist until
// the user clicks. This is common for ComboBoxes and MenuStrips.
// When a Perishable Window is removed from the GuiManager it is both
// made invisible as well as removed. This can help parent windows of
// Perishable Windows know when a Perishable Window has perished.
static WindowArray mPerishableArray = new WindowArray();
static ReadOnlyCollection<IWindow> mPerishableArrayReadOnly;
/// <summary>
/// A stack of Windows which demand input from the cursor.
/// </summary>
/// <remarks>
/// When a dominantWindow is valid, the cursor will not be able to interact with other windows. If RemoveInvisibleDominantWindows
/// is set to true (default is true) then the GuiManager will remove any invisible dominant windows from its
/// memory.
/// In other words a DominantWindow can be removed either through the traditional Remove methods or by
/// setting the Window's Visible property to false if RemoveInvisibleDominantWindows is true.
/// </remarks>
static List<IWindow> mDominantWindows;
static List<Cursor> mCursors;
//static bool mReceivingInputJustSet;// = false;
// This variable keeps track of the last UI element
// which had focus (was clicked on). Whenever the user
// clicks on a UI element, it is compared to the mLastWindowWithFocus.
// If the window clicked on does not match mLastWindowWithFocus, the
// mLastWindowWithFocus's callOnLosingFocus gets called.
internal static IWindow mLastWindowWithFocus;// = null;
// Controls whether the tool tip is shown
static public bool ShowingCursorTextBox;
static public System.Globalization.NumberFormatInfo nfi;
static public float TextHeight;
static public float TextSpacing;
public static List<String> renderingNotes;
private static float mOverridingFieldOfView = float.NaN;
#region XML Docs
/// <summary>
/// Sets the tool tip to show
/// </summary>
/// <remarks>
/// Some UI elements like Buttons automatically show
/// a tool tip. This property can be used to overwrite
/// what is shown, or to show tool tips when the user is
/// over a non-UI element.
/// </remarks>
#endregion
private static string mToolTipText;
static WindowArray mPerishableWindowsToSurviveClick = new WindowArray();
#endregion
#region Properties
public static bool RemoveInvisibleDominantWindows
{
get;
set;
}
public static bool DrawCursorEvenIfThereIsNoUI
{
get;
set;
}
static public bool DominantWindowActive
{
get
{
foreach (IWindow window in mDominantWindows)
{
if (window.AbsoluteVisible)
{
return true;
}
}
return false;
// July 29, 2012
// Now the GuiManager
// can handle invisible
// dominant windows. Therefore
// we have to see what is visible.
// return mDominantWindows.Count != 0;
}
}
public static Cursor Cursor
{
get
{
try
{
return mCursors[0];
}
catch
{
throw new InvalidOperationException(
"There are no Cursors created yet - has FlatRedBall been initialized?");
}
}
}
public static List<Cursor> Cursors
{
get { return mCursors; }
}
public static Camera Camera
{
get { return SpriteManager.Camera; }
}
public static bool IsUIEnabled
{
get{ return mUIEnabled;}
set{ mUIEnabled = value;}
}
public static float OverridingFieldOfView
{
get { return mOverridingFieldOfView; }
set { mOverridingFieldOfView = value; }
}
public static ReadOnlyCollection<IWindow> PerishableWindows
{
get { return mPerishableArrayReadOnly; }
}
public static IEnumerable<IWindow> DominantWindows
{
get { return mDominantWindows; }
}
public static string ToolTipText
{
get { return mToolTipText; }
set { mToolTipText = value; }
}
#if !SILVERLIGHT
public static ReadOnlyCollection<IWindow> Windows
{
get { return mReadOnlyWindowArray; }
}
#endif
public static float XEdge
{
get
{
return YEdge * Camera.AspectRatio;
}
}
public static float YEdge
{
get
{
if (!float.IsNaN(mOverridingFieldOfView))
{
return (float)(100 * System.Math.Tan(mOverridingFieldOfView / 2.0));
}
else
{
return Camera.YEdge;
}
}
}
public static bool BringsClickedWindowsToFront { get; set; }
public static float UnmodifiedXEdge
{
get
{
return UnmodifiedYEdge * 4 / 3.0f;
}
}
public static float UnmodifiedYEdge
{
get
{
return (float)(100 * System.Math.Tan((System.Math.PI / 4.0f) / 2.0));
}
}
#endregion
#region Methods
#region Constructors
#if FRB_MDX
internal static void Initialize(string guiTextureToUse, System.Windows.Forms.Control form)
{
Initialize(guiTextureToUse, form, new Cursor(SpriteManager.Camera, form));
}
internal static void Initialize(string guiTextureToUse, System.Windows.Forms.Control form, Cursor cursor)
#else
// made public for unit tests
public static void Initialize(Texture2D guiTextureToUse, Cursor cursor)
#endif
{
#if FRB_MDX || XNA3_1
RemoveInvisibleDominantWindows = true;
#else
RemoveInvisibleDominantWindows = false;
#endif
mPerishableArrayReadOnly = new ReadOnlyCollection<IWindow>(mPerishableArray);
// Currently make the FRB XNA default to not using the UI, but the FRB MDX to true
TextHeight = 2;
TextSpacing = 1;
mUIEnabled = true;
// sr.WriteLine("Inside the GuiManager constructor");
// sr.Close();
mCursors = new List<Cursor>();
mCursors.Add(cursor);
mWindowArray = new WindowArray();
mReadOnlyWindowArray = new ReadOnlyCollection<IWindow>(mWindowArray);
mDominantWindows = new List<IWindow>();
#if !MONOGAME && !SILVERLIGHT && !UNIT_TESTS && !XNA4
RenderingBasedInitializize();
#endif
BringsClickedWindowsToFront = true;
try
{
#if FRB_MDX
if (System.IO.File.Exists(FlatRedBall.IO.FileManager.RelativeDirectory + "Assets/Textures/upDirectory.bmp"))
{
mUpDirectory = FlatRedBallServices.Load<Texture2D>(
FlatRedBall.IO.FileManager.RelativeDirectory + "Assets/Textures/upDirectory.bmp",
InternalGuiContentManagerName);
}
if (System.IO.File.Exists(FlatRedBall.IO.FileManager.RelativeDirectory + "Assets/Textures/cursorTextBox.bmp"))
{
mCursorTextBox = FlatRedBallServices.Load<Texture2D>(
FlatRedBall.IO.FileManager.RelativeDirectory + "Assets/Textures/cursorTextBox.bmp",
InternalGuiContentManagerName);
}
if (guiTextureToUse != null && guiTextureToUse != "")
{
guiTexture = FlatRedBallServices.Load<Texture2D>(
guiTextureToUse, InternalGuiContentManagerName);
RefreshTextSize();
}
#elif SUPPORTS_FRB_DRAWN_GUI
guiTexture = guiTextureToUse;
RefreshTextSize();
#endif
}
catch(Exception e)
{
throw e;
}
try
{
nfi = new System.Globalization.NumberFormatInfo();
//replaced the above line with the one below to used streamed images.
ShowingCursorTextBox = true;
renderingNotes = new List<String>();
}
catch(Exception e)
{
throw e;
}
#if SUPPORTS_FRB_DRAWN_GUI
SetPropertyGridTypeAssociations();
#endif
// Let's do some updates because we want to make sure our "last" values are set to the current value
// so we don't have any movement on the cursor initially:
cursor.Update(TimeManager.CurrentTime);
cursor.Update(TimeManager.CurrentTime);
}
#endregion
#region Public Methods
#region adding gui component methods
public static void AddWindow(IWindow windowToAdd)
{
#if DEBUG
if (windowToAdd == null)
{
throw new ArgumentException("Argument Window can't be null");
}
if (mWindowArray.Contains(windowToAdd))
{
int index = mWindowArray.IndexOf(windowToAdd);
throw new ArgumentException("This window has already been added to the GuiManager. It is at index " + index);
}
if (!FlatRedBallServices.IsThreadPrimary())
{
throw new InvalidOperationException("Windows can only be added on the primary thread");
}
#endif
mWindowArray.Add(windowToAdd);
if (BringsClickedWindowsToFront == false)
{
InsertionSort(mWindowArray, WindowComparisonForSorting);
}
}
#endregion
internal static float GetYOffsetForModifiedAspectRatio()
{
// make 0,0 the top-left
float unmodifiedYEdge = UnmodifiedYEdge;
float offset = unmodifiedYEdge - GuiManager.YEdge;
return offset;
}
internal static float GetXOffsetForModifiedAspectRatio()
{
float unmodifiedXEdge = UnmodifiedXEdge;
return unmodifiedXEdge - GuiManager.XEdge;
}
#if !XBOX360 && !SILVERLIGHT && !WINDOWS_PHONE && !MONOGAME && !WINDOWS_8
static public Cursor AddCursor(Camera camera, System.Windows.Forms.Form form)
{
#if FRB_MDX
Cursor cursorToAdd = new Cursor(camera, form);
#else
Cursor cursorToAdd = new Cursor(camera);
#endif
cursorToAdd.SetCursor(
FlatRedBallServices.Load<Texture2D>("Assets/Textures/cursor1.bmp", InternalGuiContentManagerName), -.5f, 1);
mCursors.Add(cursorToAdd);
return cursorToAdd;
}
#endif
#region XML Docs
/// <summary>
/// Adds a window as a Dominant Window. If the window is a regular Window
/// already managed by the GuiManager it will be removed from the regularly-managed
/// windows.
/// </summary>
/// <param name="windowToSet">The window to add to the Dominant Window stack.</param>
#endregion
static public void AddDominantWindow(IWindow window)
{
#if DEBUG
if (!FlatRedBallServices.IsThreadPrimary())
{
throw new InvalidOperationException("Dominant windows can only be added or modified on the primary thread");
}
#endif
// Let's make these tolerant
if (mWindowArray.Contains(window))
{
mWindowArray.Remove(window);
}
if (!mDominantWindows.Contains(window))
{
mDominantWindows.Add(window);
}
}
#if !SILVERLIGHT
public static void BringToFront(IWindow windowToBringToFront)
{
if (windowToBringToFront.Parent == null)
{
if (mWindowArray.Contains(windowToBringToFront) == false)
return;
mWindowArray.Remove(windowToBringToFront); mWindowArray.Add(windowToBringToFront);
}
else
{
#if SUPPORTS_FRB_DRAWN_GUI
Window parentwindow = (Window)windowToBringToFront.Parent;
parentwindow.BringToFront(windowToBringToFront);
#endif
}
}
public static void SendToBack(IWindow windowToSendToBack)
{
if (windowToSendToBack.Parent == null)
{
if (mWindowArray.Contains(windowToSendToBack) == true)
{
mWindowArray.Remove(windowToSendToBack);
mWindowArray.Insert(0, windowToSendToBack);
}
}
else
{
throw new NotImplementedException("Send to back not implemented for parents at this time.");
}
}
#endif
static public void ElementActivity()
{
foreach (IWindow w in mWindowArray)
{
w.Activity(Camera);
}
foreach (IWindow w in mPerishableArray)
{
w.Activity(Camera);
}
foreach (IWindow w in mDominantWindows)
{
w.Activity(Camera);
}
}
#if !SILVERLIGHT && !WINDOWS_PHONE && !MONODROID
static public Cursor GetCursorNum(int index)
{
if (index > -1 && index < mCursors.Count)
return mCursors[index];
else
return null;
}
#endif
#if !SILVERLIGHT && !WINDOWS_PHONE && !MONOGAME
static public void LoadSettingsFromText(string settingsTextFile)
{
TextReader tr = new StreamReader(settingsTextFile);
string buffer = tr.ReadToEnd();
float cursorSensitivity = StringFunctions.GetFloatAfter("Mouse Sensitivity: ", buffer);
if (!float.IsNaN(cursorSensitivity))
Cursor.sensitivity = cursorSensitivity;
FileManager.Close(tr);
}
#endif
[Obsolete("Use AddDominantWindow instead - this method will go away soon")]
public static void MakeDominantWindow(IWindow window)
{
AddDominantWindow(window);
}
public static void MakeRegularWindow(IWindow window)
{
#if DEBUG
if (!FlatRedBallServices.IsThreadPrimary())
{
throw new InvalidOperationException("Windows can only be added or modified on the primary thread");
}
#endif
if (mDominantWindows.Contains(window))
{
mDominantWindows.Remove(window);
}
if (!mWindowArray.Contains(window))
{
mWindowArray.Add(window);
}
}
/// <summary>
/// Sorts all contained IWindows according to their
/// Z values and Layers. This will usually result in
/// clicks being received in the same order that objects
/// are drawn, which is what the user will usually expect
/// </summary>
public static void SortZAndLayerBased()
{
// This is not a stable sort. We need it to be:
//mWindowArray.Sort(SortZAndLayerBased);
InsertionSort(mWindowArray, WindowComparisonForSorting);
}
public static void InsertionSort<T>(IList<T> list, Comparison<T> comparison)
{
if (list == null)
throw new ArgumentNullException("list");
if (comparison == null)
throw new ArgumentNullException("comparison");
int count = list.Count;
for (int j = 1; j < count; j++)
{
T key = list[j];
int i = j - 1;
for (; i >= 0 && comparison(list[i], key) > 0; i--)
{
list[i + 1] = list[i];
}
list[i + 1] = key;
}
}
static int WindowComparisonForSorting(IWindow first, IWindow second)
{
#if DEBUG
if (!FlatRedBallServices.IsThreadPrimary())
{
throw new InvalidOperationException("Objects can only be added on the primary thread");
}
#endif
if (first.Layer == second.Layer)
{
if (first.Z == second.Z)
{
return mWindowArray.IndexOf(first).CompareTo(mWindowArray.IndexOf(second));
}
else
{
return first.Z.CompareTo(second.Z);
}
}
else
{
int firstLayerIndex = SpriteManager.LayersWriteable.IndexOf(first.Layer);
int secondLayerIndex = SpriteManager.LayersWriteable.IndexOf(second.Layer);
return firstLayerIndex.CompareTo(secondLayerIndex);
}
}
#if !SILVERLIGHT && !WINDOWS_PHONE && !MONODROID
public static new string ToString()
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.Append("Number of regular Windows ").Append(mWindowArray.Count);
return stringBuilder.ToString();
}
#endif
static public void UpdateDependencies()
{
// This thing is a linked list, so we gotta foreach it
foreach (IWindow w in mDominantWindows)
w.UpdateDependencies();
for (int i = 0; i < mWindowArray.Count; i++)
mWindowArray[i].UpdateDependencies();
for (int i = 0; i < mPerishableArray.Count; i++)
mPerishableArray[i].UpdateDependencies();
}
public static void WorldToUi(float worldX, float worldY, float worldZ, out float uiX, out float uiY)
{
uiX = worldX;
uiY = worldY;
uiX = uiX - SpriteManager.Camera.X;
uiY -= SpriteManager.Camera.Y;
uiX *= SpriteManager.Camera.XEdge / SpriteManager.Camera.RelativeXEdgeAt(worldZ);
uiY *= -SpriteManager.Camera.YEdge / SpriteManager.Camera.RelativeYEdgeAt(worldZ);
uiX += SpriteManager.Camera.XEdge;
uiY += SpriteManager.Camera.YEdge;
}
#endregion
#region Internal
public static void Control()
{
#if PROFILE
TimeManager.TimeSection("Object Display Manager Activity");
#endif
InputManager.ReceivingInputJustSet = false;
mToolTipText = "";
#region Update cursors
foreach (Cursor c in mCursors)
{
#region SET CURSOR WINDOWPUSHED and WindowMiddleButtonPushed TO NULL if we push or click on something that is not a window
// Do this before the activity so on a click the frame with the click will still have a valid
// WindowPushed.
if ((c.PrimaryPush && c.WindowOver == null) || c.PrimaryClick)
{
if (c.WindowPushed != null)
{
c.WindowPushed = null;
}
}
#endregion
c.Update(TimeManager.CurrentTime);
c.WindowOver = null;
c.WindowClosing = null;
}
#endregion
UpdateDependencies();
// now we find which button we are over with each cursor
ElementActivity();
#region Loop through cursors and perform collision and action vs. Windows
foreach (Cursor c in mCursors)
{
if (c.Active)
{
#region looping through all perishable windows
for (int i = mPerishableArray.Count - 1; i > -1; i--)
{
IWindow window = mPerishableArray[i];
if (c.IsOn(window))
{
window.TestCollision(c);
}
else if (c.PrimaryClick || c.SecondaryClick)
{
if (mPerishableWindowsToSurviveClick.Contains(window))
{
mPerishableWindowsToSurviveClick.Remove(window);
}
else
{
window.Visible = false;
mPerishableArray.RemoveAt(i);
}
}
}
#endregion
#region if we have a dominant window
if (DominantWindowActive && c.WindowOver == null)
{
for(int i = mDominantWindows.Count - 1; i > -1; i--)
{
IWindow dominantWindow = mDominantWindows[i];
if (dominantWindow.Visible &&
c.IsOnWindowOrFloatingChildren(dominantWindow))
{
dominantWindow.TestCollision(c);
}
}
// If there are any dominant windows, we shouldn't perform any other collision tests
continue;
}
#endregion
#region looping through all regular windows
// First check all floating windows
else if (c.WindowOver == null)
{
for (int i = mWindowArray.Count - 1; i > -1; i--)
{
if (!mWindowArray[i].GuiManagerDrawn || mWindowArray[i].Visible == false || !mWindowArray[i].Enabled)
continue;
IWindow windowOver = c.GetDeepestFloatingChildWindowOver(mWindowArray[i]);
IWindow tempWindow = mWindowArray[i];
if (windowOver != null)
{
windowOver.TestCollision(c);
if (c.PrimaryPush && i < mWindowArray.Count)
{// we pushed a button, so let's bring it to the front
mWindowArray.Remove(tempWindow);
mWindowArray.Add(tempWindow);
}
break;
}
}
}
if (c.WindowOver == null)
{
for (int i = mWindowArray.Count - 1; i > -1; i--)
{
// Code in this loop can call
// events. Events may destroy
// entire groups of Windows, so we
// need to make sure we're still vaild:
if (i < mWindowArray.Count)
{
var window = mWindowArray[i];
if (window.Visible == false || !window.Enabled)
continue;
if (window.GuiManagerDrawn)
{
if (c.IsOn(window))
{
window.TestCollision(c);
if (c.PrimaryPush && i < mWindowArray.Count)
{// we pushed a button, so let's bring it to the front
// Man, what a bug. It's possible
// that clicking one window will bring
// another to the foreground. If so, then
// the index is no longer valid. Typical unnecessary
// optimization causing all kinds of problems.
//mWindowArray.RemoveAt(i);
mWindowArray.Remove(window);
mWindowArray.Add(window);
}
break;
}
}
else if (!window.IgnoredByCursor && window.HasCursorOver(c))
{
window.TestCollision(c);
// I think we should use the cursor's WindowOver which may be a child of Window
c.LastWindowOver = c.WindowOver;
if (Cursor.PrimaryPush && i < mWindowArray.Count && BringsClickedWindowsToFront == true)
{// we pushed a button, so let's bring it to the front
mWindowArray.Remove(window); mWindowArray.Add(window);
}
break;
}
}
}
}
#endregion
}
}
#endregion
#region call onLosingFocus
if (Cursor.PrimaryPush)// && guiResult.windowResult != null)
{
// Should this be on a per-cursor basis?
if (mLastWindowWithFocus != null && mLastWindowWithFocus != Cursor.WindowOver)
{
mLastWindowWithFocus.OnLosingFocus();
#if SUPPORTS_FRB_DRAWN_GUI
UpDownReactToCursorPush();
#endif
}
mLastWindowWithFocus = Cursor.WindowOver;
}
#endregion
#region If not on anything, set cursor.LastWindowOver to null
// Should this be on a per-cursor basis?
if (Cursor.WindowOver == null)
Cursor.LastWindowOver = null;
#endregion
#region if receivingInput setting and resetting logic
foreach (Cursor c in mCursors)
{
if (c.Active && c.PrimaryClick == true)
{
FlatRedBall.Gui.IInputReceiver objectClickedOn = c.WindowOver as IInputReceiver;
#if SUPPORTS_FRB_DRAWN_GUI
UpDownReactToPrimaryClick(c, ref objectClickedOn);
#endif
#region Check for ReceivingInput being set to null
if (InputManager.ReceivingInputJustSet == false && objectClickedOn == null)
{
#if SUPPORTS_FRB_DRAWN_GUI
bool shouldLoseInput = true;
if (InputManager.ReceivingInput != null && InputManager.ReceivingInput is TextBox &&
c.WindowPushed == InputManager.ReceivingInput)
{
shouldLoseInput = false;
}
if (shouldLoseInput)
{
InputManager.ReceivingInput = null;
}
#endif
}
#endregion
else if (objectClickedOn != null && ((IWindow)objectClickedOn).Visible == true &&
((IInputReceiver)objectClickedOn).TakingInput)
{
InputManager.InputReceiver = objectClickedOn;
}
}
}
#endregion
#if SUPPORTS_FRB_DRAWN_GUI
LoseInputOnTextBox(tempTextBox);
#endif
#region letting go of any grabbed window
if (Cursor.PrimaryClick)
{
Cursor.mWindowGrabbed = null;
Cursor.mSidesGrabbed = Sides.None;
}
#endregion
#region regulate button and toggle button up/down states depending on if the cursor is still over the button pushed
if (Cursor.WindowPushed != null && Cursor.WindowOver != Cursor.WindowPushed)
{
#if SUPPORTS_FRB_DRAWN_GUI
ButtonReactToPush();
#endif
}
#endregion
#region Remove invisible Dominant Windows
while (mDominantWindows.Count > 0 && mDominantWindows[mDominantWindows.Count - 1].Visible == false &&
RemoveInvisibleDominantWindows)
{
mDominantWindows.RemoveAt(mDominantWindows.Count - 1);
}
#endregion
#region Clear the mPerishableWindowsToSurviveClick
mPerishableWindowsToSurviveClick.Clear();
#endregion
}
#endregion
#region Private Methods
#endregion
#region Remove methods
static public void RemoveCursor(Cursor cursorToRemove)
{
#if DEBUG
if (!FlatRedBallServices.IsThreadPrimary())
{
throw new InvalidOperationException("Cursors can only be removed on the primary thread");
}
#endif
mCursors.Remove(cursorToRemove);
}
public static void RemoveParentOfWindow(IWindow childWindow)
{
RemoveWindow(childWindow.Parent);
}
public static void RemoveWindow(IWindow windowToRemove)
{
RemoveWindow(windowToRemove, false);
}
public static void RemoveWindow(IWindow windowToRemove, bool keepEvents)
{
#if DEBUG
if (!FlatRedBallServices.IsThreadPrimary())
{
throw new InvalidOperationException("Windows can only be removed on the primary thread");
}
#endif
if (mWindowArray.Contains(windowToRemove))
{
mWindowArray.Remove(windowToRemove);
}
else if (mPerishableArray.Contains(windowToRemove))
{
windowToRemove.Visible = false;
mPerishableArray.Remove(windowToRemove);
}
// If an IWindow is made dominant, then it will be removed from the regular (mWindowArray) window list.
else if (mDominantWindows.Contains(windowToRemove))
{
mDominantWindows.Remove(windowToRemove);
}
if (InputManager.InputReceiver == windowToRemove)
InputManager.InputReceiver = null;
}
static public void RemoveWindow(WindowArray windowsToRemove)
{
for (int i = windowsToRemove.Count - 1; i > -1; i--)
{
RemoveWindow(windowsToRemove[i]);
}
}
#endregion
#endregion
}// end of class GuiManager
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Microsoft.Azure;
using Microsoft.Azure.Management.Resources;
using Microsoft.Azure.Management.Resources.Models;
namespace Microsoft.Azure.Management.Resources
{
public partial class ResourceManagementClient : ServiceClient<ResourceManagementClient>, IResourceManagementClient
{
private string _apiVersion;
/// <summary>
/// Gets the API version.
/// </summary>
public string ApiVersion
{
get { return this._apiVersion; }
}
private Uri _baseUri;
/// <summary>
/// Gets the URI used as the base for all cloud service requests.
/// </summary>
public Uri BaseUri
{
get { return this._baseUri; }
}
private SubscriptionCloudCredentials _credentials;
/// <summary>
/// Gets subscription credentials which uniquely identify Microsoft
/// Azure subscription. The subscription ID forms part of the URI for
/// every service call.
/// </summary>
public SubscriptionCloudCredentials Credentials
{
get { return this._credentials; }
}
private int _longRunningOperationInitialTimeout;
/// <summary>
/// Gets or sets the initial timeout for Long Running Operations.
/// </summary>
public int LongRunningOperationInitialTimeout
{
get { return this._longRunningOperationInitialTimeout; }
set { this._longRunningOperationInitialTimeout = value; }
}
private int _longRunningOperationRetryTimeout;
/// <summary>
/// Gets or sets the retry timeout for Long Running Operations.
/// </summary>
public int LongRunningOperationRetryTimeout
{
get { return this._longRunningOperationRetryTimeout; }
set { this._longRunningOperationRetryTimeout = value; }
}
private IDeploymentOperationOperations _deploymentOperations;
/// <summary>
/// Operations for managing deployment operations.
/// </summary>
public virtual IDeploymentOperationOperations DeploymentOperations
{
get { return this._deploymentOperations; }
}
private IDeploymentOperations _deployments;
/// <summary>
/// Operations for managing deployments.
/// </summary>
public virtual IDeploymentOperations Deployments
{
get { return this._deployments; }
}
private IProviderOperations _providers;
/// <summary>
/// Operations for managing providers.
/// </summary>
public virtual IProviderOperations Providers
{
get { return this._providers; }
}
private IProviderOperationsMetadataOperations _providerOperationsMetadata;
/// <summary>
/// Operations for getting provider operations metadata.
/// </summary>
public virtual IProviderOperationsMetadataOperations ProviderOperationsMetadata
{
get { return this._providerOperationsMetadata; }
}
private IResourceGroupOperations _resourceGroups;
/// <summary>
/// Operations for managing resource groups.
/// </summary>
public virtual IResourceGroupOperations ResourceGroups
{
get { return this._resourceGroups; }
}
private IResourceOperations _resources;
/// <summary>
/// Operations for managing resources.
/// </summary>
public virtual IResourceOperations Resources
{
get { return this._resources; }
}
private IResourceProviderOperationDetailsOperations _resourceProviderOperationDetails;
/// <summary>
/// Operations for managing Resource provider operations.
/// </summary>
public virtual IResourceProviderOperationDetailsOperations ResourceProviderOperationDetails
{
get { return this._resourceProviderOperationDetails; }
}
private ITagOperations _tags;
/// <summary>
/// Operations for managing tags.
/// </summary>
public virtual ITagOperations Tags
{
get { return this._tags; }
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
public ResourceManagementClient()
: base()
{
this._deploymentOperations = new DeploymentOperationOperations(this);
this._deployments = new DeploymentOperations(this);
this._providers = new ProviderOperations(this);
this._providerOperationsMetadata = new ProviderOperationsMetadataOperations(this);
this._resourceGroups = new ResourceGroupOperations(this);
this._resources = new ResourceOperations(this);
this._resourceProviderOperationDetails = new ResourceProviderOperationDetailsOperations(this);
this._tags = new TagOperations(this);
this._apiVersion = "2014-04-01-preview";
this._longRunningOperationInitialTimeout = -1;
this._longRunningOperationRetryTimeout = -1;
this.HttpClient.Timeout = TimeSpan.FromSeconds(300);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='baseUri'>
/// Optional. Gets the URI used as the base for all cloud service
/// requests.
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, Uri baseUri)
: this()
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._credentials = credentials;
this._baseUri = baseUri;
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials)
: this()
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
this._baseUri = new Uri("https://management.azure.com/");
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(HttpClient httpClient)
: base(httpClient)
{
this._deploymentOperations = new DeploymentOperationOperations(this);
this._deployments = new DeploymentOperations(this);
this._providers = new ProviderOperations(this);
this._providerOperationsMetadata = new ProviderOperationsMetadataOperations(this);
this._resourceGroups = new ResourceGroupOperations(this);
this._resources = new ResourceOperations(this);
this._resourceProviderOperationDetails = new ResourceProviderOperationDetailsOperations(this);
this._tags = new TagOperations(this);
this._apiVersion = "2014-04-01-preview";
this._longRunningOperationInitialTimeout = -1;
this._longRunningOperationRetryTimeout = -1;
this.HttpClient.Timeout = TimeSpan.FromSeconds(300);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='baseUri'>
/// Optional. Gets the URI used as the base for all cloud service
/// requests.
/// </param>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, Uri baseUri, HttpClient httpClient)
: this(httpClient)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._credentials = credentials;
this._baseUri = baseUri;
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, HttpClient httpClient)
: this(httpClient)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
this._baseUri = new Uri("https://management.azure.com/");
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Clones properties from current instance to another
/// ResourceManagementClient instance
/// </summary>
/// <param name='client'>
/// Instance of ResourceManagementClient to clone to
/// </param>
protected override void Clone(ServiceClient<ResourceManagementClient> client)
{
base.Clone(client);
if (client is ResourceManagementClient)
{
ResourceManagementClient clonedClient = ((ResourceManagementClient)client);
clonedClient._credentials = this._credentials;
clonedClient._baseUri = this._baseUri;
clonedClient._apiVersion = this._apiVersion;
clonedClient._longRunningOperationInitialTimeout = this._longRunningOperationInitialTimeout;
clonedClient._longRunningOperationRetryTimeout = this._longRunningOperationRetryTimeout;
clonedClient.Credentials.InitializeServiceClient(clonedClient);
}
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public async Task<LongRunningOperationResponse> GetLongRunningOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken)
{
// Validate
if (operationStatusLink == null)
{
throw new ArgumentNullException("operationStatusLink");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("operationStatusLink", operationStatusLink);
TracingAdapter.Enter(invocationId, this, "GetLongRunningOperationStatusAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + operationStatusLink;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01-preview");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted && statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LongRunningOperationResponse result = null;
// Deserialize Response
result = new LongRunningOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("Retry-After"))
{
result.RetryAfter = int.Parse(httpResponse.Headers.GetValues("Retry-After").FirstOrDefault(), CultureInfo.InvariantCulture);
}
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (statusCode == HttpStatusCode.Conflict)
{
result.Status = OperationStatus.Failed;
}
if (statusCode == HttpStatusCode.BadRequest)
{
result.Status = OperationStatus.Failed;
}
if (statusCode == HttpStatusCode.Accepted)
{
result.Status = OperationStatus.InProgress;
}
if (statusCode == HttpStatusCode.OK)
{
result.Status = OperationStatus.Succeeded;
}
if (statusCode == HttpStatusCode.NoContent)
{
result.Status = OperationStatus.Succeeded;
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using ServiceStack.DataAnnotations;
using ServiceStack.DesignPatterns.Model;
namespace ServiceStack.Text.Tests.Support
{
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class UserPublicView
{
/// <summary>
/// I'm naming this 'Id' instead of 'UserId' as this is dto is
/// meant to be cached and we may want to handle all caches generically at some point.
/// </summary>
/// <value>The id.</value>
[DataMember]
public Guid Id { get; set; }
[DataMember]
public UserPublicProfile Profile { get; set; }
[DataMember]
public ArrayOfPost Posts { get; set; }
}
[Serializable]
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class UserPublicProfile
{
public UserPublicProfile()
{
this.FollowerUsers = new List<UserSearchResult>();
this.FollowingUsers = new List<UserSearchResult>();
this.UserFileTypes = new ArrayOfString();
}
[DataMember]
public Guid Id
{
get;
set;
}
[DataMember]
public string UserType
{
get;
set;
}
[DataMember]
public string UserName
{
get;
set;
}
[DataMember]
public string FullName
{
get;
set;
}
[DataMember]
public string Country
{
get;
set;
}
[DataMember]
public string LanguageCode
{
get;
set;
}
[DataMember]
public DateTime? DateOfBirth
{
get;
set;
}
[DataMember]
public DateTime? LastLoginDate
{
get;
set;
}
[DataMember]
public long FlowPostCount
{
get;
set;
}
[DataMember]
public int BuyCount
{
get;
set;
}
[DataMember]
public int ClientTracksCount
{
get;
set;
}
[DataMember]
public int ViewCount
{
get;
set;
}
[DataMember]
public List<UserSearchResult> FollowerUsers
{
get;
set;
}
[DataMember]
public List<UserSearchResult> FollowingUsers
{
get;
set;
}
///ArrayOfString causes translation error
[DataMember]
public ArrayOfString UserFileTypes
{
get;
set;
}
[DataMember]
public string OriginalProfileBase64Hash
{
get;
set;
}
[DataMember]
public string AboutMe
{
get;
set;
}
}
[Serializable]
[CollectionDataContract(Namespace = "http://schemas.ddnglobal.com/types/", ItemName = "String")]
public class ArrayOfString : List<string>
{
public ArrayOfString() { }
public ArrayOfString(IEnumerable<string> collection) : base(collection) { }
//TODO: allow params[] constructor, fails on:
//Profile = user.TranslateTo<UserPrivateProfile>()
public static ArrayOfString New(params string[] ids) { return new ArrayOfString(ids); }
//public ArrayOfString(params string[] ids) : base(ids) { }
}
[Serializable]
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class UserSearchResult
: IHasId<Guid>
{
[DataMember]
public Guid Id { get; set; }
[DataMember(EmitDefaultValue = false)]
public string UserType { get; set; }
[DataMember]
public string UserName { get; set; }
[DataMember(EmitDefaultValue = false)]
public string FullName { get; set; }
[DataMember(EmitDefaultValue = false)]
public string FirstName { get; set; }
[DataMember(EmitDefaultValue = false)]
public string LastName { get; set; }
[DataMember(EmitDefaultValue = false)]
public string LanguageCode { get; set; }
[DataMember(EmitDefaultValue = false)]
public int FlowPostCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public int ClientTracksCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public int FollowingCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public int FollowersCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public int ViewCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public DateTime ActivationDate { get; set; }
}
[Serializable]
[CollectionDataContract(Namespace = "http://schemas.ddnglobal.com/types/", ItemName = "Post")]
public class ArrayOfPost : List<Post>
{
public ArrayOfPost() { }
public ArrayOfPost(IEnumerable<Post> collection) : base(collection) { }
public static ArrayOfPost New(params Post[] ids) { return new ArrayOfPost(ids); }
}
[Serializable]
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class Post
: IHasStringId
{
public Post()
{
this.TrackUrns = new ArrayOfStringId();
}
public string Id
{
get { return this.Urn; }
}
[DataMember]
public string Urn
{
get;
set;
}
[DataMember]
public DateTime DateAdded
{
get;
set;
}
[DataMember]
public bool CanPreviewFullLength
{
get;
set;
}
[DataMember]
public Guid OriginUserId
{
get;
set;
}
[DataMember]
public string OriginUserName
{
get;
set;
}
[DataMember]
public Guid SourceUserId
{
get;
set;
}
[DataMember]
public string SourceUserName
{
get;
set;
}
[DataMember]
public string SubjectUrn
{
get;
set;
}
[DataMember]
public string ContentUrn
{
get;
set;
}
[DataMember]
public ArrayOfStringId TrackUrns
{
get;
set;
}
[DataMember]
public string Caption
{
get;
set;
}
[DataMember]
public Guid CaptionUserId
{
get;
set;
}
[DataMember]
public string CaptionUserName
{
get;
set;
}
[DataMember]
public string PostType
{
get;
set;
}
[DataMember]
public Guid? OnBehalfOfUserId
{
get;
set;
}
}
[CollectionDataContract(Namespace = "http://schemas.ddnglobal.com/types/", ItemName = "Id")]
public class ArrayOfStringId : List<string>
{
public ArrayOfStringId() { }
public ArrayOfStringId(IEnumerable<string> collection) : base(collection) { }
//TODO: allow params[] constructor, fails on: o.TranslateTo<ArrayOfStringId>()
public static ArrayOfStringId New(params string[] ids) { return new ArrayOfStringId(ids); }
//public ArrayOfStringId(params string[] ids) : base(ids) { }
}
public enum FlowPostType
{
Content,
Text,
Promo,
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class Property
{
public Property()
{
}
public Property(string name, string value)
{
this.Name = name;
this.Value = value;
}
[DataMember]
public string Name
{
get;
set;
}
[DataMember]
public string Value
{
get;
set;
}
public override string ToString()
{
return this.Name + "," + this.Value;
}
}
[CollectionDataContract(Namespace = "http://schemas.ddnglobal.com/types/", ItemName = "Property")]
public class Properties
: List<Property>
{
public Properties()
{
}
public Properties(IEnumerable<Property> collection)
: base(collection)
{
}
public string GetPropertyValue(string name)
{
foreach (var property in this)
{
if (string.CompareOrdinal(property.Name, name) == 0)
{
return property.Value;
}
}
return null;
}
public Dictionary<string, string> ToDictionary()
{
var propertyDict = new Dictionary<string, string>();
foreach (var property in this)
{
propertyDict[property.Name] = property.Value;
}
return propertyDict;
}
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class ResponseStatus
{
/// <summary>
/// Initializes a new instance of the <see cref="ResponseStatus"/> class.
///
/// A response status without an errorcode == success
/// </summary>
public ResponseStatus()
{
this.Errors = new List<ResponseError>();
}
[DataMember]
public string ErrorCode { get; set; }
[DataMember]
public string Message { get; set; }
[DataMember]
public string StackTrace { get; set; }
[DataMember]
public List<ResponseError> Errors { get; set; }
public bool IsSuccess
{
get { return this.ErrorCode == null; }
}
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class ResponseError
{
[DataMember]
public string ErrorCode { get; set; }
[DataMember]
public string FieldName { get; set; }
[DataMember]
public string Message { get; set; }
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class GetContentStatsResponse
: IExtensibleDataObject
{
public GetContentStatsResponse()
{
this.Version = 100;
this.ResponseStatus = new ResponseStatus();
this.TopRecommenders = new List<UserSearchResult>();
this.LatestPosts = new List<Post>();
}
[DataMember]
public DateTime CreatedDate { get; set; }
[DataMember]
public List<UserSearchResult> TopRecommenders { get; set; }
[DataMember]
public List<Post> LatestPosts { get; set; }
#region Standard Response Properties
[DataMember]
public int Version
{
get;
set;
}
[DataMember]
public Properties Properties
{
get;
set;
}
public ExtensionDataObject ExtensionData
{
get;
set;
}
[DataMember]
public ResponseStatus ResponseStatus
{
get;
set;
}
#endregion
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class ProUserPublicProfile
{
public ProUserPublicProfile()
{
this.SocialLinks = new List<SocialLinkUrl>();
this.ArtistImages = new List<ImageAsset>();
this.Genres = new List<string>();
this.Posts = new ArrayOfPost();
this.FollowerUsers = new List<UserSearchResult>();
this.FollowingUsers = new List<UserSearchResult>();
}
[DataMember]
public Guid Id { get; set; }
[DataMember]
public string Alias { get; set; }
[DataMember]
public string RefUrn { get; set; }
[DataMember]
public string ProUserType { get; set; }
[DataMember]
public string ProUserSalesType { get; set; }
#region Header
[DataMember]
public TextLink ProUserLink { get; set; }
/// <summary>
/// Same as above but in an [A] HTML link
/// </summary>
[DataMember]
public string ProUserLinkHtml { get; set; }
/// <summary>
/// For the twitter and facebook icons
/// </summary>
[DataMember]
public List<SocialLinkUrl> SocialLinks { get; set; }
#endregion
#region Theme
[DataMember]
public ImageAsset BannerImage { get; set; }
[DataMember]
public string BannerImageBackgroundColor { get; set; }
[DataMember]
public List<string> UserFileTypes { get; set; }
[DataMember]
public string OriginalProfileBase64Hash { get; set; }
#endregion
#region Music
[DataMember]
public List<ImageAsset> ArtistImages { get; set; }
[DataMember]
public List<string> Genres { get; set; }
#endregion
#region Biography
[DataMember]
public string BiographyPageHtml { get; set; }
#endregion
#region Outbox
[DataMember]
public ArrayOfPost Posts { get; set; }
[DataMember]
public List<UserSearchResult> FollowerUsers { get; set; }
[DataMember]
public int FollowerUsersCount { get; set; }
[DataMember]
public List<UserSearchResult> FollowingUsers { get; set; }
[DataMember]
public int FollowingUsersCount { get; set; }
#endregion
}
public enum SocialLink
{
iTunes = 0,
Bebo = 1,
Blogger = 2,
Delicious = 3,
Digg = 4,
Email = 5,
EverNote = 6,
Facebook = 7,
Flickr = 8,
FriendFeed = 9,
GoogleWave = 10,
GroveShark = 11,
iLike = 12,
LastFm = 13,
Mix = 14,
MySpace = 15,
Posterous = 16,
Reddit = 17,
Rss = 18,
StumbleUpon = 19,
Twitter = 20,
Vimeo = 21,
Wikipedia = 22,
WordPress = 23,
Yahoo = 24,
YahooBuzz = 25,
YouTube = 26,
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class SocialLinkUrl
{
[References(typeof(SocialLink))]
[DataMember(EmitDefaultValue = false)]
public string Name
{
get;
set;
}
[DataMember]
public string LinkUrl
{
get;
set;
}
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
[Serializable]
public class ImageAsset
{
[DataMember(EmitDefaultValue = false)]
public string RelativePath { get; set; }
[DataMember(EmitDefaultValue = false)]
public string AbsoluteUrl { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Hash { get; set; }
[DataMember(EmitDefaultValue = false)]
public long? SizeBytes { get; set; }
[DataMember(EmitDefaultValue = false)]
public int? Width { get; set; }
[DataMember(EmitDefaultValue = false)]
public int? Height { get; set; }
[DataMember(EmitDefaultValue = false)]
public string BackgroundColorHex { get; set; }
}
[DataContract(Namespace = "http://schemas.ddnglobal.com/types/")]
public class TextLink
{
[DataMember(EmitDefaultValue = false)]
public string Label
{
get;
set;
}
[DataMember]
public string LinkUrl
{
get;
set;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Nimrod.Events.Api.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Net.Security;
using System.Reflection;
namespace System.ServiceModel.Description
{
[DebuggerDisplay("Name={name}, IsInitiating={isInitiating}, IsTerminating={isTerminating}")]
public class OperationDescription
{
internal const string SessionOpenedAction = Channels.WebSocketTransportSettings.ConnectionOpenedAction;
private XmlName _name;
private bool _isInitiating;
private bool _isTerminating;
private bool _isSessionOpenNotificationEnabled;
private ContractDescription _declaringContract;
private FaultDescriptionCollection _faults;
private MessageDescriptionCollection _messages;
private KeyedByTypeCollection<IOperationBehavior> _behaviors;
private Collection<Type> _knownTypes;
private MethodInfo _beginMethod;
private MethodInfo _endMethod;
private MethodInfo _syncMethod;
private MethodInfo _taskMethod;
private ProtectionLevel _protectionLevel;
private bool _hasProtectionLevel;
private bool _validateRpcWrapperName = true;
private bool _hasNoDisposableParameters;
public OperationDescription(string name, ContractDescription declaringContract)
{
if (name == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("name");
}
if (name.Length == 0)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(
new ArgumentOutOfRangeException("name", SRServiceModel.SFxOperationDescriptionNameCannotBeEmpty));
}
_name = new XmlName(name, true /*isEncoded*/);
if (declaringContract == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("declaringContract");
}
_declaringContract = declaringContract;
_isInitiating = true;
_isTerminating = true;
_faults = new FaultDescriptionCollection();
_messages = new MessageDescriptionCollection();
_behaviors = new KeyedByTypeCollection<IOperationBehavior>();
_knownTypes = new Collection<Type>();
}
internal OperationDescription(string name, ContractDescription declaringContract, bool validateRpcWrapperName)
: this(name, declaringContract)
{
_validateRpcWrapperName = validateRpcWrapperName;
}
public KeyedCollection<Type, IOperationBehavior> OperationBehaviors
{
get { return this.Behaviors; }
}
[EditorBrowsable(EditorBrowsableState.Never)]
public KeyedByTypeCollection<IOperationBehavior> Behaviors
{
get { return _behaviors; }
}
public ProtectionLevel ProtectionLevel
{
get { return _protectionLevel; }
set
{
if (!(value == ProtectionLevel.None || value == ProtectionLevel.Sign || value == ProtectionLevel.EncryptAndSign))
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("value"));
_protectionLevel = value;
_hasProtectionLevel = true;
}
}
public bool HasProtectionLevel
{
get { return _hasProtectionLevel; }
}
// Not serializable on purpose, metadata import/export cannot
// produce it, only available when binding to runtime
public MethodInfo TaskMethod
{
get { return _taskMethod; }
set { _taskMethod = value; }
}
// Not serializable on purpose, metadata import/export cannot
// produce it, only available when binding to runtime
public MethodInfo SyncMethod
{
get { return _syncMethod; }
set { _syncMethod = value; }
}
// Not serializable on purpose, metadata import/export cannot
// produce it, only available when binding to runtime
public MethodInfo BeginMethod
{
get { return _beginMethod; }
set { _beginMethod = value; }
}
internal MethodInfo OperationMethod
{
get
{
if (this.SyncMethod == null)
{
return this.TaskMethod ?? this.BeginMethod;
}
else
{
return this.SyncMethod;
}
}
}
internal bool HasNoDisposableParameters
{
get { return _hasNoDisposableParameters; }
set { _hasNoDisposableParameters = value; }
}
// Not serializable on purpose, metadata import/export cannot
// produce it, only available when binding to runtime
public MethodInfo EndMethod
{
get { return _endMethod; }
set { _endMethod = value; }
}
public ContractDescription DeclaringContract
{
get { return _declaringContract; }
set
{
if (value == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("DeclaringContract");
}
else
{
_declaringContract = value;
}
}
}
public FaultDescriptionCollection Faults
{
get { return _faults; }
}
public bool IsOneWay
{
get { return this.Messages.Count == 1; }
}
public bool IsInitiating
{
get { return _isInitiating; }
set { _isInitiating = value; }
}
internal bool IsServerInitiated()
{
EnsureInvariants();
return Messages[0].Direction == MessageDirection.Output;
}
public bool IsTerminating
{
get { return _isTerminating; }
set { _isTerminating = value; }
}
public Collection<Type> KnownTypes
{
get { return _knownTypes; }
}
// Messages[0] is the 'request' (first of MEP), and for non-oneway MEPs, Messages[1] is the 'response' (second of MEP)
public MessageDescriptionCollection Messages
{
get { return _messages; }
}
internal XmlName XmlName
{
get { return _name; }
}
internal string CodeName
{
get { return _name.DecodedName; }
}
public string Name
{
get { return _name.EncodedName; }
}
internal bool IsValidateRpcWrapperName { get { return _validateRpcWrapperName; } }
internal Type TaskTResult
{
get;
set;
}
internal bool HasOutputParameters
{
get
{
// For non-oneway operations, Messages[1] is the 'response'
return (this.Messages.Count > 1) &&
(this.Messages[1].Body.Parts.Count > 0);
}
}
internal bool IsSessionOpenNotificationEnabled
{
get { return _isSessionOpenNotificationEnabled; }
set { _isSessionOpenNotificationEnabled = value; }
}
internal void EnsureInvariants()
{
if (this.Messages.Count != 1 && this.Messages.Count != 2)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new System.InvalidOperationException(string.Format(SRServiceModel.SFxOperationMustHaveOneOrTwoMessages, this.Name)));
}
}
internal void ResetProtectionLevel()
{
_protectionLevel = ProtectionLevel.None;
_hasProtectionLevel = false;
}
}
}
| |
//---------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// Description: ResourceTextReader class
// It reads values from a CSV file or tab-separated TXT file
//
//---------------------------------------------------------------------------
using System;
using System.IO;
using System.Text;
using System.Resources;
using System.Collections;
using System.Globalization;
using System.Diagnostics;
namespace BamlLocalization
{
/// <summary>
/// Reader that reads value from a CSV file or Tab-separated TXT file
/// </summary>
internal class ResourceTextReader : IDisposable
{
internal ResourceTextReader(FileType fileType, Stream stream)
{
_delimiter = LocBamlConst.GetDelimiter(fileType);
if(stream == null)
throw new ArgumentNullException("stream");
_reader = new StreamReader(stream, Encoding.UTF8);
}
internal bool ReadRow()
{
// currentChar is the first char after newlines
int currentChar = SkipAllNewLine();
if (currentChar < 0)
{
// nothing else to read
return false;
}
ReadState currentState = ReadState.TokenStart;
_columns = new ArrayList();
StringBuilder buffer = new StringBuilder();
while (currentState != ReadState.LineEnd)
{
switch(currentState)
{
// start of a token
case ReadState.TokenStart:
{
if (currentChar == _delimiter)
{
// it is the end of the token when we see a delimeter
// Store token, and reset state. and ignore this char
StoreTokenAndResetState(ref buffer, ref currentState);
}
else if (currentChar == '\"')
{
// jump to Quoted content if it token starts with a quote.
// and also ignore this quote
currentState = ReadState.QuotedContent;
}
else if (currentChar == '\n' ||
(currentChar == '\r' && _reader.Peek() == '\n'))
{
// we see a '\n' or '\r\n' sequence. Go to LineEnd
// ignore these chars
currentState = ReadState.LineEnd;
}
else
{
// safe to say that this is part of a unquoted content
buffer.Append((Char) currentChar);
currentState = ReadState.UnQuotedContent;
}
break;
}
// inside of an unquoted content
case ReadState.UnQuotedContent :
{
if (currentChar == _delimiter)
{
// It is then end of a toekn.
// Store the token value and reset state
// igore this char as well
StoreTokenAndResetState(ref buffer, ref currentState);
}
else if (currentChar == '\n' ||
(currentChar == '\r' && _reader.Peek() == '\n'))
{
// see a new line
// igorne these chars and jump to LineEnd
currentState = ReadState.LineEnd;
}
else
{
// we are good. store this char
// notice, even we see a '\"', we will just treat it like
// a normal char
buffer.Append((Char) currentChar);
}
break;
}
// inside of a quoted content
case ReadState.QuotedContent :
{
if (currentChar == '\"')
{
// now it depends on whether the next char is quote also
if (_reader.Peek() == '\"')
{
// we will ignore the next quote.
currentChar = _reader.Read();
buffer.Append( (Char) currentChar);
}
else
{ // we have a single quote. We fall back to unquoted content state
// and igorne the curernt quote
currentState = ReadState.UnQuotedContent;
}
}
else
{
// we are still inside of a quote, anything is accepted
buffer.Append((Char) currentChar);
}
break;
}
}
// read in the next char
currentChar = _reader.Read();
if (currentChar < 0)
{
// break out of the state machine if we reach the end of the file
break;
}
}
// we got to here either we are at LineEnd, or we are end of file
if (buffer.Length > 0)
{
_columns.Add(buffer.ToString());
}
return true;
}
internal string GetColumn(int index)
{
if (_columns!= null && index < _columns.Count && index >= 0)
{
return (string) _columns[index];
}
else
{
return null;
}
}
internal void Close()
{
if (_reader != null)
{
_reader.Close();
}
}
void IDisposable.Dispose()
{
Close();
}
//---------------------------------
// private functions
//---------------------------------
private void StoreTokenAndResetState(ref StringBuilder buffer, ref ReadState currentState)
{
// add the token into buffer. The token can be empty
_columns.Add(buffer.ToString());
// create a new buffer for the next token.
buffer = new StringBuilder();
// we continue to token state state
currentState = ReadState.TokenStart;
}
// skip all new line and return the first char after newlines.
// newline means '\r\n' or '\n'
private int SkipAllNewLine()
{
int _char;
while ((_char = _reader.Read())>=0)
{
if (_char == '\n')
{
continue; // continue if it is '\n'
}
else if (_char == '\r' && _reader.Peek() == '\n')
{
// skip the '\n' in the next position
_reader.Read();
// and continue
continue;
}
else
{
// stop here
break;
}
}
return _char;
}
private TextReader _reader; // internal text reader
private int _delimiter; // delimiter
private ArrayList _columns; // An arraylist storing all the columns of a row
/// <summary>
/// Enum representing internal states of the reader when reading
/// the CSV or tab-separated TXT file
/// </summary>
private enum ReadState
{
/// <summary>
/// State in which the reader is at the start of a column
/// </summary>
TokenStart,
/// <summary>
/// State in which the reader is reading contents that are quoted
/// </summary>
QuotedContent,
/// <summary>
/// State in which the reader is reading contents not in quotes
/// </summary>
UnQuotedContent,
/// <summary>
/// State in which the end of a line is reached
/// </summary>
LineEnd,
}
}
}
| |
using System;
using UnityEngine;
using Cinemachine.Utility;
using UnityEngine.Serialization;
namespace Cinemachine
{
/// <summary>
/// This is a CinemachineComponent in the the Body section of the component pipeline.
/// Its job is to position the camera in a variable relationship to a the vcam's
/// Follow target object, with offsets and damping.
///
/// This component is typically used to implement a camera that follows its target.
/// It can accept player input from an input device, which allows the player to
/// dynamically control the relationship between the camera and the target,
/// for example with a joystick.
///
/// The OrbitalTransposer introduces the concept of __Heading__, which is the direction
/// in which the target is moving, and the OrbitalTransposer will attempt to position
/// the camera in relationship to the heading, which is by default directly behind the target.
/// You can control the default relationship by adjusting the Heading Bias setting.
///
/// If you attach an input controller to the OrbitalTransposer, then the player can also
/// control the way the camera positions itself in relation to the target heading. This allows
/// the camera to move to any spot on an orbit around the target.
/// </summary>
[DocumentationSorting(6, DocumentationSortingAttribute.Level.UserRef)]
[AddComponentMenu("")] // Don't display in add component menu
[RequireComponent(typeof(CinemachinePipeline))]
[SaveDuringPlay]
public class CinemachineOrbitalTransposer : MonoBehaviour, ICinemachineComponent
{
/// <summary>The distance which the transposer will attempt to maintain from the target</summary>
[Tooltip("The distance which the transposer will attempt to maintain from the Follow target")]
public float m_Radius = 10f;
/// <summary>The vertical offset from the target</summary>
[Tooltip("The vertical offset from the target.")]
public float m_HeightOffset = 0f;
/// <summary>How aggressively the camera tries to maintain the offset in the X-axis.
/// Small numbers are more responsive, rapidly translating the camera to keep the target's x-axis
/// offset. Larger numbers give a more heavy slowly responding camera. Using different settings
/// per axis can yield a wide range of camera behaviors.</summary>
[Range(0f, 20f)]
[Tooltip("How aggressively the camera tries to maintain the offset in the X-axis. Small numbers are more responsive, rapidly translating the camera to keep the target's x-axis offset. Larger numbers give a more heavy slowly responding camera. Using different settings per axis can yield a wide range of camera behaviors.")]
public float m_XDamping = 1f;
/// <summary>How aggressively the camera tries to maintain the offset in the Z-axis.
/// Small numbers are more responsive, rapidly translating the camera to keep the target's y-axis
/// offset. Larger numbers give a more heavy slowly responding camera. Using different settings
/// per axis can yield a wide range of camera behaviors.</summary>
[Range(0f, 20f)]
[Tooltip("How aggressively the camera tries to maintain the offset in the Z-axis. Small numbers are more responsive, rapidly translating the camera to keep the target's y-axis offset. Larger numbers give a more heavy slowly responding camera. Using different settings per axis can yield a wide range of camera behaviors.")]
public float m_YDamping = 1f;
/// <summary>How aggressively the camera tries to maintain the offset in the Z-axis.
/// Small numbers are more responsive, rapidly translating the camera to keep the target's z-axis
/// offset. Larger numbers give a more heavy slowly responding camera. Using different settings
/// per axis can yield a wide range of camera behaviors.</summary>
[Range(0f, 20f)]
[Tooltip("How aggressively the camera tries to maintain the offset in the Z-axis. Small numbers are more responsive, rapidly translating the camera to keep the target's z-axis offset. Larger numbers give a more heavy slowly responding camera. Using different settings per axis can yield a wide range of camera behaviors.")]
public float m_ZDamping = 1f;
/// <summary>How the damping values will be interpreted</summary>
[DocumentationSorting(6.01f, DocumentationSortingAttribute.Level.UserRef)]
public enum DampingStyle
{
/// <summary>X, Y, and Z are along the local axes of the target</summary>
Cartesian,
/// <summary>X and Y are rotations around the target in the horizontal
/// and vertical directions, respectively. Z is the distance from the target.</summary>
Polar
}
[Tooltip("How the damping values will be interpreted. Polar will attempt to preserve a constant distance from the target, subject to Z damping")]
public DampingStyle m_DampingStyle = DampingStyle.Polar;
/// <summary>Additional Y rotation applied to the target heading.
/// When this value is 0, the camera will be placed behind the target</summary>
[Range(-180f, 180f)]
[Tooltip("Where the camera is placed when the X-axis value is zero. This is a rotation in degrees around the Y axis. When this value is 0, the camera will be placed behind the target. Nonzero offsets will rotate the zero position around the target.")]
public float m_HeadingBias = 0;
/// <summary>
/// Axis state for defining how
/// this CinemachineOrbitalTransposer reacts to player input.
/// The settings here control the responsiveness of the axis to player input.
/// </summary>
[DocumentationSorting(6.1f, DocumentationSortingAttribute.Level.UserRef)]
[Serializable]
public struct AxisState
{
/// <summary>The current position on the axis/summary>
[NoSaveDuringPlay]
[Tooltip("The current value of the axis.")]
public float Value;
/// <summary>How fast the axis value can travel. Increasing this number
/// makes the behaviour more responsive to joystick input</summary>
[Tooltip("The maximum speed of this axis in units/second")]
public float m_MaxSpeed;
/// <summary>The amount of time in seconds it takes to accelerate to
/// MaxSpeed with the supplied Axis at its maximum value</summary>
[Tooltip("The amount of time in seconds it takes to accelerate to MaxSpeed with the supplied Axis at its maximum value")]
public float m_AccelTime;
/// <summary>The amount of time in seconds it takes to decelerate
/// the axis to zero if the supplied axis is in a neutral position</summary>
[Tooltip("The amount of time in seconds it takes to decelerate the axis to zero if the supplied axis is in a neutral position")]
public float m_DecelTime;
/// <summary>The name of this axis as specified in Unity Input manager.
/// Setting to an empty string will disable the automatic updating of this axis</summary>
[FormerlySerializedAs("m_AxisName")]
[Tooltip("The name of this axis as specified in Unity Input manager. Setting to an empty string will disable the automatic updating of this axis")]
public string m_InputAxisName;
/// <summary>The value of the input axis. A value of 0 means no input
/// You can drive this directly from a
/// custom input system, or you can set the Axis Name and have the value
/// driven by the internal Input Manager</summary>
[NoSaveDuringPlay]
[Tooltip("The value of the input axis. A value of 0 means no input. You can drive this directly from a custom input system, or you can set the Axis Name and have the value driven by the internal Input Manager")]
public float m_InputAxisValue;
private float mCurrentSpeed;
private float mMinValue;
private float mMaxValue;
private bool mWrapAround;
/// <summary>Constructor with specific values</summary>
public AxisState(float maxSpeed, float accelTime, float decelTime, float val, string name)
{
m_MaxSpeed = maxSpeed;
m_AccelTime = accelTime;
m_DecelTime = decelTime;
Value = val;
m_InputAxisName = name;
m_InputAxisValue = 0;
mCurrentSpeed = 0f;
mMinValue = 0f;
mMaxValue = 0f;
mWrapAround = false;
}
/// <summary>
/// Sets the constraints by which this axis will operate on
/// </summary>
/// <param name="minValue">The lowest value this axis can achieve</param>
/// <param name="maxValue">The highest value this axis can achieve</param>
/// <param name="wrapAround">If <b>TRUE</b>, values commanded greater
/// than mMaxValue or less than mMinValue will wrap around.
/// If <b>FALSE</b>, the value will be clamped within the range.</param>
public void SetThresholds(float minValue, float maxValue, bool wrapAround)
{
mMinValue = minValue;
mMaxValue = maxValue;
mWrapAround = wrapAround;
}
/// <summary>
/// Updates the state of this axis based on the axis defined
/// by AxisState.m_AxisName
/// </summary>
/// <param name="dt">Delta time in seconds</param>
/// <param name="invertAxisInput">If <b>TRUE</b>, inverts the value of the axis.
/// Otherwise, the value is not modified</param>
/// <return>Returns <b>TRUE</b> if this axis' input was non-zero this Update,
/// <b>FALSE</b> otherwise</return>
public bool Update(float dt, bool invertAxisInput)
{
if (!string.IsNullOrEmpty(m_InputAxisName))
{
try
{
m_InputAxisValue = CinemachineCore.GetInputAxis(m_InputAxisName);
}
catch (ArgumentException e)
{
Debug.LogError(e.ToString());
}
}
float input = m_InputAxisValue;
if (invertAxisInput)
input *= -1f;
float absInput = Mathf.Abs(input);
bool axisNonZero = absInput > UnityVectorExtensions.Epsilon;
// Test to see if we're commanding a speed faster than we are going
float accelTime = Mathf.Max(0.001f, m_AccelTime);
if (axisNonZero && (absInput >= Mathf.Abs(mCurrentSpeed / m_MaxSpeed)))
{
if (m_MaxSpeed > UnityVectorExtensions.Epsilon)
mCurrentSpeed += ((m_MaxSpeed / accelTime) * input) * dt;
}
else
{
// Otherwise brake
// TODO: Can the fluctuation between these two cause nasty behaviour? Must monitor..
float decelTime = Mathf.Max(0.001f, m_DecelTime);
float reduction = Mathf.Sign(mCurrentSpeed) * (m_MaxSpeed / decelTime) * dt;
mCurrentSpeed = (Mathf.Abs(reduction) >= Mathf.Abs(mCurrentSpeed))
? 0f : (mCurrentSpeed - reduction);
}
// Clamp our max speeds so we don't go crazy
float maxSpeed = GetMaxSpeed();
mCurrentSpeed = Mathf.Clamp(mCurrentSpeed, -maxSpeed, maxSpeed);
Value += mCurrentSpeed * dt;
bool isOutOfRange = (Value > mMaxValue) || (Value < mMinValue);
if (isOutOfRange)
{
if (mWrapAround)
{
if (Value > mMaxValue)
Value = mMinValue + (Value - mMaxValue);
else
Value = mMaxValue + (Value - mMinValue);
}
else
{
Value = Mathf.Clamp(Value, mMinValue, mMaxValue);
mCurrentSpeed = 0f;
}
}
return axisNonZero;
}
// MaxSpeed may be limited as we approach the range ends, in order
// to prevent a hard bump
private float GetMaxSpeed()
{
float range = mMaxValue - mMinValue;
if (!mWrapAround && range > 0)
{
float threshold = range / 10f;
if (mCurrentSpeed > 0 && (mMaxValue - Value) < threshold)
{
float t = (mMaxValue - Value) / threshold;
return Mathf.Lerp(0, m_MaxSpeed, t);
}
else if (mCurrentSpeed < 0 && (Value - mMinValue) < threshold)
{
float t = (Value - mMinValue) / threshold;
return Mathf.Lerp(0, m_MaxSpeed, t);
}
}
return m_MaxSpeed;
}
}
/// <summary>Axis representing the current heading. Value is in degrees
/// and represents a rotation about the up vector</summary>
[Tooltip("Heading Control. The settings here control the behaviour of the camera in response to the player's input.")]
public AxisState m_XAxis = new AxisState(3000f, 2f, 1f, 0f, "Mouse X");
/// <summary>Controls how automatic orbit recentering occurs</summary>
[DocumentationSorting(6.2f, DocumentationSortingAttribute.Level.UserRef)]
[Serializable]
public struct Recentering
{
/// <summary>If checked, will enable automatic recentering of the
/// camera based on the heading calculation mode. If FALSE, recenting is disabled.</summary>
[Tooltip("If checked, will enable automatic recentering of the camera based on the heading definition. If unchecked, recenting is disabled.")]
public bool m_enabled;
/// <summary>If no input has been detected, the camera will wait
/// this long in seconds before moving its heading to the default heading.</summary>
[Tooltip("If no input has been detected, the camera will wait this long in seconds before moving its heading to the zero position.")]
public float m_RecenterWaitTime;
/// <summary>Maximum angular speed of recentering. Will accelerate into and decelerate out of this</summary>
[Tooltip("Maximum angular speed of recentering. Will accelerate into and decelerate out of this.")]
public float m_RecenteringTime;
/// <summary>
/// Sets the algorithm for determining the target's heading for purposes
/// of re-centering the camera
/// </summary>
[DocumentationSorting(6.21f, DocumentationSortingAttribute.Level.UserRef)]
public enum HeadingDerivationMode
{
/// <summary>
/// Target heading calculated from the difference between its position on
/// the last update and current frame.
/// </summary>
PositionDelta,
/// <summary>
/// Target heading calculated from its <b>Rigidbody</b>'s velocity.
/// If no <b>Rigidbody</b> exists, it will fall back
/// to HeadingDerivationMode.PositionDelta
/// </summary>
Velocity,
/// <summary>
/// Target heading calculated from the Target <b>Transform</b>'s euler Y angle
/// </summary>
TargetForward,
/// <summary>
/// Default heading is a constant world space heading.
/// </summary>
WorldForward,
}
/// <summary>The method by which the 'default heading' is calculated if
/// recentering to target heading is enabled</summary>
[Tooltip("How 'forward' is defined. The camera will be placed by default behind the target. PositionDelta will consider 'forward' to be the direction in which the target is moving.")]
[FormerlySerializedAs("m_HeadingDerivationMode")]
public HeadingDerivationMode m_HeadingDefinition;
/// <summary>Size of the velocity sampling window for target heading filter.
/// Used only if deriving heading from target's movement</summary>
[Range(0, 10)]
[Tooltip("Size of the velocity sampling window for target heading filter. This filters out irregularities in the target's movement. Used only if deriving heading from target's movement (PositionDelta or Velocity)")]
public int m_VelocityFilterStrength;
/// <summary>Constructor with specific field values</summary>
public Recentering(
bool enabled, float recenterWaitTime, float recenteringSpeed,
HeadingDerivationMode headingDerivationMode,
int velocityFilterStrength)
{
m_enabled = enabled;
m_RecenterWaitTime = recenterWaitTime;
m_RecenteringTime = recenteringSpeed;
m_HeadingDefinition = headingDerivationMode;
m_VelocityFilterStrength = velocityFilterStrength;
}
};
/// <summary>Parameters that control Automating Heading Recentering</summary>
[Tooltip("Automatic heading recentering. The settings here defines how the camera will reposition itself in the absence of player input.")]
public Recentering m_RecenterToTargetHeading
= new Recentering(true, 1, 2, Recentering.HeadingDerivationMode.TargetForward, 4);
/// <summary>
/// Damping speeds for each of the 3 axes of the offset from target
/// </summary>
public Vector3 TrackingSpeeds
{
get
{
return new Vector3(m_XDamping, m_YDamping, m_ZDamping)
* kHumanReadableTrackingSpeedScalar;
}
}
/// <summary>
/// Drive the x-axis setting programmatically.
/// Automatic heading updating will be disabled.
/// </summary>
[HideInInspector, NoSaveDuringPlay]
public bool m_HeadingIsSlave = false;
/// <summary>
/// In Slave mode, set the heading by calling this.
/// </summary>
public void SetXAxisState(AxisState state)
{
m_XAxis = state;
}
/// <summary>
/// When not in slave mode, this should be called once and only
/// once every hrame to update the heading.
/// </summary>
public void UpdateHeading(float deltaTime, Vector3 up, bool invertAxisInput)
{
if (deltaTime <= 0)
{
mHeadingRecenteringVelocity = 0;
if (m_RecenterToTargetHeading.m_enabled)
m_XAxis.Value = GetTargetHeading(m_XAxis.Value, up, deltaTime);
}
else
{
// Only read joystick when game is playing
bool xAxisInput = m_XAxis.Update(deltaTime, invertAxisInput);
if (xAxisInput)
{
mLastHeadingAxisInputTime = Time.time;
mHeadingRecenteringVelocity = 0;
}
// Recentering!
if (m_RecenterToTargetHeading.m_enabled
&& (Time.time > (mLastHeadingAxisInputTime
+ m_RecenterToTargetHeading.m_RecenterWaitTime)))
{
// Scale value determined heuristically, to account for accel/decel
float recenterTime = m_RecenterToTargetHeading.m_RecenteringTime / 3f;
float targetHeading = GetTargetHeading(m_XAxis.Value, up, deltaTime);
if (recenterTime <= deltaTime)
m_XAxis.Value = targetHeading;
else
{
float headingError = Mathf.DeltaAngle(m_XAxis.Value, targetHeading);
float absHeadingError = Mathf.Abs(headingError);
float scale = deltaTime / recenterTime;
float desiredVelocity = Mathf.Sign(headingError)
* Mathf.Min(absHeadingError, absHeadingError * scale);
// Accelerate to the desired velocity
float accel = desiredVelocity - mHeadingRecenteringVelocity;
if ((desiredVelocity < 0 && accel < 0) || (desiredVelocity > 0 && accel > 0))
desiredVelocity = mHeadingRecenteringVelocity + desiredVelocity * scale;
m_XAxis.Value += desiredVelocity;
mHeadingRecenteringVelocity = desiredVelocity;
}
}
}
}
/// <summary>Internal API for FreeLook, so that it can interpolate radius</summary>
public bool UseOffsetOverride { get; set; }
/// <summary>Internal API for FreeLook, so that it can interpolate radius</summary>
public Vector3 OffsetOverride { get; set; }
Vector3 EffectiveOffset(Vector3 up)
{
if (UseOffsetOverride)
return OffsetOverride;
return up * m_HeightOffset + GetBackVector(up) * m_Radius;
}
/// <summary>True if component is enabled and has a valid Follow target</summary>
public bool IsValid
{ get { return enabled && VirtualCamera.Follow != null; } }
/// <summary>Get the Cinemachine Virtual Camera affected by this component</summary>
public ICinemachineCamera VirtualCamera
{ get { return gameObject.transform.parent.gameObject.GetComponent<ICinemachineCamera>(); } }
/// <summary>Get the Cinemachine Pipeline stage that this component implements.
/// Always returns the Body stage</summary>
public CinemachineCore.Stage Stage { get { return CinemachineCore.Stage.Body; } }
/// <summary>Positions the virtual camera according to the transposer rules.</summary>
/// <param name="curState">The current camera state</param>
/// <param name="statePrevFrame">The camera state on the previous frame (unused)</param>
/// <param name="deltaTime">Used for damping. If 0 or less, no damping is done.</param>
/// <returns>curState with new RawPosition</returns>
public CameraState MutateCameraState(
CameraState curState, CameraState statePrevFrame, float deltaTime)
{
if (!IsValid)
return curState;
CameraState newState = curState;
newState.RawPosition = DoTracking(statePrevFrame.RawPosition, newState.ReferenceUp, deltaTime);
return newState;
}
private void OnEnable()
{
m_XAxis.SetThresholds(0f, 360f, true);
PreviousTarget = null;
mLastTargetPosition = Vector3.zero;
}
private const float kHumanReadableTrackingSpeedScalar = 0.1f;
private float mLastHeadingAxisInputTime = 0f;
private float mHeadingRecenteringVelocity = 0f;
private Vector3 mLastTargetPosition = Vector3.zero;
HeadingTracker mHeadingTracker;
private Rigidbody mTargetRigidBody = null;
private Transform PreviousTarget { get; set; }
Vector3 DoTracking(Vector3 currentPosition, Vector3 up, float deltaTime)
{
if (VirtualCamera.Follow == null)
return currentPosition;
if (VirtualCamera.Follow != PreviousTarget)
{
PreviousTarget = VirtualCamera.Follow;
mTargetRigidBody = VirtualCamera.Follow.GetComponent<Rigidbody>();
mLastTargetPosition = VirtualCamera.Follow.position;
mHeadingTracker = null;
}
// Heading
if (!m_HeadingIsSlave)
UpdateHeading(deltaTime, up, true);
mLastTargetPosition = VirtualCamera.Follow.position;
// Where to put the camera
Vector3 localTarget = EffectiveOffset(up);
localTarget = Quaternion.AngleAxis(m_XAxis.Value + m_HeadingBias, up) * localTarget;
// Adjust for damping, which is done in local coords
if (deltaTime > 0)
{
if (m_DampingStyle == DampingStyle.Polar)
{
// Get the offset in polar
Vector3 localCurrent = currentPosition - VirtualCamera.Follow.position;
Vector3 currentOnPlane = localCurrent.ProjectOntoPlane(up);
Vector3 currentPerpPlane = localCurrent - currentOnPlane;
Vector3 targetOnPlane = localTarget.ProjectOntoPlane(up);
Vector3 targetPerpPlane = localTarget - targetOnPlane;
Vector3 delta = new Vector3(
UnityVectorExtensions.SignedAngle(currentOnPlane, targetOnPlane, up),
Vector3.Dot(targetPerpPlane - currentPerpPlane, up),
(targetOnPlane.magnitude - currentOnPlane.magnitude));
// Apply damping
Vector3 trackingSpeeds = TrackingSpeeds;
for (int i = 0; i < 3; ++i)
delta[i] *= deltaTime / Mathf.Max(trackingSpeeds[i], deltaTime);
localTarget = currentOnPlane;
localTarget += (localTarget.normalized * delta.z);
localTarget += currentPerpPlane + (delta.y * up);
localTarget = Quaternion.AngleAxis(delta.x, up) * localTarget;
}
else
{
Vector3 worldOffset = currentPosition - (VirtualCamera.Follow.position + localTarget);
Quaternion localToWorldTransform = Quaternion.LookRotation(
VirtualCamera.Follow.rotation * Vector3.forward, up);
Vector3 localOffset = Quaternion.Inverse(localToWorldTransform) * worldOffset;
Vector3 trackingSpeeds = TrackingSpeeds;
for (int i = 0; i < 3; ++i)
localOffset[i] *= deltaTime / Mathf.Max(trackingSpeeds[i], deltaTime);
return currentPosition - (localToWorldTransform * localOffset);
}
}
// Return the adjusted rig position
return VirtualCamera.Follow.position + localTarget;
}
Vector3 GetBackVector(Vector3 up)
{
Vector3 fwd = Vector3.Cross(Vector3.up, up);
if (fwd.AlmostZero())
return Vector3.back;
return Vector3.Cross(up, fwd).normalized;
}
static string GetFullName(GameObject current)
{
if (current == null)
return "";
if (current.transform.parent == null)
return "/" + current.name;
return GetFullName(current.transform.parent.gameObject) + "/" + current.name;
}
private float GetTargetHeading(float currentHeading, Vector3 up, float deltaTime)
{
if (VirtualCamera.Follow == null)
return currentHeading;
if (m_RecenterToTargetHeading.m_HeadingDefinition
== Recentering.HeadingDerivationMode.Velocity
&& mTargetRigidBody == null)
{
Debug.Log(string.Format(
"Attempted to use HeadingDerivationMode.Velocity to calculate heading for {0}. No RigidBody was present on '{1}'. Defaulting to position delta",
GetFullName(VirtualCamera.VirtualCameraGameObject), VirtualCamera.Follow));
m_RecenterToTargetHeading.m_HeadingDefinition = Recentering.HeadingDerivationMode.PositionDelta;
}
Vector3 velocity = Vector3.zero;
switch (m_RecenterToTargetHeading.m_HeadingDefinition)
{
case Recentering.HeadingDerivationMode.PositionDelta:
velocity = VirtualCamera.Follow.position - mLastTargetPosition;
break;
case Recentering.HeadingDerivationMode.Velocity:
velocity = mTargetRigidBody.velocity;
break;
default:
case Recentering.HeadingDerivationMode.TargetForward:
return VirtualCamera.Follow.rotation.eulerAngles.y;
case Recentering.HeadingDerivationMode.WorldForward:
return 0;
}
// Process the velocity and derive the heading from it.
int filterSize = m_RecenterToTargetHeading.m_VelocityFilterStrength * 5;
if (mHeadingTracker == null || mHeadingTracker.FilterSize != filterSize)
mHeadingTracker = new HeadingTracker(filterSize);
mHeadingTracker.DecayHistory();
velocity = velocity.ProjectOntoPlane(up);
if (!velocity.AlmostZero())
mHeadingTracker.Add(velocity);
velocity = mHeadingTracker.GetReliableHeading();
if (!velocity.AlmostZero())
{
Vector3 fwd = (-GetBackVector(up)).ProjectOntoPlane(up);
return UnityVectorExtensions.SignedAngle(fwd, velocity, up);
}
// If no reliable heading, then stay where we are.
return currentHeading;
}
class HeadingTracker
{
struct Item
{
public Vector3 velocity;
public float weight;
public float time;
};
Item[] mHistory;
int mTop;
int mBottom;
int mCount;
Vector3 mHeadingSum;
float mWeightSum = 0;
float mWeightTime = 0;
Vector3 mLastGoodHeading = Vector3.zero;
public HeadingTracker(int filterSize)
{
mHistory = new Item[filterSize];
float historyHalfLife = filterSize / 5f; // somewhat arbitrarily
mDecayExponent = -Mathf.Log(2f) / historyHalfLife;
ClearHistory();
}
public int FilterSize { get { return mHistory.Length; } }
void ClearHistory()
{
mTop = mBottom = mCount = 0;
mWeightSum = 0;
mHeadingSum = Vector3.zero;
}
static float mDecayExponent;
static float Decay(float time) { return Mathf.Exp(time * mDecayExponent); }
public void Add(Vector3 velocity)
{
if (FilterSize == 0)
{
mLastGoodHeading = velocity;
return;
}
float weight = velocity.magnitude;
if (weight > UnityVectorExtensions.Epsilon)
{
Item item = new Item();
item.velocity = velocity;
item.weight = weight;
item.time = Time.time;
if (mCount == FilterSize)
PopBottom();
++mCount;
mHistory[mTop] = item;
if (++mTop == FilterSize)
mTop = 0;
mWeightSum *= Decay(item.time - mWeightTime);
mWeightTime = item.time;
mWeightSum += weight;
mHeadingSum += item.velocity;
}
}
void PopBottom()
{
if (mCount > 0)
{
float time = Time.time;
Item item = mHistory[mBottom];
if (++mBottom == FilterSize)
mBottom = 0;
--mCount;
float decay = Decay(time - item.time);
mWeightSum -= item.weight * decay;
mHeadingSum -= item.velocity * decay;
if (mWeightSum <= UnityVectorExtensions.Epsilon || mCount == 0)
ClearHistory();
}
}
public void DecayHistory()
{
float time = Time.time;
float decay = Decay(time - mWeightTime);
mWeightSum *= decay;
mWeightTime = time;
if (mWeightSum < UnityVectorExtensions.Epsilon)
ClearHistory();
else
mHeadingSum = mHeadingSum * decay;
}
public Vector3 GetReliableHeading()
{
// Update Last Good Heading
if (mWeightSum > UnityVectorExtensions.Epsilon
&& (mCount == mHistory.Length || mLastGoodHeading.AlmostZero()))
{
Vector3 h = mHeadingSum / mWeightSum;
if (!h.AlmostZero())
mLastGoodHeading = h.normalized;
}
return mLastGoodHeading;
}
}
}
}
| |
// Copyright 1998-2015 Epic Games, Inc. All Rights Reserved.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
#if !__MonoCS__
using System.Deployment.Application;
#endif
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Text;
using System.Threading;
using System.Windows.Forms;
using System.Runtime.InteropServices;
using System.Xml;
using System.Xml.Serialization;
using AgentInterface;
using UnrealControls;
namespace Agent
{
public partial class SwarmAgentWindow : Form
{
public enum DialogFont
{
Consolas,
Tahoma
}
/**
* Container class for a prioritised line of text
*/
public class LogLine
{
public EVerbosityLevel Verbosity;
public ELogColour Colour;
public string Line;
public LogLine( EVerbosityLevel InVerbosity, ELogColour InColour, string InLine )
{
Verbosity = InVerbosity;
Colour = InColour;
Line = InLine;
}
}
/**
* Link up to the advanced (and quick) text box
*/
private OutputWindowDocument MainLogDoc = new OutputWindowDocument();
/*
* The container class for progression data
*/
private Progressions ProgressionData = null;
/**
* The main GUI window
*/
public SwarmAgentWindow()
{
InitializeComponent();
this.BarToolTip = new System.Windows.Forms.ToolTip();
this.BarToolTip.AutoPopDelay = 60000;
this.BarToolTip.InitialDelay = 500;
this.BarToolTip.ReshowDelay = 0;
this.BarToolTip.ShowAlways = true; // Force the ToolTip text to be displayed whether or not the form is active.
AgentApplication.Options = ReadOptions<SettableOptions>( "SwarmAgent.Options.xml" );
AgentApplication.Options.PostLoad();
AgentApplication.DeveloperOptions = ReadOptions<SettableDeveloperOptions>( "SwarmAgent.DeveloperOptions.xml" );
AgentApplication.DeveloperOptions.PostLoad();
CreateBarColours( this );
LogOutputWindowView.Document = MainLogDoc;
SettingsPropertyGrid.SelectedObject = AgentApplication.Options;
DeveloperSettingsPropertyGrid.SelectedObject = AgentApplication.DeveloperOptions;
UpdateWindowState();
// Set the title bar to include the name of the machine and the group
TopLevelControl.Text = "Swarm Agent running on " + Environment.MachineName;
LogOutputWindowView.Refresh();
}
public void SelectVisualizerTab()
{
AgentTabs.SelectedTab = VisualiserTab;
}
public void SaveOptions()
{
SaveWindowState();
AgentApplication.Options.PreSave();
WriteOptions<SettableOptions>( AgentApplication.Options, "SwarmAgent.Options.xml" );
AgentApplication.DeveloperOptions.PreSave();
WriteOptions<SettableDeveloperOptions>( AgentApplication.DeveloperOptions, "SwarmAgent.DeveloperOptions.xml" );
}
public void Destroy()
{
SaveOptions();
Dispose();
}
/**
* Default logger that output strings to the console and debug stream
* with the timestamp added to the beginning of every message
*/
delegate void DelegateLog( LogLine Line );
public void Log (LogLine Line)
{
if (Line == null) {
return;
}
// if we need to, invoke the delegate
if (InvokeRequired) {
Invoke (new DelegateLog (Log), new object[] { Line });
return;
}
DateTime Now = DateTime.Now;
string FullLine = Now.ToLongTimeString () + ": " + Line.Line;
// translate the colour specified into an actual Color
Color col;
switch (Line.Colour)
{
case ELogColour.Blue:
col = Color.DarkBlue;
break;
case ELogColour.Orange:
col = Color.Orange;
break;
case ELogColour.Red:
col = Color.Red;
break;
default:
col = Color.DarkGreen;
break;
}
MainLogDoc.AppendLine( col, FullLine );
Debug.WriteLineIf( System.Diagnostics.Debugger.IsAttached, FullLine );
}
delegate void DelegateClearLog();
public void ClearLog()
{
if (IsHandleCreated)
{
if (InvokeRequired)
{
Invoke(new DelegateClearLog(ClearLog));
}
else
{
MainLogDoc.Clear();
}
}
}
/**
* Process a timing event received from Swarm
*/
delegate void DelegateProcessProgressionEvent( ProgressionEvent Event );
public void ProcessProgressionEvent( ProgressionEvent Event )
{
if( Event == null )
{
return;
}
// if we need to, invoke the delegate
if( InvokeRequired )
{
Invoke( new DelegateProcessProgressionEvent( ProcessProgressionEvent ), new object[] { Event } );
return;
}
if( Event.State == EProgressionState.InstigatorConnected )
{
ProgressionData = new Progressions();
OverallProgressBar.Invalidate();
}
if( ProgressionData != null )
{
if( ProgressionData.ProcessEvent( Event ) )
{
VisualiserGridViewResized = true;
VisualiserGridView.Invalidate();
OverallProgressBar.Invalidate();
}
}
}
public void Tick()
{
if( ProgressionData != null )
{
PopulateGridView();
if( ProgressionData.Tick() )
{
VisualiserGridView.Invalidate();
}
}
}
protected void XmlSerializer_UnknownAttribute( object sender, XmlAttributeEventArgs e )
{
}
protected void XmlSerializer_UnknownNode( object sender, XmlNodeEventArgs e )
{
}
private T ReadOptions<T>( string OptionsFileName ) where T : new()
{
T Instance = new T();
Stream XmlStream = null;
try
{
string BaseDirectory;
#if !__MonoCS__
if( ApplicationDeployment.IsNetworkDeployed )
{
ApplicationDeployment Deploy = ApplicationDeployment.CurrentDeployment;
BaseDirectory = Deploy.DataDirectory;
}
else
#endif
if (AgentApplication.OptionsFolder.Length > 0)
{
// An options folder was specified on the command line
BaseDirectory = AgentApplication.OptionsFolder;
}
else
{
BaseDirectory = Application.StartupPath;
}
string FullPath = Path.Combine( BaseDirectory, OptionsFileName );
// Get the XML data stream to read from
XmlStream = new FileStream( FullPath, FileMode.Open, FileAccess.Read, FileShare.None, 256 * 1024, false );
// Creates an instance of the XmlSerializer class so we can read the settings object
XmlSerializer ObjSer = new XmlSerializer( typeof( T ) );
// Add our callbacks for a busted XML file
ObjSer.UnknownNode += new XmlNodeEventHandler( XmlSerializer_UnknownNode );
ObjSer.UnknownAttribute += new XmlAttributeEventHandler( XmlSerializer_UnknownAttribute );
// Create an object graph from the XML data
Instance = ( T )ObjSer.Deserialize( XmlStream );
}
catch( Exception E )
{
Debug.WriteLineIf( System.Diagnostics.Debugger.IsAttached, E.Message );
}
finally
{
if( XmlStream != null )
{
// Done with the file so close it
XmlStream.Close();
}
}
return ( Instance );
}
private void WriteOptions<T>( T Data, string OptionsFileName )
{
#if !__MonoCS__ // @todo Mac
lock( Data )
#endif
{
Stream XmlStream = null;
try
{
string BaseDirectory;
#if !__MonoCS__
if( ApplicationDeployment.IsNetworkDeployed )
{
ApplicationDeployment Deploy = ApplicationDeployment.CurrentDeployment;
BaseDirectory = Deploy.DataDirectory;
}
else
#endif
if (AgentApplication.OptionsFolder.Length > 0)
{
// An options folder was specified on the command line
BaseDirectory = AgentApplication.OptionsFolder;
}
else
{
BaseDirectory = Application.StartupPath;
}
string FullPath = Path.Combine( BaseDirectory, OptionsFileName );
XmlStream = new FileStream( FullPath, FileMode.Create, FileAccess.Write, FileShare.None, 256 * 1024, false );
XmlSerializer ObjSer = new XmlSerializer( typeof( T ) );
// Add our callbacks for a busted XML file
ObjSer.UnknownNode += new XmlNodeEventHandler( XmlSerializer_UnknownNode );
ObjSer.UnknownAttribute += new XmlAttributeEventHandler( XmlSerializer_UnknownAttribute );
ObjSer.Serialize( XmlStream, Data );
}
catch( Exception E )
{
Debug.WriteLineIf( System.Diagnostics.Debugger.IsAttached, E.Message );
}
finally
{
if( XmlStream != null )
{
// Done with the file so close it
XmlStream.Close();
}
}
}
}
private void UpdateFonts()
{
// Set the requested font
LogOutputWindowView.Font = new Font( AgentApplication.Options.TextFont.ToString(), 9F );
VisualiserGridView.Font = LogOutputWindowView.Font;
SettingsPropertyGrid.Font = LogOutputWindowView.Font;
LogOutputWindowView.Refresh();
}
private void SaveWindowState()
{
AgentApplication.Options.WindowLocation = Location;
AgentApplication.Options.WindowSize = Size;
AgentApplication.Options.AgentTabIndex = AgentTabs.SelectedIndex;
}
private void DeveloperMenuItemVisibilityChanged( Object Sender, EventArgs Args )
{
if( DeveloperMenuItem.Visible )
{
AgentTabs.TabPages.Insert( DeveloperSettingsTab.TabIndex, DeveloperSettingsTab );
}
else
{
AgentTabs.TabPages.RemoveAt( DeveloperSettingsTab.TabIndex );
}
}
private void UpdateWindowState()
{
if( AgentApplication.Options.WindowLocation != new Point( 0, 0 ) )
{
Location = AgentApplication.Options.WindowLocation;
}
if( AgentApplication.Options.WindowSize != new Size( 0, 0 ) )
{
Size = AgentApplication.Options.WindowSize;
}
// Adjust the window location and size, if off-screen
Rectangle VirtualScreenBounds = SystemInformation.VirtualScreen;
Point WindowTL = AgentApplication.Options.WindowLocation;
Point WindowBR = AgentApplication.Options.WindowLocation + AgentApplication.Options.WindowSize;
WindowBR.X = Math.Min( WindowBR.X, VirtualScreenBounds.Right );
WindowBR.Y = Math.Min( WindowBR.Y, VirtualScreenBounds.Bottom );
WindowTL.X = Math.Max( WindowBR.X - AgentApplication.Options.WindowSize.Width, VirtualScreenBounds.Left );
WindowTL.Y = Math.Max( WindowBR.Y - AgentApplication.Options.WindowSize.Height, VirtualScreenBounds.Top );
Location = WindowTL;
Size = new Size( WindowBR.X - WindowTL.X, WindowBR.Y - WindowTL.Y );
// If the hidden developer menu should be shown, show it
DeveloperMenuItem.VisibleChanged += new EventHandler( DeveloperMenuItemVisibilityChanged );
DeveloperMenuItem.Visible = AgentApplication.Options.ShowDeveloperMenu;
AgentTabs.SelectedIndex = AgentApplication.Options.AgentTabIndex;
UpdateFonts();
}
private void ClickExitMenu( object sender, EventArgs e )
{
Hide();
AgentApplication.RequestQuit();
}
private void ShowSwarmAgentWindow( object sender, MouseEventArgs e )
{
AgentApplication.ShowWindow = true;
}
private void SwarmAgentWindowClosing( object sender, FormClosingEventArgs e )
{
if (e.CloseReason == CloseReason.UserClosing)
{
e.Cancel = true;
Hide();
}
else
{
AgentApplication.RequestQuit();
}
}
private void CancelButtonClick( object sender, EventArgs e )
{
Hide();
}
private void EditClearClick( object sender, EventArgs e )
{
MainLogDoc.Clear();
}
private void MenuAboutClick( object sender, EventArgs e )
{
using( UnrealAboutBox About = new UnrealAboutBox( this.Icon, null ) )
{
#if DEBUG
About.Text = "About Swarm Agent (Debug Build)";
#else
About.Text = "About Swarm Agent";
#endif
About.ShowDialog( this );
}
}
private void OptionsValueChanged( object s, PropertyValueChangedEventArgs e )
{
GridItem ChangedProperty = e.ChangedItem;
if( ChangedProperty != null )
{
Type T = ChangedProperty.Value.GetType();
if( T == typeof( SwarmAgentWindow.DialogFont ) )
{
UpdateFonts();
}
else if( T == typeof( Color ) )
{
CreateBarColours( this );
}
else if( ( ChangedProperty.Label == "EnableStandaloneMode" ) ||
( ChangedProperty.Label == "AgentGroupName" ) )
{
AgentApplication.RequestPingCoordinator();
}
else if( ChangedProperty.Label == "CoordinatorRemotingHost" )
{
AgentApplication.RequestInitCoordinator();
}
else if( ChangedProperty.Label == "ShowDeveloperMenu" )
{
DeveloperMenuItem.Visible = ( bool )ChangedProperty.Value;
}
else if( ChangedProperty.Label == "CacheFolder" )
{
AgentApplication.RequestCacheRelocation();
}
// Always write out the latest options when anything changes
SaveOptions();
}
}
private void CacheClearClick( object sender, EventArgs e )
{
AgentApplication.RequestCacheClear();
}
private void CacheValidateClick( object sender, EventArgs e )
{
AgentApplication.RequestCacheValidation();
}
private void NetworkPingCoordinatorMenuItem_Click( object sender, EventArgs e )
{
if( AgentApplication.Options.EnableStandaloneMode )
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Not pinging coordinator, standalone mode enabled" );
}
else
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Pinging Coordinator..." );
if( AgentApplication.RequestPingCoordinator() )
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Coordinator has responded normally" );
}
else
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Orange, "[Network] Coordinator has failed to respond" );
}
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Coordinator ping complete" );
}
}
private void NetworkPingRemoteAgentsMenuItem_Click( object sender, EventArgs e )
{
if( AgentApplication.Options.EnableStandaloneMode )
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Not pinging remote agents, standalone mode enabled" );
}
else
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Pinging remote agents..." );
AgentApplication.RequestPingRemoteAgents();
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Remote Agent ping complete" );
}
}
private void DeveloperRestartQAAgentsMenuItem_Click( object sender, EventArgs e )
{
if( AgentApplication.Options.EnableStandaloneMode )
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Not restarting QA agents, standalone mode enabled" );
}
else
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Restarting QA agents..." );
AgentApplication.RequestRestartQAAgents();
}
}
private void DeveloperRestartWorkerAgentsMenuItem_Click( object sender, EventArgs e )
{
if( AgentApplication.Options.EnableStandaloneMode )
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Not restarting worker agents, standalone mode enabled" );
}
else
{
AgentApplication.Log( EVerbosityLevel.Informative, ELogColour.Green, "[Network] Restarting worker agents..." );
AgentApplication.RequestRestartWorkerAgents();
}
}
}
static partial class AgentApplication
{
#if !__MonoCS__
[DllImport( "user32.dll" )]
private static extern bool SetForegroundWindow( IntPtr hWnd );
#endif
/**
* The class containing all the client settable options
*
* Serialised in on constuction of the window, out on destruction
*/
public static SettableOptions Options = null;
public static SettableDeveloperOptions DeveloperOptions = null;
/**
* Thread used to update the GUI
*/
private static Thread ProcessGUIThread = null;
/*
* Thread safe way of caching lines of the until the GUI thread is ready for them
*/
private static ReaderWriterQueue<SwarmAgentWindow.LogLine> LogLines = null;
/*
* Thread safe way of caching lines of the until the GUI thread is ready for them
*/
private static ReaderWriterQueue<SwarmAgentWindow.ProgressionEvent> ProgressionEvents = null;
/*
* Event to let the main thread know that the GUI thread is ready
*/
private static ManualResetEvent GUIInit = null;
/*
* Set to false when an exit is requested
*/
private static bool Ticking = false;
/*
* Set to true when the cache location has been modified
*/
private static bool CacheRelocationRequested = false;
/*
* Set to true when a cache clear is requested
*/
private static bool CacheClearRequested = false;
/*
* Set to true when a cache validate is requested
*/
private static bool CacheValidateRequested = false;
/*
* Set to true when you want the window to pop up front and center
*/
public static bool ShowWindow = false;
/*
* The folder that contains options for swarm. If empty, options will be located next to the executable.
*/
public static string OptionsFolder = "";
/*
* Variables private to the GUI thread
*/
private static SwarmAgentWindow MainWindow = null;
/*
* The current log file
*/
private static StreamWriter LogFile = null;
/**
* A synchronization object for the log file.
*/
private static Object LogFileLock = new Object();
public static void StartNewLogFile()
{
lock (LogFileLock)
{
// Close any existing stream first
if (LogFile != null)
{
LogFile.Close();
LogFile = null;
}
// Open a new log file marked by the UTC time
string LogDirectory = Path.Combine(AgentApplication.Options.CacheFolder, "Logs");
try
{
if (!Directory.Exists(LogDirectory))
{
// Create the directory
Directory.CreateDirectory(LogDirectory);
}
}
catch (Exception Ex)
{
Log(EVerbosityLevel.Verbose, ELogColour.Red, "[StartNewLogFile] Error: " + Ex.Message);
}
if (Directory.Exists(LogDirectory))
{
string LogFileName = Path.Combine(LogDirectory, "AgentLog_" + DateTime.UtcNow.ToFileTimeUtc().ToString() + ".log");
LogFile = new StreamWriter(LogFileName);
LogFile.AutoFlush = true;
}
}
}
public static void ClearLogWindow()
{
if (MainWindow.InvokeRequired)
{
MainWindow.Invoke((MethodInvoker)(() => MainWindow.ClearLog()));
}
else
{
MainWindow.ClearLog();
}
}
private static void ProcessThreadQueues( TimeSpan MaximumExecutionTime )
{
DateTime TimeLimit = DateTime.UtcNow + MaximumExecutionTime;
while (DateTime.UtcNow < TimeLimit)
{
// Just do one at a time
if( ProgressionEvents.Count > 0 )
{
MainWindow.ProcessProgressionEvent( ProgressionEvents.Dequeue() );
}
// Handle all progression messages first, then just do one at a time
else if( LogLines.Count > 0 )
{
MainWindow.Log( LogLines.Dequeue() );
}
else
{
break;
}
}
MainWindow.Tick();
}
/**
* Main GUI update thread
*/
private static void ProcessGUIThreadProc()
{
MainWindow = new SwarmAgentWindow();
LogLines = new ReaderWriterQueue<SwarmAgentWindow.LogLine>();
ProgressionEvents = new ReaderWriterQueue<SwarmAgentWindow.ProgressionEvent>();
StartNewLogFile();
Ticking = true;
GUIInit.Set();
TimeSpan LoopIterationTime = TimeSpan.FromMilliseconds(100);
while( Ticking )
{
Stopwatch SleepTimer = Stopwatch.StartNew();
ProcessThreadQueues( LoopIterationTime );
if( ShowWindow )
{
#if !__MonoCS__
SetForegroundWindow( MainWindow.Handle );
#endif
MainWindow.SelectVisualizerTab();
MainWindow.Show();
ShowWindow = false;
}
Application.DoEvents();
TimeSpan SleepTime = LoopIterationTime - SleepTimer.Elapsed;
if( SleepTime.TotalMilliseconds > 0 )
{
Thread.Sleep( SleepTime );
}
}
MainWindow.Destroy();
}
public static void Log( EVerbosityLevel Verbosity, ELogColour TextColour, string Line )
{
// Only consider the line if it's not the highest level of verbosity
// unless the highest level of verbosity is what is being asked for
if( ( EVerbosityLevel.SuperVerbose != Verbosity ) ||
( EVerbosityLevel.SuperVerbose == AgentApplication.Options.Verbosity ) )
{
bool bShouldLogToConsole = Verbosity <= AgentApplication.Options.Verbosity;
lock (LogFileLock)
{
// Log the line out to the file, if it exists
if (LogFile != null)
{
LogFile.WriteLine(Line);
}
else
{
// If the file doesn't exist, always log to the console
bShouldLogToConsole = true;
}
}
if( bShouldLogToConsole )
{
LogLines.Enqueue( new SwarmAgentWindow.LogLine( Verbosity, TextColour, Line ) );
}
}
}
public static void UpdateMachineState( string Machine, int ThreadNum, EProgressionState NewState )
{
ProgressionEvents.Enqueue( new SwarmAgentWindow.ProgressionEvent( Machine, ThreadNum, NewState ) );
}
public static void RequestQuit()
{
Ticking = false;
}
public static void RequestCacheRelocation()
{
CacheRelocationRequested = true;
}
public static void RequestCacheClear()
{
CacheClearRequested = true;
}
public static void RequestCacheValidation()
{
CacheValidateRequested = true;
}
public static bool RequestPingCoordinator()
{
return LocalAgent.PingCoordinator( true );
}
public static void RequestInitCoordinator()
{
LocalAgent.InitCoordinator();
}
public static void RequestPingRemoteAgents()
{
LocalAgent.PingRemoteAgents( AgentApplication.Options.AllowedRemoteAgentGroup );
}
public static void RequestRestartQAAgents()
{
LocalAgent.RestartAgentGroup( "QATestGroup" );
}
public static void RequestRestartWorkerAgents()
{
LocalAgent.RestartAgentGroup( "DefaultDeployed" );
}
private static void ParseArgs(string[] args)
{
foreach (string arg in args)
{
if ( arg.StartsWith("-OptionsFolder=") )
{
OptionsFolder = arg.Substring("-OptionsFolder=".Length);
}
}
}
private static void InitGUIThread()
{
GUIInit = new ManualResetEvent( false );
ThreadStart ThreadStartProcessGUI = new ThreadStart( ProcessGUIThreadProc );
ProcessGUIThread = new Thread( ThreadStartProcessGUI );
ProcessGUIThread.Name = "ProcessGUIThread";
ProcessGUIThread.SetApartmentState( ApartmentState.STA );
ProcessGUIThread.Start();
GUIInit.WaitOne();
}
}
}
| |
//
// LogicalTerm.cs
//
// Author:
// Stephane Delcroix <[email protected]>
//
// Copyright (C) 2008 Novell, Inc.
// Copyright (C) 2008 Stephane Delcroix
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using Hyena;
using FSpot.Core;
namespace FSpot.Query
{
public abstract class LogicalTerm : IQueryCondition
{
public abstract string SqlClause ();
}
public class TagTerm : LogicalTerm, IDisposable
{
Tag tag;
public Tag Tag {
get { return tag; }
}
public TagTerm (Tag tag)
{
this.tag = tag;
}
public override string SqlClause ()
{
return SqlClause (this);
}
internal static string SqlClause (params TagTerm [] tags)
{
List<string> list = new List<string> (tags.Length);
foreach (TagTerm tag in tags)
list.Add (tag.Tag.Id.ToString ());
return SqlClause (list.ToArray ());
}
private static string SqlClause (string [] tagids)
{
if (tagids.Length == 0)
return null;
if (tagids.Length == 1)
return String.Format (" (photos.id IN (SELECT photo_id FROM photo_tags WHERE tag_id = {0})) ", tagids[0]);
else
return String.Format (" (photos.id IN (SELECT photo_id FROM photo_tags WHERE tag_id IN ({0}))) ", String.Join (", ", tagids));
}
public void Dispose ()
{
if (tag != null)
tag.Dispose ();
System.GC.SuppressFinalize (this);
}
~TagTerm ()
{
Log.DebugFormat ("Finalizer called on {0}. Should be Disposed", GetType ());
if (tag != null)
tag.Dispose ();
}
}
public class TextTerm : LogicalTerm
{
string text;
public string Text {
get { return text; }
}
string field;
public string Field {
get { return field; }
}
public TextTerm (string text, string field)
{
this.text = text;
this.field = field;
}
public static OrTerm SearchMultiple (string text, params string[] fields)
{
List<TextTerm> terms = new List<TextTerm> (fields.Length);
foreach (string field in fields)
terms.Add (new TextTerm (text, field));
return new OrTerm (terms.ToArray ());
}
public override string SqlClause ()
{
return String.Format (" {0} LIKE %{1}% ", field, text);
}
}
public class NotTerm : LogicalTerm
{
LogicalTerm term;
public LogicalTerm Term {
get { return term; }
}
public NotTerm (LogicalTerm term)
{
this.term = term;
}
public override string SqlClause ()
{
return String.Format (" NOT ({0}) ", term.SqlClause ());
}
}
public abstract class NAryOperator : LogicalTerm
{
protected List<LogicalTerm> terms;
public LogicalTerm[] Terms {
get { return terms.ToArray (); }
}
protected string [] ToStringArray ()
{
List<string> ls = new List<string> (terms.Count);
foreach (LogicalTerm term in terms)
ls.Add (term.SqlClause ());
return ls.ToArray ();
}
public static string SqlClause (string op, string[] items)
{
if (items.Length == 1)
return items [0];
else
return " (" + String.Join (String.Format (" {0} ", op), items) + ") ";
}
}
public class OrTerm : NAryOperator
{
public OrTerm (params LogicalTerm[] terms)
{
this.terms = new List<LogicalTerm> (terms.Length);
foreach (LogicalTerm term in terms)
Add (term);
}
private void Add (LogicalTerm term)
{
if (term is OrTerm)
foreach (LogicalTerm t in (term as OrTerm).terms)
Add (t);
else
terms.Add (term);
}
public override string SqlClause ()
{
List<TagTerm> tagterms = new List<TagTerm> ();
List<string> otherterms = new List<string> ();
foreach (LogicalTerm term in terms)
if (term is TagTerm)
tagterms.Add (term as TagTerm);
else
otherterms.Add (term.SqlClause ());
otherterms.Insert (0, TagTerm.SqlClause (tagterms.ToArray ()));
return SqlClause ("OR", otherterms.ToArray ());
}
}
public class AndTerm : NAryOperator
{
public AndTerm (params LogicalTerm[] terms)
{
this.terms = new List<LogicalTerm> (terms.Length);
foreach (LogicalTerm term in terms)
Add (term);
}
private void Add (LogicalTerm term)
{
if (term is AndTerm)
foreach (LogicalTerm t in (term as AndTerm).terms)
Add (t);
else
terms.Add (term);
}
public override string SqlClause ()
{
return SqlClause ("AND", ToStringArray ());
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Table;
using Microsoft.Extensions.Logging;
using Orleans.AzureUtils.Utilities;
using Orleans.Reminders.AzureStorage;
using Orleans.Internal;
using Orleans.Configuration;
namespace Orleans.Runtime.ReminderService
{
internal class ReminderTableEntry : TableEntity
{
public string GrainReference { get; set; } // Part of RowKey
public string ReminderName { get; set; } // Part of RowKey
public string ServiceId { get; set; } // Part of PartitionKey
public string DeploymentId { get; set; }
public string StartAt { get; set; }
public string Period { get; set; }
public string GrainRefConsistentHash { get; set; } // Part of PartitionKey
public static string ConstructRowKey(GrainReference grainRef, string reminderName)
{
var key = string.Format("{0}-{1}", grainRef.ToKeyString(), reminderName);
return AzureTableUtils.SanitizeTableProperty(key);
}
public static (string LowerBound, string UpperBound) ConstructRowKeyBounds(GrainReference grainRef)
{
var baseKey = AzureTableUtils.SanitizeTableProperty(grainRef.ToKeyString());
return (baseKey + '-', baseKey + (char)('-' + 1));
}
public static string ConstructPartitionKey(string serviceId, GrainReference grainRef)
{
return ConstructPartitionKey(serviceId, grainRef.GetUniformHashCode());
}
public static string ConstructPartitionKey(string serviceId, uint number)
{
// IMPORTANT NOTE: Other code using this return data is very sensitive to format changes,
// so take great care when making any changes here!!!
// this format of partition key makes sure that the comparisons in FindReminderEntries(begin, end) work correctly
// the idea is that when converting to string, negative numbers start with 0, and positive start with 1. Now,
// when comparisons will be done on strings, this will ensure that positive numbers are always greater than negative
// string grainHash = number < 0 ? string.Format("0{0}", number.ToString("X")) : string.Format("1{0:d16}", number);
return AzureTableUtils.SanitizeTableProperty($"{serviceId}_{number:X8}");
}
public static (string LowerBound, string UpperBound) ConstructPartitionKeyBounds(string serviceId)
{
var baseKey = AzureTableUtils.SanitizeTableProperty(serviceId);
return (baseKey + '_', baseKey + (char)('_' + 1));
}
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("Reminder [");
sb.Append(" PartitionKey=").Append(PartitionKey);
sb.Append(" RowKey=").Append(RowKey);
sb.Append(" GrainReference=").Append(GrainReference);
sb.Append(" ReminderName=").Append(ReminderName);
sb.Append(" Deployment=").Append(DeploymentId);
sb.Append(" ServiceId=").Append(ServiceId);
sb.Append(" StartAt=").Append(StartAt);
sb.Append(" Period=").Append(Period);
sb.Append(" GrainRefConsistentHash=").Append(GrainRefConsistentHash);
sb.Append("]");
return sb.ToString();
}
}
internal class RemindersTableManager : AzureTableDataManager<ReminderTableEntry>
{
public string ServiceId { get; private set; }
public string ClusterId { get; private set; }
public static async Task<RemindersTableManager> GetManager(string serviceId, string clusterId, ILoggerFactory loggerFactory, AzureStorageOperationOptions options)
{
var singleton = new RemindersTableManager(serviceId, clusterId, options, loggerFactory);
try
{
singleton.Logger.Info("Creating RemindersTableManager for service id {0} and clusterId {1}.", serviceId, clusterId);
await singleton.InitTableAsync();
}
catch (Exception ex)
{
string errorMsg = $"Exception trying to create or connect to the Azure table: {ex.Message}";
singleton.Logger.Error((int)AzureReminderErrorCode.AzureTable_39, errorMsg, ex);
throw new OrleansException(errorMsg, ex);
}
return singleton;
}
private RemindersTableManager(
string serviceId,
string clusterId,
AzureStorageOperationOptions options,
ILoggerFactory loggerFactory)
: base(options, loggerFactory.CreateLogger<RemindersTableManager>())
{
ClusterId = clusterId;
ServiceId = serviceId;
}
internal async Task<List<Tuple<ReminderTableEntry, string>>> FindReminderEntries(uint begin, uint end)
{
// TODO: Determine whether or not a single query could be used here while avoiding a table scan
string sBegin = ReminderTableEntry.ConstructPartitionKey(ServiceId, begin);
string sEnd = ReminderTableEntry.ConstructPartitionKey(ServiceId, end);
var (partitionKeyLowerBound, partitionKeyUpperBound) = ReminderTableEntry.ConstructPartitionKeyBounds(ServiceId);
string filterOnServiceIdStr = TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.GreaterThan, partitionKeyLowerBound),
TableOperators.And,
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.LessThan, partitionKeyUpperBound));
if (begin < end)
{
string filterBetweenBeginAndEnd = TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.GreaterThan, sBegin),
TableOperators.And,
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.LessThanOrEqual,
sEnd));
string query = TableQuery.CombineFilters(filterOnServiceIdStr, TableOperators.And, filterBetweenBeginAndEnd);
var queryResults = await ReadTableEntriesAndEtagsAsync(query);
return queryResults.ToList();
}
if (begin == end)
{
var queryResults = await ReadTableEntriesAndEtagsAsync(filterOnServiceIdStr);
return queryResults.ToList();
}
// (begin > end)
string queryOnSBegin = TableQuery.CombineFilters(
filterOnServiceIdStr,
TableOperators.And,
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.GreaterThan, sBegin));
string queryOnSEnd = TableQuery.CombineFilters(
filterOnServiceIdStr,
TableOperators.And,
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.LessThanOrEqual, sEnd));
var resultsOnSBeginQuery = ReadTableEntriesAndEtagsAsync(queryOnSBegin);
var resultsOnSEndQuery = ReadTableEntriesAndEtagsAsync(queryOnSEnd);
IEnumerable<Tuple<ReminderTableEntry, string>>[] results = await Task.WhenAll(resultsOnSBeginQuery, resultsOnSEndQuery);
return results[0].Concat(results[1]).ToList();
}
internal async Task<List<Tuple<ReminderTableEntry, string>>> FindReminderEntries(GrainReference grainRef)
{
var partitionKey = ReminderTableEntry.ConstructPartitionKey(ServiceId, grainRef);
var (rowKeyLowerBound, rowKeyUpperBound) = ReminderTableEntry.ConstructRowKeyBounds(grainRef);
string filter = TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.RowKey), QueryComparisons.GreaterThan, rowKeyLowerBound),
TableOperators.And,
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.RowKey), QueryComparisons.LessThan, rowKeyUpperBound));
string query =
TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition(nameof(ReminderTableEntry.PartitionKey), QueryComparisons.Equal, partitionKey),
TableOperators.And,
filter);
var queryResults = await ReadTableEntriesAndEtagsAsync(query);
return queryResults.ToList();
}
internal async Task<Tuple<ReminderTableEntry, string>> FindReminderEntry(GrainReference grainRef, string reminderName)
{
string partitionKey = ReminderTableEntry.ConstructPartitionKey(ServiceId, grainRef);
string rowKey = ReminderTableEntry.ConstructRowKey(grainRef, reminderName);
return await ReadSingleTableEntryAsync(partitionKey, rowKey);
}
private Task<List<Tuple<ReminderTableEntry, string>>> FindAllReminderEntries()
{
return FindReminderEntries(0, 0);
}
internal async Task<string> UpsertRow(ReminderTableEntry reminderEntry)
{
try
{
return await UpsertTableEntryAsync(reminderEntry);
}
catch(Exception exc)
{
HttpStatusCode httpStatusCode;
string restStatus;
if (AzureTableUtils.EvaluateException(exc, out httpStatusCode, out restStatus))
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("UpsertRow failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus);
if (AzureTableUtils.IsContentionError(httpStatusCode)) return null; // false;
}
throw;
}
}
internal async Task<bool> DeleteReminderEntryConditionally(ReminderTableEntry reminderEntry, string eTag)
{
try
{
await DeleteTableEntryAsync(reminderEntry, eTag);
return true;
}catch(Exception exc)
{
HttpStatusCode httpStatusCode;
string restStatus;
if (AzureTableUtils.EvaluateException(exc, out httpStatusCode, out restStatus))
{
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("DeleteReminderEntryConditionally failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus);
if (AzureTableUtils.IsContentionError(httpStatusCode)) return false;
}
throw;
}
}
internal async Task DeleteTableEntries()
{
List<Tuple<ReminderTableEntry, string>> entries = await FindAllReminderEntries();
// return manager.DeleteTableEntries(entries); // this doesnt work as entries can be across partitions, which is not allowed
// group by grain hashcode so each query goes to different partition
var tasks = new List<Task>();
var groupedByHash = entries
.Where(tuple => tuple.Item1.ServiceId.Equals(ServiceId))
.Where(tuple => tuple.Item1.DeploymentId.Equals(ClusterId)) // delete only entries that belong to our DeploymentId.
.GroupBy(x => x.Item1.GrainRefConsistentHash).ToDictionary(g => g.Key, g => g.ToList());
foreach (var entriesPerPartition in groupedByHash.Values)
{
foreach (var batch in entriesPerPartition.BatchIEnumerable(this.StoragePolicyOptions.MaxBulkUpdateRows))
{
tasks.Add(DeleteTableEntriesAsync(batch));
}
}
await Task.WhenAll(tasks);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using OpenMetaverse;
using OpenMetaverse.Packets;
using System;
using System.Collections.Generic;
using System.Reflection;
namespace OpenSim.Region.ClientStack.LindenUDP
{
public sealed class PacketPool
{
private static readonly PacketPool instance = new PacketPool();
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private static Dictionary<Type, Stack<Object>> DataBlocks = new Dictionary<Type, Stack<Object>>();
/// <summary>
/// Pool of packets available for reuse.
/// </summary>
private readonly Dictionary<PacketType, Stack<Packet>> pool = new Dictionary<PacketType, Stack<Packet>>();
private PacketPool()
{
// defaults
RecyclePackets = true;
RecycleDataBlocks = true;
}
public static PacketPool Instance
{
get { return instance; }
}
/// <summary>
/// The number of blocks pooled.
/// </summary>
public int BlocksPooled
{
get
{
lock (DataBlocks)
return DataBlocks.Count;
}
}
/// <summary>
/// Number of packet blocks requested.
/// </summary>
public long BlocksRequested { get; private set; }
/// <summary>
/// Number of packet blocks reused.
/// </summary>
public long BlocksReused { get; private set; }
/// <summary>
/// The number of packets pooled
/// </summary>
public int PacketsPooled
{
get
{
lock (pool)
return pool.Count;
}
}
/// <summary>
/// Number of packets requested.
/// </summary>
public long PacketsRequested { get; private set; }
/// <summary>
/// Number of packets reused.
/// </summary>
public long PacketsReused { get; private set; }
public bool RecycleDataBlocks { get; set; }
public bool RecyclePackets { get; set; }
public T GetDataBlock<T>() where T : new()
{
lock (DataBlocks)
{
BlocksRequested++;
Stack<Object> s;
if (DataBlocks.TryGetValue(typeof(T), out s))
{
if (s.Count > 0)
{
BlocksReused++;
return (T)s.Pop();
}
}
else
{
DataBlocks[typeof(T)] = new Stack<Object>();
}
return new T();
}
}
/// <summary>
/// Gets a packet of the given type.
/// </summary>
/// <param name='type'></param>
/// <returns>Guaranteed to always return a packet, whether from the pool or newly constructed.</returns>
public Packet GetPacket(PacketType type)
{
PacketsRequested++;
Packet packet;
if (!RecyclePackets)
return Packet.BuildPacket(type);
lock (pool)
{
if (!pool.ContainsKey(type) || pool[type] == null || (pool[type]).Count == 0)
{
// m_log.DebugFormat("[PACKETPOOL]: Building {0} packet", type);
// Creating a new packet if we cannot reuse an old package
packet = Packet.BuildPacket(type);
}
else
{
// m_log.DebugFormat("[PACKETPOOL]: Pulling {0} packet", type);
// Recycle old packages
PacketsReused++;
packet = pool[type].Pop();
}
}
return packet;
}
public Packet GetPacket(byte[] bytes, ref int packetEnd, byte[] zeroBuffer)
{
PacketType type = GetType(bytes);
// Array.Clear(zeroBuffer, 0, zeroBuffer.Length);
int i = 0;
Packet packet = GetPacket(type);
if (packet == null)
m_log.WarnFormat("[PACKETPOOL]: Failed to get packet of type {0}", type);
else
packet.FromBytes(bytes, ref i, ref packetEnd, zeroBuffer);
return packet;
}
public void ReturnDataBlock<T>(T block) where T : new()
{
if (block == null)
return;
lock (DataBlocks)
{
if (!DataBlocks.ContainsKey(typeof(T)))
DataBlocks[typeof(T)] = new Stack<Object>();
if (DataBlocks[typeof(T)].Count < 50)
DataBlocks[typeof(T)].Push(block);
}
}
/// <summary>
/// Return a packet to the packet pool
/// </summary>
/// <param name="packet"></param>
public void ReturnPacket(Packet packet)
{
if (RecycleDataBlocks)
{
switch (packet.Type)
{
case PacketType.ObjectUpdate:
ObjectUpdatePacket oup = (ObjectUpdatePacket)packet;
foreach (ObjectUpdatePacket.ObjectDataBlock oupod in oup.ObjectData)
ReturnDataBlock<ObjectUpdatePacket.ObjectDataBlock>(oupod);
oup.ObjectData = null;
break;
case PacketType.ImprovedTerseObjectUpdate:
ImprovedTerseObjectUpdatePacket itoup = (ImprovedTerseObjectUpdatePacket)packet;
foreach (ImprovedTerseObjectUpdatePacket.ObjectDataBlock itoupod in itoup.ObjectData)
ReturnDataBlock<ImprovedTerseObjectUpdatePacket.ObjectDataBlock>(itoupod);
itoup.ObjectData = null;
break;
}
}
if (RecyclePackets)
{
switch (packet.Type)
{
// List pooling packets here
case PacketType.AgentUpdate:
case PacketType.PacketAck:
case PacketType.ObjectUpdate:
case PacketType.ImprovedTerseObjectUpdate:
lock (pool)
{
PacketType type = packet.Type;
if (!pool.ContainsKey(type))
{
pool[type] = new Stack<Packet>();
}
if ((pool[type]).Count < 50)
{
// m_log.DebugFormat("[PACKETPOOL]: Pushing {0} packet", type);
pool[type].Push(packet);
}
}
break;
// Other packets wont pool
default:
return;
}
}
}
private static PacketType GetType(byte[] bytes)
{
ushort id;
PacketFrequency freq;
bool isZeroCoded = (bytes[0] & Helpers.MSG_ZEROCODED) != 0;
if (bytes[6] == 0xFF)
{
if (bytes[7] == 0xFF)
{
freq = PacketFrequency.Low;
if (isZeroCoded && bytes[8] == 0)
id = bytes[10];
else
id = (ushort)((bytes[8] << 8) + bytes[9]);
}
else
{
freq = PacketFrequency.Medium;
id = bytes[7];
}
}
else
{
freq = PacketFrequency.High;
id = bytes[6];
}
return Packet.GetType(id, freq);
}
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.Symbology.dll
// Description: Contains the business logic for symbology layers and symbol categories.
// ********************************************************************************************************
//
// The Original Code is from MapWindow.dll version 6.0
//
// The Initial Developer of this Original Code is Ted Dunsford. Created 10/11/2009 11:43:53 AM
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
//
// ********************************************************************************************************
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing;
using System.Linq;
using DotSpatial.Data;
using DotSpatial.Serialization;
namespace DotSpatial.Symbology
{
/// <summary>
/// Scheme with colors support.
/// </summary>
[Serializable]
public class ColorScheme : Scheme, IColorScheme
{
#region Private Variables
private ColorCategoryCollection _categories;
private float _opacity;
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of ColorScheme
/// </summary>
public ColorScheme()
{
Configure();
}
/// <summary>
/// Creates a new instance of a color scheme using a predefined color scheme and the minimum and maximum specified
/// from the raster itself
/// </summary>
/// <param name="schemeType">The predefined scheme to use</param>
/// <param name="raster">The raster to obtain the minimum and maximum settings from</param>
public ColorScheme(ColorSchemeType schemeType, IRaster raster)
{
Configure();
ApplyScheme(schemeType, raster);
}
/// <summary>
/// This creates a new scheme, applying the specified color scheme, and using the minimum and maximum values indicated.
/// </summary>
/// <param name="schemeType">The predefined color scheme</param>
/// <param name="min">The minimum</param>
/// <param name="max">The maximum</param>
public ColorScheme(ColorSchemeType schemeType, double min, double max)
{
Configure();
ApplyScheme(schemeType, min, max);
}
private void Configure()
{
_categories = new ColorCategoryCollection(this);
_opacity = 1;
EditorSettings = new RasterEditorSettings();
}
#endregion
#region Methods
/// <summary>
/// Applies the specified color scheme and uses the specified raster to define the
/// minimum and maximum to use for the scheme.
/// </summary>
/// <param name="schemeType"></param>
/// <param name="raster"></param>
public void ApplyScheme(ColorSchemeType schemeType, IRaster raster)
{
double min, max;
if (!raster.IsInRam)
{
GetValues(raster);
min = Statistics.Minimum;
max = Statistics.Maximum;
}
else
{
min = raster.Minimum;
max = raster.Maximum;
}
ApplyScheme(schemeType, min, max);
}
/// <summary>
/// Creates the category using a random fill color
/// </summary>
/// <param name="fillColor">The base color to use for creating the category</param>
/// <param name="size">For points this is the larger dimension, for lines this is the largest width</param>
/// <returns>A new IFeatureCategory that matches the type of this scheme</returns>
public override ICategory CreateNewCategory(Color fillColor, double size)
{
return new ColorCategory(null, null, fillColor, fillColor);
}
/// <summary>
/// Creates the categories for this scheme based on statistics and values
/// sampled from the specified raster.
/// </summary>
/// <param name="raster">The raster to use when creating categories</param>
public void CreateCategories(IRaster raster)
{
GetValues(raster);
CreateBreakCategories();
OnItemChanged(this);
}
/// <summary>
/// Gets the values from the raster. If MaxSampleCount is less than the
/// number of cells, then it randomly samples the raster with MaxSampleCount
/// values. Otherwise it gets all the values in the raster.
/// </summary>
/// <param name="raster">The raster to sample</param>
public void GetValues(IRaster raster)
{
Values = raster.GetRandomValues(EditorSettings.MaxSampleCount);
var keepers = Values.Where(val => val != raster.NoDataValue).ToList();
Values = keepers;
Statistics.Calculate(Values, raster.Minimum, raster.Maximum);
}
/// <summary>
/// Applies the specified color scheme and uses the specified raster to define the
/// minimum and maximum to use for the scheme.
/// </summary>
/// <param name="schemeType">ColorSchemeType</param>
/// <param name="min">THe minimum value to use for the scheme</param>
/// <param name="max">THe maximum value to use for the scheme</param>
public void ApplyScheme(ColorSchemeType schemeType, double min, double max)
{
if (Categories == null)
{
Categories = new ColorCategoryCollection(this);
}
else
{
Categories.Clear();
}
IColorCategory eqCat = null, low = null, high = null;
if (min == max)
{
// Create one category
eqCat = new ColorCategory(min, max) {Range = {MaxIsInclusive = true, MinIsInclusive = true}};
eqCat.ApplyMinMax(EditorSettings);
Categories.Add(eqCat);
}
else
{
// Create two categories
low = new ColorCategory(min, (min + max) / 2) {Range = {MaxIsInclusive = true}};
high = new ColorCategory((min + max) / 2, max) {Range = {MaxIsInclusive = true}};
low.ApplyMinMax(EditorSettings);
high.ApplyMinMax(EditorSettings);
Categories.Add(low);
Categories.Add(high);
}
Color lowColor, midColor, highColor;
int alpha = Utils.ByteRange(Convert.ToInt32(_opacity * 255F));
switch (schemeType)
{
case ColorSchemeType.Summer_Mountains:
lowColor = Color.FromArgb(alpha, 10, 100, 10);
midColor = Color.FromArgb(alpha, 153, 125, 25);
highColor = Color.FromArgb(alpha, 255, 255, 255);
break;
case ColorSchemeType.FallLeaves:
lowColor = Color.FromArgb(alpha, 10, 100, 10);
midColor = Color.FromArgb(alpha, 199, 130, 61);
highColor = Color.FromArgb(alpha, 241, 220, 133);
break;
case ColorSchemeType.Desert:
lowColor = Color.FromArgb(alpha, 211, 206, 97);
midColor = Color.FromArgb(alpha, 139, 120, 112);
highColor = Color.FromArgb(alpha, 255, 255, 255);
break;
case ColorSchemeType.Glaciers:
lowColor = Color.FromArgb(alpha, 105, 171, 224);
midColor = Color.FromArgb(alpha, 162, 234, 240);
highColor = Color.FromArgb(alpha, 255, 255, 255);
break;
case ColorSchemeType.Meadow:
lowColor = Color.FromArgb(alpha, 68, 128, 71);
midColor = Color.FromArgb(alpha, 43, 91, 30);
highColor = Color.FromArgb(alpha, 167, 220, 168);
break;
case ColorSchemeType.Valley_Fires:
lowColor = Color.FromArgb(alpha, 164, 0, 0);
midColor = Color.FromArgb(alpha, 255, 128, 64);
highColor = Color.FromArgb(alpha, 255, 255, 191);
break;
case ColorSchemeType.DeadSea:
lowColor = Color.FromArgb(alpha, 51, 137, 208);
midColor = Color.FromArgb(alpha, 226, 227, 166);
highColor = Color.FromArgb(alpha, 151, 146, 117);
break;
case ColorSchemeType.Highway:
lowColor = Color.FromArgb(alpha, 51, 137, 208);
midColor = Color.FromArgb(alpha, 214, 207, 124);
highColor = Color.FromArgb(alpha, 54, 152, 69);
break;
default:
lowColor = midColor = highColor = Color.Transparent;
break;
}
if (eqCat != null)
{
eqCat.LowColor = eqCat.HighColor = lowColor;
}
else
{
Debug.Assert(low != null);
Debug.Assert(high != null);
low.LowColor = lowColor;
low.HighColor = midColor;
high.LowColor = midColor;
high.HighColor = highColor;
}
OnItemChanged(this);
}
#endregion
#region Properties
/// <summary>
/// Gets or sets the floating point value for the opacity
/// </summary>
[Serialize("Opacity")]
public float Opacity
{
get { return _opacity; }
set { _opacity = value; }
}
/// <summary>
/// Gets or sets the raster categories
/// </summary>
[Serialize("Categories")]
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public ColorCategoryCollection Categories
{
get { return _categories; }
set
{
if (_categories != null) _categories.Scheme = null;
_categories = value;
if (_categories != null) _categories.Scheme = this;
}
}
/// <summary>
/// Gets or sets the raster editor settings associated with this scheme.
/// </summary>
[Serialize("EditorSettings")]
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public new RasterEditorSettings EditorSettings
{
get { return base.EditorSettings as RasterEditorSettings; }
set { base.EditorSettings = value; }
}
/// <summary>
/// Uses the settings on this scheme to create a random category.
/// </summary>
/// <returns>A new IFeatureCategory</returns>
public override ICategory CreateRandomCategory()
{
Random rnd = new Random(DateTime.Now.Millisecond);
return CreateNewCategory(CreateRandomColor(rnd), 20);
}
/// <summary>
/// Occurs when setting the parent item and updates the parent item pointers
/// </summary>
/// <param name="value"></param>
protected override void OnSetParentItem(ILegendItem value)
{
base.OnSetParentItem(value);
_categories.UpdateItemParentPointers();
}
#endregion
#region IColorScheme Members
/// <summary>
/// Draws the category in the specified location.
/// </summary>
/// <param name="index"></param>
/// <param name="g"></param>
/// <param name="bounds"></param>
public override void DrawCategory(int index, Graphics g, Rectangle bounds)
{
_categories[index].LegendSymbol_Painted(g, bounds);
}
/// <summary>
/// Adds the specified category
/// </summary>
/// <param name="category"></param>
public override void AddCategory(ICategory category)
{
IColorCategory cc = category as IColorCategory;
if (cc != null) _categories.Add(cc);
}
/// <summary>
/// Attempts to decrease the index value of the specified category, and returns
/// true if the move was successful.
/// </summary>
/// <param name="category">The category to decrease the index of</param>
/// <returns></returns>
public override bool DecreaseCategoryIndex(ICategory category)
{
IColorCategory cc = category as IColorCategory;
return cc != null && _categories.DecreaseIndex(cc);
}
/// <summary>
/// Removes the specified category
/// </summary>
/// <param name="category"></param>
public override void RemoveCategory(ICategory category)
{
IColorCategory cc = category as IColorCategory;
if (cc != null) _categories.Remove(cc);
}
/// <summary>
/// Inserts the item at the specified index
/// </summary>
/// <param name="index"></param>
/// <param name="category"></param>
public override void InsertCategory(int index, ICategory category)
{
IColorCategory cc = category as IColorCategory;
if (cc != null) _categories.Insert(index, cc);
}
/// <summary>
/// Attempts to increase the position of the specified category, and returns true
/// if the index increase was successful.
/// </summary>
/// <param name="category">The category to increase the position of</param>
/// <returns>Boolean, true if the item's position was increased</returns>
public override bool IncreaseCategoryIndex(ICategory category)
{
IColorCategory cc = category as IColorCategory;
return cc != null && _categories.IncreaseIndex(cc);
}
/// <summary>
/// Suspends the change item event from firing as the list is being changed
/// </summary>
public override void SuspendEvents()
{
_categories.SuspendEvents();
}
/// <summary>
/// Allows the ChangeItem event to get passed on when changes are made
/// </summary>
public override void ResumeEvents()
{
_categories.ResumeEvents();
}
/// <summary>
/// Clears the categories
/// </summary>
public override void ClearCategories()
{
_categories.Clear();
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Reflection;
namespace System.ComponentModel
{
/// <summary>
/// <para>Provides a type converter to convert <see cref='System.Enum'/>
/// objects to and from various
/// other representations.</para>
/// </summary>
public class EnumConverter : TypeConverter
{
/// <summary>
/// <para>
/// Provides a <see cref='System.ComponentModel.TypeConverter.StandardValuesCollection'/> that specifies the
/// possible values for the enumeration.
/// </para>
/// </summary>
private StandardValuesCollection _values;
/// <summary>
/// <para>
/// Specifies
/// the
/// type of the enumerator this converter is
/// associated with.
/// </para>
/// </summary>
private readonly Type _type;
/// <summary>
/// <para>
/// Initializes a new instance of the <see cref='System.ComponentModel.EnumConverter'/> class for the given
/// type.
/// </para>
/// </summary>
public EnumConverter(Type type)
{
_type = type;
}
/// <summary>
/// <para>[To be supplied.]</para>
/// </summary>
protected Type EnumType
{
get
{
return _type;
}
}
/// <summary>
/// <para>[To be supplied.]</para>
/// </summary>
protected StandardValuesCollection Values
{
get
{
return _values;
}
set
{
_values = value;
}
}
/// <internalonly/>
/// <summary>
/// <para>Gets a value indicating whether this converter
/// can convert an object in the given source type to an enumeration object using
/// the specified context.</para>
/// </summary>
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(string) || sourceType == typeof(Enum[]))
{
return true;
}
return base.CanConvertFrom(context, sourceType);
}
/// <summary>
/// <para>Gets a value indicating whether this converter can
/// convert an object to the given destination type using the context.</para>
/// </summary>
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(Enum[]))
{
return true;
}
return base.CanConvertTo(context, destinationType);
}
#if !NETSTANDARD10
/// <summary>
/// <para>
/// Gets an <see cref='System.Collections.IComparer'/> interface that can
/// be used to sort the values of the enumerator.
/// </para>
/// </summary>
protected virtual IComparer Comparer
{
get
{
return InvariantComparer.Default;
}
}
#endif // !NETSTANDARD10
/// <internalonly/>
/// <summary>
/// <para>Converts the specified value object to an enumeration object.</para>
/// </summary>
public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value)
{
string strValue = value as string;
if (strValue != null)
{
try
{
if (strValue.IndexOf(',') != -1)
{
long convertedValue = 0;
string[] values = strValue.Split(new char[] { ',' });
foreach (string v in values)
{
convertedValue |= Convert.ToInt64((Enum)Enum.Parse(_type, v, true), culture);
}
return Enum.ToObject(_type, convertedValue);
}
else
{
return Enum.Parse(_type, strValue, true);
}
}
catch (Exception e)
{
throw new FormatException(SR.Format(SR.ConvertInvalidPrimitive, (string)value, _type.Name), e);
}
}
else if (value is Enum[])
{
long finalValue = 0;
foreach (Enum e in (Enum[])value)
{
finalValue |= Convert.ToInt64(e, culture);
}
return Enum.ToObject(_type, finalValue);
}
return base.ConvertFrom(context, culture, value);
}
/// <internalonly/>
/// <summary>
/// <para>Converts the given
/// value object to the
/// specified destination type.</para>
/// </summary>
public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType)
{
if (destinationType == null)
{
throw new ArgumentNullException(nameof(destinationType));
}
if (destinationType == typeof(string) && value != null)
{
// Raise an argument exception if the value isn't defined and if
// the enum isn't a flags style.
//
if (!_type.GetTypeInfo().IsDefined(typeof(FlagsAttribute), false) && !Enum.IsDefined(_type, value))
{
throw new ArgumentException(SR.Format(SR.EnumConverterInvalidValue, value.ToString(), _type.Name));
}
return Enum.Format(_type, value, "G");
}
if (destinationType == typeof(Enum[]) && value != null)
{
if (_type.GetTypeInfo().IsDefined(typeof(FlagsAttribute), false))
{
List<Enum> flagValues = new List<Enum>();
Array objValues = Enum.GetValues(_type);
long[] ulValues = new long[objValues.Length];
for (int idx = 0; idx < objValues.Length; idx++)
{
ulValues[idx] = Convert.ToInt64((Enum)objValues.GetValue(idx), culture);
}
long longValue = Convert.ToInt64((Enum)value, culture);
bool valueFound = true;
while (valueFound)
{
valueFound = false;
foreach (long ul in ulValues)
{
if ((ul != 0 && (ul & longValue) == ul) || ul == longValue)
{
flagValues.Add((Enum)Enum.ToObject(_type, ul));
valueFound = true;
longValue &= ~ul;
break;
}
}
if (longValue == 0)
{
break;
}
}
if (!valueFound && longValue != 0)
{
flagValues.Add((Enum)Enum.ToObject(_type, longValue));
}
return flagValues.ToArray();
}
else
{
return new Enum[] { (Enum)Enum.ToObject(_type, value) };
}
}
return base.ConvertTo(context, culture, value, destinationType);
}
#if !NETSTANDARD10
/// <internalonly/>
/// <summary>
/// <para>Gets a collection of standard values for the data type this validator is
/// designed for.</para>
/// </summary>
public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context)
{
if (_values == null)
{
// We need to get the enum values in this rather round-about way so we can filter
// out fields marked Browsable(false). Note that if multiple fields have the same value,
// the behavior is undefined, since what we return are just enum values, not names.
Type reflectType = TypeDescriptor.GetReflectionType(_type);
if (reflectType == null)
{
reflectType = _type;
}
FieldInfo[] fields = reflectType.GetFields(BindingFlags.Public | BindingFlags.Static);
ArrayList objValues = null;
if (fields != null && fields.Length > 0)
{
objValues = new ArrayList(fields.Length);
}
if (objValues != null)
{
foreach (FieldInfo field in fields)
{
BrowsableAttribute browsableAttr = null;
foreach (Attribute attr in field.GetCustomAttributes(typeof(BrowsableAttribute), false))
{
browsableAttr = attr as BrowsableAttribute;
}
if (browsableAttr == null || browsableAttr.Browsable)
{
object value = null;
try
{
if (field.Name != null)
{
value = Enum.Parse(_type, field.Name);
}
}
catch (ArgumentException)
{
// Hmm, for some reason, the parse threw. Let us ignore this value.
}
if (value != null)
{
objValues.Add(value);
}
}
}
IComparer comparer = Comparer;
if (comparer != null)
{
objValues.Sort(comparer);
}
}
Array arr = (objValues != null) ? objValues.ToArray() : null;
_values = new StandardValuesCollection(arr);
}
return _values;
}
/// <internalonly/>
/// <summary>
/// <para>Gets a value indicating whether the list of standard values returned from
/// <see cref='System.ComponentModel.TypeConverter.GetStandardValues'/>
/// is an exclusive list using the specified context.</para>
/// </summary>
public override bool GetStandardValuesExclusive(ITypeDescriptorContext context)
{
return !_type.GetTypeInfo().IsDefined(typeof(FlagsAttribute), false);
}
/// <internalonly/>
/// <summary>
/// <para>Gets a value indicating
/// whether this object
/// supports a standard set of values that can be picked
/// from a list using the specified context.</para>
/// </summary>
public override bool GetStandardValuesSupported(ITypeDescriptorContext context)
{
return true;
}
/// <internalonly/>
/// <summary>
/// <para>Gets a value indicating whether the given object value is valid for this type.</para>
/// </summary>
public override bool IsValid(ITypeDescriptorContext context, object value)
{
return Enum.IsDefined(_type, value);
}
#endif // !NETSTANDARD10
}
}
| |
//---------------------------------------------------------------------------
//
// <copyright file="WindowsListViewSubItem.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved.
// </copyright>
//
//
// Description: Win32 ListViewSubItem proxy
//
// History:
// Jean-Francois Peyroux, alexsn - Created (in DotNet)
// 2003/08/12 - alexsn Updated for WCP
//
//---------------------------------------------------------------------------
using System;
using System.ComponentModel;
using System.Windows.Automation;
using System.Windows.Automation.Provider;
using System.Windows;
using MS.Win32;
using Accessibility;
namespace MS.Internal.AutomationProxies
{
internal class ListViewSubItem: ProxySimple, IGridItemProvider, ITableItemProvider, IValueProvider
{
// ------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
#region Constructors
internal ListViewSubItem (IntPtr hwnd, ProxyFragment parent, int item, int itemParent)
: base (hwnd, parent, item)
{
// Is used to discriminate between items in a collection.
_itemParent = itemParent;
_cControlType = WindowsListView.ListViewEditable(hwnd) ? ControlType.Edit : ControlType.Text;
}
#endregion Constructos
//------------------------------------------------------
//
// Patterns Implementation
//
//------------------------------------------------------
#region ProxySimple Interface
// Returns a pattern interface if supported.
internal override object GetPatternProvider (AutomationPattern iid)
{
if (iid == GridItemPattern.Pattern)
{
return this;
}
// Only the first element is editable
if (iid == ValuePattern.Pattern && _item == 0 && WindowsListView.ListViewEditable (_hwnd))
{
return this;
}
if (iid == TableItemPattern.Pattern)
{
return this;
}
return null;
}
// Gets the bounding rectangle for this element
internal override Rect BoundingRectangle
{
get
{
NativeMethods.Win32Rect itemRectangle;
// NOTE: 1st(column 0) subitem is a special one, since it is fake, Win32's LV does not
// have a subitem 0 in report mode
int lvir = (_item == 0) ? NativeMethods.LVIR_SELECTBOUNDS : NativeMethods.LVIR_BOUNDS;
if (!WindowsListView.GetSubItemRect (_hwnd, _itemParent, _item, lvir, out itemRectangle))
{
return Rect.Empty;
}
// Special case: LV is full row select, with more than 1 column and we are looking at the first item.
// Only IconViews and DetailViews are processed here. TileViews will be processed as a
// ListViewItem. The DetailView is the only view that is in a row/column layout with its data.
if (WindowsListView.FullRowSelect(_hwnd) && !WindowsListView.HasJustifyColumnsExStyle(_hwnd) &&
!WindowsListView.IsIconView(_hwnd) && _item == 0 && 1 < ListViewItem.GetSubItemCount(_hwnd))
{
NativeMethods.Win32Rect itemRectangle1;
if (!WindowsListView.GetSubItemRect(_hwnd, _itemParent, 1, NativeMethods.LVIR_BOUNDS, out itemRectangle1))
{
return Rect.Empty;
}
// Derived values from the adjacent subitems are conditional based on RTL
if (Misc.IsControlRTL(_hwnd))
{
itemRectangle.left = itemRectangle1.right;
}
else
{
itemRectangle.right = itemRectangle1.left;
}
// take checkbox into account
if (ListViewItem.IsItemWithCheckbox (_hwnd, _itemParent))
{
NativeMethods.Win32Rect checkboxRectangle = ListViewItemCheckbox.ListViewCheckBoxRect (_hwnd, _itemParent);
// Derived values from the adjacent subitems are conditional based on RTL
if (Misc.IsControlRTL(_hwnd))
{
itemRectangle.right -= (checkboxRectangle.right - checkboxRectangle.left);
}
else
{
itemRectangle.left += (checkboxRectangle.right - checkboxRectangle.left);
}
}
}
// Don't need to normalize, GetSubItemRect returns absolute coordinates.
return itemRectangle.ToRect(false);
}
}
// Process all the Logical and Raw Element Properties
internal override object GetElementProperty(AutomationProperty idProp)
{
if (idProp == AutomationElement.IsOffscreenProperty)
{
Rect parentRect = GetParent().GetParent().BoundingRectangle;
Rect itemRect = BoundingRectangle;
if (itemRect.IsEmpty || parentRect.IsEmpty)
{
return true;
}
// Need to check if this item is visible on the whole control not just its immediate parent.
if (!Misc.IsItemVisible(ref parentRect, ref itemRect))
{
return true;
}
}
else if (idProp == AutomationElement.HasKeyboardFocusProperty)
{
IAccessible acc = AccessibleObject;
// The items are zero based, i.e. the first listview item is item 0. The
// zero item in MSAA is self, so need to add one to the item to get the
// correct Accessible child.
AccessibleRole role = Accessible.GetRole(acc, _itemParent + 1);
// ListView Iaccessible knows when its really a menu item
if (role == AccessibleRole.MenuItem)
{
// Use the IsFocused of the Subitem instead the the one in ProxySimple
// When ListViews are used for menus they don't get focus
// so the check for "does this hwnd have focus" fails
return IsFocused ();
}
// If we are in a SysListView32 and that list view is in the Start Menu search column
// real focus can stay on the edit box, while a virtual focus navigates the list
// If this is the case, only check IsFocused, don't do the GetGUIThreadInfo check.
IntPtr ancestor = _hwnd;
IntPtr desktop = UnsafeNativeMethods.GetDesktopWindow();
while (ancestor != IntPtr.Zero && ancestor != desktop)
{
if (Misc.GetClassName(ancestor) == "Desktop Search Open View")
{
return IsFocused();
}
ancestor = Misc.GetParent(ancestor);
}
}
return base.GetElementProperty(idProp);
}
//Gets the controls help text
internal override string HelpText
{
get
{
return WindowsListView.GetItemToolTipText(_hwnd);
}
}
//Gets the localized name
internal override string LocalizedName
{
get
{
string name = ListViewItem.GetText(_hwnd, _itemParent, _item);
return name.Length < Misc.MaxLengthNameProperty ? name : name.Substring(0, Misc.MaxLengthNameProperty);
}
}
// Sets the focus to this item.
internal override bool SetFocus()
{
// Set the item's state to focused.
return WindowsListView.SetItemFocused (_hwnd, this._itemParent);
}
#endregion ProxySimple Interface
#region Value Pattern
void IValueProvider.SetValue (string val)
{
// Make sure that the control is enabled
if (!SafeNativeMethods.IsWindowEnabled(_hwnd))
{
throw new ElementNotEnabledException();
}
ListViewItem.SetValue (val, _hwnd, _itemParent);
}
// Request to get the value that this UI element is representing as a string
string IValueProvider.Value
{
get
{
return ListViewItem.GetText (_hwnd, _itemParent, _item);
}
}
bool IValueProvider.IsReadOnly
{
get
{
return !WindowsListView.ListViewEditable (_hwnd);
}
}
#endregion ValuePattern
#region GridItemPattern
int IGridItemProvider.Row
{
get
{
if (!WindowsListView.IsGroupViewEnabled (_hwnd))
{
return _itemParent;
}
// we're in the group mode:
// In order to detect the item's row...find the location
// of this item in the array of group items, location will indicate the raw
int groupID = ListViewItem.GetGroupID (_hwnd, _itemParent);
if (groupID != -1)
{
GroupManager.GroupInfo groupInfo = WindowsListViewGroup.GetGroupInfo (_hwnd, groupID);
if (groupInfo)
{
int row = groupInfo.IndexOf (_itemParent);
if (row >= 0)
{
return row;
}
}
}
return -1;
}
}
int IGridItemProvider.Column
{
get
{
return _item;
}
}
int IGridItemProvider.RowSpan
{
get
{
return 1;
}
}
int IGridItemProvider.ColumnSpan
{
get
{
return 1;
}
}
IRawElementProviderSimple IGridItemProvider.ContainingGrid
{
get
{
// ContainingGrid would be either Group or the ListView
// For both cases we need to skip our immediate parent
// which is ListViewItem => meaning ContainingGrid is defined as parent of the parent
return _parent._parent;
}
}
#endregion GridItemPattern
#region TableItemPattern
IRawElementProviderSimple [] ITableItemProvider.GetRowHeaderItems ()
{
return null;
}
IRawElementProviderSimple [] ITableItemProvider.GetColumnHeaderItems ()
{
IntPtr hwndHeader = WindowsListView.ListViewGetHeader (_hwnd);
if (SafeNativeMethods.IsWindowVisible (hwndHeader))
{
WindowsSysHeader header = (WindowsSysHeader) WindowsSysHeader.Create (hwndHeader, 0);
return new IRawElementProviderSimple [] { new WindowsSysHeader.HeaderItem (hwndHeader, header, _item) };
}
return null;
}
#endregion TableItemPattern
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
#region Internal Methods
internal static ProxySimple ElementProviderFromPoint (IntPtr hwnd, ProxyFragment parent, int item, int x, int y)
{
NativeMethods.LVHITTESTINFO_INTERNAL hitTest = WindowsListView.SubitemHitTest (hwnd, item, new NativeMethods.Win32Point (x, y));
if (hitTest.iSubItem >= 0)
{
return new ListViewSubItem (hwnd, parent, hitTest.iSubItem, item);
}
// subitems do not exist
return parent;
}
#endregion Internal Methods
//------------------------------------------------------
//
// Protected Methods
//
//------------------------------------------------------
#region Protected Methods
// This routine is only called on elements belonging to an hwnd that has the focus.
protected override bool IsFocused()
{
if (Misc.IsComctrlV6OnOsVerV6orHigher(_hwnd))
{
int column = (int)Misc.ProxySendMessage(_hwnd, NativeMethods.LVM_GETFOCUSEDCOLUMN, IntPtr.Zero, IntPtr.Zero);
return column == _item;
}
return WindowsListView.IsItemFocused (_hwnd, _itemParent);
}
#endregion
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
#region Private Fields
// The item in the listview. _item is the SubItem
private int _itemParent;
#endregion Private Fields
}
}
| |
using System;
using System.Drawing;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Windows.Forms;
using System.Text;
using System.Xml;
using fyiReporting.RDL;
namespace fyiReporting.RdlDesign
{
/// <summary>
/// Summary description for DialogDataSourceRef.
/// </summary>
internal partial class DialogNewTable
{
internal DialogNewTable(DesignXmlDraw dxDraw, XmlNode container)
{
_Draw = dxDraw;
//
// Required for Windows Form Designer support
//
InitializeComponent();
InitValues(container);
}
private void InitValues(XmlNode container)
{
this.bOK.Enabled = false;
rbHorz.Checked = true;
//
// Obtain the existing DataSets info
//
object[] datasets = _Draw.DataSetNames;
if (datasets == null)
return; // not much to do if no DataSets
if (_Draw.IsDataRegion(container))
{
string s = _Draw.GetDataSetNameValue(container);
if (s == null)
return;
this.cbDataSets.Items.Add(s);
this.cbDataSets.Enabled = false;
}
else
this.cbDataSets.Items.AddRange(datasets);
cbDataSets.SelectedIndex = 0;
}
internal string TableXml
{
get
{
return rbHorz.Checked ? TableXmlHorz : TableXmlVert;
}
}
private string TableXmlHorz
{
get
{
StringBuilder table = new StringBuilder("<Table>");
table.AppendFormat("<DataSetName>{0}</DataSetName>", this.cbDataSets.Text);
table.Append("<NoRows>Query returned no rows!</NoRows><Style>" +
"<BorderStyle><Default>Solid</Default></BorderStyle></Style>");
StringBuilder tablecolumns = new StringBuilder("<TableColumns>");
StringBuilder headercolumns =
new StringBuilder("<Header><TableRows><TableRow><Height>12 pt</Height><TableCells>");
StringBuilder detailcolumns =
new StringBuilder("<Details><TableRows><TableRow><Height>12 pt</Height><TableCells>");
StringBuilder tablegroups = null;
StringBuilder footergroup = null;
string gname = this.cbGroupColumn.Text;
if (gname != null && gname.Trim() != "")
{
gname = gname.Trim();
tablegroups =
new StringBuilder("<TableGroups><TableGroup><Grouping><GroupExpressions><GroupExpression>");
tablegroups.AppendFormat("=Fields!{0}.Value</GroupExpression></GroupExpressions></Grouping>", gname);
tablegroups.Append("<Header><TableRows><TableRow><Height>12 pt</Height><TableCells>");
footergroup =
new StringBuilder("<Footer><TableRows><TableRow><Height>12 pt</Height><TableCells>");
}
else
gname = null;
StringBuilder footercolumns = null;
if (this.chkGrandTotals.Checked)
footercolumns =
new StringBuilder("<Footer><TableRows><TableRow><Height>12 pt</Height><TableCells>");
bool bHaveFooter = false; // indicates one or more columns have been checked for subtotaling
foreach (string colname in this.lbTableColumns.Items)
{
tablecolumns.Append("<TableColumn><Width>1in</Width></TableColumn>");
headercolumns.AppendFormat("<TableCell><ReportItems><Textbox><Value>{0}</Value>" +
"<Style><TextAlign>Center</TextAlign><BorderStyle><Default>Solid</Default></BorderStyle>" +
"<FontWeight>Bold</FontWeight></Style>" +
"</Textbox></ReportItems></TableCell>", colname);
string dcol;
string gcol;
if (gname == colname)
{
dcol = "";
gcol = string.Format("=Fields!{0}.Value", colname);
}
else
{
gcol = "";
dcol = string.Format("=Fields!{0}.Value", colname);
}
int iChecked = this.lbTableColumns.CheckedItems.IndexOf(colname);
string fcol = "";
if (iChecked >= 0)
{
bHaveFooter = true;
fcol = string.Format("=Sum(Fields!{0}.Value)", colname);
}
if (tablegroups != null)
{
tablegroups.AppendFormat("<TableCell><ReportItems><Textbox>" +
"<Value>{0}</Value><CanGrow>true</CanGrow>" +
"<Style><BorderStyle><Default>Solid</Default></BorderStyle>" +
"</Style></Textbox></ReportItems></TableCell>", gcol);
footergroup.AppendFormat("<TableCell><ReportItems><Textbox>" +
"<Value>{0}</Value><CanGrow>true</CanGrow>" +
"<Style><BorderStyle><Default>Solid</Default></BorderStyle>" +
"</Style></Textbox></ReportItems></TableCell>", fcol);
}
detailcolumns.AppendFormat("<TableCell><ReportItems><Textbox>" +
"<Value>{0}</Value><CanGrow>true</CanGrow>" +
"<Style><BorderStyle><Default>Solid</Default></BorderStyle>" +
"</Style></Textbox></ReportItems></TableCell>", dcol);
if (footercolumns != null)
footercolumns.AppendFormat("<TableCell><ReportItems><Textbox>" +
"<Value>{0}</Value><CanGrow>true</CanGrow>" +
"<Style><BorderStyle><Default>Solid</Default></BorderStyle>" +
"</Style></Textbox></ReportItems></TableCell>", fcol);
}
tablecolumns.Append("</TableColumns>");
table.Append(tablecolumns.ToString());
headercolumns.Append("</TableCells></TableRow></TableRows>" +
"<RepeatOnNewPage>true</RepeatOnNewPage></Header>");
table.Append(headercolumns.ToString());
detailcolumns.Append("</TableCells></TableRow></TableRows>" +
"</Details>");
table.Append(detailcolumns.ToString());
if (footercolumns != null)
{
footercolumns.Append("</TableCells></TableRow></TableRows>" +
"</Footer>");
table.Append(footercolumns.ToString());
}
if (tablegroups != null)
{
tablegroups.Append("</TableCells></TableRow></TableRows>" +
"</Header>");
if (bHaveFooter)
{
footergroup.Append("</TableCells></TableRow></TableRows>" +
"</Footer>");
tablegroups.Append(footergroup.ToString());
}
tablegroups.Append("</TableGroup></TableGroups>");
table.Append(tablegroups);
}
table.Append("</Table>");
return table.ToString();
}
}
private string TableXmlVert
{
get
{
StringBuilder table = new StringBuilder("<Table>");
table.AppendFormat("<DataSetName>{0}</DataSetName>", this.cbDataSets.Text);
table.Append("<NoRows>Query returned no rows!</NoRows><Style>" +
"<BorderStyle><Default>Solid</Default></BorderStyle></Style>");
table.Append("<TableColumns><TableColumn><Width>5in</Width></TableColumn></TableColumns>");
table.Append("<Details><TableRows>" + Environment.NewLine);
foreach (string colname in this.lbTableColumns.Items)
{
string dcol = string.Format("Fields!{0}.Value", colname);
if (this.rbVertComp.Checked)
{
string val = String.Format("<Value>=\"<span style='color:Crimson;'>{0}:&nbsp;&nbsp;</span>\" & {1}</Value>", colname, dcol);
table.AppendFormat(
"<TableRow><Height>12 pt</Height>" +
"<Visibility><Hidden>=Iif({1} = Nothing, true, false)</Hidden></Visibility>" +
"<TableCells><TableCell><ReportItems><Textbox>" +
"{0}" +
"<CanGrow>true</CanGrow>" +
"<Style><BorderStyle><Default>None</Default></BorderStyle>" +
"<Format>html</Format>" +
"</Style></Textbox></ReportItems></TableCell>" +
"</TableCells></TableRow>" +
Environment.NewLine, val, dcol);
}
else
{
table.AppendFormat(
"<TableRow><Height>12 pt</Height><TableCells>" +
"<TableCell><ReportItems><Textbox>" +
"<Value>{0}</Value>" +
"<Style><BorderStyle><Default>None</Default></BorderStyle>" +
"<FontWeight>Bold</FontWeight>" +
"<Color>Crimson</Color>" +
"</Style></Textbox></ReportItems></TableCell>" +
"</TableCells></TableRow>", colname);
table.AppendFormat(
"<TableRow><Height>12 pt</Height><TableCells>" +
"<TableCell><ReportItems><Textbox>" +
"<Value>={0}</Value><CanGrow>true</CanGrow>" +
"<Style><BorderStyle><Default>None</Default></BorderStyle>" +
"</Style></Textbox></ReportItems></TableCell>" +
"</TableCells></TableRow>", dcol);
}
}
table.Append("</TableRows></Details></Table>");
return table.ToString();
}
}
public void Apply()
{
//
}
private void bOK_Click(object sender, System.EventArgs e)
{
// apply the result
Apply();
DialogResult = DialogResult.OK;
}
private void cbDataSets_SelectedIndexChanged(object sender, System.EventArgs e)
{
this.lbTableColumns.Items.Clear();
bOK.Enabled = false;
this.lbFields.Items.Clear();
string[] fields = _Draw.GetFields(cbDataSets.Text, false);
if (fields != null)
lbFields.Items.AddRange(fields);
}
private void bRight_Click(object sender, System.EventArgs e)
{
ListBox.SelectedIndexCollection sic = lbFields.SelectedIndices;
int count = sic.Count;
foreach (int i in sic)
{
string fname = (string)lbFields.Items[i];
lbTableColumns.Items.Add(fname);
}
// Need to remove backwards
ArrayList ar = new ArrayList(sic);
ar.Reverse();
foreach (int i in ar)
{
lbFields.Items.RemoveAt(i);
}
bOK.Enabled = lbTableColumns.Items.Count > 0;
if (count > 0 && lbFields.Items.Count > 0)
lbFields.SelectedIndex = 0;
}
private void bLeft_Click(object sender, System.EventArgs e)
{
ICollection sic = lbTableColumns.SelectedIndices;
int count = sic.Count;
foreach (int i in sic)
{
string fname = (string)lbTableColumns.Items[i];
lbFields.Items.Add(fname);
if (fname == this.cbGroupColumn.Text)
this.cbGroupColumn.Text = "";
}
// Need to remove backwards
ArrayList ar = new ArrayList(sic);
ar.Reverse();
foreach (int i in ar)
{
lbTableColumns.Items.RemoveAt(i);
}
bOK.Enabled = lbTableColumns.Items.Count > 0;
if (count > 0 && lbTableColumns.Items.Count > 0)
lbTableColumns.SelectedIndex = 0;
}
private void bAllRight_Click(object sender, System.EventArgs e)
{
foreach (object fname in lbFields.Items)
{
lbTableColumns.Items.Add(fname);
}
lbFields.Items.Clear();
bOK.Enabled = lbTableColumns.Items.Count > 0;
}
private void bAllLeft_Click(object sender, System.EventArgs e)
{
foreach (object fname in lbTableColumns.Items)
{
lbFields.Items.Add(fname);
}
lbTableColumns.Items.Clear();
this.cbGroupColumn.Text = "";
bOK.Enabled = false;
}
private void bUp_Click(object sender, System.EventArgs e)
{
int index = lbTableColumns.SelectedIndex;
if (index <= 0)
return;
string prename = (string)lbTableColumns.Items[index - 1];
lbTableColumns.Items.RemoveAt(index - 1);
lbTableColumns.Items.Insert(index, prename);
}
private void bDown_Click(object sender, System.EventArgs e)
{
int index = lbTableColumns.SelectedIndex;
if (index < 0 || index + 1 == lbTableColumns.Items.Count)
return;
string postname = (string)lbTableColumns.Items[index + 1];
lbTableColumns.Items.RemoveAt(index + 1);
lbTableColumns.Items.Insert(index, postname);
}
private void cbGroupColumn_Enter(object sender, System.EventArgs e)
{
cbGroupColumn.Items.Clear();
cbGroupColumn.Items.Add("");
if (lbTableColumns.Items.Count > 0)
{
object[] names = new object[lbTableColumns.Items.Count];
lbTableColumns.Items.CopyTo(names, 0);
cbGroupColumn.Items.AddRange(names);
}
}
private void rbHorz_CheckedChanged(object sender, System.EventArgs e)
{
// only standard column report supports grouping and totals
this.cbGroupColumn.Enabled = this.chkGrandTotals.Enabled = rbHorz.Checked;
}
}
}
| |
//---------------------------------------------------------------------
// <copyright file="EntityPropertyMappingInfo.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Manages the mapping information for EntityPropertyMappingAttributes
// on a ResourceType.
// </summary>
//
// @owner [....]
//---------------------------------------------------------------------
namespace System.Data.Services.Common
{
using System.Diagnostics;
#if ASTORIA_CLIENT
using System.Data.Services.Client;
using System.Reflection;
using ClientTypeOrResourceType_Alias = System.Data.Services.Client.ClientType;
using TypeOrResourceType_Alias = System.Type;
#else
using System.Data.Services.Providers;
using ClientTypeOrResourceType_Alias = System.Data.Services.Providers.ResourceType;
using TypeOrResourceType_Alias = System.Data.Services.Providers.ResourceType;
#endif
/// <summary>
/// Holds information needed during content serialization/deserialization for
/// each EntityPropertyMappingAttribute
/// </summary>
[DebuggerDisplay("EntityPropertyMappingInfo {DefiningType}")]
internal sealed class EntityPropertyMappingInfo
{
/// <summary>
/// Private field backing Attribute property.
/// </summary>
private readonly EntityPropertyMappingAttribute attribute;
/// <summary>
/// Private field backing DefiningType property
/// </summary>
private readonly TypeOrResourceType_Alias definingType;
/// <summary>
/// Source property path in the segmented form.. Stored in order not to have to call attribute.SourcePath.Split('/') each time we want to read the property value.
/// </summary>
private readonly string[] segmentedSourcePath;
/// <summary>
/// Type whose property is to be read. This property is of ClientType type on the client and of ResourceType type on the server.
/// </summary>
private readonly ClientTypeOrResourceType_Alias actualPropertyType;
#if !ASTORIA_CLIENT
/// <summary>
/// Private field backing IsEFProvider property
/// </summary>
private readonly bool isEFProvider;
/// <summary>
/// Creates instance of EntityPropertyMappingInfo class.
/// </summary>
/// <param name="attribute">The <see cref="EntityPropertyMappingAttribute"/> corresponding to this object</param>
/// <param name="definingType">Type the <see cref="EntityPropertyMappingAttribute"/> was defined on.</param>
/// <param name="actualPropertyType">Type whose property is to be read. This can be different from defining type when inheritance is involved.</param>
/// <param name="isEFProvider">Whether the current data source is an EF provider. Needed for error reporting.</param>
public EntityPropertyMappingInfo(EntityPropertyMappingAttribute attribute, ResourceType definingType, ResourceType actualPropertyType, bool isEFProvider)
{
this.isEFProvider = isEFProvider;
#else
/// <summary>
/// Creates instance of EntityPropertyMappingInfo class.
/// </summary>
/// <param name="attribute">The <see cref="EntityPropertyMappingAttribute"/> corresponding to this object</param>
/// <param name="definingType">Type the <see cref="EntityPropertyMappingAttribute"/> was defined on.</param>
/// <param name="actualPropertyType">ClientType whose property is to be read.</param>
public EntityPropertyMappingInfo(EntityPropertyMappingAttribute attribute, Type definingType, ClientType actualPropertyType)
{
#endif
Debug.Assert(attribute != null, "attribute != null");
Debug.Assert(definingType != null, "definingType != null");
Debug.Assert(actualPropertyType != null, "actualPropertyType != null");
this.attribute = attribute;
this.definingType = definingType;
this.actualPropertyType = actualPropertyType;
Debug.Assert(!string.IsNullOrEmpty(attribute.SourcePath), "Invalid source path");
this.segmentedSourcePath = attribute.SourcePath.Split('/');
}
/// <summary>
/// The <see cref="EntityPropertyMappingAttribute"/> corresponding to this object
/// </summary>
public EntityPropertyMappingAttribute Attribute
{
get { return this.attribute; }
}
/// <summary>
/// Type that has the <see cref="EntityPropertyMappingAttribute"/>
/// </summary>
public TypeOrResourceType_Alias DefiningType
{
get { return this.definingType; }
}
#if ASTORIA_CLIENT
/// <summary>
/// Given a source property path reads the property value from the resource type instance
/// </summary>
/// <param name="element">Client type instance.</param>
/// <returns>Property value read from the client type instance. Possibly null.</returns>
internal object ReadPropertyValue(object element)
{
return ReadPropertyValue(element, this.actualPropertyType, this.segmentedSourcePath, 0);
}
/// <summary>
/// Given a source property path in segmented form, reads the property value from the resource type instance
/// </summary>
/// <param name="element">Client type instance.</param>
/// <param name="resourceType">Client type whose property is to be read</param>
/// <param name="srcPathSegments">Segmented source property path.</param>
/// <param name="currentSegment">Index of current property name in <paramref name="srcPathSegments"/></param>
/// <returns>Property value read from the client type instance. Possibly null.</returns>
private static object ReadPropertyValue(object element, ClientType resourceType, string[] srcPathSegments, int currentSegment)
{
if (element == null || currentSegment == srcPathSegments.Length)
{
return element;
}
else
{
String srcPathPart = srcPathSegments[currentSegment];
ClientType.ClientProperty resourceProperty = resourceType.GetProperty(srcPathPart, true);
if (resourceProperty == null)
{
throw Error.InvalidOperation(Strings.EpmSourceTree_InaccessiblePropertyOnType(srcPathPart, resourceType.ElementTypeName));
}
// If this is the last part of the path, then it has to be a primitive type otherwise should be a complex type
if (resourceProperty.IsKnownType ^ (currentSegment == srcPathSegments.Length - 1))
{
throw Error.InvalidOperation(!resourceProperty.IsKnownType ? Strings.EpmClientType_PropertyIsComplex(resourceProperty.PropertyName) :
Strings.EpmClientType_PropertyIsPrimitive(resourceProperty.PropertyName));
}
// o.Prop
PropertyInfo pi = element.GetType().GetProperty(srcPathPart, BindingFlags.Instance | BindingFlags.Public);
Debug.Assert(pi != null, "Cannot find property " + srcPathPart + "on type " + element.GetType().Name);
return ReadPropertyValue(
pi.GetValue(element, null),
resourceProperty.IsKnownType ? null : ClientType.Create(resourceProperty.PropertyType),
srcPathSegments,
++currentSegment);
}
}
#else
/// <summary>Is the current data source an EF provider</summary>
public bool IsEFProvider
{
get { return this.isEFProvider; }
}
/// <summary>
/// Given a source property path reads the property value from the resource type instance.
/// </summary>
/// <param name="element">Resource type instance.</param>
/// <param name="provider">Underlying data provider.</param>
/// <returns>Property value read from the resource type instance. Possibly null.</returns>
internal object ReadPropertyValue(object element, DataServiceProviderWrapper provider)
{
return ReadPropertyValue(element, provider, this.actualPropertyType, this.segmentedSourcePath, 0);
}
/// <summary>
/// Given a source property path in the segmented form reads the property value from the resource type instance.
/// </summary>
/// <param name="element">Resource type instance.</param>
/// <param name="provider">Underlying data provider.</param>
/// <param name="resourceType">Resource type whose property is to be read.</param>
/// <param name="srcPathSegments">Segmented source property path.</param>
/// <param name="currentSegment">Index of current property name in <paramref name="srcPathSegments"/></param>
/// <returns>Property value read from the resource type instance. Possibly null.</returns>
private static object ReadPropertyValue(object element, DataServiceProviderWrapper provider, ResourceType resourceType, string[] srcPathSegments, int currentSegment)
{
if (element == null || currentSegment == srcPathSegments.Length)
{
return element;
}
else
{
String propertyName = srcPathSegments[currentSegment];
ResourceProperty resourceProperty = resourceType != null ? resourceType.TryResolvePropertyName(propertyName) : null;
if (resourceProperty != null)
{
// If this is the last part of the path, then it has to be a primitive type otherwise should be a complex type
if (!resourceProperty.IsOfKind(currentSegment == srcPathSegments.Length - 1 ? ResourcePropertyKind.Primitive : ResourcePropertyKind.ComplexType))
{
throw new InvalidOperationException(Strings.EpmSourceTree_EndsWithNonPrimitiveType(propertyName));
}
}
else
{
if (!(resourceType == null || resourceType.IsOpenType))
{
throw new InvalidOperationException(Strings.EpmSourceTree_InaccessiblePropertyOnType(propertyName, resourceType.Name));
}
// this is an open type resolve resourceType and try resolving resourceProperty
resourceType = WebUtil.GetNonPrimitiveResourceType(provider, element);
resourceProperty = resourceType.TryResolvePropertyName(propertyName);
}
Debug.Assert(resourceType != null, "resourceType != null");
object propertyValue = WebUtil.GetPropertyValue(provider, element, resourceType, resourceProperty, resourceProperty == null ? propertyName : null);
return ReadPropertyValue(
propertyValue,
provider,
resourceProperty != null ? resourceProperty.ResourceType : null,
srcPathSegments,
currentSegment + 1);
}
}
#endif
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Net;
using System.Reflection;
using System.Threading;
using log4net;
using OpenMetaverse;
using OpenMetaverse.Packets;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Framework.Client;
namespace OpenSim.Tests.Common
{
public class TestClient : IClientAPI, IClientCore
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
EventWaitHandle wh = new EventWaitHandle (false, EventResetMode.AutoReset, "Crossing");
private Scene m_scene;
// Properties so that we can get at received data for test purposes
public List<uint> ReceivedKills { get; private set; }
public List<UUID> ReceivedOfflineNotifications { get; private set; }
public List<UUID> ReceivedOnlineNotifications { get; private set; }
public List<UUID> ReceivedFriendshipTerminations { get; private set; }
public List<ImageDataPacket> SentImageDataPackets { get; private set; }
public List<ImagePacketPacket> SentImagePacketPackets { get; private set; }
public List<ImageNotInDatabasePacket> SentImageNotInDatabasePackets { get; private set; }
// Test client specific events - for use by tests to implement some IClientAPI behaviour.
public event Action<RegionInfo, Vector3, Vector3> OnReceivedMoveAgentIntoRegion;
public event Action<ulong, IPEndPoint> OnTestClientInformClientOfNeighbour;
public event TestClientOnSendRegionTeleportDelegate OnTestClientSendRegionTeleport;
public event Action<ISceneEntity, PrimUpdateFlags> OnReceivedEntityUpdate;
public event OnReceivedChatMessageDelegate OnReceivedChatMessage;
public event Action<GridInstantMessage> OnReceivedInstantMessage;
public event Action<UUID> OnReceivedSendRebakeAvatarTextures;
public delegate void TestClientOnSendRegionTeleportDelegate(
ulong regionHandle, byte simAccess, IPEndPoint regionExternalEndPoint,
uint locationID, uint flags, string capsURL);
public delegate void OnReceivedChatMessageDelegate(
string message, byte type, Vector3 fromPos, string fromName,
UUID fromAgentID, UUID ownerID, byte source, byte audible);
// disable warning: public events, part of the public API
#pragma warning disable 67
public event Action<IClientAPI> OnLogout;
public event ObjectPermissions OnObjectPermissions;
public event MoneyTransferRequest OnMoneyTransferRequest;
public event ParcelBuy OnParcelBuy;
public event Action<IClientAPI> OnConnectionClosed;
public event MoveItemsAndLeaveCopy OnMoveItemsAndLeaveCopy;
public event ImprovedInstantMessage OnInstantMessage;
public event ChatMessage OnChatFromClient;
public event TextureRequest OnRequestTexture;
public event RezObject OnRezObject;
public event ModifyTerrain OnModifyTerrain;
public event BakeTerrain OnBakeTerrain;
public event SetAppearance OnSetAppearance;
public event AvatarNowWearing OnAvatarNowWearing;
public event RezSingleAttachmentFromInv OnRezSingleAttachmentFromInv;
public event RezMultipleAttachmentsFromInv OnRezMultipleAttachmentsFromInv;
public event UUIDNameRequest OnDetachAttachmentIntoInv;
public event ObjectAttach OnObjectAttach;
public event ObjectDeselect OnObjectDetach;
public event ObjectDrop OnObjectDrop;
public event StartAnim OnStartAnim;
public event StopAnim OnStopAnim;
public event ChangeAnim OnChangeAnim;
public event LinkObjects OnLinkObjects;
public event DelinkObjects OnDelinkObjects;
public event RequestMapBlocks OnRequestMapBlocks;
public event RequestMapName OnMapNameRequest;
public event TeleportLocationRequest OnTeleportLocationRequest;
public event TeleportLandmarkRequest OnTeleportLandmarkRequest;
public event TeleportCancel OnTeleportCancel;
public event DisconnectUser OnDisconnectUser;
public event RequestAvatarProperties OnRequestAvatarProperties;
public event SetAlwaysRun OnSetAlwaysRun;
public event DeRezObject OnDeRezObject;
public event Action<IClientAPI> OnRegionHandShakeReply;
public event GenericCall1 OnRequestWearables;
public event Action<IClientAPI, bool> OnCompleteMovementToRegion;
public event UpdateAgent OnPreAgentUpdate;
public event UpdateAgent OnAgentUpdate;
public event UpdateAgent OnAgentCameraUpdate;
public event AgentRequestSit OnAgentRequestSit;
public event AgentSit OnAgentSit;
public event AvatarPickerRequest OnAvatarPickerRequest;
public event Action<IClientAPI> OnRequestAvatarsData;
public event AddNewPrim OnAddPrim;
public event RequestGodlikePowers OnRequestGodlikePowers;
public event GodKickUser OnGodKickUser;
public event ObjectDuplicate OnObjectDuplicate;
public event GrabObject OnGrabObject;
public event DeGrabObject OnDeGrabObject;
public event MoveObject OnGrabUpdate;
public event SpinStart OnSpinStart;
public event SpinObject OnSpinUpdate;
public event SpinStop OnSpinStop;
public event ViewerEffectEventHandler OnViewerEffect;
public event FetchInventory OnAgentDataUpdateRequest;
public event TeleportLocationRequest OnSetStartLocationRequest;
public event UpdateShape OnUpdatePrimShape;
public event ObjectExtraParams OnUpdateExtraParams;
public event RequestObjectPropertiesFamily OnRequestObjectPropertiesFamily;
public event ObjectSelect OnObjectSelect;
public event ObjectRequest OnObjectRequest;
public event GenericCall7 OnObjectDescription;
public event GenericCall7 OnObjectName;
public event GenericCall7 OnObjectClickAction;
public event GenericCall7 OnObjectMaterial;
public event UpdatePrimFlags OnUpdatePrimFlags;
public event UpdatePrimTexture OnUpdatePrimTexture;
public event ClientChangeObject onClientChangeObject;
public event UpdateVector OnUpdatePrimGroupPosition;
public event UpdateVector OnUpdatePrimSinglePosition;
public event UpdatePrimRotation OnUpdatePrimGroupRotation;
public event UpdatePrimSingleRotation OnUpdatePrimSingleRotation;
public event UpdatePrimSingleRotationPosition OnUpdatePrimSingleRotationPosition;
public event UpdatePrimGroupRotation OnUpdatePrimGroupMouseRotation;
public event UpdateVector OnUpdatePrimScale;
public event UpdateVector OnUpdatePrimGroupScale;
public event StatusChange OnChildAgentStatus;
public event GenericCall2 OnStopMovement;
public event Action<UUID> OnRemoveAvatar;
public event CreateNewInventoryItem OnCreateNewInventoryItem;
public event LinkInventoryItem OnLinkInventoryItem;
public event CreateInventoryFolder OnCreateNewInventoryFolder;
public event UpdateInventoryFolder OnUpdateInventoryFolder;
public event MoveInventoryFolder OnMoveInventoryFolder;
public event RemoveInventoryFolder OnRemoveInventoryFolder;
public event RemoveInventoryItem OnRemoveInventoryItem;
public event FetchInventoryDescendents OnFetchInventoryDescendents;
public event PurgeInventoryDescendents OnPurgeInventoryDescendents;
public event FetchInventory OnFetchInventory;
public event RequestTaskInventory OnRequestTaskInventory;
public event UpdateInventoryItem OnUpdateInventoryItem;
public event CopyInventoryItem OnCopyInventoryItem;
public event MoveInventoryItem OnMoveInventoryItem;
public event UDPAssetUploadRequest OnAssetUploadRequest;
public event RequestTerrain OnRequestTerrain;
public event RequestTerrain OnUploadTerrain;
public event XferReceive OnXferReceive;
public event RequestXfer OnRequestXfer;
public event ConfirmXfer OnConfirmXfer;
public event AbortXfer OnAbortXfer;
public event RezScript OnRezScript;
public event UpdateTaskInventory OnUpdateTaskInventory;
public event MoveTaskInventory OnMoveTaskItem;
public event RemoveTaskInventory OnRemoveTaskItem;
public event RequestAsset OnRequestAsset;
public event GenericMessage OnGenericMessage;
public event UUIDNameRequest OnNameFromUUIDRequest;
public event UUIDNameRequest OnUUIDGroupNameRequest;
public event ParcelPropertiesRequest OnParcelPropertiesRequest;
public event ParcelDivideRequest OnParcelDivideRequest;
public event ParcelJoinRequest OnParcelJoinRequest;
public event ParcelPropertiesUpdateRequest OnParcelPropertiesUpdateRequest;
public event ParcelAbandonRequest OnParcelAbandonRequest;
public event ParcelGodForceOwner OnParcelGodForceOwner;
public event ParcelReclaim OnParcelReclaim;
public event ParcelReturnObjectsRequest OnParcelReturnObjectsRequest;
public event ParcelAccessListRequest OnParcelAccessListRequest;
public event ParcelAccessListUpdateRequest OnParcelAccessListUpdateRequest;
public event ParcelSelectObjects OnParcelSelectObjects;
public event ParcelObjectOwnerRequest OnParcelObjectOwnerRequest;
public event ParcelDeedToGroup OnParcelDeedToGroup;
public event ObjectDeselect OnObjectDeselect;
public event RegionInfoRequest OnRegionInfoRequest;
public event EstateCovenantRequest OnEstateCovenantRequest;
public event EstateChangeInfo OnEstateChangeInfo;
public event EstateManageTelehub OnEstateManageTelehub;
public event CachedTextureRequest OnCachedTextureRequest;
public event ObjectDuplicateOnRay OnObjectDuplicateOnRay;
public event FriendActionDelegate OnApproveFriendRequest;
public event FriendActionDelegate OnDenyFriendRequest;
public event FriendshipTermination OnTerminateFriendship;
public event GrantUserFriendRights OnGrantUserRights;
public event EconomyDataRequest OnEconomyDataRequest;
public event MoneyBalanceRequest OnMoneyBalanceRequest;
public event UpdateAvatarProperties OnUpdateAvatarProperties;
public event ObjectIncludeInSearch OnObjectIncludeInSearch;
public event UUIDNameRequest OnTeleportHomeRequest;
public event ScriptAnswer OnScriptAnswer;
public event RequestPayPrice OnRequestPayPrice;
public event ObjectSaleInfo OnObjectSaleInfo;
public event ObjectBuy OnObjectBuy;
public event BuyObjectInventory OnBuyObjectInventory;
public event AgentSit OnUndo;
public event AgentSit OnRedo;
public event LandUndo OnLandUndo;
public event ForceReleaseControls OnForceReleaseControls;
public event GodLandStatRequest OnLandStatRequest;
public event RequestObjectPropertiesFamily OnObjectGroupRequest;
public event DetailedEstateDataRequest OnDetailedEstateDataRequest;
public event SetEstateFlagsRequest OnSetEstateFlagsRequest;
public event SetEstateTerrainBaseTexture OnSetEstateTerrainBaseTexture;
public event SetEstateTerrainDetailTexture OnSetEstateTerrainDetailTexture;
public event SetEstateTerrainTextureHeights OnSetEstateTerrainTextureHeights;
public event CommitEstateTerrainTextureRequest OnCommitEstateTerrainTextureRequest;
public event SetRegionTerrainSettings OnSetRegionTerrainSettings;
public event EstateRestartSimRequest OnEstateRestartSimRequest;
public event EstateChangeCovenantRequest OnEstateChangeCovenantRequest;
public event UpdateEstateAccessDeltaRequest OnUpdateEstateAccessDeltaRequest;
public event SimulatorBlueBoxMessageRequest OnSimulatorBlueBoxMessageRequest;
public event EstateBlueBoxMessageRequest OnEstateBlueBoxMessageRequest;
public event EstateDebugRegionRequest OnEstateDebugRegionRequest;
public event EstateTeleportOneUserHomeRequest OnEstateTeleportOneUserHomeRequest;
public event EstateTeleportAllUsersHomeRequest OnEstateTeleportAllUsersHomeRequest;
public event ScriptReset OnScriptReset;
public event GetScriptRunning OnGetScriptRunning;
public event SetScriptRunning OnSetScriptRunning;
public event Action<Vector3, bool, bool> OnAutoPilotGo;
public event TerrainUnacked OnUnackedTerrain;
public event RegionHandleRequest OnRegionHandleRequest;
public event ParcelInfoRequest OnParcelInfoRequest;
public event ActivateGesture OnActivateGesture;
public event DeactivateGesture OnDeactivateGesture;
public event ObjectOwner OnObjectOwner;
public event DirPlacesQuery OnDirPlacesQuery;
public event DirFindQuery OnDirFindQuery;
public event DirLandQuery OnDirLandQuery;
public event DirPopularQuery OnDirPopularQuery;
public event DirClassifiedQuery OnDirClassifiedQuery;
public event EventInfoRequest OnEventInfoRequest;
public event ParcelSetOtherCleanTime OnParcelSetOtherCleanTime;
public event MapItemRequest OnMapItemRequest;
public event OfferCallingCard OnOfferCallingCard;
public event AcceptCallingCard OnAcceptCallingCard;
public event DeclineCallingCard OnDeclineCallingCard;
public event SoundTrigger OnSoundTrigger;
public event StartLure OnStartLure;
public event TeleportLureRequest OnTeleportLureRequest;
public event NetworkStats OnNetworkStatsUpdate;
public event ClassifiedInfoRequest OnClassifiedInfoRequest;
public event ClassifiedInfoUpdate OnClassifiedInfoUpdate;
public event ClassifiedDelete OnClassifiedDelete;
public event ClassifiedGodDelete OnClassifiedGodDelete;
public event EventNotificationAddRequest OnEventNotificationAddRequest;
public event EventNotificationRemoveRequest OnEventNotificationRemoveRequest;
public event EventGodDelete OnEventGodDelete;
public event ParcelDwellRequest OnParcelDwellRequest;
public event UserInfoRequest OnUserInfoRequest;
public event UpdateUserInfo OnUpdateUserInfo;
public event RetrieveInstantMessages OnRetrieveInstantMessages;
public event PickDelete OnPickDelete;
public event PickGodDelete OnPickGodDelete;
public event PickInfoUpdate OnPickInfoUpdate;
public event AvatarNotesUpdate OnAvatarNotesUpdate;
public event MuteListRequest OnMuteListRequest;
public event AvatarInterestUpdate OnAvatarInterestUpdate;
public event PlacesQuery OnPlacesQuery;
public event FindAgentUpdate OnFindAgent;
public event TrackAgentUpdate OnTrackAgent;
public event NewUserReport OnUserReport;
public event SaveStateHandler OnSaveState;
public event GroupAccountSummaryRequest OnGroupAccountSummaryRequest;
public event GroupAccountDetailsRequest OnGroupAccountDetailsRequest;
public event GroupAccountTransactionsRequest OnGroupAccountTransactionsRequest;
public event FreezeUserUpdate OnParcelFreezeUser;
public event EjectUserUpdate OnParcelEjectUser;
public event ParcelBuyPass OnParcelBuyPass;
public event ParcelGodMark OnParcelGodMark;
public event GroupActiveProposalsRequest OnGroupActiveProposalsRequest;
public event GroupVoteHistoryRequest OnGroupVoteHistoryRequest;
public event SimWideDeletesDelegate OnSimWideDeletes;
public event SendPostcard OnSendPostcard;
public event ChangeInventoryItemFlags OnChangeInventoryItemFlags;
public event MuteListEntryUpdate OnUpdateMuteListEntry;
public event MuteListEntryRemove OnRemoveMuteListEntry;
public event GodlikeMessage onGodlikeMessage;
public event GodUpdateRegionInfoUpdate OnGodUpdateRegionInfoUpdate;
public event GenericCall2 OnUpdateThrottles;
#pragma warning restore 67
/// <value>
/// This agent's UUID
/// </value>
private UUID m_agentId;
public ISceneAgent SceneAgent { get; set; }
/// <value>
/// The last caps seed url that this client was given.
/// </value>
public string CapsSeedUrl;
private Vector3 startPos = new Vector3(((int)Constants.RegionSize * 0.5f), ((int)Constants.RegionSize * 0.5f), 2);
public virtual Vector3 StartPos
{
get { return startPos; }
set { }
}
public virtual UUID AgentId
{
get { return m_agentId; }
}
public UUID SessionId { get; set; }
public UUID SecureSessionId { get; set; }
public virtual string FirstName
{
get { return m_firstName; }
}
private string m_firstName;
public virtual string LastName
{
get { return m_lastName; }
}
private string m_lastName;
public virtual String Name
{
get { return FirstName + " " + LastName; }
}
public int PingTimeMS { get { return 0; } }
public bool IsActive
{
get { return true; }
set { }
}
public bool IsLoggingOut { get; set; }
public UUID ActiveGroupId
{
get { return UUID.Zero; }
}
public string ActiveGroupName
{
get { return String.Empty; }
}
public ulong ActiveGroupPowers
{
get { return 0; }
}
public bool IsGroupMember(UUID groupID)
{
return false;
}
public ulong GetGroupPowers(UUID groupID)
{
return 0;
}
public virtual int NextAnimationSequenceNumber
{
get { return 1; }
}
public IScene Scene
{
get { return m_scene; }
}
public bool SendLogoutPacketWhenClosing
{
set { }
}
private uint m_circuitCode;
public uint CircuitCode
{
get { return m_circuitCode; }
set { m_circuitCode = value; }
}
public IPEndPoint RemoteEndPoint
{
get { return new IPEndPoint(IPAddress.Loopback, (ushort)m_circuitCode); }
}
public List<uint> SelectedObjects {get; private set;}
/// <summary>
/// Constructor
/// </summary>
/// <param name="agentData"></param>
/// <param name="scene"></param>
/// <param name="sceneManager"></param>
public TestClient(AgentCircuitData agentData, Scene scene)
{
m_agentId = agentData.AgentID;
m_firstName = agentData.firstname;
m_lastName = agentData.lastname;
m_circuitCode = agentData.circuitcode;
m_scene = scene;
SessionId = agentData.SessionID;
SecureSessionId = agentData.SecureSessionID;
CapsSeedUrl = agentData.CapsPath;
ReceivedKills = new List<uint>();
ReceivedOfflineNotifications = new List<UUID>();
ReceivedOnlineNotifications = new List<UUID>();
ReceivedFriendshipTerminations = new List<UUID>();
SentImageDataPackets = new List<ImageDataPacket>();
SentImagePacketPackets = new List<ImagePacketPacket>();
SentImageNotInDatabasePackets = new List<ImageNotInDatabasePacket>();
}
/// <summary>
/// Trigger chat coming from this connection.
/// </summary>
/// <param name="channel"></param>
/// <param name="type"></param>
/// <param name="message"></param>
public bool Chat(int channel, ChatTypeEnum type, string message)
{
ChatMessage handlerChatFromClient = OnChatFromClient;
if (handlerChatFromClient != null)
{
OSChatMessage args = new OSChatMessage();
args.Channel = channel;
args.From = Name;
args.Message = message;
args.Type = type;
args.Scene = Scene;
args.Sender = this;
args.SenderUUID = AgentId;
handlerChatFromClient(this, args);
}
return true;
}
/// <summary>
/// Attempt a teleport to the given region.
/// </summary>
/// <param name="regionHandle"></param>
/// <param name="position"></param>
/// <param name="lookAt"></param>
public void Teleport(ulong regionHandle, Vector3 position, Vector3 lookAt)
{
OnTeleportLocationRequest(this, regionHandle, position, lookAt, 16);
}
public void CompleteMovement()
{
if (OnCompleteMovementToRegion != null)
OnCompleteMovementToRegion(this, true);
}
/// <summary>
/// Emulate sending an IM from the viewer to the simulator.
/// </summary>
/// <param name='im'></param>
public void HandleImprovedInstantMessage(GridInstantMessage im)
{
ImprovedInstantMessage handlerInstantMessage = OnInstantMessage;
if (handlerInstantMessage != null)
handlerInstantMessage(this, im);
}
public virtual void ActivateGesture(UUID assetId, UUID gestureId)
{
}
public virtual void SendWearables(AvatarWearable[] wearables, int serial)
{
}
public virtual void SendAppearance(UUID agentID, byte[] visualParams, byte[] textureEntry)
{
}
public void SendCachedTextureResponse(ISceneEntity avatar, int serial, List<CachedTextureResponseArg> cachedTextures)
{
}
public virtual void Kick(string message)
{
}
public virtual void SendStartPingCheck(byte seq)
{
}
public virtual void SendAvatarPickerReply(AvatarPickerReplyAgentDataArgs AgentData, List<AvatarPickerReplyDataArgs> Data)
{
}
public virtual void SendAgentDataUpdate(UUID agentid, UUID activegroupid, string firstname, string lastname, ulong grouppowers, string groupname, string grouptitle)
{
}
public virtual void SendKillObject(List<uint> localID)
{
ReceivedKills.AddRange(localID);
}
public void SendPartFullUpdate(ISceneEntity ent, uint? parentID)
{
}
public virtual void SetChildAgentThrottle(byte[] throttle)
{
}
public virtual void SetChildAgentThrottle(byte[] throttle, float factor)
{
}
public void SetAgentThrottleSilent(int throttle, int setting)
{
}
public int GetAgentThrottleSilent(int throttle)
{
return 0;
}
public byte[] GetThrottlesPacked(float multiplier)
{
return new byte[0];
}
public virtual void SendAnimations(UUID[] animations, int[] seqs, UUID sourceAgentId, UUID[] objectIDs)
{
}
public virtual void SendChatMessage(
string message, byte type, Vector3 fromPos, string fromName,
UUID fromAgentID, UUID ownerID, byte source, byte audible)
{
// Console.WriteLine("mmm {0} {1} {2}", message, Name, AgentId);
if (OnReceivedChatMessage != null)
OnReceivedChatMessage(message, type, fromPos, fromName, fromAgentID, ownerID, source, audible);
}
public void SendInstantMessage(GridInstantMessage im)
{
if (OnReceivedInstantMessage != null)
OnReceivedInstantMessage(im);
}
public void SendGenericMessage(string method, UUID invoice, List<string> message)
{
}
public void SendGenericMessage(string method, UUID invoice, List<byte[]> message)
{
}
public virtual bool CanSendLayerData()
{
return false;
}
public virtual void SendLayerData(float[] map)
{
}
public virtual void SendLayerData(int px, int py, float[] map)
{
}
public virtual void SendLayerData(int px, int py, float[] map, bool track)
{
}
public virtual void SendWindData(Vector2[] windSpeeds) { }
public virtual void SendCloudData(float[] cloudCover) { }
public virtual void MoveAgentIntoRegion(RegionInfo regInfo, Vector3 pos, Vector3 look)
{
if (OnReceivedMoveAgentIntoRegion != null)
OnReceivedMoveAgentIntoRegion(regInfo, pos, look);
}
public virtual AgentCircuitData RequestClientInfo()
{
AgentCircuitData agentData = new AgentCircuitData();
agentData.AgentID = AgentId;
agentData.SessionID = SessionId;
agentData.SecureSessionID = UUID.Zero;
agentData.circuitcode = m_circuitCode;
agentData.child = false;
agentData.firstname = m_firstName;
agentData.lastname = m_lastName;
ICapabilitiesModule capsModule = m_scene.RequestModuleInterface<ICapabilitiesModule>();
if (capsModule != null)
{
agentData.CapsPath = capsModule.GetCapsPath(m_agentId);
agentData.ChildrenCapSeeds = new Dictionary<ulong, string>(capsModule.GetChildrenSeeds(m_agentId));
}
return agentData;
}
public virtual void InformClientOfNeighbour(ulong neighbourHandle, IPEndPoint neighbourExternalEndPoint)
{
if (OnTestClientInformClientOfNeighbour != null)
OnTestClientInformClientOfNeighbour(neighbourHandle, neighbourExternalEndPoint);
}
public virtual void SendRegionTeleport(
ulong regionHandle, byte simAccess, IPEndPoint regionExternalEndPoint,
uint locationID, uint flags, string capsURL)
{
m_log.DebugFormat(
"[TEST CLIENT]: Received SendRegionTeleport for {0} {1} on {2}", m_firstName, m_lastName, m_scene.Name);
CapsSeedUrl = capsURL;
if (OnTestClientSendRegionTeleport != null)
OnTestClientSendRegionTeleport(
regionHandle, simAccess, regionExternalEndPoint, locationID, flags, capsURL);
}
public virtual void SendTeleportFailed(string reason)
{
m_log.DebugFormat(
"[TEST CLIENT]: Teleport failed for {0} {1} on {2} with reason {3}",
m_firstName, m_lastName, m_scene.Name, reason);
}
public virtual void CrossRegion(ulong newRegionHandle, Vector3 pos, Vector3 lookAt,
IPEndPoint newRegionExternalEndPoint, string capsURL)
{
// This is supposed to send a packet to the client telling it's ready to start region crossing.
// Instead I will just signal I'm ready, mimicking the communication behavior.
// It's ugly, but avoids needless communication setup. This is used in ScenePresenceTests.cs.
// Arthur V.
wh.Set();
}
public virtual void SendMapBlock(List<MapBlockData> mapBlocks, uint flag)
{
}
public virtual void SendLocalTeleport(Vector3 position, Vector3 lookAt, uint flags)
{
}
public virtual void SendTeleportStart(uint flags)
{
}
public void SendTeleportProgress(uint flags, string message)
{
}
public virtual void SendMoneyBalance(UUID transaction, bool success, byte[] description, int balance, int transactionType, UUID sourceID, bool sourceIsGroup, UUID destID, bool destIsGroup, int amount, string item)
{
}
public virtual void SendPayPrice(UUID objectID, int[] payPrice)
{
}
public virtual void SendCoarseLocationUpdate(List<UUID> users, List<Vector3> CoarseLocations)
{
}
public virtual void SendDialog(string objectname, UUID objectID, UUID ownerID, string ownerFirstName, string ownerLastName, string msg, UUID textureID, int ch, string[] buttonlabels)
{
}
public void SendAvatarDataImmediate(ISceneEntity avatar)
{
}
public void SendEntityUpdate(ISceneEntity entity, PrimUpdateFlags updateFlags)
{
if (OnReceivedEntityUpdate != null)
OnReceivedEntityUpdate(entity, updateFlags);
}
public void ReprioritizeUpdates()
{
}
public void FlushPrimUpdates()
{
}
public virtual void SendInventoryFolderDetails(UUID ownerID, UUID folderID,
List<InventoryItemBase> items,
List<InventoryFolderBase> folders,
int version,
bool fetchFolders,
bool fetchItems)
{
}
public virtual void SendInventoryItemDetails(UUID ownerID, InventoryItemBase item)
{
}
public virtual void SendInventoryItemCreateUpdate(InventoryItemBase Item, uint callbackID)
{
}
public void SendInventoryItemCreateUpdate(InventoryItemBase Item, UUID transactionID, uint callbackId)
{
}
public virtual void SendRemoveInventoryItem(UUID itemID)
{
}
public virtual void SendBulkUpdateInventory(InventoryNodeBase node)
{
}
public void SendTakeControls(int controls, bool passToAgent, bool TakeControls)
{
}
public virtual void SendTaskInventory(UUID taskID, short serial, byte[] fileName)
{
}
public virtual void SendXferPacket(ulong xferID, uint packet, byte[] data, bool isTaskInventory)
{
}
public virtual void SendAbortXferPacket(ulong xferID)
{
}
public virtual void SendEconomyData(float EnergyEfficiency, int ObjectCapacity, int ObjectCount, int PriceEnergyUnit,
int PriceGroupCreate, int PriceObjectClaim, float PriceObjectRent, float PriceObjectScaleFactor,
int PriceParcelClaim, float PriceParcelClaimFactor, int PriceParcelRent, int PricePublicObjectDecay,
int PricePublicObjectDelete, int PriceRentLight, int PriceUpload, int TeleportMinPrice, float TeleportPriceExponent)
{
}
public virtual void SendNameReply(UUID profileId, string firstname, string lastname)
{
}
public virtual void SendPreLoadSound(UUID objectID, UUID ownerID, UUID soundID)
{
}
public virtual void SendPlayAttachedSound(UUID soundID, UUID objectID, UUID ownerID, float gain,
byte flags)
{
}
public void SendTriggeredSound(UUID soundID, UUID ownerID, UUID objectID, UUID parentID, ulong handle, Vector3 position, float gain)
{
}
public void SendAttachedSoundGainChange(UUID objectID, float gain)
{
}
public void SendAlertMessage(string message)
{
}
public void SendAgentAlertMessage(string message, bool modal)
{
}
public void SendSystemAlertMessage(string message)
{
}
public void SendLoadURL(string objectname, UUID objectID, UUID ownerID, bool groupOwned, string message,
string url)
{
}
public virtual void SendRegionHandshake(RegionInfo regionInfo, RegionHandshakeArgs args)
{
if (OnRegionHandShakeReply != null)
{
OnRegionHandShakeReply(this);
}
}
public void SendAssetUploadCompleteMessage(sbyte AssetType, bool Success, UUID AssetFullID)
{
}
public void SendConfirmXfer(ulong xferID, uint PacketID)
{
}
public void SendXferRequest(ulong XferID, short AssetType, UUID vFileID, byte FilePath, byte[] FileName)
{
}
public void SendInitiateDownload(string simFileName, string clientFileName)
{
}
public void SendImageFirstPart(ushort numParts, UUID ImageUUID, uint ImageSize, byte[] ImageData, byte imageCodec)
{
ImageDataPacket im = new ImageDataPacket();
im.Header.Reliable = false;
im.ImageID.Packets = numParts;
im.ImageID.ID = ImageUUID;
if (ImageSize > 0)
im.ImageID.Size = ImageSize;
im.ImageData.Data = ImageData;
im.ImageID.Codec = imageCodec;
im.Header.Zerocoded = true;
SentImageDataPackets.Add(im);
}
public void SendImageNextPart(ushort partNumber, UUID imageUuid, byte[] imageData)
{
ImagePacketPacket im = new ImagePacketPacket();
im.Header.Reliable = false;
im.ImageID.Packet = partNumber;
im.ImageID.ID = imageUuid;
im.ImageData.Data = imageData;
SentImagePacketPackets.Add(im);
}
public void SendImageNotFound(UUID imageid)
{
ImageNotInDatabasePacket p = new ImageNotInDatabasePacket();
p.ImageID.ID = imageid;
SentImageNotInDatabasePackets.Add(p);
}
public void SendShutdownConnectionNotice()
{
}
public void SendSimStats(SimStats stats)
{
}
public void SendObjectPropertiesFamilyData(ISceneEntity Entity, uint RequestFlags)
{
}
public void SendObjectPropertiesReply(ISceneEntity entity)
{
}
public void SendAgentOffline(UUID[] agentIDs)
{
ReceivedOfflineNotifications.AddRange(agentIDs);
}
public void SendAgentOnline(UUID[] agentIDs)
{
ReceivedOnlineNotifications.AddRange(agentIDs);
}
public void SendFindAgent(UUID HunterID, UUID PreyID, double GlobalX, double GlobalY)
{
}
public void SendSitResponse(UUID TargetID, Vector3 OffsetPos,
Quaternion SitOrientation, bool autopilot,
Vector3 CameraAtOffset, Vector3 CameraEyeOffset, bool ForceMouseLook)
{
}
public void SendAdminResponse(UUID Token, uint AdminLevel)
{
}
public void SendGroupMembership(GroupMembershipData[] GroupMembership)
{
}
public void SendSunPos(Vector3 sunPos, Vector3 sunVel, ulong time, uint dlen, uint ylen, float phase)
{
}
public void SendViewerEffect(ViewerEffectPacket.EffectBlock[] effectBlocks)
{
}
public void SendViewerTime(int phase)
{
}
public void SendAvatarProperties(UUID avatarID, string aboutText, string bornOn, Byte[] charterMember,
string flAbout, uint flags, UUID flImageID, UUID imageID, string profileURL,
UUID partnerID)
{
}
public int DebugPacketLevel { get; set; }
public void InPacket(object NewPack)
{
}
public void ProcessInPacket(Packet NewPack)
{
}
/// <summary>
/// This is a TestClient only method to do shutdown tasks that are normally carried out by LLUDPServer.RemoveClient()
/// </summary>
public void Logout()
{
// We must set this here so that the presence is removed from the PresenceService by the PresenceDetector
IsLoggingOut = true;
Close();
}
public void Close()
{
Close(true, false);
}
public void Close(bool sendStop, bool force)
{
// Fire the callback for this connection closing
// This is necesary to get the presence detector to notice that a client has logged out.
if (OnConnectionClosed != null)
OnConnectionClosed(this);
m_scene.RemoveClient(AgentId, true);
}
public void Start()
{
throw new NotImplementedException();
}
public void Stop()
{
}
public void SendBlueBoxMessage(UUID FromAvatarID, String FromAvatarName, String Message)
{
}
public void SendLogoutPacket()
{
}
public void Terminate()
{
}
public ClientInfo GetClientInfo()
{
return null;
}
public void SetClientInfo(ClientInfo info)
{
}
public void SendScriptQuestion(UUID objectID, string taskName, string ownerName, UUID itemID, int question)
{
}
public void SendHealth(float health)
{
}
public void SendTelehubInfo(UUID ObjectID, string ObjectName, Vector3 ObjectPos, Quaternion ObjectRot, List<Vector3> SpawnPoint)
{
}
public void SendEstateList(UUID invoice, int code, UUID[] Data, uint estateID)
{
}
public void SendBannedUserList(UUID invoice, EstateBan[] banlist, uint estateID)
{
}
public void SendRegionInfoToEstateMenu(RegionInfoForEstateMenuArgs args)
{
}
public void SendEstateCovenantInformation(UUID covenant)
{
}
public void SendDetailedEstateData(UUID invoice, string estateName, uint estateID, uint parentEstate, uint estateFlags, uint sunPosition, UUID covenant, uint covenantChanged, string abuseEmail, UUID estateOwner)
{
}
public void SendLandProperties(int sequence_id, bool snap_selection, int request_result, ILandObject lo, float simObjectBonusFactor, int parcelObjectCapacity, int simObjectCapacity, uint regionFlags)
{
}
public void SendLandAccessListData(List<LandAccessEntry> accessList, uint accessFlag, int localLandID)
{
}
public void SendForceClientSelectObjects(List<uint> objectIDs)
{
}
public void SendCameraConstraint(Vector4 ConstraintPlane)
{
}
public void SendLandObjectOwners(LandData land, List<UUID> groups, Dictionary<UUID, int> ownersAndCount)
{
}
public void SendLandParcelOverlay(byte[] data, int sequence_id)
{
}
public void SendParcelMediaCommand(uint flags, ParcelMediaCommandEnum command, float time)
{
}
public void SendParcelMediaUpdate(string mediaUrl, UUID mediaTextureID, byte autoScale, string mediaType,
string mediaDesc, int mediaWidth, int mediaHeight, byte mediaLoop)
{
}
public void SendGroupNameReply(UUID groupLLUID, string GroupName)
{
}
public void SendLandStatReply(uint reportType, uint requestFlags, uint resultCount, LandStatReportItem[] lsrpia)
{
}
public void SendScriptRunningReply(UUID objectID, UUID itemID, bool running)
{
}
public void SendAsset(AssetRequestToClient req)
{
}
public void SendTexture(AssetBase TextureAsset)
{
}
public void SendSetFollowCamProperties (UUID objectID, SortedDictionary<int, float> parameters)
{
}
public void SendClearFollowCamProperties (UUID objectID)
{
}
public void SendRegionHandle (UUID regoinID, ulong handle)
{
}
public void SendParcelInfo (RegionInfo info, LandData land, UUID parcelID, uint x, uint y)
{
}
public void SetClientOption(string option, string value)
{
}
public string GetClientOption(string option)
{
return string.Empty;
}
public void SendScriptTeleportRequest(string objName, string simName, Vector3 pos, Vector3 lookAt)
{
}
public void SendDirPlacesReply(UUID queryID, DirPlacesReplyData[] data)
{
}
public void SendDirPeopleReply(UUID queryID, DirPeopleReplyData[] data)
{
}
public void SendDirEventsReply(UUID queryID, DirEventsReplyData[] data)
{
}
public void SendDirGroupsReply(UUID queryID, DirGroupsReplyData[] data)
{
}
public void SendDirClassifiedReply(UUID queryID, DirClassifiedReplyData[] data)
{
}
public void SendDirLandReply(UUID queryID, DirLandReplyData[] data)
{
}
public void SendDirPopularReply(UUID queryID, DirPopularReplyData[] data)
{
}
public void SendMapItemReply(mapItemReply[] replies, uint mapitemtype, uint flags)
{
}
public void SendEventInfoReply (EventData info)
{
}
public void SendOfferCallingCard (UUID destID, UUID transactionID)
{
}
public void SendAcceptCallingCard (UUID transactionID)
{
}
public void SendDeclineCallingCard (UUID transactionID)
{
}
public void SendAvatarGroupsReply(UUID avatarID, GroupMembershipData[] data)
{
}
public void SendAgentGroupDataUpdate(UUID avatarID, GroupMembershipData[] data)
{
}
public void SendJoinGroupReply(UUID groupID, bool success)
{
}
public void SendEjectGroupMemberReply(UUID agentID, UUID groupID, bool succss)
{
}
public void SendLeaveGroupReply(UUID groupID, bool success)
{
}
public void SendTerminateFriend(UUID exFriendID)
{
ReceivedFriendshipTerminations.Add(exFriendID);
}
public bool AddGenericPacketHandler(string MethodName, GenericMessage handler)
{
//throw new NotImplementedException();
return false;
}
public void SendAvatarClassifiedReply(UUID targetID, UUID[] classifiedID, string[] name)
{
}
public void SendClassifiedInfoReply(UUID classifiedID, UUID creatorID, uint creationDate, uint expirationDate, uint category, string name, string description, UUID parcelID, uint parentEstate, UUID snapshotID, string simName, Vector3 globalPos, string parcelName, byte classifiedFlags, int price)
{
}
public void SendAgentDropGroup(UUID groupID)
{
}
public void SendAvatarNotesReply(UUID targetID, string text)
{
}
public void SendAvatarPicksReply(UUID targetID, Dictionary<UUID, string> picks)
{
}
public void SendAvatarClassifiedReply(UUID targetID, Dictionary<UUID, string> classifieds)
{
}
public void SendParcelDwellReply(int localID, UUID parcelID, float dwell)
{
}
public void SendUserInfoReply(bool imViaEmail, bool visible, string email)
{
}
public void SendCreateGroupReply(UUID groupID, bool success, string message)
{
}
public void RefreshGroupMembership()
{
}
public void SendUseCachedMuteList()
{
}
public void SendMuteListUpdate(string filename)
{
}
public void SendPickInfoReply(UUID pickID,UUID creatorID, bool topPick, UUID parcelID, string name, string desc, UUID snapshotID, string user, string originalName, string simName, Vector3 posGlobal, int sortOrder, bool enabled)
{
}
public bool TryGet<T>(out T iface)
{
iface = default(T);
return false;
}
public T Get<T>()
{
return default(T);
}
public void Disconnect(string reason)
{
}
public void Disconnect()
{
}
public void SendRebakeAvatarTextures(UUID textureID)
{
if (OnReceivedSendRebakeAvatarTextures != null)
OnReceivedSendRebakeAvatarTextures(textureID);
}
public void SendAvatarInterestsReply(UUID avatarID, uint wantMask, string wantText, uint skillsMask, string skillsText, string languages)
{
}
public void SendGroupAccountingDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID, int amt)
{
}
public void SendGroupAccountingSummary(IClientAPI sender,UUID groupID, uint moneyAmt, int totalTier, int usedTier)
{
}
public void SendGroupTransactionsSummaryDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID,int amt)
{
}
public void SendGroupVoteHistory(UUID groupID, UUID transactionID, GroupVoteHistory[] Votes)
{
}
public void SendGroupActiveProposals(UUID groupID, UUID transactionID, GroupActiveProposals[] Proposals)
{
}
public void SendChangeUserRights(UUID agentID, UUID friendID, int rights)
{
}
public void SendTextBoxRequest(string message, int chatChannel, string objectname, UUID ownerID, string ownerFirstName, string ownerLastName, UUID objectId)
{
}
public void SendAgentTerseUpdate(ISceneEntity presence)
{
}
public void SendPlacesReply(UUID queryID, UUID transactionID, PlacesReplyData[] data)
{
}
public void SendPartPhysicsProprieties(ISceneEntity entity)
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace System.Buffers
{
/// <summary>
/// Represents a standard formatting string without using an actual String. A StandardFormat consists of a character (such as 'G', 'D' or 'X')
/// and an optional precision ranging from 0..99, or the special value NoPrecision.
/// </summary>
public readonly struct StandardFormat : IEquatable<StandardFormat>
{
/// <summary>
/// Precision values for format that don't use a precision, or for when the precision is to be unspecified.
/// </summary>
public const byte NoPrecision = byte.MaxValue;
/// <summary>
/// The maximum valid precision value.
/// </summary>
public const byte MaxPrecision = 99;
private readonly byte _format;
private readonly byte _precision;
/// <summary>
/// The character component of the format.
/// </summary>
public char Symbol => (char)_format;
/// <summary>
/// The precision component of the format. Ranges from 0..9 or the special value NoPrecision.
/// </summary>
public byte Precision => _precision;
/// <summary>
/// true if Precision is a value other than NoPrecision
/// </summary>
public bool HasPrecision => _precision != NoPrecision;
/// <summary>
/// true if the StandardFormat == default(StandardFormat)
/// </summary>
public bool IsDefault => _format == 0 && _precision == 0;
/// <summary>
/// Create a StandardFormat.
/// </summary>
/// <param name="symbol">A type-specific formatting character such as 'G', 'D' or 'X'</param>
/// <param name="precision">An optional precision ranging from 0..9 or the special value NoPrecision (the default)</param>
public StandardFormat(char symbol, byte precision = NoPrecision)
{
if (precision != NoPrecision && precision > MaxPrecision)
ThrowHelper.ThrowArgumentOutOfRangeException_PrecisionTooLarge();
if (symbol != (byte)symbol)
ThrowHelper.ThrowArgumentOutOfRangeException_SymbolDoesNotFit();
_format = (byte)symbol;
_precision = precision;
}
/// <summary>
/// Converts a character to a StandardFormat using the NoPrecision precision.
/// </summary>
public static implicit operator StandardFormat(char symbol) => new StandardFormat(symbol);
/// <summary>
/// Converts a <see cref="ReadOnlySpan{Char}"/> into a StandardFormat
/// </summary>
public static StandardFormat Parse(ReadOnlySpan<char> format)
{
ParseHelper(format, out StandardFormat standardFormat, throws: true);
return standardFormat;
}
/// <summary>
/// Converts a classic .NET format string into a StandardFormat
/// </summary>
public static StandardFormat Parse(string? format) => format == null ? default : Parse(format.AsSpan());
/// <summary>
/// Tries to convert a <see cref="ReadOnlySpan{Char}"/> into a StandardFormat. A return value indicates whether the conversion succeeded or failed.
/// </summary>
public static bool TryParse(ReadOnlySpan<char> format, out StandardFormat result)
{
return ParseHelper(format, out result);
}
private static bool ParseHelper(ReadOnlySpan<char> format, out StandardFormat standardFormat, bool throws = false)
{
standardFormat = default;
if (format.Length == 0)
return true;
char symbol = format[0];
byte precision;
if (format.Length == 1)
{
precision = NoPrecision;
}
else
{
uint parsedPrecision = 0;
for (int srcIndex = 1; srcIndex < format.Length; srcIndex++)
{
uint digit = format[srcIndex] - 48u; // '0'
if (digit > 9)
{
return throws ? throw new FormatException(SR.Format(SR.Argument_CannotParsePrecision, MaxPrecision)) : false;
}
parsedPrecision = parsedPrecision * 10 + digit;
if (parsedPrecision > MaxPrecision)
{
return throws ? throw new FormatException(SR.Format(SR.Argument_PrecisionTooLarge, MaxPrecision)) : false;
}
}
precision = (byte)parsedPrecision;
}
standardFormat = new StandardFormat(symbol, precision);
return true;
}
/// <summary>
/// Returns true if both the Symbol and Precision are equal.
/// </summary>
public override bool Equals(object? obj) => obj is StandardFormat other && Equals(other);
/// <summary>
/// Compute a hash code.
/// </summary>
public override int GetHashCode() => _format.GetHashCode() ^ _precision.GetHashCode();
/// <summary>
/// Returns true if both the Symbol and Precision are equal.
/// </summary>
public bool Equals(StandardFormat other) => _format == other._format && _precision == other._precision;
/// <summary>
/// Returns the format in classic .NET format.
/// </summary>
public override string ToString()
{
Span<char> buffer = stackalloc char[FormatStringLength];
int charsWritten = Format(buffer);
return new string(buffer.Slice(0, charsWritten));
}
/// <summary>The exact buffer length required by <see cref="Format"/>.</summary>
internal const int FormatStringLength = 3;
/// <summary>
/// Formats the format in classic .NET format.
/// </summary>
internal int Format(Span<char> destination)
{
Debug.Assert(destination.Length == FormatStringLength);
int count = 0;
char symbol = Symbol;
if (symbol != default &&
(uint)destination.Length == FormatStringLength) // to eliminate bounds checks
{
destination[0] = symbol;
count = 1;
uint precision = Precision;
if (precision != NoPrecision)
{
// Note that Precision is stored as a byte, so in theory it could contain
// values > MaxPrecision (99). But all supported mechanisms for creating a
// StandardFormat limit values to being <= MaxPrecision, so the only way a value
// could be larger than that is if unsafe code or the equivalent were used
// to force a larger invalid value in, in which case we don't need to
// guarantee such an invalid value is properly roundtripped through here;
// we just need to make sure things aren't corrupted further.
if (precision >= 10)
{
uint div = Math.DivRem(precision, 10, out precision);
destination[1] = (char)('0' + div % 10);
count = 2;
}
destination[count] = (char)('0' + precision);
count++;
}
}
return count;
}
/// <summary>
/// Returns true if both the Symbol and Precision are equal.
/// </summary>
public static bool operator ==(StandardFormat left, StandardFormat right) => left.Equals(right);
/// <summary>
/// Returns false if both the Symbol and Precision are equal.
/// </summary>
public static bool operator !=(StandardFormat left, StandardFormat right) => !left.Equals(right);
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void SignSByte()
{
var test = new SimpleBinaryOpTest__SignSByte();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__SignSByte
{
private const int VectorSize = 16;
private const int Op1ElementCount = VectorSize / sizeof(SByte);
private const int Op2ElementCount = VectorSize / sizeof(SByte);
private const int RetElementCount = VectorSize / sizeof(SByte);
private static SByte[] _data1 = new SByte[Op1ElementCount];
private static SByte[] _data2 = new SByte[Op2ElementCount];
private static Vector128<SByte> _clsVar1;
private static Vector128<SByte> _clsVar2;
private Vector128<SByte> _fld1;
private Vector128<SByte> _fld2;
private SimpleBinaryOpTest__DataTable<SByte, SByte, SByte> _dataTable;
static SimpleBinaryOpTest__SignSByte()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue + 1, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar1), ref Unsafe.As<SByte, byte>(ref _data1[0]), VectorSize);
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _clsVar2), ref Unsafe.As<SByte, byte>(ref _data2[0]), VectorSize);
}
public SimpleBinaryOpTest__SignSByte()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue + 1, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld1), ref Unsafe.As<SByte, byte>(ref _data1[0]), VectorSize);
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<SByte>, byte>(ref _fld2), ref Unsafe.As<SByte, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue + 1, sbyte.MaxValue)); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
_dataTable = new SimpleBinaryOpTest__DataTable<SByte, SByte, SByte>(_data1, _data2, new SByte[RetElementCount], VectorSize);
}
public bool IsSupported => Ssse3.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Ssse3.Sign(
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Ssse3.Sign(
Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Ssse3.Sign(
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Ssse3).GetMethod(nameof(Ssse3.Sign), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Ssse3).GetMethod(nameof(Ssse3.Sign), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Ssse3).GetMethod(nameof(Ssse3.Sign), new Type[] { typeof(Vector128<SByte>), typeof(Vector128<SByte>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<SByte>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Ssse3.Sign(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector128<SByte>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector128<SByte>>(_dataTable.inArray2Ptr);
var result = Ssse3.Sign(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Sse2.LoadVector128((SByte*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadVector128((SByte*)(_dataTable.inArray2Ptr));
var result = Ssse3.Sign(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadAlignedVector128((SByte*)(_dataTable.inArray2Ptr));
var result = Ssse3.Sign(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__SignSByte();
var result = Ssse3.Sign(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Ssse3.Sign(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<SByte> left, Vector128<SByte> right, void* result, [CallerMemberName] string method = "")
{
SByte[] inArray1 = new SByte[Op1ElementCount];
SByte[] inArray2 = new SByte[Op2ElementCount];
SByte[] outArray = new SByte[RetElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
SByte[] inArray1 = new SByte[Op1ElementCount];
SByte[] inArray2 = new SByte[Op2ElementCount];
SByte[] outArray = new SByte[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(SByte[] left, SByte[] right, SByte[] result, [CallerMemberName] string method = "")
{
if (result[0] != (right[0] < 0 ? (sbyte)(-left[0]) : (right[0] > 0 ? left[0] : 0)))
{
Succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (right[i] < 0 ? (sbyte)(-left[i]) : (right[i] > 0 ? left[i] : 0)))
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Ssse3)}.{nameof(Ssse3.Sign)}<SByte>(Vector128<SByte>, Vector128<SByte>): {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
//
// DbBoundType.cs
//
// Author:
// Scott Peterson <[email protected]>
//
// Copyright (C) 2008 Scott Peterson
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using Hyena.Data.Sqlite;
namespace Hyena.Data.Sqlite.Tests
{
internal enum IntEnum : int
{
Zero,
One,
Two,
Three
}
internal enum LongEnum : long
{
Cero,
Uno,
Dos,
Tres
}
internal class ModelProvider : SqliteModelProvider<DbBoundType>
{
public ModelProvider (HyenaSqliteConnection connection) : base (connection)
{
Init ();
}
public override string TableName {
get { return "TestTable"; }
}
protected override int ModelVersion {
get { return 1; }
}
protected override int DatabaseVersion {
get { return 1; }
}
protected override void MigrateTable (int old_version)
{
}
protected override void MigrateDatabase (int old_version)
{
}
protected override DbBoundType MakeNewObject ()
{
return new DbBoundType ();
}
}
internal class DbBoundType
{
[DatabaseColumn ("PrimaryKey", Constraints = DatabaseColumnConstraints.PrimaryKey)]
public long PrimaryKey;
[DatabaseColumn ("PublicIntField")]
public int PublicIntField;
[DatabaseColumn ("PublicLongField")]
public long PublicLongField;
[DatabaseColumn ("PublicStringField")]
public string PublicStringField;
[DatabaseColumn ("PublicDateTimeField")]
public DateTime PublicDateTimeField;
[DatabaseColumn ("PublicTimeSpanField")]
public TimeSpan PublicTimeSpanField;
[DatabaseColumn ("PublicIntEnumField")]
public IntEnum PublicIntEnumField;
[DatabaseColumn ("PublicLongEnumField")]
public LongEnum PublicLongEnumField;
private int public_int_property_field;
[DatabaseColumn ("PublicIntProperty")]
public int PublicIntProperty {
get { return public_int_property_field; }
set { public_int_property_field = value; }
}
private long public_long_property_field;
[DatabaseColumn ("PublicLongProperty")]
public long PublicLongProperty {
get { return public_long_property_field; }
set { public_long_property_field = value; }
}
private string public_string_property_field;
[DatabaseColumn ("PublicStringProperty")]
public string PublicStringProperty {
get { return public_string_property_field; }
set { public_string_property_field = value; }
}
private DateTime public_date_time_proprety_field;
[DatabaseColumn ("PublicDateTimeProperty")]
public DateTime PublicDateTimeProperty {
get { return public_date_time_proprety_field; }
set { public_date_time_proprety_field = value; }
}
private TimeSpan public_time_span_property_field;
[DatabaseColumn ("PublicTimeSpanProperty")]
public TimeSpan PublicTimeSpanProperty {
get { return public_time_span_property_field; }
set { public_time_span_property_field = value; }
}
private IntEnum public_int_enum_property_field;
[DatabaseColumn ("PublicIntEnumProperty")]
public IntEnum PublicIntEnumProperty {
get { return public_int_enum_property_field; }
set { public_int_enum_property_field = value; }
}
private LongEnum public_long_enum_property_field;
[DatabaseColumn ("PublicLongEnumProperty")]
public LongEnum PublicLongEnumProperty {
get { return public_long_enum_property_field; }
set { public_long_enum_property_field = value; }
}
[DatabaseColumn ("PrivateIntField")]
private int private_int_field;
[DatabaseColumn ("PrivateLongField")]
private long private_long_field;
[DatabaseColumn ("PrivateStringField")]
private string private_string_field;
[DatabaseColumn ("PrivateDateTimeField")]
private DateTime private_date_time_field;
[DatabaseColumn ("PrivateTimeSpanField")]
private TimeSpan private_time_span_field;
[DatabaseColumn ("PrivateIntEnumField")]
private IntEnum private_int_enum_field;
[DatabaseColumn ("PrivateLongEnumField")]
private LongEnum private_long_enum_field;
public int GetPrivateIntField ()
{
return private_int_field;
}
public void SetPrivateIntField (int value)
{
private_int_field = value;
}
public long GetPrivateLongField ()
{
return private_long_field;
}
public void SetPrivateLongField (long value)
{
private_long_field = value;
}
public string GetPrivateStringField ()
{
return private_string_field;
}
public void SetPrivateStringField (string value)
{
private_string_field = value;
}
public DateTime GetPrivateDateTimeField ()
{
return private_date_time_field;
}
public void SetPrivateDateTimeField (DateTime value)
{
private_date_time_field = value;
}
public TimeSpan GetPrivateTimeSpanField ()
{
return private_time_span_field;
}
public void SetPrivateTimeSpanField (TimeSpan value)
{
private_time_span_field = value;
}
public IntEnum GetPrivateIntEnumField ()
{
return private_int_enum_field;
}
public void SetPrivateIntEnumField (IntEnum value)
{
private_int_enum_field = value;
}
public LongEnum GetPrivateLongEnumField ()
{
return private_long_enum_field;
}
public void SetPrivateLongEnumField (LongEnum value)
{
private_long_enum_field = value;
}
private int private_int_property_field;
[DatabaseColumn ("PrivateIntProperty")]
private int private_int_property {
get { return private_int_property_field; }
set { private_int_property_field = value; }
}
private long private_long_property_field;
[DatabaseColumn ("PrivateLongProperty")]
private long private_long_property {
get { return private_long_property_field; }
set { private_long_property_field = value; }
}
private string private_string_property_field;
[DatabaseColumn ("PrivateStringProperty")]
private string private_string_property {
get { return private_string_property_field; }
set { private_string_property_field = value; }
}
private DateTime private_date_time_property_field;
[DatabaseColumn ("PrivateDateTimeProperty")]
private DateTime private_date_time_property {
get { return private_date_time_property_field; }
set { private_date_time_property_field = value; }
}
private TimeSpan private_time_span_property_field;
[DatabaseColumn ("PrivateTimeSpanProperty")]
private TimeSpan private_time_span_property {
get { return private_time_span_property_field; }
set { private_time_span_property_field = value; }
}
private IntEnum private_int_enum_property_field;
[DatabaseColumn ("PrivateIntEnumProperty")]
private IntEnum private_int_enum_property {
get { return private_int_enum_property_field; }
set { private_int_enum_property_field = value; }
}
private LongEnum private_long_enum_property_field;
[DatabaseColumn ("PrivateLongEnumProperty")]
private LongEnum private_long_enum_property {
get { return private_long_enum_property_field; }
set { private_long_enum_property_field = value; }
}
public int GetPrivateIntProperty ()
{
return private_int_property;
}
public void SetPrivateIntProperty (int value)
{
private_int_property = value;
}
public long GetPrivateLongProperty ()
{
return private_long_property;
}
public void SetPrivateLongProperty (long value)
{
private_long_property = value;
}
public string GetPrivateStringProperty ()
{
return private_string_property;
}
public void SetPrivateStringProperty (string value)
{
private_string_property = value;
}
public DateTime GetPrivateDateTimeProperty ()
{
return private_date_time_property;
}
public void SetPrivateDateTimeProperty (DateTime value)
{
private_date_time_property = value;
}
public TimeSpan GetPrivateTimeSpanProperty ()
{
return private_time_span_property;
}
public void SetPrivateTimeSpanProperty (TimeSpan value)
{
private_time_span_property = value;
}
public IntEnum GetPrivateIntEnumProperty ()
{
return private_int_enum_property;
}
public void SetPrivateIntEnumProperty (IntEnum value)
{
private_int_enum_property = value;
}
public LongEnum GetPrivateLongEnumProperty ()
{
return private_long_enum_property;
}
public void SetPrivateLongEnumProperty (LongEnum value)
{
private_long_enum_property = value;
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Globalization;
using log4net.Config;
using log4net.Core;
using log4net.Layout;
using log4net.Repository;
using log4net.Tests.Appender;
using log4net.Tests.Layout;
using NUnit.Framework;
namespace log4net.Tests.Core
{
/// <summary>
/// Used for internal unit testing the <see cref="PatternLayoutTest"/> class.
/// </summary>
/// <remarks>
/// Used for internal unit testing the <see cref="PatternLayoutTest"/> class.
/// </remarks>
[TestFixture]
public class StringFormatTest
{
[Test]
public void TestFormatString()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Layout = new PatternLayout("%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestFormatString");
// ***
log1.Info("TestMessage");
Assert.AreEqual("TestMessage", stringAppender.GetString(), "Test simple INFO event");
stringAppender.Reset();
// ***
log1.DebugFormat("Before {0} After", "Middle");
Assert.AreEqual("Before Middle After", stringAppender.GetString(), "Test simple formatted DEBUG event");
stringAppender.Reset();
// ***
log1.InfoFormat("Before {0} After", "Middle");
Assert.AreEqual("Before Middle After", stringAppender.GetString(), "Test simple formatted INFO event");
stringAppender.Reset();
// ***
log1.WarnFormat("Before {0} After", "Middle");
Assert.AreEqual("Before Middle After", stringAppender.GetString(), "Test simple formatted WARN event");
stringAppender.Reset();
// ***
log1.ErrorFormat("Before {0} After", "Middle");
Assert.AreEqual("Before Middle After", stringAppender.GetString(), "Test simple formatted ERROR event");
stringAppender.Reset();
// ***
log1.FatalFormat("Before {0} After", "Middle");
Assert.AreEqual("Before Middle After", stringAppender.GetString(), "Test simple formatted FATAL event");
stringAppender.Reset();
// ***
log1.InfoFormat("Before {0} After {1}", "Middle", "End");
Assert.AreEqual("Before Middle After End", stringAppender.GetString(), "Test simple formatted INFO event 2");
stringAppender.Reset();
// ***
log1.InfoFormat("IGNORE THIS WARNING - EXCEPTION EXPECTED Before {0} After {1} {2}", "Middle", "End");
Assert.AreEqual(STRING_FORMAT_ERROR, stringAppender.GetString(), "Test formatting error");
stringAppender.Reset();
}
private const string STRING_FORMAT_ERROR = "<log4net.Error>Exception during StringFormat: Index (zero based) must be greater than or equal to zero and less than the size of the argument list. <format>IGNORE THIS WARNING - EXCEPTION EXPECTED Before {0} After {1} {2}</format><args>{Middle, End}</args></log4net.Error>";
[Test]
public void TestLogFormatApi_Debug()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Debug");
// ***
log1.Debug("TestMessage");
Assert.AreEqual("DEBUG:TestMessage", stringAppender.GetString(), "Test simple DEBUG event 1");
stringAppender.Reset();
// ***
log1.Debug("TestMessage", null);
Assert.AreEqual("DEBUG:TestMessage", stringAppender.GetString(), "Test simple DEBUG event 2");
stringAppender.Reset();
// ***
log1.Debug("TestMessage", new Exception("Exception message"));
Assert.AreEqual("DEBUG:TestMessageSystem.Exception: Exception message" + Environment.NewLine, stringAppender.GetString(), "Test simple DEBUG event 3");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}", "1");
Assert.AreEqual("DEBUG:a1", stringAppender.GetString(), "Test formatted DEBUG event with 1 parm");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("DEBUG:a1b2", stringAppender.GetString(), "Test formatted DEBUG event with 2 parm");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("DEBUG:a1b2c3", stringAppender.GetString(), "Test formatted DEBUG event with 3 parm");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("DEBUG:aQbWcEdReTf", stringAppender.GetString(), "Test formatted DEBUG event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.DebugFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("DEBUG:Before Middle After End", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.DebugFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("DEBUG:Before Middle After End", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_NoDebug()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Threshold = Level.Info;
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Debug");
// ***
log1.Debug("TestMessage");
Assert.AreEqual("", stringAppender.GetString(), "Test simple DEBUG event 1");
stringAppender.Reset();
// ***
log1.Debug("TestMessage", null);
Assert.AreEqual("", stringAppender.GetString(), "Test simple DEBUG event 2");
stringAppender.Reset();
// ***
log1.Debug("TestMessage", new Exception("Exception message"));
Assert.AreEqual("", stringAppender.GetString(), "Test simple DEBUG event 3");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}", "1");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted DEBUG event with 1 parm");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted DEBUG event with 2 parm");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted DEBUG event with 3 parm");
stringAppender.Reset();
// ***
log1.DebugFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted DEBUG event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.DebugFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.DebugFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_Info()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Info");
// ***
log1.Info("TestMessage");
Assert.AreEqual("INFO:TestMessage", stringAppender.GetString(), "Test simple INFO event 1");
stringAppender.Reset();
// ***
log1.Info("TestMessage", null);
Assert.AreEqual("INFO:TestMessage", stringAppender.GetString(), "Test simple INFO event 2");
stringAppender.Reset();
// ***
log1.Info("TestMessage", new Exception("Exception message"));
Assert.AreEqual("INFO:TestMessageSystem.Exception: Exception message" + Environment.NewLine, stringAppender.GetString(), "Test simple INFO event 3");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}", "1");
Assert.AreEqual("INFO:a1", stringAppender.GetString(), "Test formatted INFO event with 1 parm");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("INFO:a1b2", stringAppender.GetString(), "Test formatted INFO event with 2 parm");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("INFO:a1b2c3", stringAppender.GetString(), "Test formatted INFO event with 3 parm");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("INFO:aQbWcEdReTf", stringAppender.GetString(), "Test formatted INFO event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.InfoFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("INFO:Before Middle After End", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.InfoFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("INFO:Before Middle After End", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_NoInfo()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Threshold = Level.Warn;
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Info");
// ***
log1.Info("TestMessage");
Assert.AreEqual("", stringAppender.GetString(), "Test simple INFO event 1");
stringAppender.Reset();
// ***
log1.Info("TestMessage", null);
Assert.AreEqual("", stringAppender.GetString(), "Test simple INFO event 2");
stringAppender.Reset();
// ***
log1.Info("TestMessage", new Exception("Exception message"));
Assert.AreEqual("", stringAppender.GetString(), "Test simple INFO event 3");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}", "1");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted INFO event with 1 parm");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted INFO event with 2 parm");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted INFO event with 3 parm");
stringAppender.Reset();
// ***
log1.InfoFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted INFO event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.InfoFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.InfoFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_Warn()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Warn");
// ***
log1.Warn("TestMessage");
Assert.AreEqual("WARN:TestMessage", stringAppender.GetString(), "Test simple WARN event 1");
stringAppender.Reset();
// ***
log1.Warn("TestMessage", null);
Assert.AreEqual("WARN:TestMessage", stringAppender.GetString(), "Test simple WARN event 2");
stringAppender.Reset();
// ***
log1.Warn("TestMessage", new Exception("Exception message"));
Assert.AreEqual("WARN:TestMessageSystem.Exception: Exception message" + Environment.NewLine, stringAppender.GetString(), "Test simple WARN event 3");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}", "1");
Assert.AreEqual("WARN:a1", stringAppender.GetString(), "Test formatted WARN event with 1 parm");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("WARN:a1b2", stringAppender.GetString(), "Test formatted WARN event with 2 parm");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("WARN:a1b2c3", stringAppender.GetString(), "Test formatted WARN event with 3 parm");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("WARN:aQbWcEdReTf", stringAppender.GetString(), "Test formatted WARN event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.WarnFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("WARN:Before Middle After End", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.WarnFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("WARN:Before Middle After End", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_NoWarn()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Threshold = Level.Error;
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Warn");
// ***
log1.Warn("TestMessage");
Assert.AreEqual("", stringAppender.GetString(), "Test simple WARN event 1");
stringAppender.Reset();
// ***
log1.Warn("TestMessage", null);
Assert.AreEqual("", stringAppender.GetString(), "Test simple WARN event 2");
stringAppender.Reset();
// ***
log1.Warn("TestMessage", new Exception("Exception message"));
Assert.AreEqual("", stringAppender.GetString(), "Test simple WARN event 3");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}", "1");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted WARN event with 1 parm");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted WARN event with 2 parm");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted WARN event with 3 parm");
stringAppender.Reset();
// ***
log1.WarnFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted WARN event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.WarnFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.WarnFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_Error()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Error");
// ***
log1.Error("TestMessage");
Assert.AreEqual("ERROR:TestMessage", stringAppender.GetString(), "Test simple ERROR event 1");
stringAppender.Reset();
// ***
log1.Error("TestMessage", null);
Assert.AreEqual("ERROR:TestMessage", stringAppender.GetString(), "Test simple ERROR event 2");
stringAppender.Reset();
// ***
log1.Error("TestMessage", new Exception("Exception message"));
Assert.AreEqual("ERROR:TestMessageSystem.Exception: Exception message" + Environment.NewLine, stringAppender.GetString(), "Test simple ERROR event 3");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}", "1");
Assert.AreEqual("ERROR:a1", stringAppender.GetString(), "Test formatted ERROR event with 1 parm");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("ERROR:a1b2", stringAppender.GetString(), "Test formatted ERROR event with 2 parm");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("ERROR:a1b2c3", stringAppender.GetString(), "Test formatted ERROR event with 3 parm");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("ERROR:aQbWcEdReTf", stringAppender.GetString(), "Test formatted ERROR event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.ErrorFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("ERROR:Before Middle After End", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.ErrorFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("ERROR:Before Middle After End", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_NoError()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Threshold = Level.Fatal;
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Error");
// ***
log1.Error("TestMessage");
Assert.AreEqual("", stringAppender.GetString(), "Test simple ERROR event 1");
stringAppender.Reset();
// ***
log1.Error("TestMessage", null);
Assert.AreEqual("", stringAppender.GetString(), "Test simple ERROR event 2");
stringAppender.Reset();
// ***
log1.Error("TestMessage", new Exception("Exception message"));
Assert.AreEqual("", stringAppender.GetString(), "Test simple ERROR event 3");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}", "1");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted ERROR event with 1 parm");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted ERROR event with 2 parm");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted ERROR event with 3 parm");
stringAppender.Reset();
// ***
log1.ErrorFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted ERROR event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.ErrorFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.ErrorFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_Fatal()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Fatal");
// ***
log1.Fatal("TestMessage");
Assert.AreEqual("FATAL:TestMessage", stringAppender.GetString(), "Test simple FATAL event 1");
stringAppender.Reset();
// ***
log1.Fatal("TestMessage", null);
Assert.AreEqual("FATAL:TestMessage", stringAppender.GetString(), "Test simple FATAL event 2");
stringAppender.Reset();
// ***
log1.Fatal("TestMessage", new Exception("Exception message"));
Assert.AreEqual("FATAL:TestMessageSystem.Exception: Exception message" + Environment.NewLine, stringAppender.GetString(), "Test simple FATAL event 3");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}", "1");
Assert.AreEqual("FATAL:a1", stringAppender.GetString(), "Test formatted FATAL event with 1 parm");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("FATAL:a1b2", stringAppender.GetString(), "Test formatted FATAL event with 2 parm");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("FATAL:a1b2c3", stringAppender.GetString(), "Test formatted FATAL event with 3 parm");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("FATAL:aQbWcEdReTf", stringAppender.GetString(), "Test formatted FATAL event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.FatalFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("FATAL:Before Middle After End", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.FatalFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("FATAL:Before Middle After End", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
[Test]
public void TestLogFormatApi_NoFatal()
{
StringAppender stringAppender = new StringAppender();
stringAppender.Threshold = Level.Off;
stringAppender.Layout = new PatternLayout("%level:%message");
ILoggerRepository rep = LogManager.CreateRepository(Guid.NewGuid().ToString());
BasicConfigurator.Configure(rep, stringAppender);
ILog log1 = LogManager.GetLogger(rep.Name, "TestLogFormatApi_Fatal");
// ***
log1.Fatal("TestMessage");
Assert.AreEqual("", stringAppender.GetString(), "Test simple FATAL event 1");
stringAppender.Reset();
// ***
log1.Fatal("TestMessage", null);
Assert.AreEqual("", stringAppender.GetString(), "Test simple FATAL event 2");
stringAppender.Reset();
// ***
log1.Fatal("TestMessage", new Exception("Exception message"));
Assert.AreEqual("", stringAppender.GetString(), "Test simple FATAL event 3");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}", "1");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted FATAL event with 1 parm");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}b{1}", "1", "2");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted FATAL event with 2 parm");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}b{1}c{2}", "1", "2", "3");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted FATAL event with 3 parm");
stringAppender.Reset();
// ***
log1.FatalFormat("a{0}b{1}c{2}d{3}e{4}f", "Q", "W", "E", "R", "T", "Y");
Assert.AreEqual("", stringAppender.GetString(), "Test formatted FATAL event with 5 parms (only 4 used)");
stringAppender.Reset();
// ***
log1.FatalFormat(null, "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with null provider");
stringAppender.Reset();
// ***
log1.FatalFormat(new CultureInfo("en"), "Before {0} After {1}", "Middle", "End");
Assert.AreEqual("", stringAppender.GetString(), "Test formatting with 'en' provider");
stringAppender.Reset();
}
}
}
| |
--- /dev/null 2016-03-07 09:18:53.000000000 -0500
+++ src/System.Xml.ReaderWriter/src/SR.cs 2016-03-07 09:19:12.223114000 -0500
@@ -0,0 +1,1374 @@
+using System;
+using System.Resources;
+
+namespace FxResources.System.Xml.ReaderWriter
+{
+ internal static class SR
+ {
+
+ }
+}
+
+namespace System
+{
+ internal static class SR
+ {
+ private static ResourceManager s_resourceManager;
+
+ private const String s_resourcesName = "FxResources.System.Xml.ReaderWriter.SR";
+
+ internal static String Argument_ConversionOverflow
+ {
+ get
+ {
+ return SR.GetResourceString("Argument_ConversionOverflow", null);
+ }
+ }
+
+ internal static String Argument_EncodingConversionOverflowBytes
+ {
+ get
+ {
+ return SR.GetResourceString("Argument_EncodingConversionOverflowBytes", null);
+ }
+ }
+
+ internal static String Argument_InvalidCharSequenceNoIndex
+ {
+ get
+ {
+ return SR.GetResourceString("Argument_InvalidCharSequenceNoIndex", null);
+ }
+ }
+
+ internal static String Argument_InvalidCodePageConversionIndex
+ {
+ get
+ {
+ return SR.GetResourceString("Argument_InvalidCodePageConversionIndex", null);
+ }
+ }
+
+ internal static String ArgumentNull_Array
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentNull_Array", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_Index
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_Index", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_IndexCount
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_IndexCount", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_IndexCountBuffer
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_IndexCountBuffer", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_NeedNonNegNum
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_NeedNonNegNum", null);
+ }
+ }
+
+ internal static String ArgumentOutOfRange_Range
+ {
+ get
+ {
+ return SR.GetResourceString("ArgumentOutOfRange_Range", null);
+ }
+ }
+
+ internal static String Enc_InvalidByteInEncoding
+ {
+ get
+ {
+ return SR.GetResourceString("Enc_InvalidByteInEncoding", null);
+ }
+ }
+
+ private static ResourceManager ResourceManager
+ {
+ get
+ {
+ if (SR.s_resourceManager == null)
+ {
+ SR.s_resourceManager = new ResourceManager(SR.ResourceType);
+ }
+ return SR.s_resourceManager;
+ }
+ }
+
+ internal static Type ResourceType
+ {
+ get
+ {
+ return typeof(FxResources.System.Xml.ReaderWriter.SR);
+ }
+ }
+
+ internal static String Sch_InvalidDateTimeOption
+ {
+ get
+ {
+ return SR.GetResourceString("Sch_InvalidDateTimeOption", null);
+ }
+ }
+
+ internal static String Sch_ParEntityRefNesting
+ {
+ get
+ {
+ return SR.GetResourceString("Sch_ParEntityRefNesting", null);
+ }
+ }
+
+ internal static String Xml_AsyncIsRunningException
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_AsyncIsRunningException", null);
+ }
+ }
+
+ internal static String Xml_BadAttributeChar
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadAttributeChar", null);
+ }
+ }
+
+ internal static String Xml_BadDecimalEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadDecimalEntity", null);
+ }
+ }
+
+ internal static String Xml_BadDTDLocation
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadDTDLocation", null);
+ }
+ }
+
+ internal static String Xml_BadHexEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadHexEntity", null);
+ }
+ }
+
+ internal static String Xml_BadNameChar
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadNameChar", null);
+ }
+ }
+
+ internal static String Xml_BadNamespaceDecl
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadNamespaceDecl", null);
+ }
+ }
+
+ internal static String Xml_BadStartNameChar
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_BadStartNameChar", null);
+ }
+ }
+
+ internal static String Xml_CanNotBindToReservedNamespace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CanNotBindToReservedNamespace", null);
+ }
+ }
+
+ internal static String Xml_CannotResolveEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CannotResolveEntity", null);
+ }
+ }
+
+ internal static String Xml_CannotResolveEntityDtdIgnored
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CannotResolveEntityDtdIgnored", null);
+ }
+ }
+
+ internal static String Xml_CannotResolveExternalSubset
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CannotResolveExternalSubset", null);
+ }
+ }
+
+ internal static String Xml_CannotResolveUrl
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CannotResolveUrl", null);
+ }
+ }
+
+ internal static String Xml_CannotStartDocumentOnFragment
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CannotStartDocumentOnFragment", null);
+ }
+ }
+
+ internal static String Xml_CannotWriteXmlDecl
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CannotWriteXmlDecl", null);
+ }
+ }
+
+ internal static String Xml_CDATAEndInText
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CDATAEndInText", null);
+ }
+ }
+
+ internal static String Xml_CharEntityOverflow
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_CharEntityOverflow", null);
+ }
+ }
+
+ internal static String Xml_ClosedOrError
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ClosedOrError", null);
+ }
+ }
+
+ internal static String Xml_ClosedOrErrorReader
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ClosedOrErrorReader", null);
+ }
+ }
+
+ internal static String Xml_ColonInLocalName
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ColonInLocalName", null);
+ }
+ }
+
+ internal static String Xml_ConformanceLevelFragment
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ConformanceLevelFragment", null);
+ }
+ }
+
+ internal static String Xml_DefaultException
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DefaultException", null);
+ }
+ }
+
+ internal static String Xml_DoubleBaseUri
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DoubleBaseUri", null);
+ }
+ }
+
+ internal static String Xml_DtdAfterRootElement
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DtdAfterRootElement", null);
+ }
+ }
+
+ internal static String Xml_DtdAlreadyWritten
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DtdAlreadyWritten", null);
+ }
+ }
+
+ internal static String Xml_DtdIsProhibited
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DtdIsProhibited", null);
+ }
+ }
+
+ internal static String Xml_DtdIsProhibitedEx
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DtdIsProhibitedEx", null);
+ }
+ }
+
+ internal static String Xml_DtdNotAllowedInFragment
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DtdNotAllowedInFragment", null);
+ }
+ }
+
+ internal static String Xml_DupAttributeName
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DupAttributeName", null);
+ }
+ }
+
+ internal static String Xml_DupXmlDecl
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_DupXmlDecl", null);
+ }
+ }
+
+ internal static String Xml_ElementNotFound
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ElementNotFound", null);
+ }
+ }
+
+ internal static String Xml_ElementNotFoundNs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ElementNotFoundNs", null);
+ }
+ }
+
+ internal static String Xml_EmptyLocalName
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_EmptyLocalName", null);
+ }
+ }
+
+ internal static String Xml_EmptyName
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_EmptyName", null);
+ }
+ }
+
+ internal static String Xml_EntityRefNesting
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_EntityRefNesting", null);
+ }
+ }
+
+ internal static String Xml_EnumerationRequired
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_EnumerationRequired", null);
+ }
+ }
+
+ internal static String Xml_ErrorOpeningExternalDtd
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ErrorOpeningExternalDtd", null);
+ }
+ }
+
+ internal static String Xml_ErrorOpeningExternalEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ErrorOpeningExternalEntity", null);
+ }
+ }
+
+ internal static String Xml_ErrorParsingEntityName
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ErrorParsingEntityName", null);
+ }
+ }
+
+ internal static String Xml_ErrorPosition
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ErrorPosition", null);
+ }
+ }
+
+ internal static String Xml_ExpectAttType
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectAttType", null);
+ }
+ }
+
+ internal static String Xml_ExpectDtdMarkup
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectDtdMarkup", null);
+ }
+ }
+
+ internal static String Xml_ExpectExternalIdOrEntityValue
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectExternalIdOrEntityValue", null);
+ }
+ }
+
+ internal static String Xml_ExpectExternalOrClose
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectExternalOrClose", null);
+ }
+ }
+
+ internal static String Xml_ExpectExternalOrPublicId
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectExternalOrPublicId", null);
+ }
+ }
+
+ internal static String Xml_ExpectIgnoreOrInclude
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectIgnoreOrInclude", null);
+ }
+ }
+
+ internal static String Xml_ExpectingWhiteSpace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectingWhiteSpace", null);
+ }
+ }
+
+ internal static String Xml_ExpectNoWhitespace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectNoWhitespace", null);
+ }
+ }
+
+ internal static String Xml_ExpectOp
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectOp", null);
+ }
+ }
+
+ internal static String Xml_ExpectPcData
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectPcData", null);
+ }
+ }
+
+ internal static String Xml_ExpectSubOrClose
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExpectSubOrClose", null);
+ }
+ }
+
+ internal static String Xml_ExternalEntityInAttValue
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExternalEntityInAttValue", null);
+ }
+ }
+
+ internal static String Xml_ExternalEntityInStandAloneDocument
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ExternalEntityInStandAloneDocument", null);
+ }
+ }
+
+ internal static String Xml_FragmentId
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_FragmentId", null);
+ }
+ }
+
+ internal static String Xml_IncompatibleConformanceLevel
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_IncompatibleConformanceLevel", null);
+ }
+ }
+
+ internal static String Xml_IncompleteDtdContent
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_IncompleteDtdContent", null);
+ }
+ }
+
+ internal static String Xml_IncompleteEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_IncompleteEntity", null);
+ }
+ }
+
+ internal static String Xml_IndentCharsNotWhitespace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_IndentCharsNotWhitespace", null);
+ }
+ }
+
+ internal static String Xml_InternalError
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InternalError", null);
+ }
+ }
+
+ internal static String Xml_InvalidAttributeType
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidAttributeType", null);
+ }
+ }
+
+ internal static String Xml_InvalidBase64Value
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidBase64Value", null);
+ }
+ }
+
+ internal static String Xml_InvalidBinHexValue
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidBinHexValue", null);
+ }
+ }
+
+ internal static String Xml_InvalidBinHexValueOddCount
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidBinHexValueOddCount", null);
+ }
+ }
+
+ internal static String Xml_InvalidCharacter
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidCharacter", null);
+ }
+ }
+
+ internal static String Xml_InvalidCharInThisEncoding
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidCharInThisEncoding", null);
+ }
+ }
+
+ internal static String Xml_InvalidCharsInIndent
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidCharsInIndent", null);
+ }
+ }
+
+ internal static String Xml_InvalidCommentChars
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidCommentChars", null);
+ }
+ }
+
+ internal static String Xml_InvalidConditionalSection
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidConditionalSection", null);
+ }
+ }
+
+ internal static String Xml_InvalidContentModel
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidContentModel", null);
+ }
+ }
+
+ internal static String Xml_InvalidNameCharsDetail
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidNameCharsDetail", null);
+ }
+ }
+
+ internal static String Xml_InvalidNmToken
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidNmToken", null);
+ }
+ }
+
+ internal static String Xml_InvalidNodeType
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidNodeType", null);
+ }
+ }
+
+ internal static String Xml_InvalidOperation
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidOperation", null);
+ }
+ }
+
+ internal static String Xml_InvalidParEntityRef
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidParEntityRef", null);
+ }
+ }
+
+ internal static String Xml_InvalidPIName
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidPIName", null);
+ }
+ }
+
+ internal static String Xml_InvalidPosition
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidPosition", null);
+ }
+ }
+
+ internal static String Xml_InvalidReadContentAs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidReadContentAs", null);
+ }
+ }
+
+ internal static String Xml_InvalidReadElementContentAs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidReadElementContentAs", null);
+ }
+ }
+
+ internal static String Xml_InvalidReadValueChunk
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidReadValueChunk", null);
+ }
+ }
+
+ internal static String Xml_InvalidRootData
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidRootData", null);
+ }
+ }
+
+ internal static String Xml_InvalidSurrogateHighChar
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidSurrogateHighChar", null);
+ }
+ }
+
+ internal static String Xml_InvalidSurrogateMissingLowChar
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidSurrogateMissingLowChar", null);
+ }
+ }
+
+ internal static String Xml_InvalidSurrogatePairWithArgs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidSurrogatePairWithArgs", null);
+ }
+ }
+
+ internal static String Xml_InvalidTextDecl
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidTextDecl", null);
+ }
+ }
+
+ internal static String Xml_InvalidVersionNumber
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidVersionNumber", null);
+ }
+ }
+
+ internal static String Xml_InvalidWhitespaceCharacter
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidWhitespaceCharacter", null);
+ }
+ }
+
+ internal static String Xml_InvalidXmlDecl
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidXmlDecl", null);
+ }
+ }
+
+ internal static String Xml_InvalidXmlSpace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_InvalidXmlSpace", null);
+ }
+ }
+
+ internal static String Xml_LimitExceeded
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_LimitExceeded", null);
+ }
+ }
+
+ internal static String Xml_MessageWithErrorPosition
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MessageWithErrorPosition", null);
+ }
+ }
+
+ internal static String Xml_MissingByteOrderMark
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MissingByteOrderMark", null);
+ }
+ }
+
+ internal static String Xml_MissingRoot
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MissingRoot", null);
+ }
+ }
+
+ internal static String Xml_MixedReadElementContentAs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MixedReadElementContentAs", null);
+ }
+ }
+
+ internal static String Xml_MixingBinaryContentMethods
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MixingBinaryContentMethods", null);
+ }
+ }
+
+ internal static String Xml_MixingReadValueChunkWithBinary
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MixingReadValueChunkWithBinary", null);
+ }
+ }
+
+ internal static String Xml_MixingV1StreamingWithV2Binary
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MixingV1StreamingWithV2Binary", null);
+ }
+ }
+
+ internal static String Xml_MultipleDTDsProvided
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MultipleDTDsProvided", null);
+ }
+ }
+
+ internal static String Xml_MultipleRoots
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_MultipleRoots", null);
+ }
+ }
+
+ internal static String Xml_NamespaceDeclXmlXmlns
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NamespaceDeclXmlXmlns", null);
+ }
+ }
+
+ internal static String Xml_NametableMismatch
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NametableMismatch", null);
+ }
+ }
+
+ internal static String Xml_NonWhitespace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NonWhitespace", null);
+ }
+ }
+
+ internal static String Xml_NoRoot
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NoRoot", null);
+ }
+ }
+
+ internal static String Xml_NoStartTag
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NoStartTag", null);
+ }
+ }
+
+ internal static String Xml_NotEnoughSpaceForSurrogatePair
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NotEnoughSpaceForSurrogatePair", null);
+ }
+ }
+
+ internal static String Xml_NotSameNametable
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_NotSameNametable", null);
+ }
+ }
+
+ internal static String Xml_PrefixForEmptyNs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_PrefixForEmptyNs", null);
+ }
+ }
+
+ internal static String Xml_ReadBinaryContentNotSupported
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ReadBinaryContentNotSupported", null);
+ }
+ }
+
+ internal static String Xml_ReadContentAsFormatException
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ReadContentAsFormatException", null);
+ }
+ }
+
+ internal static String Xml_ReaderAsyncNotSetException
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ReaderAsyncNotSetException", null);
+ }
+ }
+
+ internal static String Xml_ReadOnlyProperty
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ReadOnlyProperty", null);
+ }
+ }
+
+ internal static String Xml_ReadSubtreeNotOnElement
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ReadSubtreeNotOnElement", null);
+ }
+ }
+
+ internal static String Xml_ReadValueChunkNotSupported
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_ReadValueChunkNotSupported", null);
+ }
+ }
+
+ internal static String Xml_RecursiveGenEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_RecursiveGenEntity", null);
+ }
+ }
+
+ internal static String Xml_RecursiveParEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_RecursiveParEntity", null);
+ }
+ }
+
+ internal static String Xml_RedefinePrefix
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_RedefinePrefix", null);
+ }
+ }
+
+ internal static String Xml_SurrogatePairSplit
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_SurrogatePairSplit", null);
+ }
+ }
+
+ internal static String Xml_SystemPathResolverCannotOpenUri
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_SystemPathResolverCannotOpenUri", null);
+ }
+ }
+
+ internal static String Xml_TagMismatchEx
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_TagMismatchEx", null);
+ }
+ }
+
+ internal static String Xml_UnclosedConditionalSection
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnclosedConditionalSection", null);
+ }
+ }
+
+ internal static String Xml_UnclosedQuote
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnclosedQuote", null);
+ }
+ }
+
+ internal static String Xml_UndeclaredEntity
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UndeclaredEntity", null);
+ }
+ }
+
+ internal static String Xml_UndefNamespace
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UndefNamespace", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedCDataEnd
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedCDataEnd", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedEndTag
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedEndTag", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedEOF
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedEOF", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedEOF1
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedEOF1", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedEOFInElementContent
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedEOFInElementContent", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedTokenEx
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedTokenEx", null);
+ }
+ }
+
+ internal static String Xml_UnexpectedTokens2
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnexpectedTokens2", null);
+ }
+ }
+
+ internal static String Xml_UnknownEncoding
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnknownEncoding", null);
+ }
+ }
+
+ internal static String Xml_UnknownNs
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnknownNs", null);
+ }
+ }
+
+ internal static String Xml_UnparsedEntityRef
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnparsedEntityRef", null);
+ }
+ }
+
+ internal static String Xml_UnsupportedClass
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UnsupportedClass", null);
+ }
+ }
+
+ internal static String Xml_UserException
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_UserException", null);
+ }
+ }
+
+ internal static String Xml_WriterAsyncNotSetException
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_WriterAsyncNotSetException", null);
+ }
+ }
+
+ internal static String Xml_WrongToken
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_WrongToken", null);
+ }
+ }
+
+ internal static String Xml_XmlDeclNotFirst
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_XmlDeclNotFirst", null);
+ }
+ }
+
+ internal static String Xml_XmlnsPrefix
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_XmlnsPrefix", null);
+ }
+ }
+
+ internal static String Xml_XmlPrefix
+ {
+ get
+ {
+ return SR.GetResourceString("Xml_XmlPrefix", null);
+ }
+ }
+
+ internal static String XmlConvert_BadFormat
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_BadFormat", null);
+ }
+ }
+
+ internal static String XmlConvert_BadUri
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_BadUri", null);
+ }
+ }
+
+ internal static String XmlConvert_NotOneCharString
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_NotOneCharString", null);
+ }
+ }
+
+ internal static String XmlConvert_Overflow
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_Overflow", null);
+ }
+ }
+
+ internal static String XmlConvert_TypeListBadMapping2
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_TypeListBadMapping2", null);
+ }
+ }
+
+ internal static String XmlConvert_TypeNoNamespace
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_TypeNoNamespace", null);
+ }
+ }
+
+ internal static String XmlConvert_TypeNoPrefix
+ {
+ get
+ {
+ return SR.GetResourceString("XmlConvert_TypeNoPrefix", null);
+ }
+ }
+
+ internal static String Format(String resourceFormat, params Object[] args)
+ {
+ if (args == null)
+ {
+ return resourceFormat;
+ }
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, args);
+ }
+ return String.Concat(resourceFormat, String.Join(", ", args));
+ }
+
+ internal static String Format(String resourceFormat, Object p1)
+ {
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, p1);
+ }
+ return String.Join(", ", new Object[] { resourceFormat, p1 });
+ }
+
+ internal static String Format(String resourceFormat, Object p1, Object p2)
+ {
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, p1, p2);
+ }
+ return String.Join(", ", new Object[] { resourceFormat, p1, p2 });
+ }
+
+ internal static String Format(String resourceFormat, Object p1, Object p2, Object p3)
+ {
+ if (!SR.UsingResourceKeys())
+ {
+ return String.Format(resourceFormat, p1, p2, p3);
+ }
+ return String.Join(", ", new Object[] { resourceFormat, p1, p2, p3 });
+ }
+
+ internal static String GetResourceString(String resourceKey, String defaultString)
+ {
+ String str = null;
+ try
+ {
+ str = SR.ResourceManager.GetString(resourceKey);
+ }
+ catch (MissingManifestResourceException missingManifestResourceException)
+ {
+ }
+ if (defaultString != null && resourceKey.Equals(str))
+ {
+ return defaultString;
+ }
+ return str;
+ }
+
+ private static Boolean UsingResourceKeys()
+ {
+ return false;
+ }
+ }
+}
| |
#region File Description
//-----------------------------------------------------------------------------
// InstancedModelSampleGame.cs
//
// Microsoft XNA Community Game Platform
// Copyright (C) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
#endregion
#region Using Statements
using System;
using System.Collections.Generic;
using System.Globalization;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Input;
#endregion
namespace InstancedModelSample
{
/// <summary>
/// Enum describes the various possible techniques
/// that can be chosen to implement instancing.
/// </summary>
public enum InstancingTechnique
{
HardwareInstancing,
NoInstancing,
NoInstancingOrStateBatching
}
/// <summary>
/// Sample showing how to efficiently render many copies of a model, using
/// hardware instancing to draw more than one copy in a single GPU batch.
/// </summary>
public class InstancedModelSampleGame : Microsoft.Xna.Framework.Game
{
#region Fields
GraphicsDeviceManager graphics;
SpriteBatch spriteBatch;
SpriteFont spriteFont;
// Instanced model rendering.
InstancingTechnique instancingTechnique = InstancingTechnique.HardwareInstancing;
const int InitialInstanceCount = 1000;
List<SpinningInstance> instances;
Matrix[] instanceTransforms;
Model instancedModel;
Matrix[] instancedModelBones;
DynamicVertexBuffer instanceVertexBuffer;
// To store instance transform matrices in a vertex buffer, we use this custom
// vertex type which encodes 4x4 matrices as a set of four Vector4 values.
static VertexDeclaration instanceVertexDeclaration = new VertexDeclaration
(
new VertexElement(0, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 0),
new VertexElement(16, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 1),
new VertexElement(32, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 2),
new VertexElement(48, VertexElementFormat.Vector4, VertexElementUsage.BlendWeight, 3)
);
// Measure the framerate.
int frameRate;
int frameCounter;
TimeSpan elapsedTime;
// Input handling.
KeyboardState lastKeyboardState;
GamePadState lastGamePadState;
KeyboardState currentKeyboardState;
GamePadState currentGamePadState;
#endregion
#region Initialization
public InstancedModelSampleGame()
{
graphics = new GraphicsDeviceManager(this);
Content.RootDirectory = "Content";
// Most games will want to leave both these values set to true to ensure
// smoother updates, but when you are doing performance work it can be
// useful to set them to false in order to get more accurate measurements.
IsFixedTimeStep = false;
graphics.SynchronizeWithVerticalRetrace = false;
// Initialize the list of instances.
instances = new List<SpinningInstance>();
for (int i = 0; i < InitialInstanceCount; i++)
instances.Add(new SpinningInstance());
}
/// <summary>
/// Load your graphics content.
/// </summary>
protected override void LoadContent()
{
spriteBatch = new SpriteBatch(graphics.GraphicsDevice);
spriteFont = Content.Load<SpriteFont>("Font");
instancedModel = Content.Load<Model>("Cats");
instancedModelBones = new Matrix[instancedModel.Bones.Count];
instancedModel.CopyAbsoluteBoneTransformsTo(instancedModelBones);
}
#endregion
#region Update and Draw
/// <summary>
/// Allows the game to run logic.
/// </summary>
protected override void Update(GameTime gameTime)
{
HandleInput();
// Update the position of each spinning instance.
foreach (SpinningInstance instance in instances)
{
instance.Update(gameTime);
}
// Measure our framerate.
elapsedTime += gameTime.ElapsedGameTime;
if (elapsedTime > TimeSpan.FromSeconds(1))
{
elapsedTime -= TimeSpan.FromSeconds(1);
frameRate = frameCounter;
frameCounter = 0;
}
base.Update(gameTime);
}
/// <summary>
/// This is called when the game should draw itself.
/// </summary>
protected override void Draw(GameTime gameTime)
{
GraphicsDevice device = graphics.GraphicsDevice;
device.Clear(Color.CornflowerBlue);
// Calculate camera matrices.
Matrix view = Matrix.CreateLookAt(new Vector3(0, 0, 15),
Vector3.Zero, Vector3.Up);
Matrix projection = Matrix.CreatePerspectiveFieldOfView(MathHelper.PiOver4,
device.Viewport.AspectRatio,
1,
100);
// Set renderstates for drawing 3D models.
device.BlendState = BlendState.Opaque;
device.DepthStencilState = DepthStencilState.Default;
// Gather instance transform matrices into a single array.
Array.Resize(ref instanceTransforms, instances.Count);
for (int i = 0; i < instances.Count; i++)
{
instanceTransforms[i] = instances[i].Transform;
}
// Draw all the instances, using the currently selected rendering technique.
switch (instancingTechnique)
{
case InstancingTechnique.HardwareInstancing:
DrawModelHardwareInstancing(instancedModel, instancedModelBones,
instanceTransforms, view, projection);
break;
case InstancingTechnique.NoInstancing:
DrawModelNoInstancing(instancedModel, instancedModelBones,
instanceTransforms, view, projection);
break;
case InstancingTechnique.NoInstancingOrStateBatching:
DrawModelNoInstancingOrStateBatching(instancedModel, instancedModelBones,
instanceTransforms, view, projection);
break;
}
DrawOverlayText();
// Measure our framerate.
frameCounter++;
base.Draw(gameTime);
}
/// <summary>
/// Efficiently draws several copies of a piece of geometry using hardware instancing.
/// </summary>
void DrawModelHardwareInstancing(Model model, Matrix[] modelBones,
Matrix[] instances, Matrix view, Matrix projection)
{
if (instances.Length == 0)
return;
// If we have more instances than room in our vertex buffer, grow it to the neccessary size.
if ((instanceVertexBuffer == null) ||
(instances.Length > instanceVertexBuffer.VertexCount))
{
if (instanceVertexBuffer != null)
instanceVertexBuffer.Dispose();
instanceVertexBuffer = new DynamicVertexBuffer(GraphicsDevice, instanceVertexDeclaration,
instances.Length, BufferUsage.WriteOnly);
}
// Transfer the latest instance transform matrices into the instanceVertexBuffer.
instanceVertexBuffer.SetData(instances, 0, instances.Length, SetDataOptions.Discard);
foreach (ModelMesh mesh in model.Meshes)
{
foreach (ModelMeshPart meshPart in mesh.MeshParts)
{
// Tell the GPU to read from both the model vertex buffer plus our instanceVertexBuffer.
GraphicsDevice.SetVertexBuffers(
new VertexBufferBinding(meshPart.VertexBuffer, meshPart.VertexOffset, 0),
new VertexBufferBinding(instanceVertexBuffer, 0, 1)
);
GraphicsDevice.Indices = meshPart.IndexBuffer;
// Set up the instance rendering effect.
Effect effect = meshPart.Effect;
effect.CurrentTechnique = effect.Techniques["HardwareInstancing"];
effect.Parameters["World"].SetValue(modelBones[mesh.ParentBone.Index]);
effect.Parameters["View"].SetValue(view);
effect.Parameters["Projection"].SetValue(projection);
// Draw all the instance copies in a single call.
foreach (EffectPass pass in effect.CurrentTechnique.Passes)
{
pass.Apply();
GraphicsDevice.DrawInstancedPrimitives(PrimitiveType.TriangleList, 0, 0,
meshPart.NumVertices, meshPart.StartIndex,
meshPart.PrimitiveCount, instances.Length);
}
}
}
}
/// <summary>
/// Draws several copies of a piece of geometry without using any
/// special GPU instancing techniques at all. This just does a
/// regular loop and issues several draw calls one after another.
/// </summary>
void DrawModelNoInstancing(Model model, Matrix[] modelBones,
Matrix[] instances, Matrix view, Matrix projection)
{
foreach (ModelMesh mesh in model.Meshes)
{
foreach (ModelMeshPart meshPart in mesh.MeshParts)
{
GraphicsDevice.SetVertexBuffer(meshPart.VertexBuffer, meshPart.VertexOffset);
GraphicsDevice.Indices = meshPart.IndexBuffer;
// Set up the rendering effect.
Effect effect = meshPart.Effect;
effect.CurrentTechnique = effect.Techniques["NoInstancing"];
effect.Parameters["View"].SetValue(view);
effect.Parameters["Projection"].SetValue(projection);
EffectParameter transformParameter = effect.Parameters["World"];
// Draw a single instance copy each time around this loop.
for (int i = 0; i < instances.Length; i++)
{
transformParameter.SetValue(modelBones[mesh.ParentBone.Index] * instances[i]);
foreach (EffectPass pass in effect.CurrentTechnique.Passes)
{
pass.Apply();
GraphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0,
meshPart.NumVertices, meshPart.StartIndex,
meshPart.PrimitiveCount);
}
}
}
}
}
/// <summary>
/// This technique is NOT a good idea! It is only included in the sample
/// for comparison purposes, so you can compare its performance with the
/// other more sensible approaches. This uses the exact same shader code
/// as the preceding NoInstancing technique, but with a key difference.
/// Where the NoInstancing technique worked like this:
///
/// SetRenderStates()
/// foreach instance
/// {
/// Update effect with per-instance transform matrix
/// DrawIndexedPrimitives()
/// }
///
/// NoInstancingOrStateBatching works like so:
///
/// foreach instance
/// {
/// Set per-instance transform matrix into the effect
/// SetRenderStates()
/// DrawIndexedPrimitives()
/// }
///
/// As you can see, this is repeatedly setting the same renderstates.
/// Not so efficient.
///
/// In other words, the built-in Model.Draw method is pretty inefficient when
/// it comes to drawing more than one instance! Even without using any fancy
/// shader techniques, you can get a significant speed boost just by rearranging
/// your drawing code to work more like the earlier NoInstancing technique.
/// </summary>
void DrawModelNoInstancingOrStateBatching(Model model, Matrix[] modelBones,
Matrix[] instances, Matrix view, Matrix projection)
{
for (int i = 0; i < instances.Length; i++)
{
foreach (ModelMesh mesh in model.Meshes)
{
foreach (Effect effect in mesh.Effects)
{
effect.CurrentTechnique = effect.Techniques["NoInstancing"];
effect.Parameters["World"].SetValue(modelBones[mesh.ParentBone.Index] * instances[i]);
effect.Parameters["View"].SetValue(view);
effect.Parameters["Projection"].SetValue(projection);
}
mesh.Draw();
}
}
}
/// <summary>
/// Helper for drawing the help text overlay.
/// </summary>
void DrawOverlayText()
{
string text = string.Format(CultureInfo.CurrentCulture,
"Frames per second: {0}\n" +
"Instances: {1}\n" +
"Technique: {2}\n\n" +
"A = Change technique\n" +
"X = Add instances\n" +
"Y = Remove instances\n",
frameRate,
instances.Count,
instancingTechnique);
spriteBatch.Begin();
spriteBatch.DrawString(spriteFont, text, new Vector2(65, 65), Color.Black);
spriteBatch.DrawString(spriteFont, text, new Vector2(64, 64), Color.White);
spriteBatch.End();
}
#endregion
#region Handle Input
/// <summary>
/// Handles input for quitting or changing settings.
/// </summary>
void HandleInput()
{
lastKeyboardState = currentKeyboardState;
lastGamePadState = currentGamePadState;
currentKeyboardState = Keyboard.GetState();
currentGamePadState = GamePad.GetState(PlayerIndex.One);
// Check for exit.
if (currentKeyboardState.IsKeyDown(Keys.Escape) ||
currentGamePadState.Buttons.Back == ButtonState.Pressed)
{
Exit();
}
// Change the number of instances more quickly if there are
// already lots of them. This avoids you having to sit there
// for hours with your finger on the "increase" button!
int instanceChangeRate = Math.Max(instances.Count / 100, 1);
// Increase the number of instances?
if (currentKeyboardState.IsKeyDown(Keys.X) ||
currentGamePadState.Buttons.X == ButtonState.Pressed)
{
for (int i = 0; i < instanceChangeRate; i++)
{
instances.Add(new SpinningInstance());
}
}
// Decrease the number of instances?
if (currentKeyboardState.IsKeyDown(Keys.Y) ||
currentGamePadState.Buttons.Y == ButtonState.Pressed)
{
for (int i = 0; i < instanceChangeRate; i++)
{
if (instances.Count == 0)
break;
instances.RemoveAt(instances.Count - 1);
}
}
// Change which instancing technique we are using?
if ((currentKeyboardState.IsKeyDown(Keys.A) &&
lastKeyboardState.IsKeyUp(Keys.A)) ||
(currentGamePadState.Buttons.A == ButtonState.Pressed &&
lastGamePadState.Buttons.A == ButtonState.Released))
{
instancingTechnique++;
// Wrap if we reach the end of the possible techniques.
if (instancingTechnique > InstancingTechnique.NoInstancingOrStateBatching)
instancingTechnique = 0;
}
}
#endregion
}
#region Entry Point
/// <summary>
/// The main entry point for the application.
/// </summary>
static class Program
{
static void Main()
{
using (InstancedModelSampleGame game = new InstancedModelSampleGame())
{
game.Run();
}
}
}
#endregion
}
| |
#region
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Linq;
using System.Threading.Tasks;
using GTR.Core.Engine;
using GTR.Core.Marshalling;
using GTR.Core.Marshalling.DTO;
using GTR.Core.Model;
using GTR.Core.Moves;
using GTR.Core.Services;
using GTR.Core.Util;
using MoveEventArgs = GTR.Core.Moves.MoveEventArgs;
#endregion
namespace GTR.Core.Game
{
public class GameEngine
{
private readonly Dictionary<Player, PlayerEngine> playerEngines;
private GameOptions _gameOptions;
private bool _isGameOver;
private CompletedGame completedGame;
private IPlayerInputService playerInputService;
private MoveMaker moveMaker;
public GameEngine(
Model.Game gameModel,
IMessageProvider messageProvider,
IPlayerInputService playerInputService)
{
Game = gameModel;
MessageProvider = messageProvider;
this.playerInputService = playerInputService;
WireEvents();
playerEngines = new Dictionary<Player, PlayerEngine>();
moveMaker = new MoveMaker();
foreach (var player in GameTable.Players)
{
var playerEngine = new PlayerEngine(player, playerInputService, gameModel.GameTable, moveMaker);
playerEngines.Add(player, playerEngine);
}
}
public MoveHistory MoveHistory { get { return moveMaker.History; } }
public Model.Game Game { get; }
public IMessageProvider MessageProvider { get; set; }
private GameTable GameTable
{
get { return Game?.GameTable; }
}
internal event GameOverHandler GameOver = delegate { };
private void WireEvents()
{
GameTable.OrderDeck.CollectionChanged += OrderDeckOnCollectionChanged;
foreach (var siteDeck in GameTable.SiteDecks)
{
siteDeck.CollectionChanged += SiteDeckOnCollectionChanged;
}
GameOver += OnGameOver;
}
public async Task<CompletedGame> PlayGame()
{
DealCards();
Game.LeadPlayer = DetermineGoesFirst(GameTable);
Game.TurnNumber = 1;
while (!_isGameOver)
{
await HandleTurn();
Game.TurnNumber++;
}
return completedGame;
}
#region game start
private void DealCards()
{
foreach (var player in GameTable.Players)
{
for (int i = 0; i < player.Hand.RefillCapacity; i++)
{
var topDeckCard = GameTable.OrderDeck.Top;
moveMaker.MakeMove(topDeckCard, GameTable.OrderDeck, player.Hand.OrderCards, null);
}
var topJackCard = GameTable.JackDeck.ElementAt(0);
moveMaker.MakeMove(topJackCard, GameTable.JackDeck, player.Hand.JackCards, null);
}
}
private Player DetermineGoesFirst(GameTable playingField)
{
var players = playingField.Players;
var orderDeck = playingField.OrderDeck;
var pool = playingField.Pool;
orderDeck.Shuffle();
Dictionary<Player, OrderCardModel> playerDraws = new Dictionary<Player, OrderCardModel>();
foreach (var player in players)
{
var drawnCard = orderDeck.Draw();
pool.Add(drawnCard);
playerDraws.Add(player, drawnCard);
}
// TODO: implement once buildings are done
return players.ElementAt(0);
}
#endregion
#region game mid
private async Task HandleTurn()
{
MessageProvider.Display(string.Format("Turn {0} lead phase", Game.TurnNumber));
await HandleLeadFollowAsync();
MessageProvider.Display(string.Format("Turn {0} action phase", Game.TurnNumber));
await HandleActionsAsync();
CompleteTurn();
MessageProvider.Display(string.Format("Turn {0} complete", Game.TurnNumber));
}
private async Task HandleLeadFollowAsync()
{
Game.ActionPlayer = Game.LeadPlayer;
var leaderEngine = playerEngines[Game.LeadPlayer];
var lead = await leaderEngine.LeadAsync();
if (lead == null)
{
return;
}
var leadRole = (RoleType) lead;
for (int playerNumber = 0; playerNumber < Game.GameTable.Players.Count - 1; playerNumber++)
{
Game.ActionPlayer = Game.ActionPlayer.PlayerToRight;
var actionEngine = playerEngines[Game.ActionPlayer];
await actionEngine.FollowAsync(leadRole);
}
}
private async Task HandleActionsAsync()
{
Game.ActionPlayer = Game.LeadPlayer;
for (int playerNumber = 0; playerNumber < Game.GameTable.Players.Count - 1; playerNumber++)
{
var actionEngine = playerEngines[Game.ActionPlayer];
await actionEngine.TakeActionsAsync();
Game.ActionPlayer = Game.ActionPlayer.PlayerToRight;
}
}
private void CompleteTurn()
{
foreach (Player player in Game.GameTable.Players)
{
var playerEngine = playerEngines[player];
playerEngine.ClearPlayArea();
}
Game.LeadPlayer = Game.LeadPlayer.PlayerToRight;
}
#endregion
#region end game
private void SiteDeckOnCollectionChanged(object sender,
NotifyCollectionChangedEventArgs notifyCollectionChangedEventArgs)
{
if (notifyCollectionChangedEventArgs.Action != NotifyCollectionChangedAction.Remove)
{
return;
}
bool areAvailableSites = GameTable.SiteDecks.Any(deck => deck.Top.SiteType == SiteType.InsideRome);
if (areAvailableSites)
{
return;
}
if (GameOver != null)
{
GameOverEventArgs gameOverEventArgs = new GameOverEventArgs {Reason = Messages.ZeroSitesGameOver};
GameOver(this, gameOverEventArgs);
}
}
private void OrderDeckOnCollectionChanged(object sender,
NotifyCollectionChangedEventArgs notifyCollectionChangedEventArgs)
{
if (notifyCollectionChangedEventArgs.Action != NotifyCollectionChangedAction.Remove)
{
return;
}
if (GameTable.OrderDeck.Count != 0)
{
return;
}
if (GameOver != null)
{
GameOverEventArgs gameOverEventArgs = new GameOverEventArgs {Reason = Messages.DeckEmptyGameOver};
GameOver(this, gameOverEventArgs);
}
}
private void OnGameOver(object sender, GameOverEventArgs args)
{
var gameScore = GameScorer.Score(GameTable.Players);
var winners = GameScorer.CalculateWinners(gameScore);
completedGame = new CompletedGame
{
Winners = winners,
GameScore = gameScore
};
_isGameOver = true;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using SimpleContainer.Helpers;
using SimpleContainer.Infection;
using SimpleContainer.Interface;
namespace SimpleContainer.Configuration
{
public abstract class AbstractConfigurationBuilder<TSelf>
where TSelf : AbstractConfigurationBuilder<TSelf>
{
internal ConfigurationRegistry.Builder RegistryBuilder { get; private set; }
protected readonly List<string> contracts;
internal AbstractConfigurationBuilder(ConfigurationRegistry.Builder registryBuilder, List<string> contracts)
{
RegistryBuilder = registryBuilder;
this.contracts = contracts;
}
public TSelf Bind<TInterface, TImplementation>(bool clearOld = false)
where TImplementation : TInterface
{
GetServiceBuilder(typeof (TInterface)).Bind(typeof (TInterface), typeof (TImplementation), clearOld);
return Self;
}
public TSelf Bind(Type interfaceType, Type implementationType, bool clearOld)
{
GetServiceBuilder(interfaceType).Bind(interfaceType, implementationType, clearOld);
return Self;
}
public TSelf Bind(Type interfaceType, Type implementationType)
{
GetServiceBuilder(interfaceType).Bind(interfaceType, implementationType, false);
return Self;
}
public TSelf Bind<T>(object value, bool containerOwnsInstance = true)
{
GetServiceBuilder(typeof (T)).Bind(typeof (T), value, containerOwnsInstance);
return Self;
}
public TSelf Bind(Type interfaceType, object value, bool containerOwnsInstance = true)
{
GetServiceBuilder(interfaceType).Bind(interfaceType, value, containerOwnsInstance);
return Self;
}
public TSelf WithInstanceFilter<T>(Func<T, bool> filter)
{
GetServiceBuilder(typeof (T)).WithInstanceFilter(filter);
return Self;
}
public TSelf WithImplicitDependency<T>(ServiceName name)
{
GetServiceBuilder(typeof(T)).WithImplicitDependency(name);
return Self;
}
public TSelf WithComment<T>(string comment)
{
GetServiceBuilder(typeof(T)).SetComment(comment);
return Self;
}
public TSelf Bind<T>(Func<IContainer, T> creator, bool containerOwnsInstance = true)
{
GetServiceBuilder(typeof (T)).Bind(c => creator(c), containerOwnsInstance);
return Self;
}
public TSelf Bind(Type type, Func<IContainer, object> creator, bool containerOwnsInstance = true)
{
GetServiceBuilder(type).Bind(creator, containerOwnsInstance);
return Self;
}
public TSelf Bind<T>(Func<IContainer, Type, T> creator, bool containerOwnsInstance = true)
{
GetServiceBuilder(typeof (T)).Bind((c, t) => creator(c, t), containerOwnsInstance);
return Self;
}
public TSelf Bind(Type type, Func<IContainer, Type, object> creator, bool containerOwnsInstance = true)
{
GetServiceBuilder(type).Bind(creator, containerOwnsInstance);
return Self;
}
public TSelf BindDependency<T>(string dependencyName, object value)
{
GetServiceBuilder(typeof (T)).BindDependency(dependencyName, value);
return Self;
}
public TSelf BindDependency(Type type, string dependencyName, object value)
{
GetServiceBuilder(type).BindDependency(dependencyName, value);
return Self;
}
public TSelf BindDependency<T, TDependency>(TDependency value)
{
GetServiceBuilder(typeof (T)).BindDependency<T, TDependency>(value);
return Self;
}
public TSelf BindDependency<T, TDependency>(object value)
{
GetServiceBuilder(typeof (T)).BindDependency<T, TDependency>(value);
return Self;
}
public TSelf BindDependency<T, TDependency, TDependencyValue>()
where TDependencyValue : TDependency
{
GetServiceBuilder(typeof (T)).BindDependency<TDependency, TDependencyValue>();
return Self;
}
public TSelf BindDependency(Type type, Type dependencyType, Func<IContainer, object> creator)
{
GetServiceBuilder(type).BindDependency(dependencyType, creator);
return Self;
}
public TSelf BindDependencyFactory<T>(string dependencyName, Func<IContainer, object> creator)
{
GetServiceBuilder(typeof (T)).BindDependencyFactory(dependencyName, creator);
return Self;
}
public TSelf BindDependencyImplementation<T, TDependencyValue>(string dependencyName)
{
GetServiceBuilder(typeof (T)).BindDependencyImplementation<TDependencyValue>(dependencyName);
return Self;
}
public TSelf BindDependencyImplementation<T, TDependencyInterface, TDependencyImplementation>()
{
GetServiceBuilder(typeof (T)).BindDependencyImplementation<TDependencyInterface, TDependencyImplementation>();
return Self;
}
public TSelf BindDependencies<T>(object dependencies)
{
GetServiceBuilder(typeof (T)).BindDependencies(dependencies);
return Self;
}
public TSelf BindDependencies<T>(IParametersSource parameters)
{
GetServiceBuilder(typeof (T)).BindDependencies(parameters);
return Self;
}
public TSelf BindDependencyValue(Type type, Type dependencyType, object value)
{
GetServiceBuilder(type).BindDependencyValue(dependencyType, value);
return Self;
}
public TSelf DontUse(Type t)
{
GetServiceBuilder(t).DontUse();
return Self;
}
public TSelf DontUse<T>()
{
GetServiceBuilder(typeof (T)).DontUse();
return Self;
}
public TSelf IgnoreImplementation(Type t)
{
GetServiceBuilder(t).IgnoreImplementation();
return Self;
}
public TSelf IgnoreImplementation<T>()
{
GetServiceBuilder(typeof (T)).IgnoreImplementation();
return Self;
}
private ServiceConfiguration.Builder GetServiceBuilder(Type type)
{
return RegistryBuilder.GetConfigurationSet(type).GetBuilder(contracts);
}
private TSelf Self
{
get { return (TSelf) this; }
}
public ContractConfigurationBuilder Contract(params string[] newContracts)
{
return new ContractConfigurationBuilder(RegistryBuilder, contracts.Concat(newContracts.ToList()));
}
public ContractConfigurationBuilder Contract<T>()
where T : RequireContractAttribute, new()
{
return Contract(InternalHelpers.NameOf<T>());
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using Abp.Events.Bus.Factories;
using Abp.Events.Bus.Factories.Internals;
using Abp.Events.Bus.Handlers;
using Abp.Events.Bus.Handlers.Internals;
using Abp.Threading.Extensions;
using Castle.Core.Logging;
namespace Abp.Events.Bus
{
/// <summary>
/// Implements EventBus as Singleton pattern.
/// </summary>
public class EventBus : IEventBus
{
/// <summary>
/// Gets the default <see cref="EventBus"/> instance.
/// </summary>
public static EventBus Default { get { return DefaultInstance; } }
private static readonly EventBus DefaultInstance = new EventBus();
/// <summary>
/// Reference to the Logger.
/// </summary>
public ILogger Logger { get; set; }
/// <summary>
/// All registered handler factories.
/// Key: Type of the event
/// Value: List of handler factories
/// </summary>
private readonly ConcurrentDictionary<Type, List<IEventHandlerFactory>> _handlerFactories;
/// <summary>
/// Creates a new <see cref="EventBus"/> instance.
/// Instead of creating a new instace, you can use <see cref="Default"/> to use Global <see cref="EventBus"/>.
/// </summary>
public EventBus()
{
_handlerFactories = new ConcurrentDictionary<Type, List<IEventHandlerFactory>>();
Logger = NullLogger.Instance;
}
/// <inheritdoc/>
public IDisposable Register<TEventData>(Action<TEventData> action) where TEventData : IEventData
{
return Register(typeof(TEventData), new ActionEventHandler<TEventData>(action));
}
/// <inheritdoc/>
public IDisposable Register<TEventData>(IEventHandler<TEventData> handler) where TEventData : IEventData
{
return Register(typeof(TEventData), handler);
}
/// <inheritdoc/>
public IDisposable Register<TEventData, THandler>()
where TEventData : IEventData
where THandler : IEventHandler<TEventData>, new()
{
return Register(typeof(TEventData), new TransientEventHandlerFactory<THandler>());
}
/// <inheritdoc/>
public IDisposable Register(Type eventType, IEventHandler handler)
{
return Register(eventType, new SingleInstanceHandlerFactory(handler));
}
/// <inheritdoc/>
public IDisposable Register<TEventData>(IEventHandlerFactory handlerFactory) where TEventData : IEventData
{
return Register(typeof(TEventData), handlerFactory);
}
/// <inheritdoc/>
public IDisposable Register(Type eventType, IEventHandlerFactory handlerFactory)
{
GetOrCreateHandlerFactories(eventType)
.Locking(factories => factories.Add(handlerFactory));
return new FactoryUnregistrar(this, eventType, handlerFactory);
}
/// <inheritdoc/>
public void Unregister<TEventData>(Action<TEventData> action) where TEventData : IEventData
{
GetOrCreateHandlerFactories(typeof(TEventData))
.Locking(factories =>
{
factories.RemoveAll(
factory =>
{
if (factory is SingleInstanceHandlerFactory)
{
var singleInstanceFactoru = factory as SingleInstanceHandlerFactory;
if (singleInstanceFactoru.HandlerInstance is ActionEventHandler<TEventData>)
{
var actionHandler =
singleInstanceFactoru.HandlerInstance as ActionEventHandler<TEventData>;
if (actionHandler.Action == action)
{
return true;
}
}
}
return false;
});
});
}
/// <inheritdoc/>
public void Unregister<TEventData>(IEventHandler<TEventData> handler) where TEventData : IEventData
{
Unregister(typeof(TEventData), handler);
}
/// <inheritdoc/>
public void Unregister(Type eventType, IEventHandler handler)
{
GetOrCreateHandlerFactories(eventType)
.Locking(factories =>
{
factories.RemoveAll(
factory =>
factory is SingleInstanceHandlerFactory &&
(factory as SingleInstanceHandlerFactory).HandlerInstance == handler
);
});
}
/// <inheritdoc/>
public void Unregister<TEventData>(IEventHandlerFactory factory) where TEventData : IEventData
{
Unregister(typeof(TEventData), factory);
}
/// <inheritdoc/>
public void Unregister(Type eventType, IEventHandlerFactory factory)
{
GetOrCreateHandlerFactories(eventType).Locking(factories => factories.Remove(factory));
}
/// <inheritdoc/>
public void UnregisterAll<TEventData>() where TEventData : IEventData
{
UnregisterAll(typeof(TEventData));
}
/// <inheritdoc/>
public void UnregisterAll(Type eventType)
{
GetOrCreateHandlerFactories(eventType).Locking(factories => factories.Clear());
}
/// <inheritdoc/>
public void Trigger<TEventData>(TEventData eventData) where TEventData : IEventData
{
Trigger((object)null, eventData);
}
/// <inheritdoc/>
public void Trigger<TEventData>(object eventSource, TEventData eventData) where TEventData : IEventData
{
Trigger(typeof(TEventData), eventSource, eventData);
}
/// <inheritdoc/>
public void Trigger(Type eventType, IEventData eventData)
{
Trigger(eventType, null, eventData);
}
/// <inheritdoc/>
public void Trigger(Type eventType, object eventSource, IEventData eventData)
{
//TODO: This method can be optimized by adding all possibilities to a dictionary.
eventData.EventSource = eventSource;
foreach (var factoryToTrigger in GetHandlerFactories(eventType))
{
var eventHandler = factoryToTrigger.GetHandler();
if (eventHandler == null)
{
throw new Exception("Registered event handler for event type " + eventType.Name + " does not implement IEventHandler<" + eventType.Name + "> interface!");
}
var handlerType = typeof(IEventHandler<>).MakeGenericType(eventType);
try
{
handlerType
.GetMethod("HandleEvent", BindingFlags.Public | BindingFlags.Instance, null, new[] { eventType }, null)
.Invoke(eventHandler, new object[] { eventData });
}
finally
{
factoryToTrigger.ReleaseHandler(eventHandler);
}
}
//Implements generic argument inheritance. See IEventDataWithInheritableGenericArgument
if (eventType.IsGenericType &&
eventType.GetGenericArguments().Length == 1 &&
typeof(IEventDataWithInheritableGenericArgument).IsAssignableFrom(eventType))
{
var genericArg = eventType.GetGenericArguments()[0];
var baseArg = genericArg.BaseType;
if (baseArg != null)
{
var baseEventType = eventType.GetGenericTypeDefinition().MakeGenericType(genericArg.BaseType);
var constructorArgs = ((IEventDataWithInheritableGenericArgument)eventData).GetConstructorArgs();
var baseEventData = (IEventData)Activator.CreateInstance(baseEventType, constructorArgs);
baseEventData.EventTime = eventData.EventTime;
Trigger(baseEventType, eventData.EventSource, baseEventData);
}
}
}
private IEnumerable<IEventHandlerFactory> GetHandlerFactories(Type eventType)
{
var handlerFactoryList = new List<IEventHandlerFactory>();
foreach (var handlerFactory in _handlerFactories.Where(hf => ShouldTriggerEventForHandler(eventType, hf.Key)))
{
handlerFactoryList.AddRange(handlerFactory.Value);
}
return handlerFactoryList.ToArray();
}
private static bool ShouldTriggerEventForHandler(Type eventType, Type handlerType)
{
//Should trigger same type
if (handlerType == eventType)
{
return true;
}
//Should trigger for inherited types
if (handlerType.IsAssignableFrom(eventType))
{
return true;
}
return false;
}
/// <inheritdoc/>
public Task TriggerAsync<TEventData>(TEventData eventData) where TEventData : IEventData
{
return TriggerAsync((object)null, eventData);
}
/// <inheritdoc/>
public Task TriggerAsync<TEventData>(object eventSource, TEventData eventData) where TEventData : IEventData
{
ExecutionContext.SuppressFlow();
var task = Task.Factory.StartNew(
() =>
{
try
{
Trigger(eventSource, eventData);
}
catch (Exception ex)
{
Logger.Warn(ex.ToString(), ex);
}
});
ExecutionContext.RestoreFlow();
return task;
}
/// <inheritdoc/>
public Task TriggerAsync(Type eventType, IEventData eventData)
{
return TriggerAsync(eventType, null, eventData);
}
/// <inheritdoc/>
public Task TriggerAsync(Type eventType, object eventSource, IEventData eventData)
{
ExecutionContext.SuppressFlow();
var task = Task.Factory.StartNew(
() =>
{
try
{
Trigger(eventType, eventSource, eventData);
}
catch (Exception ex)
{
Logger.Warn(ex.ToString(), ex);
}
});
ExecutionContext.RestoreFlow();
return task;
}
private List<IEventHandlerFactory> GetOrCreateHandlerFactories(Type eventType)
{
return _handlerFactories.GetOrAdd(eventType, (type) => new List<IEventHandlerFactory>());
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCOREAPP
#define HAVE_STORE_ISOPEN
#endif
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using Microsoft.DotNet.RemoteExecutor;
using Xunit;
namespace System.Security.Cryptography.X509Certificates.Tests
{
public class X509StoreTests : FileCleanupTestBase
{
[Fact]
public static void OpenMyStore()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store.Open(OpenFlags.ReadOnly);
Assert.Equal("My", store.Name);
}
}
[Fact]
public static void Constructor_DefaultStoreName()
{
using (X509Store store = new X509Store(StoreLocation.CurrentUser))
{
Assert.Equal("MY", store.Name);
}
}
#if HAVE_STORE_ISOPEN
[Fact]
public static void Constructor_IsNotOpen()
{
using (X509Store store = new X509Store(StoreLocation.CurrentUser))
{
Assert.False(store.IsOpen);
}
}
#endif
[Fact]
public static void Constructor_DefaultStoreLocation()
{
using (X509Store store = new X509Store(StoreName.My))
{
Assert.Equal(StoreLocation.CurrentUser, store.Location);
}
using (X509Store store = new X509Store("My"))
{
Assert.Equal(StoreLocation.CurrentUser, store.Location);
}
}
[PlatformSpecific(TestPlatforms.Windows | TestPlatforms.OSX)] // Not supported via OpenSSL
[Fact]
public static void Constructor_StoreHandle()
{
using (X509Store store1 = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store1.Open(OpenFlags.ReadOnly);
bool hadCerts;
using (var coll = new ImportedCollection(store1.Certificates))
{
// Use >1 instead of >0 in case the one is an ephemeral accident.
hadCerts = coll.Collection.Count > 1;
Assert.True(coll.Collection.Count >= 0);
}
using (X509Store store2 = new X509Store(store1.StoreHandle))
{
using (var coll = new ImportedCollection(store2.Certificates))
{
if (hadCerts)
{
// Use InRange here instead of True >= 0 so that the error message
// is different, and we can diagnose a bit of what state we might have been in.
Assert.InRange(coll.Collection.Count, 1, int.MaxValue);
}
else
{
Assert.True(coll.Collection.Count >= 0);
}
}
}
}
}
[PlatformSpecific(TestPlatforms.AnyUnix & ~TestPlatforms.OSX)] // API not supported via OpenSSL
[Fact]
public static void Constructor_StoreHandle_Unix()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store.Open(OpenFlags.ReadOnly);
Assert.Equal(IntPtr.Zero, store.StoreHandle);
}
Assert.Throws<PlatformNotSupportedException>(() => new X509Chain(IntPtr.Zero));
}
#if HAVE_STORE_ISOPEN
[Fact]
public static void Constructor_OpenFlags()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser, OpenFlags.ReadOnly))
{
Assert.True(store.IsOpen);
}
}
[Fact]
public static void Constructor_OpenFlags_StoreName()
{
using (X509Store store = new X509Store("My", StoreLocation.CurrentUser, OpenFlags.ReadOnly))
{
Assert.True(store.IsOpen);
}
}
[Fact]
public static void Constructor_OpenFlags_OpenAnyway()
{
using (X509Store store = new X509Store("My", StoreLocation.CurrentUser, OpenFlags.ReadOnly))
{
store.Open(OpenFlags.ReadOnly);
Assert.True(store.IsOpen);
}
}
[Fact]
public static void Constructor_OpenFlags_NonExistingStoreName_Throws()
{
Assert.ThrowsAny<CryptographicException>(() =>
new X509Store(new Guid().ToString("D"), StoreLocation.CurrentUser, OpenFlags.ReadOnly | OpenFlags.OpenExistingOnly)
);
}
#endif
[PlatformSpecific(TestPlatforms.Windows | TestPlatforms.OSX)] // StoreHandle not supported via OpenSSL
[Fact]
public static void TestDispose()
{
X509Store store;
using (store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store.Open(OpenFlags.ReadOnly);
Assert.NotEqual(IntPtr.Zero, store.StoreHandle);
}
Assert.Throws<CryptographicException>(() => store.StoreHandle);
}
[Fact]
public static void ReadMyCertificates()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store.Open(OpenFlags.ReadOnly);
using (var coll = new ImportedCollection(store.Certificates))
{
int certCount = coll.Collection.Count;
// This assert is just so certCount appears to be used, the test really
// is that store.get_Certificates didn't throw.
Assert.True(certCount >= 0);
}
}
}
[Fact]
public static void OpenNotExistent()
{
using (X509Store store = new X509Store(Guid.NewGuid().ToString("N"), StoreLocation.CurrentUser))
{
Assert.ThrowsAny<CryptographicException>(() => store.Open(OpenFlags.OpenExistingOnly));
}
}
#if HAVE_STORE_ISOPEN
[Fact]
public static void Open_IsOpenTrue()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store.Open(OpenFlags.ReadOnly);
Assert.True(store.IsOpen);
}
}
[Fact]
public static void Dispose_IsOpenFalse()
{
X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser);
store.Open(OpenFlags.ReadOnly);
store.Dispose();
Assert.False(store.IsOpen);
}
[Fact]
public static void ReOpen_IsOpenTrue()
{
X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser);
store.Open(OpenFlags.ReadOnly);
store.Close();
store.Open(OpenFlags.ReadOnly);
Assert.True(store.IsOpen);
}
#endif
[Fact]
public static void AddReadOnlyThrows()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
store.Open(OpenFlags.ReadOnly);
using (var coll = new ImportedCollection(store.Certificates))
{
// Add only throws when it has to do work. If, for some reason, this certificate
// is already present in the CurrentUser\My store, we can't really test this
// functionality.
if (!coll.Collection.Contains(cert))
{
Assert.ThrowsAny<CryptographicException>(() => store.Add(cert));
}
}
}
}
[Fact]
public static void AddDisposedThrowsCryptographicException()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
store.Open(OpenFlags.ReadWrite);
cert.Dispose();
Assert.Throws<CryptographicException>(() => store.Add(cert));
}
}
[Fact]
public static void AddReadOnlyThrowsWhenCertificateExists()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
store.Open(OpenFlags.ReadOnly);
X509Certificate2 toAdd = null;
// Look through the certificates to find one with no private key to call add on.
// (The private key restriction is so that in the event of an "accidental success"
// that no potential permissions would be modified)
using (var coll = new ImportedCollection(store.Certificates))
{
foreach (X509Certificate2 cert in coll.Collection)
{
if (!cert.HasPrivateKey)
{
toAdd = cert;
break;
}
}
if (toAdd != null)
{
Assert.ThrowsAny<CryptographicException>(() => store.Add(toAdd));
}
}
}
}
[Fact]
public static void RemoveReadOnlyThrowsWhenFound()
{
// This test is unfortunate, in that it will mostly never test.
// In order to do so it would have to open the store ReadWrite, put in a known value,
// and call Remove on a ReadOnly copy.
//
// Just calling Remove on the first item found could also work (when the store isn't empty),
// but if it fails the cost is too high.
//
// So what's the purpose of this test, you ask? To record why we're not unit testing it.
// And someone could test it manually if they wanted.
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
store.Open(OpenFlags.ReadOnly);
using (var coll = new ImportedCollection(store.Certificates))
{
if (coll.Collection.Contains(cert))
{
Assert.ThrowsAny<CryptographicException>(() => store.Remove(cert));
}
}
}
}
[Fact]
public static void RemoveReadOnlyNonExistingDoesNotThrow()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
store.Open(OpenFlags.ReadOnly);
store.Remove(cert);
}
}
[Fact]
public static void RemoveDisposedIsIgnored()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
store.Open(OpenFlags.ReadWrite);
cert.Dispose();
store.Remove(cert);
}
}
/* Placeholder information for these tests until they can be written to run reliably.
* Currently such tests would create physical files (Unix) and\or certificates (Windows)
* which can collide with other running tests that use the same cert, or from a
* test suite running more than once at the same time on the same machine.
* Ideally, we use a GUID-named store to aoiv collitions with proper cleanup on Unix and Windows
* and\or have lower testing hooks or use Microsoft Fakes Framework to redirect
* and encapsulate the actual storage logic so it can be tested, along with mock exceptions
* to verify exception handling.
* See issue https://github.com/dotnet/corefx/issues/12833
* and https://github.com/dotnet/corefx/issues/12223
[Fact]
public static void TestAddAndRemove() {}
[Fact]
public static void TestAddRangeAndRemoveRange() {}
*/
[Fact]
public static void EnumerateClosedIsEmpty()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
{
int count = store.Certificates.Count;
Assert.Equal(0, count);
}
}
[Fact]
public static void AddClosedThrows()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
Assert.ThrowsAny<CryptographicException>(() => store.Add(cert));
}
}
[Fact]
public static void RemoveClosedThrows()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.CurrentUser))
using (X509Certificate2 cert = new X509Certificate2(TestData.MsCertificate))
{
Assert.ThrowsAny<CryptographicException>(() => store.Remove(cert));
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows | TestPlatforms.OSX)]
public static void OpenMachineMyStore_Supported()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.LocalMachine))
{
store.Open(OpenFlags.ReadOnly);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix & ~TestPlatforms.OSX)]
public static void OpenMachineMyStore_NotSupported()
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.LocalMachine))
{
Exception e = Assert.Throws<CryptographicException>(() => store.Open(OpenFlags.ReadOnly));
Assert.NotNull(e.InnerException);
Assert.IsType<PlatformNotSupportedException>(e.InnerException);
}
}
[Theory]
[PlatformSpecific(TestPlatforms.AnyUnix & ~TestPlatforms.OSX)]
[InlineData(OpenFlags.ReadOnly, false)]
[InlineData(OpenFlags.MaxAllowed, false)]
[InlineData(OpenFlags.ReadWrite, true)]
public static void OpenMachineRootStore_Permissions(OpenFlags permissions, bool shouldThrow)
{
using (X509Store store = new X509Store(StoreName.Root, StoreLocation.LocalMachine))
{
if (shouldThrow)
{
Exception e = Assert.Throws<CryptographicException>(() => store.Open(permissions));
Assert.NotNull(e.InnerException);
Assert.IsType<PlatformNotSupportedException>(e.InnerException);
}
else
{
// Assert.DoesNotThrow
store.Open(permissions);
}
}
}
[Fact]
public static void MachineRootStore_NonEmpty()
{
// This test will fail on systems where the administrator has gone out of their
// way to prune the trusted CA list down below this threshold.
//
// As of 2016-01-25, Ubuntu 14.04 has 169, and CentOS 7.1 has 175, so that'd be
// quite a lot of pruning.
//
// And as of 2016-01-29 we understand the Homebrew-installed root store, with 180.
const int MinimumThreshold = 5;
using (X509Store store = new X509Store(StoreName.Root, StoreLocation.LocalMachine))
{
store.Open(OpenFlags.ReadOnly);
using (var storeCerts = new ImportedCollection(store.Certificates))
{
int certCount = storeCerts.Collection.Count;
Assert.InRange(certCount, MinimumThreshold, int.MaxValue);
}
}
}
[Theory]
[PlatformSpecific(TestPlatforms.Windows | TestPlatforms.OSX)]
[InlineData(StoreLocation.CurrentUser, true)]
[InlineData(StoreLocation.LocalMachine, true)]
[InlineData(StoreLocation.CurrentUser, false)]
[InlineData(StoreLocation.LocalMachine, false)]
public static void EnumerateDisallowedStore(StoreLocation location, bool useEnum)
{
X509Store store = useEnum
? new X509Store(StoreName.Disallowed, location)
// Non-normative casing, proving that we aren't case-sensitive (Windows isn't)
: new X509Store("disallowed", location);
using (store)
{
store.Open(OpenFlags.ReadOnly | OpenFlags.OpenExistingOnly);
using (var storeCerts = new ImportedCollection(store.Certificates))
{
// That's all. We enumerated it.
// There might not even be data in it.
}
}
}
[Theory]
[PlatformSpecific(TestPlatforms.AnyUnix & ~TestPlatforms.OSX)]
[InlineData(false, OpenFlags.ReadOnly)]
[InlineData(true, OpenFlags.ReadOnly)]
[InlineData(false, OpenFlags.ReadWrite)]
[InlineData(true, OpenFlags.ReadWrite)]
[InlineData(false, OpenFlags.MaxAllowed)]
[InlineData(true, OpenFlags.MaxAllowed)]
public static void UnixCannotOpenMachineDisallowedStore(bool useEnum, OpenFlags openFlags)
{
X509Store store = useEnum
? new X509Store(StoreName.Disallowed, StoreLocation.LocalMachine)
// Non-normative casing, proving that we aren't case-sensitive (Windows isn't)
: new X509Store("disallowed", StoreLocation.LocalMachine);
using (store)
{
Exception e = Assert.Throws<CryptographicException>(() => store.Open(openFlags));
Assert.NotNull(e.InnerException);
Assert.IsType<PlatformNotSupportedException>(e.InnerException);
Assert.Equal(e.Message, e.InnerException.Message);
}
}
[Theory]
[PlatformSpecific(TestPlatforms.AnyUnix & ~TestPlatforms.OSX)]
[InlineData(false, OpenFlags.ReadOnly)]
[InlineData(true, OpenFlags.ReadOnly)]
[InlineData(false, OpenFlags.ReadWrite)]
[InlineData(true, OpenFlags.ReadWrite)]
[InlineData(false, OpenFlags.MaxAllowed)]
[InlineData(true, OpenFlags.MaxAllowed)]
public static void UnixCannotModifyDisallowedStore(bool useEnum, OpenFlags openFlags)
{
X509Store store = useEnum
? new X509Store(StoreName.Disallowed, StoreLocation.CurrentUser)
// Non-normative casing, proving that we aren't case-sensitive (Windows isn't)
: new X509Store("disallowed", StoreLocation.CurrentUser);
using (store)
using (X509Certificate2 cert = new X509Certificate2(TestData.Rsa384CertificatePemBytes))
{
store.Open(openFlags);
Exception e = Assert.Throws<CryptographicException>(() => store.Add(cert));
if (openFlags == OpenFlags.ReadOnly)
{
Assert.Null(e.InnerException);
}
else
{
Assert.NotNull(e.InnerException);
Assert.IsType<PlatformNotSupportedException>(e.InnerException);
Assert.Equal(e.Message, e.InnerException.Message);
}
Assert.Equal(0, store.Certificates.Count);
}
}
#if Unix
[ConditionalFact(nameof(NotRunningAsRoot))] // root can read '2.pem'
[PlatformSpecific(TestPlatforms.Linux)] // Windows/OSX doesn't use SSL_CERT_{DIR,FILE}.
private void X509Store_MachineStoreLoadSkipsInvalidFiles()
{
// We create a folder for our machine store and use it by setting SSL_CERT_{DIR,FILE}.
// In the store we'll add some invalid files, but we start and finish with a valid file.
// This is to account for the order in which the store is populated.
string sslCertDir = GetTestFilePath();
Directory.CreateDirectory(sslCertDir);
// Valid file.
File.WriteAllBytes(Path.Combine(sslCertDir, "0.pem"), TestData.SelfSigned1PemBytes);
// File with invalid content.
File.WriteAllText(Path.Combine(sslCertDir, "1.pem"), "This is not a valid cert");
// File which is not readable by the current user.
string unreadableFileName = Path.Combine(sslCertDir, "2.pem");
File.WriteAllBytes(unreadableFileName, TestData.SelfSigned2PemBytes);
Assert.Equal(0, chmod(unreadableFileName, 0));
// Valid file.
File.WriteAllBytes(Path.Combine(sslCertDir, "3.pem"), TestData.SelfSigned3PemBytes);
var psi = new ProcessStartInfo();
psi.Environment.Add("SSL_CERT_DIR", sslCertDir);
psi.Environment.Add("SSL_CERT_FILE", "/nonexisting");
RemoteExecutor.Invoke(() =>
{
using (var store = new X509Store(StoreName.Root, StoreLocation.LocalMachine))
{
store.Open(OpenFlags.OpenExistingOnly);
// Check nr of certificates in store.
Assert.Equal(2, store.Certificates.Count);
}
}, new RemoteInvokeOptions { StartInfo = psi }).Dispose();
}
[DllImport("libc")]
private static extern int chmod(string path, int mode);
[DllImport("libc")]
private static extern uint geteuid();
public static bool NotRunningAsRoot => geteuid() != 0;
#endif
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using Lucene.Net.Attributes;
using Lucene.Net.Documents;
namespace Lucene.Net.Index
{
using Lucene.Net.Randomized.Generators;
using Lucene.Net.Support;
using NUnit.Framework;
using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
using BytesRef = Lucene.Net.Util.BytesRef;
using Codec = Lucene.Net.Codecs.Codec;
using Directory = Lucene.Net.Store.Directory;
using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
using Document = Documents.Document;
using FakeIOException = Lucene.Net.Store.MockDirectoryWrapper.FakeIOException;
using Field = Field;
using IndexSearcher = Lucene.Net.Search.IndexSearcher;
using InfoStream = Lucene.Net.Util.InfoStream;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
/*
/// Copyright 2004 The Apache Software Foundation
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
*/
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper;
using Query = Lucene.Net.Search.Query;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using TermQuery = Lucene.Net.Search.TermQuery;
using TestUtil = Lucene.Net.Util.TestUtil;
using TextField = TextField;
using TopDocs = Lucene.Net.Search.TopDocs;
[TestFixture]
public class TestIndexWriterReader : LuceneTestCase
{
private readonly int NumThreads = TEST_NIGHTLY ? 5 : 3;
public static int Count(Term t, IndexReader r)
{
int count = 0;
DocsEnum td = TestUtil.Docs(Random(), r, t.Field, new BytesRef(t.Text()), MultiFields.GetLiveDocs(r), null, 0);
if (td != null)
{
while (td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
{
td.DocID();
count++;
}
}
return count;
}
[Test]
public virtual void TestAddCloseOpen()
{
// Can't use assertNoDeletes: this test pulls a non-NRT
// reader in the end:
Directory dir1 = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir1, iwc);
for (int i = 0; i < 97; i++)
{
DirectoryReader reader = writer.Reader;
if (i == 0)
{
writer.AddDocument(DocHelper.CreateDocument(i, "x", 1 + Random().Next(5)));
}
else
{
int previous = Random().Next(i);
// a check if the reader is current here could fail since there might be
// merges going on.
switch (Random().Next(5))
{
case 0:
case 1:
case 2:
writer.AddDocument(DocHelper.CreateDocument(i, "x", 1 + Random().Next(5)));
break;
case 3:
writer.UpdateDocument(new Term("id", "" + previous), DocHelper.CreateDocument(previous, "x", 1 + Random().Next(5)));
break;
case 4:
writer.DeleteDocuments(new Term("id", "" + previous));
break;
}
}
Assert.IsFalse(reader.Current);
reader.Dispose();
}
writer.ForceMerge(1); // make sure all merging is done etc.
DirectoryReader dirReader = writer.Reader;
writer.Commit(); // no changes that are not visible to the reader
Assert.IsTrue(dirReader.Current);
writer.Dispose();
Assert.IsTrue(dirReader.Current); // all changes are visible to the reader
iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
writer = new IndexWriter(dir1, iwc);
Assert.IsTrue(dirReader.Current);
writer.AddDocument(DocHelper.CreateDocument(1, "x", 1 + Random().Next(5)));
Assert.IsTrue(dirReader.Current); // segments in ram but IW is different to the readers one
writer.Dispose();
Assert.IsFalse(dirReader.Current); // segments written
dirReader.Dispose();
dir1.Dispose();
}
[Test]
public virtual void TestUpdateDocument()
{
bool doFullMerge = true;
Directory dir1 = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
if (iwc.MaxBufferedDocs < 20)
{
iwc.SetMaxBufferedDocs(20);
}
// no merging
if (Random().NextBoolean())
{
iwc.SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
}
else
{
iwc.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
}
if (VERBOSE)
{
Console.WriteLine("TEST: make index");
}
IndexWriter writer = new IndexWriter(dir1, iwc);
// create the index
CreateIndexNoClose(!doFullMerge, "index1", writer);
// writer.Flush(false, true, true);
// get a reader
DirectoryReader r1 = writer.Reader;
Assert.IsTrue(r1.Current);
string id10 = r1.Document(10).GetField("id").StringValue;
Document newDoc = r1.Document(10);
newDoc.RemoveField("id");
newDoc.Add(NewStringField("id", Convert.ToString(8000), Field.Store.YES));
writer.UpdateDocument(new Term("id", id10), newDoc);
Assert.IsFalse(r1.Current);
DirectoryReader r2 = writer.Reader;
Assert.IsTrue(r2.Current);
Assert.AreEqual(0, Count(new Term("id", id10), r2));
if (VERBOSE)
{
Console.WriteLine("TEST: verify id");
}
Assert.AreEqual(1, Count(new Term("id", Convert.ToString(8000)), r2));
r1.Dispose();
Assert.IsTrue(r2.Current);
writer.Dispose();
Assert.IsTrue(r2.Current);
DirectoryReader r3 = DirectoryReader.Open(dir1);
Assert.IsTrue(r3.Current);
Assert.IsTrue(r2.Current);
Assert.AreEqual(0, Count(new Term("id", id10), r3));
Assert.AreEqual(1, Count(new Term("id", Convert.ToString(8000)), r3));
writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Document doc = new Document();
doc.Add(NewTextField("field", "a b c", Field.Store.NO));
writer.AddDocument(doc);
Assert.IsTrue(r2.Current);
Assert.IsTrue(r3.Current);
writer.Dispose();
Assert.IsFalse(r2.Current);
Assert.IsTrue(!r3.Current);
r2.Dispose();
r3.Dispose();
dir1.Dispose();
}
[Test]
public virtual void TestIsCurrent()
{
Directory dir = NewDirectory();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.Add(NewTextField("field", "a b c", Field.Store.NO));
writer.AddDocument(doc);
writer.Dispose();
iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
writer = new IndexWriter(dir, iwc);
doc = new Document();
doc.Add(NewTextField("field", "a b c", Field.Store.NO));
DirectoryReader nrtReader = writer.Reader;
Assert.IsTrue(nrtReader.Current);
writer.AddDocument(doc);
Assert.IsFalse(nrtReader.Current); // should see the changes
writer.ForceMerge(1); // make sure we don't have a merge going on
Assert.IsFalse(nrtReader.Current);
nrtReader.Dispose();
DirectoryReader dirReader = DirectoryReader.Open(dir);
nrtReader = writer.Reader;
Assert.IsTrue(dirReader.Current);
Assert.IsTrue(nrtReader.Current); // nothing was committed yet so we are still current
Assert.AreEqual(2, nrtReader.MaxDoc); // sees the actual document added
Assert.AreEqual(1, dirReader.MaxDoc);
writer.Dispose(); // close is actually a commit both should see the changes
Assert.IsTrue(nrtReader.Current);
Assert.IsFalse(dirReader.Current); // this reader has been opened before the writer was closed / committed
dirReader.Dispose();
nrtReader.Dispose();
dir.Dispose();
}
/// <summary>
/// Test using IW.addIndexes
/// </summary>
[Test]
public virtual void TestAddIndexes()
{
bool doFullMerge = false;
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
if (iwc.MaxBufferedDocs < 20)
{
iwc.SetMaxBufferedDocs(20);
}
// no merging
if (Random().NextBoolean())
{
iwc.SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
}
else
{
iwc.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
}
IndexWriter writer = new IndexWriter(dir1, iwc);
// create the index
CreateIndexNoClose(!doFullMerge, "index1", writer);
writer.Flush(false, true);
// create a 2nd index
Directory dir2 = NewDirectory();
IndexWriter writer2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
CreateIndexNoClose(!doFullMerge, "index2", writer2);
writer2.Dispose();
DirectoryReader r0 = writer.Reader;
Assert.IsTrue(r0.Current);
writer.AddIndexes(dir2);
Assert.IsFalse(r0.Current);
r0.Dispose();
DirectoryReader r1 = writer.Reader;
Assert.IsTrue(r1.Current);
writer.Commit();
Assert.IsTrue(r1.Current); // we have seen all changes - no change after opening the NRT reader
Assert.AreEqual(200, r1.MaxDoc);
int index2df = r1.DocFreq(new Term("indexname", "index2"));
Assert.AreEqual(100, index2df);
// verify the docs are from different indexes
Document doc5 = r1.Document(5);
Assert.AreEqual("index1", doc5.Get("indexname"));
Document doc150 = r1.Document(150);
Assert.AreEqual("index2", doc150.Get("indexname"));
r1.Dispose();
writer.Dispose();
dir1.Dispose();
dir2.Dispose();
}
[Test]
public virtual void ExposeCompTermVR()
{
bool doFullMerge = false;
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
if (iwc.MaxBufferedDocs < 20)
{
iwc.SetMaxBufferedDocs(20);
}
// no merging
if (Random().NextBoolean())
{
iwc.SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
}
else
{
iwc.SetMergePolicy(NoMergePolicy.COMPOUND_FILES);
}
IndexWriter writer = new IndexWriter(dir1, iwc);
CreateIndexNoClose(!doFullMerge, "index1", writer);
writer.Dispose();
dir1.Dispose();
}
[Test]
public virtual void TestAddIndexes2()
{
bool doFullMerge = false;
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// create a 2nd index
Directory dir2 = NewDirectory();
IndexWriter writer2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
CreateIndexNoClose(!doFullMerge, "index2", writer2);
writer2.Dispose();
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
writer.AddIndexes(dir2);
IndexReader r1 = writer.Reader;
Assert.AreEqual(500, r1.MaxDoc);
r1.Dispose();
writer.Dispose();
dir1.Dispose();
dir2.Dispose();
}
/// <summary>
/// Deletes using IW.deleteDocuments
/// </summary>
[Test]
public virtual void TestDeleteFromIndexWriter()
{
bool doFullMerge = true;
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter writer = new IndexWriter(dir1, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetReaderTermsIndexDivisor(2));
// create the index
CreateIndexNoClose(!doFullMerge, "index1", writer);
writer.Flush(false, true);
// get a reader
IndexReader r1 = writer.Reader;
string id10 = r1.Document(10).GetField("id").StringValue;
// deleted IW docs should not show up in the next getReader
writer.DeleteDocuments(new Term("id", id10));
IndexReader r2 = writer.Reader;
Assert.AreEqual(1, Count(new Term("id", id10), r1));
Assert.AreEqual(0, Count(new Term("id", id10), r2));
string id50 = r1.Document(50).GetField("id").StringValue;
Assert.AreEqual(1, Count(new Term("id", id50), r1));
writer.DeleteDocuments(new Term("id", id50));
IndexReader r3 = writer.Reader;
Assert.AreEqual(0, Count(new Term("id", id10), r3));
Assert.AreEqual(0, Count(new Term("id", id50), r3));
string id75 = r1.Document(75).GetField("id").StringValue;
writer.DeleteDocuments(new TermQuery(new Term("id", id75)));
IndexReader r4 = writer.Reader;
Assert.AreEqual(1, Count(new Term("id", id75), r3));
Assert.AreEqual(0, Count(new Term("id", id75), r4));
r1.Dispose();
r2.Dispose();
r3.Dispose();
r4.Dispose();
writer.Dispose();
// reopen the writer to verify the delete made it to the directory
writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
IndexReader w2r1 = writer.Reader;
Assert.AreEqual(0, Count(new Term("id", id10), w2r1));
w2r1.Dispose();
writer.Dispose();
dir1.Dispose();
}
[Test]
public virtual void TestAddIndexesAndDoDeletesThreads()
{
const int numIter = 2;
int numDirs = 3;
Directory mainDir = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter mainWriter = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
TestUtil.ReduceOpenFiles(mainWriter);
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(this, numIter, mainWriter);
addDirThreads.LaunchThreads(numDirs);
addDirThreads.JoinThreads();
//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.numThreads
// * addDirThreads.NUM_INIT_DOCS, addDirThreads.mainWriter.NumDocs);
Assert.AreEqual(addDirThreads.Count.Get(), addDirThreads.MainWriter.NumDocs());
addDirThreads.Close(true);
Assert.IsTrue(addDirThreads.Failures.Count == 0);
TestUtil.CheckIndex(mainDir);
IndexReader reader = DirectoryReader.Open(mainDir);
Assert.AreEqual(addDirThreads.Count.Get(), reader.NumDocs);
//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.numThreads
// * addDirThreads.NUM_INIT_DOCS, reader.NumDocs);
reader.Dispose();
addDirThreads.CloseDir();
mainDir.Dispose();
}
private class AddDirectoriesThreads
{
internal bool InstanceFieldsInitialized = false;
internal virtual void InitializeInstanceFields()
{
Threads = new ThreadClass[OuterInstance.NumThreads];
}
private readonly TestIndexWriterReader OuterInstance;
internal Directory AddDir;
internal const int NUM_INIT_DOCS = 100;
internal int NumDirs;
internal ThreadClass[] Threads;
internal IndexWriter MainWriter;
internal readonly IList<Exception> Failures = new List<Exception>();
internal IndexReader[] Readers;
internal bool DidClose = false;
internal AtomicInteger Count = new AtomicInteger(0);
internal AtomicInteger NumaddIndexes = new AtomicInteger(0);
public AddDirectoriesThreads(TestIndexWriterReader outerInstance, int numDirs, IndexWriter mainWriter)
{
this.OuterInstance = outerInstance;
if (!InstanceFieldsInitialized)
{
InitializeInstanceFields();
InstanceFieldsInitialized = true;
}
this.NumDirs = numDirs;
this.MainWriter = mainWriter;
AddDir = NewDirectory();
IndexWriter writer = new IndexWriter(AddDir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2));
TestUtil.ReduceOpenFiles(writer);
for (int i = 0; i < NUM_INIT_DOCS; i++)
{
Document doc = DocHelper.CreateDocument(i, "addindex", 4);
writer.AddDocument(doc);
}
writer.Dispose();
Readers = new IndexReader[numDirs];
for (int i = 0; i < numDirs; i++)
{
Readers[i] = DirectoryReader.Open(AddDir);
}
}
internal virtual void JoinThreads()
{
for (int i = 0; i < OuterInstance.NumThreads; i++)
{
try
{
Threads[i].Join();
}
catch (ThreadInterruptedException ie)
{
throw new ThreadInterruptedException("Thread Interrupted Exception", ie);
}
}
}
internal virtual void Close(bool doWait)
{
DidClose = true;
if (doWait)
{
MainWriter.WaitForMerges();
}
MainWriter.Dispose(doWait);
}
internal virtual void CloseDir()
{
for (int i = 0; i < NumDirs; i++)
{
Readers[i].Dispose();
}
AddDir.Dispose();
}
internal virtual void Handle(Exception t)
{
Console.WriteLine(t.StackTrace);
lock (Failures)
{
Failures.Add(t);
}
}
internal virtual void LaunchThreads(int numIter)
{
for (int i = 0; i < OuterInstance.NumThreads; i++)
{
Threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter);
}
for (int i = 0; i < OuterInstance.NumThreads; i++)
{
Threads[i].Start();
}
}
private class ThreadAnonymousInnerClassHelper : ThreadClass
{
private readonly AddDirectoriesThreads OuterInstance;
private int NumIter;
public ThreadAnonymousInnerClassHelper(AddDirectoriesThreads outerInstance, int numIter)
{
this.OuterInstance = outerInstance;
this.NumIter = numIter;
}
public override void Run()
{
try
{
Directory[] dirs = new Directory[OuterInstance.NumDirs];
for (int k = 0; k < OuterInstance.NumDirs; k++)
{
dirs[k] = new MockDirectoryWrapper(Random(), new RAMDirectory(OuterInstance.AddDir, NewIOContext(Random())));
}
//int j = 0;
//while (true) {
// System.out.println(Thread.currentThread().getName() + ": iter
// j=" + j);
for (int x = 0; x < NumIter; x++)
{
// only do addIndexes
OuterInstance.DoBody(x, dirs);
}
//if (numIter > 0 && j == numIter)
// break;
//doBody(j++, dirs);
//doBody(5, dirs);
//}
}
catch (Exception t)
{
OuterInstance.Handle(t);
}
}
}
internal virtual void DoBody(int j, Directory[] dirs)
{
switch (j % 4)
{
case 0:
MainWriter.AddIndexes(dirs);
MainWriter.ForceMerge(1);
break;
case 1:
MainWriter.AddIndexes(dirs);
NumaddIndexes.IncrementAndGet();
break;
case 2:
MainWriter.AddIndexes(Readers);
break;
case 3:
MainWriter.Commit();
break;
}
Count.AddAndGet(dirs.Length * NUM_INIT_DOCS);
}
}
[Test]
public virtual void TestIndexWriterReopenSegmentFullMerge()
{
DoTestIndexWriterReopenSegment(true);
}
[Test]
public virtual void TestIndexWriterReopenSegment()
{
DoTestIndexWriterReopenSegment(false);
}
/// <summary>
/// Tests creating a segment, then check to insure the segment can be seen via
/// IW.getReader
/// </summary>
public virtual void DoTestIndexWriterReopenSegment(bool doFullMerge)
{
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
IndexReader r1 = writer.Reader;
Assert.AreEqual(0, r1.MaxDoc);
CreateIndexNoClose(false, "index1", writer);
writer.Flush(!doFullMerge, true);
IndexReader iwr1 = writer.Reader;
Assert.AreEqual(100, iwr1.MaxDoc);
IndexReader r2 = writer.Reader;
Assert.AreEqual(r2.MaxDoc, 100);
// add 100 documents
for (int x = 10000; x < 10000 + 100; x++)
{
Document d = DocHelper.CreateDocument(x, "index1", 5);
writer.AddDocument(d);
}
writer.Flush(false, true);
// verify the reader was reopened internally
IndexReader iwr2 = writer.Reader;
Assert.IsTrue(iwr2 != r1);
Assert.AreEqual(200, iwr2.MaxDoc);
// should have flushed out a segment
IndexReader r3 = writer.Reader;
Assert.IsTrue(r2 != r3);
Assert.AreEqual(200, r3.MaxDoc);
// dec ref the readers rather than close them because
// closing flushes changes to the writer
r1.Dispose();
iwr1.Dispose();
r2.Dispose();
r3.Dispose();
iwr2.Dispose();
writer.Dispose();
// test whether the changes made it to the directory
writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
IndexReader w2r1 = writer.Reader;
// insure the deletes were actually flushed to the directory
Assert.AreEqual(200, w2r1.MaxDoc);
w2r1.Dispose();
writer.Dispose();
dir1.Dispose();
}
/*
* Delete a document by term and return the doc id
*
* public static int deleteDocument(Term term, IndexWriter writer) throws
* IOException { IndexReader reader = writer.GetReader(); TermDocs td =
* reader.termDocs(term); int doc = -1; //if (td.Next()) { // doc = td.Doc();
* //} //writer.DeleteDocuments(term); td.Dispose(); return doc; }
*/
public static void CreateIndex(Random random, Directory dir1, string indexName, bool multiSegment)
{
IndexWriter w = new IndexWriter(dir1, LuceneTestCase.NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++)
{
w.AddDocument(DocHelper.CreateDocument(i, indexName, 4));
}
if (!multiSegment)
{
w.ForceMerge(1);
}
w.Dispose();
}
public static void CreateIndexNoClose(bool multiSegment, string indexName, IndexWriter w)
{
for (int i = 0; i < 100; i++)
{
w.AddDocument(DocHelper.CreateDocument(i, indexName, 4));
}
if (!multiSegment)
{
w.ForceMerge(1);
}
}
private class MyWarmer : IndexWriter.IndexReaderWarmer
{
internal int WarmCount;
public override void Warm(AtomicReader reader)
{
WarmCount++;
}
}
[Test]
public virtual void TestMergeWarmer([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
{
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
// Enroll warmer
MyWarmer warmer = new MyWarmer();
var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
.SetMaxBufferedDocs(2)
.SetMergedSegmentWarmer(warmer)
.SetMergeScheduler(scheduler)
.SetMergePolicy(NewLogMergePolicy());
IndexWriter writer = new IndexWriter(dir1, config);
// create the index
CreateIndexNoClose(false, "test", writer);
// get a reader to put writer into near real-time mode
IndexReader r1 = writer.Reader;
((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;
//int num = AtLeast(100);
int num = 101;
for (int i = 0; i < num; i++)
{
writer.AddDocument(DocHelper.CreateDocument(i, "test", 4));
}
((IConcurrentMergeScheduler)writer.Config.MergeScheduler).Sync();
Assert.IsTrue(warmer.WarmCount > 0);
Console.WriteLine("Count {0}", warmer.WarmCount);
int count = warmer.WarmCount;
var newDocument = DocHelper.CreateDocument(17, "test", 4);
writer.AddDocument(newDocument);
writer.ForceMerge(1);
Assert.IsTrue(warmer.WarmCount > count);
writer.Dispose();
r1.Dispose();
dir1.Dispose();
}
[Test]
public virtual void TestAfterCommit([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
{
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(scheduler);
IndexWriter writer = new IndexWriter(dir1, config);
writer.Commit();
// create the index
CreateIndexNoClose(false, "test", writer);
// get a reader to put writer into near real-time mode
DirectoryReader r1 = writer.Reader;
TestUtil.CheckIndex(dir1);
writer.Commit();
TestUtil.CheckIndex(dir1);
Assert.AreEqual(100, r1.NumDocs);
for (int i = 0; i < 10; i++)
{
writer.AddDocument(DocHelper.CreateDocument(i, "test", 4));
}
((IConcurrentMergeScheduler)writer.Config.MergeScheduler).Sync();
DirectoryReader r2 = DirectoryReader.OpenIfChanged(r1);
if (r2 != null)
{
r1.Dispose();
r1 = r2;
}
Assert.AreEqual(110, r1.NumDocs);
writer.Dispose();
r1.Dispose();
dir1.Dispose();
}
// Make sure reader remains usable even if IndexWriter closes
[Test]
public virtual void TestAfterClose()
{
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// create the index
CreateIndexNoClose(false, "test", writer);
DirectoryReader r = writer.Reader;
writer.Dispose();
TestUtil.CheckIndex(dir1);
// reader should remain usable even after IndexWriter is closed:
Assert.AreEqual(100, r.NumDocs);
Query q = new TermQuery(new Term("indexname", "test"));
IndexSearcher searcher = NewSearcher(r);
Assert.AreEqual(100, searcher.Search(q, 10).TotalHits);
try
{
DirectoryReader.OpenIfChanged(r);
Assert.Fail("failed to hit AlreadyClosedException");
}
catch (AlreadyClosedException ace)
{
// expected
}
r.Dispose();
dir1.Dispose();
}
// Stress test reopen during addIndexes
[Test]
public virtual void TestDuringAddIndexes()
{
Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
// create the index
CreateIndexNoClose(false, "test", writer);
writer.Commit();
Directory[] dirs = new Directory[10];
for (int i = 0; i < 10; i++)
{
dirs[i] = new MockDirectoryWrapper(Random(), new RAMDirectory(dir1, NewIOContext(Random())));
}
DirectoryReader r = writer.Reader;
const float SECONDS = 0.5f;
long endTime = (long)(Environment.TickCount + 1000.0 * SECONDS);
IList<Exception> excs = new SynchronizedCollection<Exception>();
// Only one thread can addIndexes at a time, because
// IndexWriter acquires a write lock in each directory:
var threads = new ThreadClass[1];
for (int i = 0; i < threads.Length; i++)
{
threads[i] = new ThreadAnonymousInnerClassHelper(writer, dirs, endTime, excs);
threads[i].SetDaemon(true);
threads[i].Start();
}
int lastCount = 0;
while (Environment.TickCount < endTime)
{
DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
if (r2 != null)
{
r.Dispose();
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
IndexSearcher searcher = NewSearcher(r);
int count = searcher.Search(q, 10).TotalHits;
Assert.IsTrue(count >= lastCount);
lastCount = count;
}
for (int i = 0; i < threads.Length; i++)
{
threads[i].Join();
}
// final check
DirectoryReader dr2 = DirectoryReader.OpenIfChanged(r);
if (dr2 != null)
{
r.Dispose();
r = dr2;
}
Query q2 = new TermQuery(new Term("indexname", "test"));
IndexSearcher searcher_ = NewSearcher(r);
int count_ = searcher_.Search(q2, 10).TotalHits;
Assert.IsTrue(count_ >= lastCount);
Assert.AreEqual(0, excs.Count);
r.Dispose();
if (dir1 is MockDirectoryWrapper)
{
ICollection<string> openDeletedFiles = ((MockDirectoryWrapper)dir1).OpenDeletedFiles;
Assert.AreEqual(0, openDeletedFiles.Count, "openDeleted=" + openDeletedFiles);
}
writer.Dispose();
dir1.Dispose();
}
private class ThreadAnonymousInnerClassHelper : ThreadClass
{
private IndexWriter Writer;
private Directory[] Dirs;
private long EndTime;
private IList<Exception> Excs;
public ThreadAnonymousInnerClassHelper(IndexWriter writer, Directory[] dirs, long endTime, IList<Exception> excs)
{
this.Writer = writer;
this.Dirs = dirs;
this.EndTime = endTime;
this.Excs = excs;
}
public override void Run()
{
do
{
try
{
Writer.AddIndexes(Dirs);
Writer.MaybeMerge();
}
catch (Exception t)
{
Excs.Add(t);
throw new Exception(t.Message, t);
}
} while (Environment.TickCount < EndTime);
}
}
private Directory GetAssertNoDeletesDirectory(Directory directory)
{
if (directory is MockDirectoryWrapper)
{
((MockDirectoryWrapper)directory).AssertNoDeleteOpenFile = true;
}
return directory;
}
// Stress test reopen during add/delete
[Test]
public virtual void TestDuringAddDelete()
{
Directory dir1 = NewDirectory();
var writer = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy(2)));
// create the index
CreateIndexNoClose(false, "test", writer);
writer.Commit();
DirectoryReader r = writer.Reader;
const float SECONDS = 0.5f;
long endTime = (long)(Environment.TickCount + 1000.0 * SECONDS);
IList<Exception> excs = new SynchronizedCollection<Exception>();
var threads = new ThreadClass[NumThreads];
for (int i = 0; i < NumThreads; i++)
{
threads[i] = new ThreadAnonymousInnerClassHelper2(writer, r, endTime, excs);
threads[i].SetDaemon(true);
threads[i].Start();
}
int sum = 0;
while (Environment.TickCount < endTime)
{
DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
if (r2 != null)
{
r.Dispose();
r = r2;
}
Query q = new TermQuery(new Term("indexname", "test"));
IndexSearcher searcher = NewSearcher(r);
sum += searcher.Search(q, 10).TotalHits;
}
for (int i = 0; i < NumThreads; i++)
{
threads[i].Join();
}
// at least search once
DirectoryReader dr2 = DirectoryReader.OpenIfChanged(r);
if (dr2 != null)
{
r.Dispose();
r = dr2;
}
Query q2 = new TermQuery(new Term("indexname", "test"));
IndexSearcher indSearcher = NewSearcher(r);
sum += indSearcher.Search(q2, 10).TotalHits;
Assert.IsTrue(sum > 0, "no documents found at all");
Assert.AreEqual(0, excs.Count);
writer.Dispose();
r.Dispose();
dir1.Dispose();
}
private class ThreadAnonymousInnerClassHelper2 : ThreadClass
{
private IndexWriter Writer;
private DirectoryReader r;
private long EndTime;
private IList<Exception> Excs;
public ThreadAnonymousInnerClassHelper2(IndexWriter writer, DirectoryReader r, long endTime, IList<Exception> excs)
{
this.Writer = writer;
this.r = r;
this.EndTime = endTime;
this.Excs = excs;
rand = new Random(Random().Next());
}
internal readonly Random rand;
public override void Run()
{
int count = 0;
do
{
try
{
for (int docUpto = 0; docUpto < 10; docUpto++)
{
Writer.AddDocument(DocHelper.CreateDocument(10 * count + docUpto, "test", 4));
}
count++;
int limit = count * 10;
for (int delUpto = 0; delUpto < 5; delUpto++)
{
int x = rand.Next(limit);
Writer.DeleteDocuments(new Term("field3", "b" + x));
}
}
catch (Exception t)
{
Excs.Add(t);
throw new Exception(t.Message, t);
}
} while (Environment.TickCount < EndTime);
}
}
[Test]
public virtual void TestForceMergeDeletes()
{
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
Document doc = new Document();
doc.Add(NewTextField("field", "a b c", Field.Store.NO));
Field id = NewStringField("id", "", Field.Store.NO);
doc.Add(id);
id.StringValue = "0";
w.AddDocument(doc);
id.StringValue = "1";
w.AddDocument(doc);
w.DeleteDocuments(new Term("id", "0"));
IndexReader r = w.Reader;
w.ForceMergeDeletes();
w.Dispose();
r.Dispose();
r = DirectoryReader.Open(dir);
Assert.AreEqual(1, r.NumDocs);
Assert.IsFalse(r.HasDeletions);
r.Dispose();
dir.Dispose();
}
[Test]
public virtual void TestDeletesNumDocs()
{
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
Document doc = new Document();
doc.Add(NewTextField("field", "a b c", Field.Store.NO));
Field id = NewStringField("id", "", Field.Store.NO);
doc.Add(id);
id.StringValue = "0";
w.AddDocument(doc);
id.StringValue = "1";
w.AddDocument(doc);
IndexReader r = w.Reader;
Assert.AreEqual(2, r.NumDocs);
r.Dispose();
w.DeleteDocuments(new Term("id", "0"));
r = w.Reader;
Assert.AreEqual(1, r.NumDocs);
r.Dispose();
w.DeleteDocuments(new Term("id", "1"));
r = w.Reader;
Assert.AreEqual(0, r.NumDocs);
r.Dispose();
w.Dispose();
dir.Dispose();
}
[Test]
public virtual void TestEmptyIndex()
{
// Ensures that getReader works on an empty index, which hasn't been committed yet.
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
IndexReader r = w.Reader;
Assert.AreEqual(0, r.NumDocs);
r.Dispose();
w.Dispose();
dir.Dispose();
}
[Test]
public virtual void TestSegmentWarmer()
{
Directory dir = NewDirectory();
AtomicBoolean didWarm = new AtomicBoolean();
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetReaderPooling(true).SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(this, didWarm)).
SetMergePolicy(NewLogMergePolicy(10)));
Document doc = new Document();
doc.Add(NewStringField("foo", "bar", Field.Store.NO));
for (int i = 0; i < 20; i++)
{
w.AddDocument(doc);
}
w.WaitForMerges();
w.Dispose();
dir.Dispose();
Assert.IsTrue(didWarm.Get());
}
private class IndexReaderWarmerAnonymousInnerClassHelper : IndexWriter.IndexReaderWarmer
{
private readonly TestIndexWriterReader OuterInstance;
private AtomicBoolean DidWarm;
public IndexReaderWarmerAnonymousInnerClassHelper(TestIndexWriterReader outerInstance, AtomicBoolean didWarm)
{
this.OuterInstance = outerInstance;
this.DidWarm = didWarm;
}
public override void Warm(AtomicReader r)
{
IndexSearcher s = NewSearcher(r);
TopDocs hits = s.Search(new TermQuery(new Term("foo", "bar")), 10);
Assert.AreEqual(20, hits.TotalHits);
DidWarm.Set(true);
}
}
[Test]
public virtual void TestSimpleMergedSegmentWramer()
{
Directory dir = NewDirectory();
AtomicBoolean didWarm = new AtomicBoolean();
InfoStream infoStream = new InfoStreamAnonymousInnerClassHelper(this, didWarm);
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetReaderPooling(true).SetInfoStream(infoStream).SetMergedSegmentWarmer(new SimpleMergedSegmentWarmer(infoStream)).SetMergePolicy(NewLogMergePolicy(10)));
Document doc = new Document();
doc.Add(NewStringField("foo", "bar", Field.Store.NO));
for (int i = 0; i < 20; i++)
{
w.AddDocument(doc);
}
w.WaitForMerges();
w.Dispose();
dir.Dispose();
Assert.IsTrue(didWarm.Get());
}
private class InfoStreamAnonymousInnerClassHelper : InfoStream
{
private readonly TestIndexWriterReader OuterInstance;
private AtomicBoolean DidWarm;
public InfoStreamAnonymousInnerClassHelper(TestIndexWriterReader outerInstance, AtomicBoolean didWarm)
{
this.OuterInstance = outerInstance;
this.DidWarm = didWarm;
}
public override void Dispose()
{
}
public override void Message(string component, string message)
{
if ("SMSW".Equals(component))
{
DidWarm.Set(true);
}
}
public override bool IsEnabled(string component)
{
return true;
}
}
[Test]
public virtual void TestNoTermsIndex()
{
// Some Codecs don't honor the ReaderTermsIndexDivisor, so skip the test if
// they're picked.
AssumeFalse("PreFlex codec does not support ReaderTermsIndexDivisor!", "Lucene3x".Equals(Codec.Default.Name));
IndexWriterConfig conf = (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetReaderTermsIndexDivisor(-1);
// Don't proceed if picked Codec is in the list of illegal ones.
string format = TestUtil.GetPostingsFormat("f");
AssumeFalse("Format: " + format + " does not support ReaderTermsIndexDivisor!", (format.Equals("FSTPulsing41") || format.Equals("FSTOrdPulsing41") || format.Equals("FST41") || format.Equals("FSTOrd41") || format.Equals("SimpleText") || format.Equals("Memory") || format.Equals("MockRandom") || format.Equals("Direct")));
Directory dir = NewDirectory();
IndexWriter w = new IndexWriter(dir, conf);
Document doc = new Document();
doc.Add(new TextField("f", "val", Field.Store.NO));
w.AddDocument(doc);
SegmentReader r = GetOnlySegmentReader(DirectoryReader.Open(w, true));
try
{
TestUtil.Docs(Random(), r, "f", new BytesRef("val"), null, null, DocsEnum.FLAG_NONE);
Assert.Fail("should have failed to seek since terms index was not loaded.");
}
catch (InvalidOperationException e)
{
// expected - we didn't load the term index
}
finally
{
r.Dispose();
w.Dispose();
dir.Dispose();
}
}
[Test]
public virtual void TestReopenAfterNoRealChange()
{
Directory d = GetAssertNoDeletesDirectory(NewDirectory());
IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
DirectoryReader r = w.Reader; // start pooling readers
DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
Assert.IsNull(r2);
w.AddDocument(new Document());
DirectoryReader r3 = DirectoryReader.OpenIfChanged(r);
Assert.IsNotNull(r3);
Assert.IsTrue(r3.Version != r.Version);
Assert.IsTrue(r3.Current);
// Deletes nothing in reality...:
w.DeleteDocuments(new Term("foo", "bar"));
// ... but IW marks this as not current:
Assert.IsFalse(r3.Current);
DirectoryReader r4 = DirectoryReader.OpenIfChanged(r3);
Assert.IsNull(r4);
// Deletes nothing in reality...:
w.DeleteDocuments(new Term("foo", "bar"));
DirectoryReader r5 = DirectoryReader.OpenIfChanged(r3, w, true);
Assert.IsNull(r5);
r3.Dispose();
w.Dispose();
d.Dispose();
}
[Test]
public virtual void TestNRTOpenExceptions()
{
// LUCENE-5262: test that several failed attempts to obtain an NRT reader
// don't leak file handles.
MockDirectoryWrapper dir = (MockDirectoryWrapper)GetAssertNoDeletesDirectory(NewMockDirectory());
AtomicBoolean shouldFail = new AtomicBoolean();
dir.FailOn(new FailureAnonymousInnerClassHelper(shouldFail));
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges from getting in the way
IndexWriter writer = new IndexWriter(dir, conf);
// create a segment and open an NRT reader
writer.AddDocument(new Document());
writer.Reader.Dispose();
// add a new document so a new NRT reader is required
writer.AddDocument(new Document());
// try to obtain an NRT reader twice: first time it fails and closes all the
// other NRT readers. second time it fails, but also fails to close the
// other NRT reader, since it is already marked closed!
for (int i = 0; i < 2; i++)
{
shouldFail.Set(true);
try
{
writer.Reader.Dispose();
}
catch (FakeIOException e)
{
// expected
if (VERBOSE)
{
Console.WriteLine("hit expected fake IOE");
}
}
}
writer.Dispose();
dir.Dispose();
}
private class FailureAnonymousInnerClassHelper : MockDirectoryWrapper.Failure
{
private readonly AtomicBoolean ShouldFail;
public FailureAnonymousInnerClassHelper(AtomicBoolean shouldFail)
{
this.ShouldFail = shouldFail;
}
public override void Eval(MockDirectoryWrapper dir)
{
var trace = new StackTrace();
if (ShouldFail.Get())
{
foreach (var frame in trace.GetFrames())
{
var method = frame.GetMethod();
if ("GetReadOnlyClone".Equals(method.Name))
{
if (VERBOSE)
{
Console.WriteLine("TEST: now fail; exc:");
Console.WriteLine((new Exception()).StackTrace);
}
ShouldFail.Set(false);
throw new FakeIOException();
}
}
}
}
}
/// <summary>
/// Make sure if all we do is open NRT reader against
/// writer, we don't see merge starvation.
/// </summary>
[Test]
public virtual void TestTooManySegments()
{
Directory dir = GetAssertNoDeletesDirectory(NewDirectory());
// Don't use newIndexWriterConfig, because we need a
// "sane" mergePolicy:
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter w = new IndexWriter(dir, iwc);
// Create 500 segments:
for (int i = 0; i < 500; i++)
{
Document doc = new Document();
doc.Add(NewStringField("id", "" + i, Field.Store.NO));
w.AddDocument(doc);
IndexReader r = DirectoryReader.Open(w, true);
// Make sure segment count never exceeds 100:
Assert.IsTrue(r.Leaves.Count < 100);
r.Dispose();
}
w.Dispose();
dir.Dispose();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using Xunit;
namespace System
{
public static partial class PlatformDetection
{
public static Version OSXKernelVersion => throw new PlatformNotSupportedException();
public static bool IsSuperUser => throw new PlatformNotSupportedException();
public static bool IsOpenSUSE => false;
public static bool IsUbuntu => false;
public static bool IsDebian => false;
public static bool IsDebian8 => false;
public static bool IsUbuntu1404 => false;
public static bool IsCentos7 => false;
public static bool IsTizen => false;
public static bool IsNotFedoraOrRedHatOrCentos => true;
public static bool IsFedora => false;
public static bool IsWindowsNanoServer => (IsNotWindowsIoTCore && !File.Exists(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Windows), "regedit.exe")));
public static int WindowsVersion => GetWindowsVersion();
public static bool IsMacOsHighSierraOrHigher { get; } = false;
public static bool IsWindows10Version1607OrGreater =>
GetWindowsVersion() == 10 && GetWindowsMinorVersion() == 0 && GetWindowsBuildNumber() >= 14393;
public static bool IsWindows10Version1703OrGreater =>
GetWindowsVersion() == 10 && GetWindowsMinorVersion() == 0 && GetWindowsBuildNumber() >= 15063;
public static bool IsWindows10InsiderPreviewBuild16215OrGreater =>
GetWindowsVersion() == 10 && GetWindowsMinorVersion() == 0 && GetWindowsBuildNumber() >= 16215;
public static bool IsWindows10Version16251OrGreater =>
GetWindowsVersion() == 10 && GetWindowsMinorVersion() == 0 && GetWindowsBuildNumber() >= 16251;
// Windows OneCoreUAP SKU doesn't have httpapi.dll
public static bool IsNotOneCoreUAP =>
File.Exists(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Windows), "System32", "httpapi.dll"));
public static bool IsWindowsIoTCore
{
get
{
int productType;
Assert.True(GetProductInfo(Environment.OSVersion.Version.Major, Environment.OSVersion.Version.Minor, 0, 0, out productType));
if ((productType == PRODUCT_IOTUAPCOMMERCIAL) ||
(productType == PRODUCT_IOTUAP))
{
return true;
}
return false;
}
}
public static bool IsWindows => true;
public static bool IsWindows7 => GetWindowsVersion() == 6 && GetWindowsMinorVersion() == 1;
public static bool IsWindows8x => GetWindowsVersion() == 6 && (GetWindowsMinorVersion() == 2 || GetWindowsMinorVersion() == 3);
public static bool IsNetfx462OrNewer()
{
if (!IsFullFramework)
{
return false;
}
Version net462 = new Version(4, 6, 2);
Version runningVersion = GetFrameworkVersion();
return runningVersion != null && runningVersion >= net462;
}
public static bool IsNetfx470OrNewer()
{
if (!IsFullFramework)
{
return false;
}
Version net470 = new Version(4, 7, 0);
Version runningVersion = GetFrameworkVersion();
return runningVersion != null && runningVersion >= net470;
}
public static bool IsNetfx471OrNewer()
{
if (!IsFullFramework)
{
return false;
}
Version net471 = new Version(4, 7, 1);
Version runningVersion = GetFrameworkVersion();
return runningVersion != null && runningVersion >= net471;
}
private static Version GetFrameworkVersion()
{
string[] descriptionArray = RuntimeInformation.FrameworkDescription.Split(' ');
if (descriptionArray.Length < 3)
return null;
if (!Version.TryParse(descriptionArray[2], out Version actualVersion))
return null;
foreach (Range currentRange in FrameworkRanges)
{
if (currentRange.IsInRange(actualVersion))
return currentRange.FrameworkVersion;
}
return null;
}
public static string GetDistroVersionString() { return ""; }
private static int s_isWinRT = -1;
public static bool IsWinRT
{
get
{
if (s_isWinRT != -1)
return s_isWinRT == 1;
if (!IsWindows || IsWindows7)
{
s_isWinRT = 0;
return false;
}
byte[] buffer = new byte[0];
uint bufferSize = 0;
try
{
int result = GetCurrentApplicationUserModelId(ref bufferSize, buffer);
switch (result)
{
case 15703: // APPMODEL_ERROR_NO_APPLICATION
s_isWinRT = 0;
break;
case 0: // ERROR_SUCCESS
case 122: // ERROR_INSUFFICIENT_BUFFER
// Success is actually insufficent buffer as we're really only looking for
// not NO_APPLICATION and we're not actually giving a buffer here. The
// API will always return NO_APPLICATION if we're not running under a
// WinRT process, no matter what size the buffer is.
s_isWinRT = 1;
break;
default:
throw new InvalidOperationException($"Failed to get AppId, result was {result}.");
}
}
catch (Exception e)
{
// We could catch this here, being friendly with older portable surface area should we
// desire to use this method elsewhere.
if (e.GetType().FullName.Equals("System.EntryPointNotFoundException", StringComparison.Ordinal))
{
// API doesn't exist, likely pre Win8
s_isWinRT = 0;
}
else
{
throw;
}
}
return s_isWinRT == 1;
}
}
private static int s_isWindowsElevated = -1;
public static bool IsWindowsAndElevated
{
get
{
if (s_isWindowsElevated != -1)
return s_isWindowsElevated == 1;
if (!IsWindows || IsWinRT)
{
s_isWindowsElevated = 0;
return false;
}
IntPtr processToken;
Assert.True(OpenProcessToken(GetCurrentProcess(), TOKEN_READ, out processToken));
try
{
uint tokenInfo;
uint returnLength;
Assert.True(GetTokenInformation(
processToken, TokenElevation, out tokenInfo, sizeof(uint), out returnLength));
s_isWindowsElevated = tokenInfo == 0 ? 0 : 1;
}
finally
{
CloseHandle(processToken);
}
return s_isWindowsElevated == 1;
}
}
private static int GetWindowsMinorVersion()
{
if (IsWindows)
{
RTL_OSVERSIONINFOEX osvi = new RTL_OSVERSIONINFOEX();
osvi.dwOSVersionInfoSize = (uint)Marshal.SizeOf(osvi);
Assert.Equal(0, RtlGetVersion(out osvi));
return (int)osvi.dwMinorVersion;
}
return -1;
}
private static int GetWindowsBuildNumber()
{
if (IsWindows)
{
RTL_OSVERSIONINFOEX osvi = new RTL_OSVERSIONINFOEX();
osvi.dwOSVersionInfoSize = (uint)Marshal.SizeOf(osvi);
Assert.Equal(0, RtlGetVersion(out osvi));
return (int)osvi.dwBuildNumber;
}
return -1;
}
private const uint TokenElevation = 20;
private const uint STANDARD_RIGHTS_READ = 0x00020000;
private const uint TOKEN_QUERY = 0x0008;
private const uint TOKEN_READ = STANDARD_RIGHTS_READ | TOKEN_QUERY;
[DllImport("advapi32.dll", SetLastError = true, ExactSpelling = true)]
private static extern bool GetTokenInformation(
IntPtr TokenHandle,
uint TokenInformationClass,
out uint TokenInformation,
uint TokenInformationLength,
out uint ReturnLength);
private const int PRODUCT_IOTUAP = 0x0000007B;
private const int PRODUCT_IOTUAPCOMMERCIAL = 0x00000083;
[DllImport("kernel32.dll", SetLastError = false)]
private static extern bool GetProductInfo(
int dwOSMajorVersion,
int dwOSMinorVersion,
int dwSpMajorVersion,
int dwSpMinorVersion,
out int pdwReturnedProductType
);
[DllImport("ntdll.dll")]
private static extern int RtlGetVersion(out RTL_OSVERSIONINFOEX lpVersionInformation);
[StructLayout(LayoutKind.Sequential)]
private struct RTL_OSVERSIONINFOEX
{
internal uint dwOSVersionInfoSize;
internal uint dwMajorVersion;
internal uint dwMinorVersion;
internal uint dwBuildNumber;
internal uint dwPlatformId;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
internal string szCSDVersion;
}
private static int GetWindowsVersion()
{
RTL_OSVERSIONINFOEX osvi = new RTL_OSVERSIONINFOEX();
osvi.dwOSVersionInfoSize = (uint)Marshal.SizeOf(osvi);
Assert.Equal(0, RtlGetVersion(out osvi));
return (int)osvi.dwMajorVersion;
}
[DllImport("kernel32.dll", ExactSpelling = true)]
private static extern int GetCurrentApplicationUserModelId(ref uint applicationUserModelIdLength, byte[] applicationUserModelId);
[DllImport("kernel32.dll", SetLastError = true, ExactSpelling = true)]
private static extern bool CloseHandle(IntPtr handle);
[DllImport("advapi32.dll", SetLastError = true, ExactSpelling = true)]
private static extern bool OpenProcessToken(IntPtr ProcessHandle, uint DesiredAccesss, out IntPtr TokenHandle);
// The process handle does NOT need closing
[DllImport("kernel32.dll", ExactSpelling = true)]
private static extern IntPtr GetCurrentProcess();
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="AboutBox.Designer.cs" company="None">
// Copyright (c) Brandon Wallace and Jesse Calhoun. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
using TQVaultAE.GUI.Properties;
namespace TQVaultAE.GUI
{
/// <summary>
/// Class for About Dialog Box.
/// </summary>
internal partial class AboutBox
{
/// <summary>
/// Table for layout.
/// </summary>
private System.Windows.Forms.TableLayoutPanel tableLayoutPanel;
/// <summary>
/// Logo Picture on dialog.
/// </summary>
private System.Windows.Forms.PictureBox logoPictureBox;
/// <summary>
/// Product Name on dialog.
/// </summary>
private ScalingLabel labelProductName;
/// <summary>
/// Version label on dialog.
/// </summary>
private ScalingLabel labelVersion;
/// <summary>
/// Copyright label on dialog.
/// </summary>
private ScalingLabel labelCopyright;
/// <summary>
/// Description text box.
/// </summary>
private ScalingTextBox textBoxDescription;
/// <summary>
/// Form OK Button.
/// </summary>
private ScalingButton buttonOK;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (this.components != null))
{
this.components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(AboutBox));
this.tableLayoutPanel = new System.Windows.Forms.TableLayoutPanel();
this.logoPictureBox = new System.Windows.Forms.PictureBox();
this.labelProductName = new ScalingLabel();
this.labelVersion = new ScalingLabel();
this.labelCopyright = new ScalingLabel();
this.textBoxDescription = new ScalingTextBox();
this.buttonOK = new ScalingButton();
this.tableLayoutPanel.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.logoPictureBox)).BeginInit();
this.SuspendLayout();
//
// tableLayoutPanel
//
this.tableLayoutPanel.Anchor = System.Windows.Forms.AnchorStyles.Bottom;
this.tableLayoutPanel.ColumnCount = 2;
this.tableLayoutPanel.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 33F));
this.tableLayoutPanel.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 67F));
this.tableLayoutPanel.Controls.Add(this.logoPictureBox, 0, 0);
this.tableLayoutPanel.Controls.Add(this.labelProductName, 1, 0);
this.tableLayoutPanel.Controls.Add(this.labelVersion, 1, 1);
this.tableLayoutPanel.Controls.Add(this.labelCopyright, 1, 2);
this.tableLayoutPanel.Controls.Add(this.textBoxDescription, 1, 4);
this.tableLayoutPanel.Controls.Add(this.buttonOK, 1, 5);
this.tableLayoutPanel.Location = new System.Drawing.Point(9, 32);
this.tableLayoutPanel.Name = "tableLayoutPanel";
this.tableLayoutPanel.RowCount = 6;
this.tableLayoutPanel.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 10F));
this.tableLayoutPanel.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 10F));
this.tableLayoutPanel.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 6.730769F));
this.tableLayoutPanel.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 1.923077F));
this.tableLayoutPanel.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 61.05769F));
this.tableLayoutPanel.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 10F));
this.tableLayoutPanel.Size = new System.Drawing.Size(568, 416);
this.tableLayoutPanel.TabIndex = 0;
//
// logoPictureBox
//
this.logoPictureBox.Dock = System.Windows.Forms.DockStyle.Fill;
this.logoPictureBox.Image = Resources.NewAboutGraphic;
this.logoPictureBox.Location = new System.Drawing.Point(3, 3);
this.logoPictureBox.Name = "logoPictureBox";
this.tableLayoutPanel.SetRowSpan(this.logoPictureBox, 6);
this.logoPictureBox.Size = new System.Drawing.Size(181, 410);
this.logoPictureBox.TabIndex = 12;
this.logoPictureBox.TabStop = false;
//
// labelProductName
//
this.labelProductName.Dock = System.Windows.Forms.DockStyle.Fill;
this.labelProductName.Font = new System.Drawing.Font("Albertus MT Light", 11.25F);
this.labelProductName.ForeColor = System.Drawing.Color.White;
this.labelProductName.Location = new System.Drawing.Point(193, 0);
this.labelProductName.Margin = new System.Windows.Forms.Padding(6, 0, 3, 0);
this.labelProductName.MaximumSize = new System.Drawing.Size(0, 17);
this.labelProductName.Name = "labelProductName";
this.labelProductName.Size = new System.Drawing.Size(372, 17);
this.labelProductName.TabIndex = 19;
this.labelProductName.Text = "Product Name";
this.labelProductName.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// labelVersion
//
this.labelVersion.Dock = System.Windows.Forms.DockStyle.Fill;
this.labelVersion.Font = new System.Drawing.Font("Albertus MT Light", 11.25F);
this.labelVersion.ForeColor = System.Drawing.Color.White;
this.labelVersion.Location = new System.Drawing.Point(193, 41);
this.labelVersion.Margin = new System.Windows.Forms.Padding(6, 0, 3, 0);
this.labelVersion.MaximumSize = new System.Drawing.Size(0, 17);
this.labelVersion.Name = "labelVersion";
this.labelVersion.Size = new System.Drawing.Size(372, 17);
this.labelVersion.TabIndex = 0;
this.labelVersion.Text = "Version";
this.labelVersion.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// labelCopyright
//
this.labelCopyright.Dock = System.Windows.Forms.DockStyle.Fill;
this.labelCopyright.Font = new System.Drawing.Font("Albertus MT Light", 11.25F);
this.labelCopyright.ForeColor = System.Drawing.Color.White;
this.labelCopyright.Location = new System.Drawing.Point(193, 82);
this.labelCopyright.Margin = new System.Windows.Forms.Padding(6, 0, 3, 0);
this.labelCopyright.MaximumSize = new System.Drawing.Size(0, 17);
this.labelCopyright.Name = "labelCopyright";
this.labelCopyright.Size = new System.Drawing.Size(372, 17);
this.labelCopyright.TabIndex = 21;
this.labelCopyright.Text = "Copyright";
this.labelCopyright.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// textBoxDescription
//
this.textBoxDescription.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(46)))), ((int)(((byte)(41)))), ((int)(((byte)(31)))));
this.textBoxDescription.Dock = System.Windows.Forms.DockStyle.Fill;
this.textBoxDescription.Font = new System.Drawing.Font("Albertus MT Light", 9.75F);
this.textBoxDescription.ForeColor = System.Drawing.Color.White;
this.textBoxDescription.Location = new System.Drawing.Point(193, 121);
this.textBoxDescription.Margin = new System.Windows.Forms.Padding(6, 3, 3, 3);
this.textBoxDescription.Multiline = true;
this.textBoxDescription.Name = "textBoxDescription";
this.textBoxDescription.ReadOnly = true;
this.textBoxDescription.ScrollBars = System.Windows.Forms.ScrollBars.Both;
this.textBoxDescription.Size = new System.Drawing.Size(372, 248);
this.textBoxDescription.TabIndex = 23;
this.textBoxDescription.TabStop = false;
this.textBoxDescription.Text = "Description";
//
// buttonOK
//
this.buttonOK.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.buttonOK.BackColor = System.Drawing.Color.Transparent;
this.buttonOK.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.buttonOK.DownBitmap = Resources.MainButtonDown;
this.buttonOK.FlatAppearance.BorderSize = 0;
this.buttonOK.FlatAppearance.MouseDownBackColor = System.Drawing.Color.FromArgb(((int)(((byte)(0)))), ((int)(((byte)(51)))), ((int)(((byte)(44)))), ((int)(((byte)(28)))));
this.buttonOK.FlatAppearance.MouseOverBackColor = System.Drawing.Color.FromArgb(((int)(((byte)(0)))), ((int)(((byte)(51)))), ((int)(((byte)(44)))), ((int)(((byte)(28)))));
this.buttonOK.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.buttonOK.Font = new System.Drawing.Font("Albertus MT Light", 12F);
this.buttonOK.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(51)))), ((int)(((byte)(44)))), ((int)(((byte)(28)))));
this.buttonOK.Image = ((System.Drawing.Image)(resources.GetObject("buttonOK.Image")));
this.buttonOK.Location = new System.Drawing.Point(428, 383);
this.buttonOK.Name = "buttonOK";
this.buttonOK.OverBitmap = Resources.MainButtonOver;
this.buttonOK.Size = new System.Drawing.Size(137, 30);
this.buttonOK.SizeToGraphic = false;
this.buttonOK.TabIndex = 24;
this.buttonOK.Text = "&OK";
this.buttonOK.UpBitmap = Resources.MainButtonUp;
this.buttonOK.UseCustomGraphic = true;
this.buttonOK.UseVisualStyleBackColor = false;
//
// AboutBox
//
this.AcceptButton = this.buttonOK;
this.AutoScaleDimensions = new System.Drawing.SizeF(96F, 96F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi;
this.BackColor = System.Drawing.Color.FromArgb(((int)(((byte)(46)))), ((int)(((byte)(41)))), ((int)(((byte)(31)))));
this.ClientSize = new System.Drawing.Size(589, 460);
this.Controls.Add(this.tableLayoutPanel);
this.DrawCustomBorder = true;
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "AboutBox";
this.Padding = new System.Windows.Forms.Padding(9);
this.ShowIcon = false;
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "AboutBox";
this.TopMost = true;
this.Controls.SetChildIndex(this.tableLayoutPanel, 0);
this.tableLayoutPanel.ResumeLayout(false);
this.tableLayoutPanel.PerformLayout();
((System.ComponentModel.ISupportInitialize)(this.logoPictureBox)).EndInit();
this.ResumeLayout(false);
}
#endregion
}
}
| |
namespace Morph
{
public struct Char
{
public const char MaxValue = (char)0xff; //or 0xffff
public const char MinValue = (char)0;
internal char m_value;
public static bool IsWhiteSpace(System.Char c)
{
byte b = (byte)c;
return (b >= 9 && b <= 13) || b == 32 || b == 133 || b == 160;
}
public static bool IsWhiteSpace(string s, int index)
{
CheckParameter(s, index);
return IsWhiteSpace(s[index]);
}
public int CompareTo(System.Object value)
{
if (value == null)
return 1;
if (!(value is System.Char))
{
throw new System.ArgumentException("Value is not a System.Char");
}
return CompareTo((char)value);
}
public int CompareTo(char value)
{
if (m_value == value)
return 0;
if (m_value > value)
return 1;
else
return -1;
}
public override bool Equals(System.Object obj)
{
if (!(obj is System.Char))
return false;
return Equals((char)obj);
}
public bool Equals(char obj)
{
return m_value == obj;
}
public override int GetHashCode()
{
return m_value;
}
public override string ToString()
{
return new string(m_value, 1);
}
public static string ToString(char c)
{
return new string(c, 1);
}
public static char Parse(string s)
{
if (s == null)
throw new System.ArgumentNullException("s");
if (s.Length != 1)
throw new System.FormatException("s contains more than one character.");
return s[0];
}
public static bool TryParse(string s, out char result)
{
if (s == null || s.Length != 1)
{
result = (char)0;
return false;
}
result = s[0];
return true;
}
public static char ToUpper(char c)
{
if (c >= 'a' && c <= 'z')
return (char)(c + ('A' - 'a'));
return c;
}
public static char ToLower(char c)
{
if (c >= 'A' && c <= 'Z')
return (char)(c - ('A' - 'a'));
return c;
}
public static bool IsUpper(char c)
{
return (c >= 'A' && c <= 'Z');
}
public static bool IsUpper(string s, int index)
{
CheckParameter(s, index);
return IsUpper(s[index]);
}
public static bool IsNumber(char c)
{
return IsDigit(c);
}
public static bool IsNumber(string s, int index)
{
CheckParameter(s, index);
return IsNumber(s[index]);
}
public static bool IsLower(char c)
{
return (c >= 'a' && c <= 'z');
}
public static bool IsLower(string s, int index)
{
CheckParameter(s, index);
return IsLower(s[index]);
}
public static bool IsLetter(char c)
{
return (IsUpper(c) || IsLower(c));
}
public static bool IsLetter(string s, int index)
{
CheckParameter(s, index);
return IsLetter(s[index]);
}
public static bool IsDigit(char c)
{
return (c >= '0' && c <= '9');
}
public static bool IsDigit(string s, int index)
{
CheckParameter(s, index);
return IsDigit(s[index]);
}
public static bool IsLetterOrDigit(char c)
{
return (IsLetter(c) || IsDigit(c));
}
public static bool IsLetterOrDigit(string s, int index)
{
CheckParameter(s, index);
return IsLetterOrDigit(s[index]);
}
public static bool IsControl(char c)
{
byte b = (byte)c;
return ((b >= 0 && b <= 31) || (b >= 127 && b <= 159) ); //0->31 and 127->159
}
public static bool IsControl(string s, int index)
{
CheckParameter(s, index);
return IsControl(s[index]);
}
public static bool IsPunctuation(char c)
{
byte b = (byte)c;
return ((b >= 33 && b <= 47) || (b >= 58 && b <= 64) || (b >= 91 && b <= 96) || (b >= 123 && b <= 126));
}
public static bool IsPunctuation(string s, int index)
{
CheckParameter(s, index);
return IsPunctuation(s[index]);
}
public static bool IsSeparator(char c)
{
byte b = (byte)c;
return (b == 32 || b == 160);
}
public static bool IsSeparator(string s, int index)
{
CheckParameter(s, index);
return IsSeparator(s[index]);
}
//public static int GetNumericValue(char c)
//{
// if (IsDigit(c))
// return (c - '0');
// return -1;
//}
//public static int GetNumericValue(string s, int index)
//{
// CheckParameter(s, index);
// return GetNumericValue(s[index]);
//}
private static void CheckParameter(string s, int index)
{
if (s == null)
throw new System.ArgumentNullException("s");
if (index < 0 || index >= s.Length)
throw new System.ArgumentOutOfRangeException("The value of index is less than zero, or greater than or equal to the length of s.");
}
}
}
| |
#region Foreign-License
/*
Copyright (c) 2012 Sky Morey
See the file "license.terms" for information on usage and redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
*/
#endregion
using System;
using System.Collections;
using System.Diagnostics;
using System.Text;
using sqlite_int64 = System.Int64;
using i32 = System.Int32;
using i64 = System.Int64;
using u32 = System.UInt32;
namespace Tcl.Lang
{
#if TCLSH
using Tcl_Channel = Channel;
using Tcl_DString = TclString;
using Tcl_Interp = Interp;
using Tcl_Obj = TclObject;
using Tcl_WideInt = System.Int64;
public partial class TCL
{
// -- Conversion from TCL to tclsharp coding
// Included in SQLite3 port to C# for use in testharness only; 2008 Noah B Hart
public static void Tcl_AppendElement(Interp interp, StringBuilder toAppend)
{
interp.appendElement(toAppend.ToString());
}
public static void Tcl_AppendElement(Interp interp, string toAppend)
{
interp.appendElement(toAppend);
}
public static void Tcl_AppendResult(Interp interp, params object[] tos)
{
if (tos != null)
{
StringBuilder result = new StringBuilder(100);
for (int i = 0; i < tos.Length && tos[i] != null; i++)
result.Append(tos[i].ToString());
interp.appendElement(result.ToString());
}
}
public static void Tcl_AppendResult(Interp interp, params string[] strings)
{
if (strings != null)
{
StringBuilder result = new StringBuilder(100);
for (int i = 0; i < strings.Length && strings[i] != null && strings[i] != ""; i++)
result.Append(strings[i]);
interp.appendElement(result.ToString());
}
}
public static void Tcl_BackgroundError(Interp interp)
{
interp.SetErrorCode(TclInteger.NewInstance(TCL_ERROR));
interp.AddErrorInfo("Background Error");
}
public static void Tcl_CreateCommand(Interp interp, string cmdName, Interp.dxObjCmdProc ObjCmdProc, object ClientData, Interp.dxCmdDeleteProc DbDeleteCmd)
{
interp.createObjCommand(cmdName, ObjCmdProc, ClientData, DbDeleteCmd);
}
public static void Tcl_CreateObjCommand(Interp interp, string cmdName, Interp.dxObjCmdProc ObjCmdProc, object ClientData, Interp.dxCmdDeleteProc DbDeleteCmd)
{
interp.createObjCommand(cmdName, ObjCmdProc, ClientData, DbDeleteCmd);
}
public static bool Tcl_CreateCommandPointer(Interp interp, StringBuilder command, object clientData)
{
try
{
interp.createObjCommand(command.ToString(), null, clientData, null);
return false;
}
catch
{
return true;
}
}
public static bool Tcl_CreateCommandPointer(Interp interp, string command, object clientData)
{
try
{
interp.createObjCommand(command, null, clientData, null);
return false;
}
catch
{
return true;
}
}
public static void Tcl_DecrRefCount(ref TclObject to)
{
to.Release();
if (to.internalRep == null)
to = null;
}
public static int Tcl_DeleteCommand(Interp interp, string cmdName)
{
return interp.deleteCommand(cmdName);
}
public static void Tcl_DStringAppendElement(TclObject str, string append)
{
TclString.append(str, append);
}
public static void Tcl_DStringFree(ref TclObject str)
{
str.Release();
}
public static void Tcl_DStringInit(out TclObject str)
{
str = TclString.NewInstance("");
str.Preserve();
}
public static int Tcl_DStringLength(TclObject str)
{
return str.ToString().Length;
}
public static TclObject Tcl_DuplicateObj(TclObject to)
{
return to.duplicate();
}
public static int Tcl_Eval(Interp interp, string s)
{
try
{
interp.Eval(s);
return 0;
}
catch
{
return 1;
};
}
public static int Tcl_EvalObjEx(Interp interp, TclObject tobj, int flags)
{
try
{
interp.Eval(tobj, flags);
return 0;
}
catch (TclException e)
{
if (e.GetCompletionCode() == TCL.CompletionCode.RETURN)
return TCL_RETURN;
else if (e.GetCompletionCode() == TCL.CompletionCode.BREAK || interp.GetResult().ToString() == "invoked \"break\" outside of a loop")
return TCL_BREAK;
else
return TCL_ERROR;
};
}
public static void Tcl_Free(ref TclObject[] to)
{
if (to != null)
for (int i = 0; i < to.Length; i++)
while (to[i] != null && to[i].refCount > 0)
to[i].Release();
to = null;
}
public static void Tcl_Free(ref TclObject to)
{
while (to.refCount > 0)
to.Release();
}
public static void Tcl_Free<T>(ref T x) where T : class
{
x = null;
}
public static bool Tcl_GetBoolean(Interp interp, TclObject to, out int result)
{
try
{
result = (TclBoolean.get(interp, to) ? 1 : 0);
return false;
}
catch
{
result = 0;
return true;
}
}
public static bool Tcl_GetBoolean(Interp interp, TclObject to, out bool result)
{
try
{
result = TclBoolean.get(interp, to);
return false;
}
catch
{
result = false;
return true;
}
}
public static bool Tcl_GetBooleanFromObj(Interp interp, TclObject to, out bool result)
{
try
{
result = TclBoolean.get(interp, to);
return false;
}
catch
{
result = false;
return true;
}
}
public static bool Tcl_GetCommandInfo(Interp interp, string command, out WrappedCommand value)
{
try
{
value = interp.getObjCommand(command);
return false;
}
catch
{
value = null;
return true;
}
}
public static byte[] Tcl_GetByteArrayFromObj(TclObject to, out int n)
{
n = TclByteArray.getLength(null, to);
return Encoding.UTF8.GetBytes(to.ToString());
}
public static bool Tcl_GetDouble(Interp interp, TclObject to, out double value)
{
try
{
value = TclDouble.Get(interp, to);
return false;
}
catch
{
value = 0;
return true;
}
}
public static bool Tcl_GetDoubleFromObj(Interp interp, TclObject to, out double value)
{
try
{
if (to.ToString() == "NaN")
value = Double.NaN;
else
value = TclDouble.Get(interp, to);
return false;
}
catch
{
value = 0;
return true;
}
}
public static bool Tcl_GetIndexFromObj(Interp interp, TclObject to, string[] table, string msg, int flags, out int index)
{
try
{
index = TclIndex.Get(interp, to, table, msg, flags);
return false;
}
catch
{
index = 0;
return true;
}
}
public static bool Tcl_GetInt(Interp interp, TclObject to, out int value)
{
try
{
value = TclInteger.Get(interp, to);
return false;
}
catch
{
value = 0;
return true;
}
}
public static bool Tcl_GetInt(Interp interp, TclObject to, out u32 value)
{
try
{
value = (u32)TclInteger.Get(interp, to);
return false;
}
catch
{
value = 0;
return true;
}
}
public static int Tcl_GetIntFromObj(Interp interp, TclObject to, out int value)
{
try
{
value = TclInteger.Get(interp, to);
return TCL.TCL_OK;
}
catch
{
value = 0;
return TCL.TCL_ERROR;
}
}
public static bool Tcl_GetLong(Interp interp, TclObject to, out i64 value)
{
try
{
value = (i64)TclLong.get(interp, to);
return false;
}
catch
{
value = 0;
return true;
}
}
public static TclObject Tcl_GetObjResult(Interp interp)
{
TclObject toReturn = interp.GetResult();
return toReturn;
}
public static string Tcl_GetString(TclObject to)
{
return to.ToString();
}
public static string Tcl_GetStringFromObj(TclObject to, int n)
{
Debug.Assert(n == 0, "Try calling by ref");
return to.ToString();
}
public static string Tcl_GetStringFromObj(TclObject to, out int n)
{
byte[] tb = System.Text.Encoding.UTF8.GetBytes(to.ToString());
string ts = System.Text.Encoding.UTF8.GetString(tb, 0, tb.Length);
n = ts.Length;
return ts;
}
public static string Tcl_GetStringResult(Interp interp)
{
return interp.GetResult().ToString();
}
public static TclObject Tcl_GetVar2Ex(Interp interp, string part1, string part2, VarFlag flags)
{
try
{
Var[] result = Var.LookupVar(interp, part1, part2, flags, "read", false, true);
if (result == null)
{
// lookupVar() returns null only if VarFlag.LEAVE_ERR_MSG is
// not part of the flags argument, return null in this case.
return null;
}
Var var = result[0];
Var array = result[1];
TclObject to = null;
if (var.IsVarScalar() && !var.IsVarUndefined())
{
to = (TclObject)var._value;
//if ( to.typePtr != "String" )
//{
// double D = 0;
// if ( !Double.TryParse( to.ToString(), out D ) ) { if ( String.IsNullOrEmpty( to.typePtr ) ) to.typePtr = "string"; }
// else if ( to.typePtr == "ByteArray" )
// to.typePtr = "bytearray";
// else if ( to.ToString().Contains( "." ) )
// to.typePtr = "double";
// else
// to.typePtr = "int";
//}
return to;
}
else if (var.isSQLITE3_Link())
{
to = (TclObject)var.Ext_Get();
}
else
{
to = TclList.NewInstance();
foreach (string key in ((Hashtable)array._value).Keys)
{
Var s = (Var)((Hashtable)array._value)[key];
if (s._value != null) TclList.Append(null, to, TclString.NewInstance(s._value.ToString()));
}
}
return to;
}
catch (Exception e)
{
return null;
};
}
public static TclObject Tcl_GetVar(Interp interp, string part, VarFlag flags)
{
try
{
TclObject to = interp.GetVar(part, flags);
return to;
}
catch (Exception e)
{
return TclObj.newInstance("");
};
}
public static TclObject Tcl_GetVarType(Interp interp, string part1, string part2, VarFlag flags)
{
try
{
TclObject to = interp.GetVar(part1, part2, flags);
return to;
}
catch
{
return null;
};
}
public static bool Tcl_GetWideIntFromObj(Interp interp, TclObject to, out sqlite_int64 value)
{
try
{
if (to.ToString() == "NaN")
unchecked
{
value = (long)Double.NaN;
}
else
value = TclLong.get(interp, to);
return false;
}
catch
{
value = 0;
return true;
};
}
public static void Tcl_IncrRefCount(TclObject to)
{
to.Preserve();
}
public static void Tcl_LinkVar(Interp interp, string name, Object GetSet, VarFlags flags)
{
Debug.Assert(((flags & VarFlags.EXT_LINK_READ_ONLY) != 0) || GetSet.GetType().Name == "SQLITE3_GETSET");
Var[] linkvar = Var.LookupVar(interp, name, null, VarFlag.GLOBAL_ONLY, "define", true, false);
linkvar[0].Flags |= VarFlags.EXT_LINK | flags;
linkvar[0].ext_getset = GetSet;
linkvar[0].RefCount++;
}
public static bool Tcl_ListObjAppendElement(Interp interp, TclObject to, TclObject elemObj)
{
try
{
TclList.Append(interp, to, elemObj);
return false;
}
catch
{
return true;
}
}
public static void Tcl_ListObjIndex(Interp interp, TclObject to, int nItem, out TclObject elmObj)
{
try
{
elmObj = TclList.index(interp, to, nItem);
}
catch
{
elmObj = null;
}
}
public static bool Tcl_ListObjGetElements(Interp interp, TclObject to, out int nItem, out TclObject[] elmObj)
{
try
{
elmObj = TclList.getElements(interp, to);
nItem = elmObj.Length;
return false;
}
catch
{
elmObj = null;
nItem = 0;
return true;
}
}
public static void Tcl_ListObjLength(Interp interp, TclObject to, out int nArg)
{
try
{
nArg = TclList.getLength(interp, to);
}
catch
{
nArg = 0;
}
}
public static TclObject Tcl_NewBooleanObj(int value)
{
return TclBoolean.newInstance(value != 0);
}
public static TclObject Tcl_NewByteArrayObj(byte[] value, int bytes)
{
if (value == null || value.Length == 0 || bytes == 0)
return TclByteArray.NewInstance();
else
return TclByteArray.NewInstance(value, 0, bytes);
}
public static TclObject Tcl_NewByteArrayObj(string value, int bytes)
{
if (value == null || bytes == 0)
return TclByteArray.NewInstance();
else
return TclByteArray.NewInstance(System.Text.Encoding.UTF8.GetBytes(value.Substring(0, bytes)));
}
public static TclObject Tcl_NewDoubleObj(double value)
{
return TclDouble.NewInstance(value);
}
public static TclObject Tcl_NewIntObj(int value)
{
return TclInteger.NewInstance(value);
}
public static TclObject Tcl_NewListObj(int nArg, TclObject[] aArg)
{
TclObject to = TclList.NewInstance();
for (int i = 0; i < nArg; i++)
TclList.Append(null, to, aArg[i]);
return to;
}
public static TclObject Tcl_NewObj()
{
return TclString.NewInstance("");
}
public static TclObject Tcl_NewStringObj(byte[] value, int iLength)
{
if (iLength > 0 && iLength < value.Length)
return TclString.NewInstance(Encoding.UTF8.GetString(value, 0, iLength));
else
return TclString.NewInstance(Encoding.UTF8.GetString(value, 0, value.Length));
}
public static TclObject Tcl_NewStringObj(string value, int iLength)
{
if (value == null)
value = "";
else
value = value.Split('\0')[0];
if (iLength <= 0)
iLength = value.Length;
return TclString.NewInstance(value.Substring(0, iLength));
}
public static TclObject Tcl_NewWideIntObj(long value)
{
return TclLong.newInstance(value);
}
public static bool Tcl_ObjSetVar2(Interp interp, TclObject toName, TclObject part2, TclObject toValue, VarFlag flags)
{
try
{
if (part2 == null)
interp.SetVar(toName, toValue, flags);
else
interp.SetVar(toName.ToString(), part2.ToString(), toValue.ToString(), flags);
return false;
}
catch
{
return true;
}
}
public static void Tcl_PkgProvide(Interp interp, string name, string version)
{
interp.pkgProvide(name, version);
}
public static void Tcl_ResetResult(Interp interp)
{
interp.ResetResult();
}
public static void Tcl_SetBooleanObj(TclObject to, int result)
{
to.stringRep = TclBoolean.newInstance(result != 0).ToString();
to.Preserve();
}
public static bool Tcl_SetCommandInfo(Interp interp, string command, WrappedCommand value)
{
try
{
value = interp.getObjCommand(command);
return false;
}
catch
{
return true;
}
}
public static void Tcl_SetIntObj(TclObject to, int result
)
{
while (to.Shared)
to.Release();
TclInteger.set(to, result);
to.Preserve();
}
public static void Tcl_SetLongObj(TclObject to, long result)
{
while (to.Shared)
to.Release();
TclLong.set(to, result);
to.Preserve();
}
public static void Tcl_SetObjResult(Interp interp, TclObject to)
{
interp.ResetResult();
interp.SetResult(to);
}
public static void Tcl_SetResult(Interp interp, StringBuilder result, int dummy)
{
interp.ResetResult();
interp.SetResult(result.ToString());
}
public static void Tcl_SetResult(Interp interp, string result, int dummy)
{
interp.ResetResult();
interp.SetResult(result);
}
public static void Tcl_SetVar(Interp interp, string part, string value, int flags)
{
interp.SetVar(part, value, (VarFlag)flags);
}
public static void Tcl_SetVar2(Interp interp, string part1, string part2, string value, int flags)
{
interp.SetVar(part1, part2, value, (VarFlag)flags);
}
public static void Tcl_SetVar2(Interp interp, string part1, string part2, TclObject value, int flags)
{
interp.SetVar(part1, part2, value, (VarFlag)flags);
}
public static void Tcl_UnregisterChannel(Interp interp, Channel chan)
{
TclIO.unregisterChannel(interp, chan);
}
public static int Tcl_VarEval(Interp interp, string Scriptname, params string[] argv)
{
try
{
//Tcl_Obj[] aArg = null;
int rc = 0;
Tcl_Obj pCmd = Tcl_NewStringObj(Scriptname, -1);
Tcl_IncrRefCount(pCmd);
for (int i = 0; i < argv.Length; i++)
{
if (argv[i] != null && argv[i] != " ")
rc = Tcl_ListObjAppendElement(interp, pCmd, Tcl_NewStringObj(argv[i], -1)) ? 1 : 0;
if (rc != 0)
{
Tcl_DecrRefCount(ref pCmd);
return 1;
}
}
rc = Tcl_EvalObjEx(interp, pCmd, TCL_EVAL_DIRECT);
Tcl_DecrRefCount(ref pCmd);
return rc == TCL_BREAK ? 1 : 0;
}
catch
{
return 1;
}
}
public static void Tcl_WrongNumArgs(Interp interp, int argc, TclObject[] argv, string message)
{
throw new TclNumArgsException(interp, argc, argv, message == null ? "option ?arg ...?" : message);
}
public static Interp Tcl_GetSlave(Interp interp, string slaveInterp)
{
try
{
return ((InterpSlaveCmd)interp._slaveTable[slaveInterp]).slaveInterp;
}
catch
{
return null;
}
}
}
#endif
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace System.Globalization {
using System;
using System.Diagnostics.Contracts;
//
// This class implements the Julian calendar. In 48 B.C. Julius Caesar ordered a calendar reform, and this calendar
// is called Julian calendar. It consisted of a solar year of twelve months and of 365 days with an extra day
// every fourth year.
//*
//* Calendar support range:
//* Calendar Minimum Maximum
//* ========== ========== ==========
//* Gregorian 0001/01/01 9999/12/31
//* Julia 0001/01/03 9999/10/19
[Serializable]
[System.Runtime.InteropServices.ComVisible(true)]
public class JulianCalendar : Calendar {
public static readonly int JulianEra = 1;
private const int DatePartYear = 0;
private const int DatePartDayOfYear = 1;
private const int DatePartMonth = 2;
private const int DatePartDay = 3;
// Number of days in a non-leap year
private const int JulianDaysPerYear = 365;
// Number of days in 4 years
private const int JulianDaysPer4Years = JulianDaysPerYear * 4 + 1;
//internal static Calendar m_defaultInstance;
private static readonly int[] DaysToMonth365 =
{
0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365
};
private static readonly int[] DaysToMonth366 =
{
0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366
};
// Gregorian Calendar 9999/12/31 = Julian Calendar 9999/10/19
// keep it as variable field for serialization compat.
internal int MaxYear = 9999;
[System.Runtime.InteropServices.ComVisible(false)]
public override DateTime MinSupportedDateTime
{
get
{
return (DateTime.MinValue);
}
}
[System.Runtime.InteropServices.ComVisible(false)]
public override DateTime MaxSupportedDateTime
{
get
{
return (DateTime.MaxValue);
}
}
// Return the type of the Julian calendar.
//
[System.Runtime.InteropServices.ComVisible(false)]
public override CalendarAlgorithmType AlgorithmType
{
get
{
return CalendarAlgorithmType.SolarCalendar;
}
}
/*=================================GetDefaultInstance==========================
**Action: Internal method to provide a default intance of JulianCalendar. Used by NLS+ implementation
** and other calendars.
**Returns:
**Arguments:
**Exceptions:
============================================================================*/
/*
internal static Calendar GetDefaultInstance() {
if (m_defaultInstance == null) {
m_defaultInstance = new JulianCalendar();
}
return (m_defaultInstance);
}
*/
// Construct an instance of gregorian calendar.
public JulianCalendar() {
// There is no system setting of TwoDigitYear max, so set the value here.
twoDigitYearMax = 2029;
}
internal override int ID {
get {
return (CAL_JULIAN);
}
}
static internal void CheckEraRange(int era) {
if (era != CurrentEra && era != JulianEra) {
throw new ArgumentOutOfRangeException("era", Environment.GetResourceString("ArgumentOutOfRange_InvalidEraValue"));
}
}
internal void CheckYearEraRange(int year, int era) {
CheckEraRange(era);
if (year <= 0 || year > MaxYear) {
throw new ArgumentOutOfRangeException(
"year",
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
1,
MaxYear));
}
}
static internal void CheckMonthRange(int month) {
if (month < 1 || month > 12) {
throw new ArgumentOutOfRangeException("month", Environment.GetResourceString("ArgumentOutOfRange_Month"));
}
}
/*=================================GetDefaultInstance==========================
**Action: Check for if the day value is valid.
**Returns:
**Arguments:
**Exceptions:
**Notes:
** Before calling this method, call CheckYearEraRange()/CheckMonthRange() to make
** sure year/month values are correct.
============================================================================*/
static internal void CheckDayRange(int year, int month, int day) {
if (year == 1 && month == 1)
{
// The mimimum supported Julia date is Julian 0001/01/03.
if (day < 3) {
throw new ArgumentOutOfRangeException(null,
Environment.GetResourceString("ArgumentOutOfRange_BadYearMonthDay"));
}
}
bool isLeapYear = (year % 4) == 0;
int[] days = isLeapYear ? DaysToMonth366 : DaysToMonth365;
int monthDays = days[month] - days[month - 1];
if (day < 1 || day > monthDays) {
throw new ArgumentOutOfRangeException(
"day",
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
1,
monthDays));
}
}
// Returns a given date part of this DateTime. This method is used
// to compute the year, day-of-year, month, or day part.
static internal int GetDatePart(long ticks, int part)
{
// Gregorian 1/1/0001 is Julian 1/3/0001. Remember DateTime(0) is refered to Gregorian 1/1/0001.
// The following line convert Gregorian ticks to Julian ticks.
long julianTicks = ticks + TicksPerDay * 2;
// n = number of days since 1/1/0001
int n = (int)(julianTicks / TicksPerDay);
// y4 = number of whole 4-year periods within 100-year period
int y4 = n / JulianDaysPer4Years;
// n = day number within 4-year period
n -= y4 * JulianDaysPer4Years;
// y1 = number of whole years within 4-year period
int y1 = n / JulianDaysPerYear;
// Last year has an extra day, so decrement result if 4
if (y1 == 4) y1 = 3;
// If year was requested, compute and return it
if (part == DatePartYear)
{
return (y4 * 4 + y1 + 1);
}
// n = day number within year
n -= y1 * JulianDaysPerYear;
// If day-of-year was requested, return it
if (part == DatePartDayOfYear)
{
return (n + 1);
}
// Leap year calculation looks different from IsLeapYear since y1, y4,
// and y100 are relative to year 1, not year 0
bool leapYear = (y1 == 3);
int[] days = leapYear? DaysToMonth366: DaysToMonth365;
// All months have less than 32 days, so n >> 5 is a good conservative
// estimate for the month
int m = n >> 5 + 1;
// m = 1-based month number
while (n >= days[m]) m++;
// If month was requested, return it
if (part == DatePartMonth) return (m);
// Return 1-based day-of-month
return (n - days[m - 1] + 1);
}
// Returns the tick count corresponding to the given year, month, and day.
static internal long DateToTicks(int year, int month, int day)
{
int[] days = (year % 4 == 0)? DaysToMonth366: DaysToMonth365;
int y = year - 1;
int n = y * 365 + y / 4 + days[month - 1] + day - 1;
// Gregorian 1/1/0001 is Julian 1/3/0001. n * TicksPerDay is the ticks in JulianCalendar.
// Therefore, we subtract two days in the following to convert the ticks in JulianCalendar
// to ticks in Gregorian calendar.
return ((n - 2) * TicksPerDay);
}
public override DateTime AddMonths(DateTime time, int months)
{
if (months < -120000 || months > 120000) {
throw new ArgumentOutOfRangeException(
"months",
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
-120000,
120000));
}
Contract.EndContractBlock();
int y = GetDatePart(time.Ticks, DatePartYear);
int m = GetDatePart(time.Ticks, DatePartMonth);
int d = GetDatePart(time.Ticks, DatePartDay);
int i = m - 1 + months;
if (i >= 0) {
m = i % 12 + 1;
y = y + i / 12;
}
else {
m = 12 + (i + 1) % 12;
y = y + (i - 11) / 12;
}
int[] daysArray = (y % 4 == 0 && (y % 100 != 0 || y % 400 == 0)) ? DaysToMonth366: DaysToMonth365;
int days = (daysArray[m] - daysArray[m - 1]);
if (d > days) {
d = days;
}
long ticks = DateToTicks(y, m, d) + time.Ticks % TicksPerDay;
Calendar.CheckAddResult(ticks, MinSupportedDateTime, MaxSupportedDateTime);
return (new DateTime(ticks));
}
public override DateTime AddYears(DateTime time, int years) {
return (AddMonths(time, years * 12));
}
public override int GetDayOfMonth(DateTime time) {
return (GetDatePart(time.Ticks, DatePartDay));
}
public override DayOfWeek GetDayOfWeek(DateTime time) {
return ((DayOfWeek)((int)(time.Ticks / TicksPerDay + 1) % 7));
}
public override int GetDayOfYear(DateTime time) {
return (GetDatePart(time.Ticks, DatePartDayOfYear));
}
public override int GetDaysInMonth(int year, int month, int era) {
CheckYearEraRange(year, era);
CheckMonthRange(month);
int[] days = (year % 4 == 0) ? DaysToMonth366: DaysToMonth365;
return (days[month] - days[month - 1]);
}
public override int GetDaysInYear(int year, int era) {
// Year/Era range is done in IsLeapYear().
return (IsLeapYear(year, era) ? 366:365);
}
public override int GetEra(DateTime time)
{
return (JulianEra);
}
public override int GetMonth(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartMonth));
}
public override int[] Eras {
get {
return (new int[] {JulianEra});
}
}
public override int GetMonthsInYear(int year, int era)
{
CheckYearEraRange(year, era);
return (12);
}
public override int GetYear(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartYear));
}
public override bool IsLeapDay(int year, int month, int day, int era)
{
CheckMonthRange(month);
// Year/Era range check is done in IsLeapYear().
if (IsLeapYear(year, era)) {
CheckDayRange(year, month, day);
return (month == 2 && day == 29);
}
CheckDayRange(year, month, day);
return (false);
}
// Returns the leap month in a calendar year of the specified era. This method returns 0
// if this calendar does not have leap month, or this year is not a leap year.
//
[System.Runtime.InteropServices.ComVisible(false)]
public override int GetLeapMonth(int year, int era)
{
CheckYearEraRange(year, era);
return (0);
}
public override bool IsLeapMonth(int year, int month, int era)
{
CheckYearEraRange(year, era);
CheckMonthRange(month);
return (false);
}
// Checks whether a given year in the specified era is a leap year. This method returns true if
// year is a leap year, or false if not.
//
public override bool IsLeapYear(int year, int era)
{
CheckYearEraRange(year, era);
return (year % 4 == 0);
}
public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era)
{
CheckYearEraRange(year, era);
CheckMonthRange(month);
CheckDayRange(year, month, day);
if (millisecond < 0 || millisecond >= MillisPerSecond) {
throw new ArgumentOutOfRangeException(
"millisecond",
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
0,
MillisPerSecond - 1));
}
if (hour >= 0 && hour < 24 && minute >= 0 && minute < 60 && second >=0 && second < 60)
{
return new DateTime(DateToTicks(year, month, day) + (new TimeSpan(0, hour, minute, second, millisecond)).Ticks);
} else
{
throw new ArgumentOutOfRangeException(null, Environment.GetResourceString("ArgumentOutOfRange_BadHourMinuteSecond"));
}
}
public override int TwoDigitYearMax {
get {
return (twoDigitYearMax);
}
set {
VerifyWritable();
if (value < 99 || value > MaxYear)
{
throw new ArgumentOutOfRangeException(
"year",
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
99,
MaxYear));
}
twoDigitYearMax = value;
}
}
public override int ToFourDigitYear(int year) {
if (year < 0) {
throw new ArgumentOutOfRangeException("year",
Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum"));
}
Contract.EndContractBlock();
if (year > MaxYear) {
throw new ArgumentOutOfRangeException(
"year",
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Bounds_Lower_Upper"),
1,
MaxYear));
}
return (base.ToFourDigitYear(year));
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Server.Base;
using OpenSim.Services.Interfaces;
using System;
using System.Collections.Generic;
using System.Reflection;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Services.Connectors
{
public class GridServicesConnector : IGridService
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private string m_ServerURI = String.Empty;
public GridServicesConnector()
{
}
public GridServicesConnector(string serverURI)
{
m_ServerURI = serverURI.TrimEnd('/');
}
public GridServicesConnector(IConfigSource source)
{
Initialise(source);
}
public virtual void Initialise(IConfigSource source)
{
IConfig gridConfig = source.Configs["GridService"];
if (gridConfig == null)
{
m_log.Error("[GRID CONNECTOR]: GridService missing from OpenSim.ini");
throw new Exception("Grid connector init error");
}
string serviceURI = gridConfig.GetString("GridServerURI",
String.Empty);
if (serviceURI == String.Empty)
{
m_log.Error("[GRID CONNECTOR]: No Server URI named in section GridService");
throw new Exception("Grid connector init error");
}
m_ServerURI = serviceURI;
}
#region IGridService
public bool DeregisterRegion(UUID regionID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["REGIONID"] = regionID.ToString();
sendData["METHOD"] = "deregister";
string uri = m_ServerURI + "/grid";
try
{
string reply
= SynchronousRestFormsRequester.MakeRequest("POST", uri, ServerUtils.BuildQueryString(sendData));
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if ((replyData["Result"] != null) && (replyData["Result"].ToString().ToLower() == "success"))
return true;
}
else
m_log.DebugFormat("[GRID CONNECTOR]: DeregisterRegion received null reply");
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
}
return false;
}
public List<GridRegion> GetDefaultHypergridRegions(UUID scopeID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["METHOD"] = "get_default_hypergrid_regions";
List<GridRegion> rinfos = new List<GridRegion>();
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
//m_log.DebugFormat("[GRID CONNECTOR]: reply was {0}", reply);
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetDefaultHypergridRegions {0} received null response",
scopeID);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetDefaultHypergridRegions received null reply");
return rinfos;
}
public List<GridRegion> GetDefaultRegions(UUID scopeID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["METHOD"] = "get_default_regions";
List<GridRegion> rinfos = new List<GridRegion>();
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
//m_log.DebugFormat("[GRID CONNECTOR]: reply was {0}", reply);
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetDefaultRegions {0} received null response",
scopeID);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetDefaultRegions received null reply");
return rinfos;
}
public List<GridRegion> GetFallbackRegions(UUID scopeID, int x, int y)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["X"] = x.ToString();
sendData["Y"] = y.ToString();
sendData["METHOD"] = "get_fallback_regions";
List<GridRegion> rinfos = new List<GridRegion>();
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
//m_log.DebugFormat("[GRID CONNECTOR]: reply was {0}", reply);
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetFallbackRegions {0}, {1}-{2} received null response",
scopeID, x, y);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetFallbackRegions received null reply");
return rinfos;
}
public List<GridRegion> GetHyperlinks(UUID scopeID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["METHOD"] = "get_hyperlinks";
List<GridRegion> rinfos = new List<GridRegion>();
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
//m_log.DebugFormat("[GRID CONNECTOR]: reply was {0}", reply);
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetHyperlinks {0} received null response",
scopeID);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetHyperlinks received null reply");
return rinfos;
}
public List<GridRegion> GetNeighbours(UUID scopeID, UUID regionID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["REGIONID"] = regionID.ToString();
sendData["METHOD"] = "get_neighbours";
List<GridRegion> rinfos = new List<GridRegion>();
string reqString = ServerUtils.BuildQueryString(sendData);
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST", uri, reqString);
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
//m_log.DebugFormat("[GRID CONNECTOR]: get neighbours returned {0} elements", rinfosList.Count);
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetNeighbours {0}, {1} received null response",
scopeID, regionID);
return rinfos;
}
public GridRegion GetRegionByName(UUID scopeID, string regionName)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["NAME"] = regionName;
sendData["METHOD"] = "get_region_by_name";
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return null;
}
GridRegion rinfo = null;
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if ((replyData != null) && (replyData["result"] != null))
{
if (replyData["result"] is Dictionary<string, object>)
rinfo = new GridRegion((Dictionary<string, object>)replyData["result"]);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByPosition {0}, {1} received null response",
scopeID, regionName);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByName received null reply");
return rinfo;
}
public GridRegion GetRegionByPosition(UUID scopeID, int x, int y)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["X"] = x.ToString();
sendData["Y"] = y.ToString();
sendData["METHOD"] = "get_region_by_position";
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return null;
}
GridRegion rinfo = null;
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if ((replyData != null) && (replyData["result"] != null))
{
if (replyData["result"] is Dictionary<string, object>)
rinfo = new GridRegion((Dictionary<string, object>)replyData["result"]);
//else
// m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByPosition {0}, {1}-{2} received no region",
// scopeID, x, y);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByPosition {0}, {1}-{2} received null response",
scopeID, x, y);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByPosition received null reply");
return rinfo;
}
public GridRegion GetRegionByUUID(UUID scopeID, UUID regionID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["REGIONID"] = regionID.ToString();
sendData["METHOD"] = "get_region_by_uuid";
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST", uri, ServerUtils.BuildQueryString(sendData));
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return null;
}
GridRegion rinfo = null;
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if ((replyData != null) && (replyData["result"] != null))
{
if (replyData["result"] is Dictionary<string, object>)
rinfo = new GridRegion((Dictionary<string, object>)replyData["result"]);
//else
// m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByUUID {0}, {1} received null response",
// scopeID, regionID);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByUUID {0}, {1} received null response",
scopeID, regionID);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionByUUID received null reply");
return rinfo;
}
public int GetRegionFlags(UUID scopeID, UUID regionID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["REGIONID"] = regionID.ToString();
sendData["METHOD"] = "get_region_flags";
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return -1;
}
int flags = -1;
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if ((replyData != null) && replyData.ContainsKey("result") && (replyData["result"] != null))
{
Int32.TryParse((string)replyData["result"], out flags);
//else
// m_log.DebugFormat("[GRID CONNECTOR]: GetRegionFlags {0}, {1} received wrong type {2}",
// scopeID, regionID, replyData["result"].GetType());
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionFlags {0}, {1} received null response",
scopeID, regionID);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionFlags received null reply");
return flags;
}
public List<GridRegion> GetRegionRange(UUID scopeID, int xmin, int xmax, int ymin, int ymax)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["XMIN"] = xmin.ToString();
sendData["XMAX"] = xmax.ToString();
sendData["YMIN"] = ymin.ToString();
sendData["YMAX"] = ymax.ToString();
sendData["METHOD"] = "get_region_range";
List<GridRegion> rinfos = new List<GridRegion>();
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
//m_log.DebugFormat("[GRID CONNECTOR]: reply was {0}", reply);
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionRange {0}, {1}-{2} {3}-{4} received null response",
scopeID, xmin, xmax, ymin, ymax);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionRange received null reply");
return rinfos;
}
public List<GridRegion> GetRegionsByName(UUID scopeID, string name, int maxNumber)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
sendData["SCOPEID"] = scopeID.ToString();
sendData["NAME"] = name;
sendData["MAX"] = maxNumber.ToString();
sendData["METHOD"] = "get_regions_by_name";
List<GridRegion> rinfos = new List<GridRegion>();
string reply = string.Empty;
string uri = m_ServerURI + "/grid";
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
ServerUtils.BuildQueryString(sendData));
}
catch (Exception e)
{
m_log.DebugFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
return rinfos;
}
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
Dictionary<string, object>.ValueCollection rinfosList = replyData.Values;
foreach (object r in rinfosList)
{
if (r is Dictionary<string, object>)
{
GridRegion rinfo = new GridRegion((Dictionary<string, object>)r);
rinfos.Add(rinfo);
}
}
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionsByName {0}, {1}, {2} received null response",
scopeID, name, maxNumber);
}
else
m_log.DebugFormat("[GRID CONNECTOR]: GetRegionsByName received null reply");
return rinfos;
}
public string RegisterRegion(UUID scopeID, GridRegion regionInfo)
{
Dictionary<string, object> rinfo = regionInfo.ToKeyValuePairs();
Dictionary<string, object> sendData = new Dictionary<string, object>();
foreach (KeyValuePair<string, object> kvp in rinfo)
sendData[kvp.Key] = (string)kvp.Value;
sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "register";
string reqString = ServerUtils.BuildQueryString(sendData);
string uri = m_ServerURI + "/grid";
// m_log.DebugFormat("[GRID CONNECTOR]: queryString = {0}", reqString);
try
{
string reply = SynchronousRestFormsRequester.MakeRequest("POST", uri, reqString);
if (reply != string.Empty)
{
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData.ContainsKey("Result") && (replyData["Result"].ToString().ToLower() == "success"))
{
return String.Empty;
}
else if (replyData.ContainsKey("Result") && (replyData["Result"].ToString().ToLower() == "failure"))
{
m_log.ErrorFormat(
"[GRID CONNECTOR]: Registration failed: {0} when contacting {1}", replyData["Message"], uri);
return replyData["Message"].ToString();
}
else if (!replyData.ContainsKey("Result"))
{
m_log.ErrorFormat(
"[GRID CONNECTOR]: reply data does not contain result field when contacting {0}", uri);
}
else
{
m_log.ErrorFormat(
"[GRID CONNECTOR]: unexpected result {0} when contacting {1}", replyData["Result"], uri);
return "Unexpected result " + replyData["Result"].ToString();
}
}
else
{
m_log.ErrorFormat(
"[GRID CONNECTOR]: RegisterRegion received null reply when contacting grid server at {0}", uri);
}
}
catch (Exception e)
{
m_log.ErrorFormat("[GRID CONNECTOR]: Exception when contacting grid server at {0}: {1}", uri, e.Message);
}
return string.Format("Error communicating with the grid service at {0}", uri);
}
#endregion IGridService
}
}
| |
using UnityEngine;
using System.Collections;
public class FourierCPU
{
int m_size;
float m_fsize;
int m_passes;
float[] m_butterflyLookupTable = null;
public FourierCPU(int size)
{
if(!Mathf.IsPowerOfTwo(size))
{
Debug.Log("FourierCPU::FourierCPU - fourier grid size must be pow2 number, changing to nearest pow2 number");
size = Mathf.NextPowerOfTwo(size);
}
m_size = size; //must be pow2 num
m_fsize = (float)m_size;
m_passes = (int)(Mathf.Log(m_fsize)/Mathf.Log(2.0f));
ComputeButterflyLookupTable();
}
int BitReverse(int i)
{
int j = i;
int Sum = 0;
int W = 1;
int M = m_size / 2;
while(M != 0)
{
j = ((i&M) > M-1) ? 1 : 0;
Sum += j * W;
W *= 2;
M /= 2;
}
return Sum;
}
void ComputeButterflyLookupTable()
{
m_butterflyLookupTable = new float[m_size * m_passes * 4];
for(int i = 0; i < m_passes; i++)
{
int nBlocks = (int) Mathf.Pow(2, m_passes - 1 - i);
int nHInputs = (int) Mathf.Pow(2, i);
for (int j = 0; j < nBlocks; j++)
{
for (int k = 0; k < nHInputs; k++)
{
int i1, i2, j1, j2;
if (i == 0)
{
i1 = j * nHInputs * 2 + k;
i2 = j * nHInputs * 2 + nHInputs + k;
j1 = BitReverse(i1);
j2 = BitReverse(i2);
}
else
{
i1 = j * nHInputs * 2 + k;
i2 = j * nHInputs * 2 + nHInputs + k;
j1 = i1;
j2 = i2;
}
float wr = Mathf.Cos(2.0f * Mathf.PI * (float)(k*nBlocks) / m_fsize);
float wi = Mathf.Sin(2.0f * Mathf.PI * (float)(k*nBlocks) / m_fsize);
int offset1 = 4 * (i1 + i * m_size);
m_butterflyLookupTable[offset1 + 0] = j1;
m_butterflyLookupTable[offset1 + 1] = j2;
m_butterflyLookupTable[offset1 + 2] = wr;
m_butterflyLookupTable[offset1 + 3] = wi;
int offset2 = 4 * (i2 + i * m_size);
m_butterflyLookupTable[offset2 + 0] = j1;
m_butterflyLookupTable[offset2 + 1] = j2;
m_butterflyLookupTable[offset2 + 2] = -wr;
m_butterflyLookupTable[offset2 + 3] = -wi;
}
}
}
}
//Performs two FFTs on two complex numbers packed in a vector4
Vector4 FFT(Vector2 w, Vector4 input1, Vector4 input2)
{
input1.x += w.x * input2.x - w.y * input2.y;
input1.y += w.y * input2.x + w.x * input2.y;
input1.z += w.x * input2.z - w.y * input2.w;
input1.w += w.y * input2.z + w.x * input2.w;
return input1;
}
//Performs one FFT on a complex number
Vector2 FFT(Vector2 w, Vector2 input1, Vector2 input2)
{
input1.x += w.x * input2.x - w.y * input2.y;
input1.y += w.y * input2.x + w.x * input2.y;
return input1;
}
public int PeformFFT(int startIdx, Vector2[,] data0, Vector4[,] data1, Vector4[,] data2)
{
int x; int y; int i;
int idx = 0; int idx1; int bftIdx;
int X; int Y;
Vector2 w;
int j = startIdx;
for (i = 0; i < m_passes; i++, j++)
{
idx = j%2;
idx1 = (j+1)%2;
for(x = 0; x < m_size; x++)
{
for(y = 0; y < m_size; y++)
{
bftIdx = 4*(x+i*m_size);
X = (int)m_butterflyLookupTable[bftIdx + 0];
Y = (int)m_butterflyLookupTable[bftIdx + 1];
w.x = m_butterflyLookupTable[bftIdx + 2];
w.y = m_butterflyLookupTable[bftIdx + 3];
data0[idx, x+y*m_size] = FFT(w, data0[idx1, X+y*m_size], data0[idx1, Y+y*m_size]);
data1[idx, x+y*m_size] = FFT(w, data1[idx1, X+y*m_size], data1[idx1, Y+y*m_size]);
data2[idx, x+y*m_size] = FFT(w, data2[idx1, X+y*m_size], data2[idx1, Y+y*m_size]);
}
}
}
for (i = 0; i < m_passes; i++, j++)
{
idx = j%2;
idx1 = (j+1)%2;
for(x = 0; x < m_size; x++)
{
for(y = 0; y < m_size; y++)
{
bftIdx = 4*(y+i*m_size);
X = (int)m_butterflyLookupTable[bftIdx + 0];
Y = (int)m_butterflyLookupTable[bftIdx + 1];
w.x = m_butterflyLookupTable[bftIdx + 2];
w.y = m_butterflyLookupTable[bftIdx + 3];
data0[idx, x+y*m_size] = FFT(w, data0[idx1, x+X*m_size], data0[idx1, x+Y*m_size]);
data1[idx, x+y*m_size] = FFT(w, data1[idx1, x+X*m_size], data1[idx1, x+Y*m_size]);
data2[idx, x+y*m_size] = FFT(w, data2[idx1, x+X*m_size], data2[idx1, x+Y*m_size]);
}
}
}
return idx;
}
public int PeformFFT_X(int startIdx, Vector2[,] data0, Vector4[,] data1, Vector4[,] data2)
{
int x; int y; int i;
int idx = 0; int idx1; int bftIdx;
int X; int Y;
Vector2 w;
int j = startIdx;
for (i = 0; i < m_passes; i++, j++)
{
idx = j%2;
idx1 = (j+1)%2;
for(x = 0; x < m_size; x++)
{
for(y = 0; y < m_size; y++)
{
bftIdx = 4*(x+i*m_size);
X = (int)m_butterflyLookupTable[bftIdx + 0];
Y = (int)m_butterflyLookupTable[bftIdx + 1];
w.x = m_butterflyLookupTable[bftIdx + 2];
w.y = m_butterflyLookupTable[bftIdx + 3];
data0[idx, x+y*m_size] = FFT(w, data0[idx1, X+y*m_size], data0[idx1, Y+y*m_size]);
data1[idx, x+y*m_size] = FFT(w, data1[idx1, X+y*m_size], data1[idx1, Y+y*m_size]);
data2[idx, x+y*m_size] = FFT(w, data2[idx1, X+y*m_size], data2[idx1, Y+y*m_size]);
}
}
}
return idx;
}
public int PeformFFT_Y(int startIdx, Vector2[,] data0, Vector4[,] data1, Vector4[,] data2)
{
int x; int y; int i;
int idx = 0; int idx1; int bftIdx;
int X; int Y;
Vector2 w;
int j = startIdx+1;
for (i = 0; i < m_passes; i++, j++)
{
idx = j%2;
idx1 = (j+1)%2;
for(x = 0; x < m_size; x++)
{
for(y = 0; y < m_size; y++)
{
bftIdx = 4*(y+i*m_size);
X = (int)m_butterflyLookupTable[bftIdx + 0];
Y = (int)m_butterflyLookupTable[bftIdx + 1];
w.x = m_butterflyLookupTable[bftIdx + 2];
w.y = m_butterflyLookupTable[bftIdx + 3];
data0[idx, x+y*m_size] = FFT(w, data0[idx1, x+X*m_size], data0[idx1, x+Y*m_size]);
data1[idx, x+y*m_size] = FFT(w, data1[idx1, x+X*m_size], data1[idx1, x+Y*m_size]);
data2[idx, x+y*m_size] = FFT(w, data2[idx1, x+X*m_size], data2[idx1, x+Y*m_size]);
}
}
}
return idx;
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
using System;
using NPOI.HWPF.SPRM;
using System.Collections.Generic;
namespace NPOI.HWPF.UserModel
{
public class TableRow
: Paragraph
{
private static char TABLE_CELL_MARK = '\u0007';
private static short SPRM_TJC = 0x5400;
private static short SPRM_DXAGAPHALF = unchecked((short)0x9602);
private static short SPRM_FCANTSPLIT = 0x3403;
private static short SPRM_FTABLEHEADER = 0x3404;
private static short SPRM_DYAROWHEIGHT = unchecked((short)0x9407);
int _levelNum;
private TableProperties _tprops;
private TableCell[] _cells;
public TableRow(int startIdx, int endIdx, Table parent, int levelNum)
: base(startIdx, endIdx, parent)
{
Paragraph last = GetParagraph(NumParagraphs - 1);
_papx = last._papx;
_tprops = TableSprmUncompressor.UncompressTAP(_papx);
_levelNum = levelNum;
initCells();
}
private void initCells()
{
if ( _cellsFound )
return;
short expectedCellsCount = _tprops.GetItcMac();
int lastCellStart = 0;
List<TableCell> cells = new List<TableCell>(
expectedCellsCount + 1 );
for ( int p = 0; p < NumParagraphs; p++ )
{
Paragraph paragraph = GetParagraph( p );
String s = paragraph.Text;
if ( ( ( s.Length > 0 && s[s.Length - 1]== TABLE_CELL_MARK ) || paragraph
.IsEmbeddedCellMark() )
&& paragraph.GetTableLevel() == _levelNum )
{
TableCellDescriptor tableCellDescriptor = _tprops.GetRgtc() != null
&& _tprops.GetRgtc().Length > cells.Count ? _tprops
.GetRgtc()[cells.Count] : new TableCellDescriptor();
short leftEdge = (_tprops.GetRgdxaCenter() != null
&& _tprops.GetRgdxaCenter().Length > cells.Count) ? (short)_tprops
.GetRgdxaCenter()[cells.Count] : (short)0;
short rightEdge = (_tprops.GetRgdxaCenter() != null
&& _tprops.GetRgdxaCenter().Length > cells.Count + 1) ? (short)_tprops
.GetRgdxaCenter()[cells.Count + 1] : (short)0;
TableCell tableCell = new TableCell( GetParagraph(
lastCellStart ).StartOffset, GetParagraph( p )
.EndOffset, this, _levelNum, tableCellDescriptor,
leftEdge, rightEdge - leftEdge );
cells.Add( tableCell );
lastCellStart = p + 1;
}
}
if ( lastCellStart < ( NumParagraphs - 1 ) )
{
TableCellDescriptor tableCellDescriptor = _tprops.GetRgtc() != null
&& _tprops.GetRgtc().Length > cells.Count ? _tprops
.GetRgtc()[cells.Count] : new TableCellDescriptor();
short leftEdge = _tprops.GetRgdxaCenter() != null
&& _tprops.GetRgdxaCenter().Length > cells.Count ? (short)_tprops
.GetRgdxaCenter()[cells.Count] : (short)0;
short rightEdge = _tprops.GetRgdxaCenter() != null
&& _tprops.GetRgdxaCenter().Length > cells.Count + 1 ? (short)_tprops
.GetRgdxaCenter()[cells.Count + 1] : (short)0;
TableCell tableCell = new TableCell( lastCellStart,
( NumParagraphs - 1 ), this, _levelNum,
tableCellDescriptor, leftEdge, rightEdge - leftEdge );
cells.Add( tableCell );
}
if ( cells.Count>0 )
{
TableCell lastCell = cells[cells.Count - 1];
if ( lastCell.NumParagraphs == 1
&& ( lastCell.GetParagraph( 0 ).IsTableRowEnd() ) )
{
// remove "fake" cell
cells.RemoveAt( cells.Count - 1 );
}
}
if ( cells.Count != expectedCellsCount )
{
_tprops.SetItcMac( (short) cells.Count);
}
_cells = cells.ToArray();
_cellsFound = true;
}
private bool _cellsFound = false;
protected void Reset()
{
_cellsFound = false;
}
public int GetRowJustification()
{
return _tprops.GetJc();
}
public void SetRowJustification(int jc)
{
_tprops.SetJc(jc);
_papx.UpdateSprm(SPRM_TJC, (short)jc);
}
public int GetGapHalf()
{
return _tprops.GetDxaGapHalf();
}
public void SetGapHalf(int dxaGapHalf)
{
_tprops.SetDxaGapHalf(dxaGapHalf);
_papx.UpdateSprm(SPRM_DXAGAPHALF, (short)dxaGapHalf);
}
public int GetRowHeight()
{
return _tprops.GetDyaRowHeight();
}
public void SetRowHeight(int dyaRowHeight)
{
_tprops.SetDyaRowHeight(dyaRowHeight);
_papx.UpdateSprm(SPRM_DYAROWHEIGHT, (short)dyaRowHeight);
}
public bool cantSplit()
{
return _tprops.GetFCantSplit();
}
public void SetCantSplit(bool cantSplit)
{
_tprops.SetFCantSplit(cantSplit);
_papx.UpdateSprm(SPRM_FCANTSPLIT, (byte)(cantSplit ? 1 : 0));
}
public bool isTableHeader()
{
return _tprops.GetFTableHeader();
}
public void SetTableHeader(bool tableHeader)
{
_tprops.SetFTableHeader(tableHeader);
_papx.UpdateSprm(SPRM_FTABLEHEADER, (byte)(tableHeader ? 1 : 0));
}
public int NumCells()
{
initCells();
return _cells.Length;
}
public TableCell GetCell(int index)
{
initCells();
return _cells[index];
}
public override BorderCode GetTopBorder()
{
return _tprops.GetBrcBottom();
}
public override BorderCode GetBottomBorder()
{
return _tprops.GetBrcBottom();
}
public override BorderCode GetLeftBorder()
{
return _tprops.GetBrcLeft();
}
public override BorderCode GetRightBorder()
{
return _tprops.GetBrcRight();
}
public BorderCode GetHorizontalBorder()
{
return _tprops.GetBrcHorizontal();
}
public BorderCode GetVerticalBorder()
{
return _tprops.GetBrcVertical();
}
public override BorderCode GetBarBorder()
{
throw new NotImplementedException("not applicable for TableRow");
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using NodaTime;
using QuantConnect.Data;
using QuantConnect.Data.Market;
using QuantConnect.Data.UniverseSelection;
using QuantConnect.Logging;
using QuantConnect.Securities;
using QuantConnect.Securities.Option;
namespace QuantConnect.Lean.Engine.DataFeeds
{
/// <summary>
/// Represents a grouping of data emitted at a certain time.
/// </summary>
public class TimeSlice
{
/// <summary>
/// Gets the count of data points in this <see cref="TimeSlice"/>
/// </summary>
public int DataPointCount { get; private set; }
/// <summary>
/// Gets the time this data was emitted
/// </summary>
public DateTime Time { get; private set; }
/// <summary>
/// Gets the data in the time slice
/// </summary>
public List<DataFeedPacket> Data { get; private set; }
/// <summary>
/// Gets the <see cref="Slice"/> that will be used as input for the algorithm
/// </summary>
public Slice Slice { get; private set; }
/// <summary>
/// Gets the data used to update securities
/// </summary>
public List<UpdateData<Security>> SecuritiesUpdateData { get; private set; }
/// <summary>
/// Gets the data used to update the consolidators
/// </summary>
public List<UpdateData<SubscriptionDataConfig>> ConsolidatorUpdateData { get; private set; }
/// <summary>
/// Gets all the custom data in this <see cref="TimeSlice"/>
/// </summary>
public List<UpdateData<Security>> CustomData { get; private set; }
/// <summary>
/// Gets the changes to the data subscriptions as a result of universe selection
/// </summary>
public SecurityChanges SecurityChanges { get; private set; }
/// <summary>
/// Gets the universe data generated this time step.
/// </summary>
public Dictionary<Universe, BaseDataCollection> UniverseData { get; private set; }
/// <summary>
/// Initializes a new <see cref="TimeSlice"/> containing the specified data
/// </summary>
public TimeSlice(DateTime time,
int dataPointCount,
Slice slice,
List<DataFeedPacket> data,
List<UpdateData<Security>> securitiesUpdateData,
List<UpdateData<SubscriptionDataConfig>> consolidatorUpdateData,
List<UpdateData<Security>> customData,
SecurityChanges securityChanges,
Dictionary<Universe, BaseDataCollection> universeData)
{
Time = time;
Data = data;
Slice = slice;
CustomData = customData;
DataPointCount = dataPointCount;
SecuritiesUpdateData = securitiesUpdateData;
ConsolidatorUpdateData = consolidatorUpdateData;
SecurityChanges = securityChanges;
UniverseData = universeData;
}
/// <summary>
/// Creates a new <see cref="TimeSlice"/> for the specified time using the specified data
/// </summary>
/// <param name="utcDateTime">The UTC frontier date time</param>
/// <param name="algorithmTimeZone">The algorithm's time zone, required for computing algorithm and slice time</param>
/// <param name="cashBook">The algorithm's cash book, required for generating cash update pairs</param>
/// <param name="data">The data in this <see cref="TimeSlice"/></param>
/// <param name="changes">The new changes that are seen in this time slice as a result of universe selection</param>
/// <param name="universeData"></param>
/// <returns>A new <see cref="TimeSlice"/> containing the specified data</returns>
public static TimeSlice Create(DateTime utcDateTime,
DateTimeZone algorithmTimeZone,
CashBook cashBook,
List<DataFeedPacket> data,
SecurityChanges changes,
Dictionary<Universe, BaseDataCollection> universeData)
{
int count = 0;
var security = new List<UpdateData<Security>>();
var custom = new List<UpdateData<Security>>();
var consolidator = new List<UpdateData<SubscriptionDataConfig>>();
var allDataForAlgorithm = new List<BaseData>(data.Count);
var optionUnderlyingUpdates = new Dictionary<Symbol, BaseData>();
Split split;
Dividend dividend;
Delisting delisting;
SymbolChangedEvent symbolChange;
// we need to be able to reference the slice being created in order to define the
// evaluation of option price models, so we define a 'future' that can be referenced
// in the option price model evaluation delegates for each contract
Slice slice = null;
var sliceFuture = new Lazy<Slice>(() => slice);
var algorithmTime = utcDateTime.ConvertFromUtc(algorithmTimeZone);
var tradeBars = new TradeBars(algorithmTime);
var quoteBars = new QuoteBars(algorithmTime);
var ticks = new Ticks(algorithmTime);
var splits = new Splits(algorithmTime);
var dividends = new Dividends(algorithmTime);
var delistings = new Delistings(algorithmTime);
var optionChains = new OptionChains(algorithmTime);
var futuresChains = new FuturesChains(algorithmTime);
var symbolChanges = new SymbolChangedEvents(algorithmTime);
if (universeData.Count > 0)
{
// count universe data
foreach (var kvp in universeData)
{
count += kvp.Value.Data.Count;
}
}
// ensure we read equity data before option data, so we can set the current underlying price
foreach (var packet in data)
{
// filter out packets for removed subscriptions
if (packet.IsSubscriptionRemoved)
{
continue;
}
var list = packet.Data;
var symbol = packet.Security.Symbol;
if (list.Count == 0) continue;
// keep count of all data points
if (list.Count == 1 && list[0] is BaseDataCollection)
{
var baseDataCollectionCount = ((BaseDataCollection)list[0]).Data.Count;
if (baseDataCollectionCount == 0)
{
continue;
}
count += baseDataCollectionCount;
}
else
{
count += list.Count;
}
if (!packet.Configuration.IsInternalFeed && packet.Configuration.IsCustomData)
{
// This is all the custom data
custom.Add(new UpdateData<Security>(packet.Security, packet.Configuration.Type, list));
}
var securityUpdate = new List<BaseData>(list.Count);
var consolidatorUpdate = new List<BaseData>(list.Count);
for (int i = 0; i < list.Count; i++)
{
var baseData = list[i];
if (!packet.Configuration.IsInternalFeed)
{
// this is all the data that goes into the algorithm
allDataForAlgorithm.Add(baseData);
}
// don't add internal feed data to ticks/bars objects
if (baseData.DataType != MarketDataType.Auxiliary)
{
if (!packet.Configuration.IsInternalFeed)
{
PopulateDataDictionaries(baseData, ticks, tradeBars, quoteBars, optionChains, futuresChains);
// special handling of options data to build the option chain
if (packet.Security.Type == SecurityType.Option)
{
if (baseData.DataType == MarketDataType.OptionChain)
{
optionChains[baseData.Symbol] = (OptionChain) baseData;
}
else if (!HandleOptionData(algorithmTime, baseData, optionChains, packet.Security, sliceFuture, optionUnderlyingUpdates))
{
continue;
}
}
// special handling of futures data to build the futures chain
if (packet.Security.Type == SecurityType.Future)
{
if (baseData.DataType == MarketDataType.FuturesChain)
{
futuresChains[baseData.Symbol] = (FuturesChain)baseData;
}
else if (!HandleFuturesData(algorithmTime, baseData, futuresChains, packet.Security))
{
continue;
}
}
// this is data used to update consolidators
consolidatorUpdate.Add(baseData);
}
// this is the data used set market prices
// do not add it if it is a Suspicious tick
var tick = baseData as Tick;
if (tick != null && tick.Suspicious) continue;
securityUpdate.Add(baseData);
// option underlying security update
if (packet.Security.Symbol.SecurityType == SecurityType.Equity)
{
optionUnderlyingUpdates[packet.Security.Symbol] = baseData;
}
}
// include checks for various aux types so we don't have to construct the dictionaries in Slice
else if ((delisting = baseData as Delisting) != null)
{
delistings[symbol] = delisting;
}
else if ((dividend = baseData as Dividend) != null)
{
dividends[symbol] = dividend;
}
else if ((split = baseData as Split) != null)
{
splits[symbol] = split;
}
else if ((symbolChange = baseData as SymbolChangedEvent) != null)
{
// symbol changes is keyed by the requested symbol
symbolChanges[packet.Configuration.Symbol] = symbolChange;
}
}
if (securityUpdate.Count > 0)
{
security.Add(new UpdateData<Security>(packet.Security, packet.Configuration.Type, securityUpdate));
}
if (consolidatorUpdate.Count > 0)
{
consolidator.Add(new UpdateData<SubscriptionDataConfig>(packet.Configuration, packet.Configuration.Type, consolidatorUpdate));
}
}
slice = new Slice(algorithmTime, allDataForAlgorithm, tradeBars, quoteBars, ticks, optionChains, futuresChains, splits, dividends, delistings, symbolChanges, allDataForAlgorithm.Count > 0);
return new TimeSlice(utcDateTime, count, slice, data, security, consolidator, custom, changes, universeData);
}
/// <summary>
/// Adds the specified <see cref="BaseData"/> instance to the appropriate <see cref="DataDictionary{T}"/>
/// </summary>
private static void PopulateDataDictionaries(BaseData baseData, Ticks ticks, TradeBars tradeBars, QuoteBars quoteBars, OptionChains optionChains, FuturesChains futuresChains)
{
var symbol = baseData.Symbol;
// populate data dictionaries
switch (baseData.DataType)
{
case MarketDataType.Tick:
ticks.Add(symbol, (Tick)baseData);
break;
case MarketDataType.TradeBar:
tradeBars[symbol] = (TradeBar) baseData;
break;
case MarketDataType.QuoteBar:
quoteBars[symbol] = (QuoteBar) baseData;
break;
case MarketDataType.OptionChain:
optionChains[symbol] = (OptionChain) baseData;
break;
case MarketDataType.FuturesChain:
futuresChains[symbol] = (FuturesChain)baseData;
break;
}
}
private static bool HandleOptionData(DateTime algorithmTime, BaseData baseData, OptionChains optionChains, Security security, Lazy<Slice> sliceFuture, IReadOnlyDictionary<Symbol, BaseData> optionUnderlyingUpdates)
{
var symbol = baseData.Symbol;
OptionChain chain;
var canonical = Symbol.CreateOption(symbol.Underlying, symbol.ID.Market, default(OptionStyle), default(OptionRight), 0, SecurityIdentifier.DefaultDate);
if (!optionChains.TryGetValue(canonical, out chain))
{
chain = new OptionChain(canonical, algorithmTime);
optionChains[canonical] = chain;
}
// set the underlying current data point in the option chain
var option = security as Option;
if (option != null)
{
if (option.Underlying == null)
{
Log.Error($"TimeSlice.HandleOptionData(): {algorithmTime}: Option underlying is null");
return false;
}
var underlyingData = option.Underlying.GetLastData();
BaseData underlyingUpdate;
if (optionUnderlyingUpdates.TryGetValue(option.Underlying.Symbol, out underlyingUpdate))
{
underlyingData = underlyingUpdate;
}
if (underlyingData == null)
{
Log.Error($"TimeSlice.HandleOptionData(): {algorithmTime}: Option underlying GetLastData returned null");
return false;
}
chain.Underlying = underlyingData;
}
var universeData = baseData as OptionChainUniverseDataCollection;
if (universeData != null)
{
if (universeData.Underlying != null)
{
foreach(var addedContract in chain.Contracts)
{
addedContract.Value.UnderlyingLastPrice = chain.Underlying.Price;
}
}
foreach (var contractSymbol in universeData.FilteredContracts)
{
chain.FilteredContracts.Add(contractSymbol);
}
return false;
}
OptionContract contract;
if (!chain.Contracts.TryGetValue(baseData.Symbol, out contract))
{
var underlyingSymbol = baseData.Symbol.Underlying;
contract = new OptionContract(baseData.Symbol, underlyingSymbol)
{
Time = baseData.EndTime,
LastPrice = security.Close,
Volume = (long)security.Volume,
BidPrice = security.BidPrice,
BidSize = (long)security.BidSize,
AskPrice = security.AskPrice,
AskSize = (long)security.AskSize,
OpenInterest = security.OpenInterest,
UnderlyingLastPrice = chain.Underlying.Price
};
chain.Contracts[baseData.Symbol] = contract;
if (option != null)
{
contract.SetOptionPriceModel(() => option.PriceModel.Evaluate(option, sliceFuture.Value, contract));
}
}
// populate ticks and tradebars dictionaries with no aux data
switch (baseData.DataType)
{
case MarketDataType.Tick:
var tick = (Tick)baseData;
chain.Ticks.Add(tick.Symbol, tick);
UpdateContract(contract, tick);
break;
case MarketDataType.TradeBar:
var tradeBar = (TradeBar)baseData;
chain.TradeBars[symbol] = tradeBar;
UpdateContract(contract, tradeBar);
break;
case MarketDataType.QuoteBar:
var quote = (QuoteBar)baseData;
chain.QuoteBars[symbol] = quote;
UpdateContract(contract, quote);
break;
case MarketDataType.Base:
chain.AddAuxData(baseData);
break;
}
return true;
}
private static bool HandleFuturesData(DateTime algorithmTime, BaseData baseData, FuturesChains futuresChains, Security security)
{
var symbol = baseData.Symbol;
FuturesChain chain;
var canonical = Symbol.Create(symbol.ID.Symbol, SecurityType.Future, symbol.ID.Market);
if (!futuresChains.TryGetValue(canonical, out chain))
{
chain = new FuturesChain(canonical, algorithmTime);
futuresChains[canonical] = chain;
}
var universeData = baseData as FuturesChainUniverseDataCollection;
if (universeData != null)
{
foreach (var contractSymbol in universeData.FilteredContracts)
{
chain.FilteredContracts.Add(contractSymbol);
}
return false;
}
FuturesContract contract;
if (!chain.Contracts.TryGetValue(baseData.Symbol, out contract))
{
var underlyingSymbol = baseData.Symbol.Underlying;
contract = new FuturesContract(baseData.Symbol, underlyingSymbol)
{
Time = baseData.EndTime,
LastPrice = security.Close,
Volume = (long)security.Volume,
BidPrice = security.BidPrice,
BidSize = (long)security.BidSize,
AskPrice = security.AskPrice,
AskSize = (long)security.AskSize,
OpenInterest = security.OpenInterest
};
chain.Contracts[baseData.Symbol] = contract;
}
// populate ticks and tradebars dictionaries with no aux data
switch (baseData.DataType)
{
case MarketDataType.Tick:
var tick = (Tick)baseData;
chain.Ticks.Add(tick.Symbol, tick);
UpdateContract(contract, tick);
break;
case MarketDataType.TradeBar:
var tradeBar = (TradeBar)baseData;
chain.TradeBars[symbol] = tradeBar;
UpdateContract(contract, tradeBar);
break;
case MarketDataType.QuoteBar:
var quote = (QuoteBar)baseData;
chain.QuoteBars[symbol] = quote;
UpdateContract(contract, quote);
break;
case MarketDataType.Base:
chain.AddAuxData(baseData);
break;
}
return true;
}
private static void UpdateContract(OptionContract contract, QuoteBar quote)
{
if (quote.Ask != null && quote.Ask.Close != 0m)
{
contract.AskPrice = quote.Ask.Close;
contract.AskSize = (long)quote.LastAskSize;
}
if (quote.Bid != null && quote.Bid.Close != 0m)
{
contract.BidPrice = quote.Bid.Close;
contract.BidSize = (long)quote.LastBidSize;
}
}
private static void UpdateContract(OptionContract contract, Tick tick)
{
if (tick.TickType == TickType.Trade)
{
contract.LastPrice = tick.Price;
}
else if (tick.TickType == TickType.Quote)
{
if (tick.AskPrice != 0m)
{
contract.AskPrice = tick.AskPrice;
contract.AskSize = (long)tick.AskSize;
}
if (tick.BidPrice != 0m)
{
contract.BidPrice = tick.BidPrice;
contract.BidSize = (long)tick.BidSize;
}
}
else if (tick.TickType == TickType.OpenInterest)
{
if (tick.Value != 0m)
{
contract.OpenInterest = tick.Value;
}
}
}
private static void UpdateContract(OptionContract contract, TradeBar tradeBar)
{
if (tradeBar.Close == 0m) return;
contract.LastPrice = tradeBar.Close;
contract.Volume = (long)tradeBar.Volume;
}
private static void UpdateContract(FuturesContract contract, QuoteBar quote)
{
if (quote.Ask != null && quote.Ask.Close != 0m)
{
contract.AskPrice = quote.Ask.Close;
contract.AskSize = (long)quote.LastAskSize;
}
if (quote.Bid != null && quote.Bid.Close != 0m)
{
contract.BidPrice = quote.Bid.Close;
contract.BidSize = (long)quote.LastBidSize;
}
}
private static void UpdateContract(FuturesContract contract, Tick tick)
{
if (tick.TickType == TickType.Trade)
{
contract.LastPrice = tick.Price;
}
else if (tick.TickType == TickType.Quote)
{
if (tick.AskPrice != 0m)
{
contract.AskPrice = tick.AskPrice;
contract.AskSize = (long)tick.AskSize;
}
if (tick.BidPrice != 0m)
{
contract.BidPrice = tick.BidPrice;
contract.BidSize = (long)tick.BidSize;
}
}
else if (tick.TickType == TickType.OpenInterest)
{
if (tick.Value != 0m)
{
contract.OpenInterest = tick.Value;
}
}
}
private static void UpdateContract(FuturesContract contract, TradeBar tradeBar)
{
if (tradeBar.Close == 0m) return;
contract.LastPrice = tradeBar.Close;
contract.Volume = (long)tradeBar.Volume;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Text;
using Test.Cryptography;
using Xunit;
namespace System.Security.Cryptography.Encryption.Rijndael.Tests
{
using Rijndael = System.Security.Cryptography.Rijndael;
/// <summary>
/// Since RijndaelImplementation (from Rijndael.Create()) and RijndaelManaged classes wrap Aes,
/// we only test minimally here.
/// </summary>
public class RijndaelTests
{
[Fact]
public static void VerifyDefaults()
{
using (var alg = Rijndael.Create())
{
// We use an internal class for the implementation, not the public RijndaelManaged
Assert.IsNotType<RijndaelManaged>(alg);
VerifyDefaults(alg);
}
using (var alg = new RijndaelManaged())
{
VerifyDefaults(alg);
}
}
private static void VerifyDefaults(Rijndael alg)
{
// The block size differs from the base
Assert.Equal(128, alg.LegalBlockSizes[0].MinSize);
Assert.Equal(128, alg.LegalBlockSizes[0].MaxSize);
Assert.Equal(128, alg.BlockSize);
// Different exception since we have different supported BlockSizes than desktop
Assert.Throws<PlatformNotSupportedException>(() => alg.BlockSize = 192);
Assert.Throws<PlatformNotSupportedException>(() => alg.BlockSize = 256);
// Normal exception for rest
Assert.Throws<CryptographicException>(() => alg.BlockSize = 111);
Assert.Equal(CipherMode.CBC, alg.Mode);
Assert.Equal(PaddingMode.PKCS7, alg.Padding);
}
[Fact]
public static void EncryptDecryptKnownECB192()
{
using (var alg = Rijndael.Create())
{
EncryptDecryptKnownECB192(alg);
}
using (var alg = new RijndaelManaged())
{
EncryptDecryptKnownECB192(alg);
}
}
private static void EncryptDecryptKnownECB192(Rijndael alg)
{
byte[] plainTextBytes =
new ASCIIEncoding().GetBytes("This is a sentence that is longer than a block, it ensures that multi-block functions work.");
byte[] encryptedBytesExpected = new byte[]
{
0xC9, 0x7F, 0xA5, 0x5B, 0xC3, 0x92, 0xDC, 0xA6,
0xE4, 0x9F, 0x2D, 0x1A, 0xEF, 0x7A, 0x27, 0x03,
0x04, 0x9C, 0xFB, 0x56, 0x63, 0x38, 0xAE, 0x4F,
0xDC, 0xF6, 0x36, 0x98, 0x28, 0x05, 0x32, 0xE9,
0xF2, 0x6E, 0xEC, 0x0C, 0x04, 0x9D, 0x12, 0x17,
0x18, 0x35, 0xD4, 0x29, 0xFC, 0x01, 0xB1, 0x20,
0xFA, 0x30, 0xAE, 0x00, 0x53, 0xD4, 0x26, 0x25,
0xA4, 0xFD, 0xD5, 0xE6, 0xED, 0x79, 0x35, 0x2A,
0xE2, 0xBB, 0x95, 0x0D, 0xEF, 0x09, 0xBB, 0x6D,
0xC5, 0xC4, 0xDB, 0x28, 0xC6, 0xF4, 0x31, 0x33,
0x9A, 0x90, 0x12, 0x36, 0x50, 0xA0, 0xB7, 0xD1,
0x35, 0xC4, 0xCE, 0x81, 0xE5, 0x2B, 0x85, 0x6B,
};
byte[] aes192Key = new byte[]
{
0xA6, 0x1E, 0xC7, 0x54, 0x37, 0x4D, 0x8C, 0xA5,
0xA4, 0xBB, 0x99, 0x50, 0x35, 0x4B, 0x30, 0x4D,
0x6C, 0xFE, 0x3B, 0x59, 0x65, 0xCB, 0x93, 0xE3,
};
// The CipherMode and KeySize are different than the default values; this ensures the type
// forwards the state properly to Aes.
alg.Mode = CipherMode.ECB;
alg.Key = aes192Key;
byte[] encryptedBytes = alg.Encrypt(plainTextBytes);
Assert.Equal(encryptedBytesExpected, encryptedBytes);
byte[] decryptedBytes = alg.Decrypt(encryptedBytes);
Assert.Equal(plainTextBytes, decryptedBytes);
}
[Fact]
public static void TestShims()
{
using (var alg = Rijndael.Create())
{
TestShims(alg);
}
using (var alg = new RijndaelManaged())
{
TestShims(alg);
}
}
private static void TestShims(Rijndael alg)
{
alg.BlockSize = 128;
Assert.Equal(128, alg.BlockSize);
var emptyIV = new byte[alg.BlockSize / 8];
alg.IV = emptyIV;
Assert.Equal(emptyIV, alg.IV);
alg.GenerateIV();
Assert.NotEqual(emptyIV, alg.IV);
var emptyKey = new byte[alg.KeySize / 8];
alg.Key = emptyKey;
Assert.Equal(emptyKey, alg.Key);
alg.GenerateKey();
Assert.NotEqual(emptyKey, alg.Key);
alg.KeySize = 128;
Assert.Equal(128, alg.KeySize);
alg.Mode = CipherMode.ECB;
Assert.Equal(CipherMode.ECB, alg.Mode);
alg.Padding = PaddingMode.PKCS7;
Assert.Equal(PaddingMode.PKCS7, alg.Padding);
}
[Fact]
public static void RijndaelKeySize_BaseClass()
{
using (Rijndael alg = new RijndaelMinimal())
{
Assert.Equal(128, alg.LegalKeySizes[0].MinSize);
Assert.Equal(256, alg.LegalKeySizes[0].MaxSize);
Assert.Equal(64, alg.LegalKeySizes[0].SkipSize);
Assert.Equal(256, alg.KeySize);
Assert.Equal(128, alg.LegalBlockSizes[0].MinSize);
Assert.Equal(256, alg.LegalBlockSizes[0].MaxSize);
Assert.Equal(64, alg.LegalBlockSizes[0].SkipSize);
Assert.Equal(128, alg.BlockSize);
}
}
[Fact]
public static void EnsureLegalSizesValuesIsolated()
{
new RijndaelLegalSizesBreaker().Dispose();
using (Rijndael alg = Rijndael.Create())
{
Assert.Equal(128, alg.LegalKeySizes[0].MinSize);
Assert.Equal(128, alg.LegalBlockSizes[0].MinSize);
alg.Key = new byte[16];
}
}
private class RijndaelLegalSizesBreaker : RijndaelMinimal
{
public RijndaelLegalSizesBreaker()
{
LegalKeySizesValue[0] = new KeySizes(1, 1, 0);
LegalBlockSizesValue[0] = new KeySizes(1, 1, 0);
}
}
private class RijndaelMinimal : Rijndael
{
// If the constructor uses a virtual call to any of the property setters
// they will fail.
private readonly bool _ready;
public RijndaelMinimal()
{
// Don't set this as a field initializer, otherwise it runs before the base ctor.
_ready = true;
}
public override int KeySize
{
set
{
if (!_ready)
{
throw new InvalidOperationException();
}
base.KeySize = value;
}
}
public override int BlockSize
{
set
{
if (!_ready)
{
throw new InvalidOperationException();
}
base.BlockSize = value;
}
}
public override byte[] IV
{
set
{
if (!_ready)
{
throw new InvalidOperationException();
}
base.IV = value;
}
}
public override byte[] Key
{
set
{
if (!_ready)
{
throw new InvalidOperationException();
}
base.Key = value;
}
}
public override CipherMode Mode
{
set
{
if (!_ready)
{
throw new InvalidOperationException();
}
base.Mode = value;
}
}
public override PaddingMode Padding
{
set
{
if (!_ready)
{
throw new InvalidOperationException();
}
base.Padding = value;
}
}
public override ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV)
{
throw new NotImplementedException();
}
public override ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV)
{
throw new NotImplementedException();
}
public override void GenerateIV()
{
throw new NotImplementedException();
}
public override void GenerateKey()
{
throw new NotImplementedException();
}
}
}
}
| |
//#define Trace
// ParallelDeflateOutputStream.cs
// ------------------------------------------------------------------
//
// A DeflateStream that does compression only, it uses a
// divide-and-conquer approach with multiple threads to exploit multiple
// CPUs for the DEFLATE computation.
//
// last saved: <2011-July-31 14:49:40>
//
// ------------------------------------------------------------------
//
// Copyright (c) 2009-2011 by Dino Chiesa
// All rights reserved!
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Threading;
using Ionic.Zlib;
using System.IO;
namespace Ionic.Zlib
{
internal class WorkItem
{
public byte[] buffer;
public byte[] compressed;
public int crc;
public int index;
public int ordinal;
public int inputBytesAvailable;
public int compressedBytesAvailable;
public ZlibCodec compressor;
public WorkItem(int size,
Ionic.Zlib.CompressionLevel compressLevel,
CompressionStrategy strategy,
int ix)
{
this.buffer= new byte[size];
// alloc 5 bytes overhead for every block (margin of safety= 2)
int n = size + ((size / 32768)+1) * 5 * 2;
this.compressed = new byte[n];
this.compressor = new ZlibCodec();
this.compressor.InitializeDeflate(compressLevel, false);
this.compressor.OutputBuffer = this.compressed;
this.compressor.InputBuffer = this.buffer;
this.index = ix;
}
}
/// <summary>
/// A class for compressing streams using the
/// Deflate algorithm with multiple threads.
/// </summary>
///
/// <remarks>
/// <para>
/// This class performs DEFLATE compression through writing. For
/// more information on the Deflate algorithm, see IETF RFC 1951,
/// "DEFLATE Compressed Data Format Specification version 1.3."
/// </para>
///
/// <para>
/// This class is similar to <see cref="Ionic.Zlib.DeflateStream"/>, except
/// that this class is for compression only, and this implementation uses an
/// approach that employs multiple worker threads to perform the DEFLATE. On
/// a multi-cpu or multi-core computer, the performance of this class can be
/// significantly higher than the single-threaded DeflateStream, particularly
/// for larger streams. How large? Anything over 10mb is a good candidate
/// for parallel compression.
/// </para>
///
/// <para>
/// The tradeoff is that this class uses more memory and more CPU than the
/// vanilla DeflateStream, and also is less efficient as a compressor. For
/// large files the size of the compressed data stream can be less than 1%
/// larger than the size of a compressed data stream from the vanialla
/// DeflateStream. For smaller files the difference can be larger. The
/// difference will also be larger if you set the BufferSize to be lower than
/// the default value. Your mileage may vary. Finally, for small files, the
/// ParallelDeflateOutputStream can be much slower than the vanilla
/// DeflateStream, because of the overhead associated to using the thread
/// pool.
/// </para>
///
/// </remarks>
/// <seealso cref="Ionic.Zlib.DeflateStream" />
public class ParallelDeflateOutputStream : System.IO.Stream
{
private static readonly int IO_BUFFER_SIZE_DEFAULT = 64 * 1024; // 128k
private static readonly int BufferPairsPerCore = 4;
private System.Collections.Generic.List<WorkItem> _pool;
private bool _leaveOpen;
private bool emitting;
private System.IO.Stream _outStream;
private int _maxBufferPairs;
private int _bufferSize = IO_BUFFER_SIZE_DEFAULT;
private AutoResetEvent _newlyCompressedBlob;
//private ManualResetEvent _writingDone;
//private ManualResetEvent _sessionReset;
private object _outputLock = new object();
private bool _isClosed;
private bool _firstWriteDone;
private int _currentlyFilling;
private int _lastFilled;
private int _lastWritten;
private int _latestCompressed;
private int _Crc32;
private Ionic.Crc.CRC32 _runningCrc;
private object _latestLock = new object();
private System.Collections.Generic.Queue<int> _toWrite;
private System.Collections.Generic.Queue<int> _toFill;
private Int64 _totalBytesProcessed;
private Ionic.Zlib.CompressionLevel _compressLevel;
private volatile Exception _pendingException;
private bool _handlingException;
private object _eLock = new Object(); // protects _pendingException
// This bitfield is used only when Trace is defined.
//private TraceBits _DesiredTrace = TraceBits.Write | TraceBits.WriteBegin |
//TraceBits.WriteDone | TraceBits.Lifecycle | TraceBits.Fill | TraceBits.Flush |
//TraceBits.Session;
//private TraceBits _DesiredTrace = TraceBits.WriteBegin | TraceBits.WriteDone | TraceBits.Synch | TraceBits.Lifecycle | TraceBits.Session ;
private TraceBits _DesiredTrace =
TraceBits.Session |
TraceBits.Compress |
TraceBits.WriteTake |
TraceBits.WriteEnter |
TraceBits.EmitEnter |
TraceBits.EmitDone |
TraceBits.EmitLock |
TraceBits.EmitSkip |
TraceBits.EmitBegin;
/// <summary>
/// Create a ParallelDeflateOutputStream.
/// </summary>
/// <remarks>
///
/// <para>
/// This stream compresses data written into it via the DEFLATE
/// algorithm (see RFC 1951), and writes out the compressed byte stream.
/// </para>
///
/// <para>
/// The instance will use the default compression level, the default
/// buffer sizes and the default number of threads and buffers per
/// thread.
/// </para>
///
/// <para>
/// This class is similar to <see cref="Ionic.Zlib.DeflateStream"/>,
/// except that this implementation uses an approach that employs
/// multiple worker threads to perform the DEFLATE. On a multi-cpu or
/// multi-core computer, the performance of this class can be
/// significantly higher than the single-threaded DeflateStream,
/// particularly for larger streams. How large? Anything over 10mb is
/// a good candidate for parallel compression.
/// </para>
///
/// </remarks>
///
/// <example>
///
/// This example shows how to use a ParallelDeflateOutputStream to compress
/// data. It reads a file, compresses it, and writes the compressed data to
/// a second, output file.
///
/// <code>
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n= -1;
/// String outputFile = fileToCompress + ".compressed";
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(outputFile))
/// {
/// using (Stream compressor = new ParallelDeflateOutputStream(raw))
/// {
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
/// <code lang="VB">
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Dim outputFile As String = (fileToCompress & ".compressed")
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(outputFile)
/// Using compressor As Stream = New ParallelDeflateOutputStream(raw)
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
/// <param name="stream">The stream to which compressed data will be written.</param>
public ParallelDeflateOutputStream(System.IO.Stream stream)
: this(stream, CompressionLevel.Default, CompressionStrategy.Default, false)
{
}
/// <summary>
/// Create a ParallelDeflateOutputStream using the specified CompressionLevel.
/// </summary>
/// <remarks>
/// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/>
/// constructor for example code.
/// </remarks>
/// <param name="stream">The stream to which compressed data will be written.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level)
: this(stream, level, CompressionStrategy.Default, false)
{
}
/// <summary>
/// Create a ParallelDeflateOutputStream and specify whether to leave the captive stream open
/// when the ParallelDeflateOutputStream is closed.
/// </summary>
/// <remarks>
/// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/>
/// constructor for example code.
/// </remarks>
/// <param name="stream">The stream to which compressed data will be written.</param>
/// <param name="leaveOpen">
/// true if the application would like the stream to remain open after inflation/deflation.
/// </param>
public ParallelDeflateOutputStream(System.IO.Stream stream, bool leaveOpen)
: this(stream, CompressionLevel.Default, CompressionStrategy.Default, leaveOpen)
{
}
/// <summary>
/// Create a ParallelDeflateOutputStream and specify whether to leave the captive stream open
/// when the ParallelDeflateOutputStream is closed.
/// </summary>
/// <remarks>
/// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/>
/// constructor for example code.
/// </remarks>
/// <param name="stream">The stream to which compressed data will be written.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
/// <param name="leaveOpen">
/// true if the application would like the stream to remain open after inflation/deflation.
/// </param>
public ParallelDeflateOutputStream(System.IO.Stream stream, CompressionLevel level, bool leaveOpen)
: this(stream, CompressionLevel.Default, CompressionStrategy.Default, leaveOpen)
{
}
/// <summary>
/// Create a ParallelDeflateOutputStream using the specified
/// CompressionLevel and CompressionStrategy, and specifying whether to
/// leave the captive stream open when the ParallelDeflateOutputStream is
/// closed.
/// </summary>
/// <remarks>
/// See the <see cref="ParallelDeflateOutputStream(System.IO.Stream)"/>
/// constructor for example code.
/// </remarks>
/// <param name="stream">The stream to which compressed data will be written.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
/// <param name="strategy">
/// By tweaking this parameter, you may be able to optimize the compression for
/// data with particular characteristics.
/// </param>
/// <param name="leaveOpen">
/// true if the application would like the stream to remain open after inflation/deflation.
/// </param>
public ParallelDeflateOutputStream(System.IO.Stream stream,
CompressionLevel level,
CompressionStrategy strategy,
bool leaveOpen)
{
TraceOutput(TraceBits.Lifecycle | TraceBits.Session, "-------------------------------------------------------");
TraceOutput(TraceBits.Lifecycle | TraceBits.Session, "Create {0:X8}", this.GetHashCode());
_outStream = stream;
_compressLevel= level;
Strategy = strategy;
_leaveOpen = leaveOpen;
this.MaxBufferPairs = 16; // default
}
/// <summary>
/// The ZLIB strategy to be used during compression.
/// </summary>
///
public CompressionStrategy Strategy
{
get;
private set;
}
/// <summary>
/// The maximum number of buffer pairs to use.
/// </summary>
///
/// <remarks>
/// <para>
/// This property sets an upper limit on the number of memory buffer
/// pairs to create. The implementation of this stream allocates
/// multiple buffers to facilitate parallel compression. As each buffer
/// fills up, this stream uses <see
/// cref="System.Threading.ThreadPool.QueueUserWorkItem(WaitCallback)">
/// ThreadPool.QueueUserWorkItem()</see>
/// to compress those buffers in a background threadpool thread. After a
/// buffer is compressed, it is re-ordered and written to the output
/// stream.
/// </para>
///
/// <para>
/// A higher number of buffer pairs enables a higher degree of
/// parallelism, which tends to increase the speed of compression on
/// multi-cpu computers. On the other hand, a higher number of buffer
/// pairs also implies a larger memory consumption, more active worker
/// threads, and a higher cpu utilization for any compression. This
/// property enables the application to limit its memory consumption and
/// CPU utilization behavior depending on requirements.
/// </para>
///
/// <para>
/// For each compression "task" that occurs in parallel, there are 2
/// buffers allocated: one for input and one for output. This property
/// sets a limit for the number of pairs. The total amount of storage
/// space allocated for buffering will then be (N*S*2), where N is the
/// number of buffer pairs, S is the size of each buffer (<see
/// cref="BufferSize"/>). By default, DotNetZip allocates 4 buffer
/// pairs per CPU core, so if your machine has 4 cores, and you retain
/// the default buffer size of 128k, then the
/// ParallelDeflateOutputStream will use 4 * 4 * 2 * 128kb of buffer
/// memory in total, or 4mb, in blocks of 128kb. If you then set this
/// property to 8, then the number will be 8 * 2 * 128kb of buffer
/// memory, or 2mb.
/// </para>
///
/// <para>
/// CPU utilization will also go up with additional buffers, because a
/// larger number of buffer pairs allows a larger number of background
/// threads to compress in parallel. If you find that parallel
/// compression is consuming too much memory or CPU, you can adjust this
/// value downward.
/// </para>
///
/// <para>
/// The default value is 16. Different values may deliver better or
/// worse results, depending on your priorities and the dynamic
/// performance characteristics of your storage and compute resources.
/// </para>
///
/// <para>
/// This property is not the number of buffer pairs to use; it is an
/// upper limit. An illustration: Suppose you have an application that
/// uses the default value of this property (which is 16), and it runs
/// on a machine with 2 CPU cores. In that case, DotNetZip will allocate
/// 4 buffer pairs per CPU core, for a total of 8 pairs. The upper
/// limit specified by this property has no effect.
/// </para>
///
/// <para>
/// The application can set this value at any time, but it is effective
/// only before the first call to Write(), which is when the buffers are
/// allocated.
/// </para>
/// </remarks>
public int MaxBufferPairs
{
get
{
return _maxBufferPairs;
}
set
{
if (value < 4)
throw new ArgumentException("MaxBufferPairs",
"Value must be 4 or greater.");
_maxBufferPairs = value;
}
}
/// <summary>
/// The size of the buffers used by the compressor threads.
/// </summary>
/// <remarks>
///
/// <para>
/// The default buffer size is 128k. The application can set this value
/// at any time, but it is effective only before the first Write().
/// </para>
///
/// <para>
/// Larger buffer sizes implies larger memory consumption but allows
/// more efficient compression. Using smaller buffer sizes consumes less
/// memory but may result in less effective compression. For example,
/// using the default buffer size of 128k, the compression delivered is
/// within 1% of the compression delivered by the single-threaded <see
/// cref="Ionic.Zlib.DeflateStream"/>. On the other hand, using a
/// BufferSize of 8k can result in a compressed data stream that is 5%
/// larger than that delivered by the single-threaded
/// <c>DeflateStream</c>. Excessively small buffer sizes can also cause
/// the speed of the ParallelDeflateOutputStream to drop, because of
/// larger thread scheduling overhead dealing with many many small
/// buffers.
/// </para>
///
/// <para>
/// The total amount of storage space allocated for buffering will be
/// (N*S*2), where N is the number of buffer pairs, and S is the size of
/// each buffer (this property). There are 2 buffers used by the
/// compressor, one for input and one for output. By default, DotNetZip
/// allocates 4 buffer pairs per CPU core, so if your machine has 4
/// cores, then the number of buffer pairs used will be 16. If you
/// accept the default value of this property, 128k, then the
/// ParallelDeflateOutputStream will use 16 * 2 * 128kb of buffer memory
/// in total, or 4mb, in blocks of 128kb. If you set this property to
/// 64kb, then the number will be 16 * 2 * 64kb of buffer memory, or
/// 2mb.
/// </para>
///
/// </remarks>
public int BufferSize
{
get { return _bufferSize;}
set
{
if (value < 1024)
throw new ArgumentOutOfRangeException("BufferSize",
"BufferSize must be greater than 1024 bytes");
_bufferSize = value;
}
}
/// <summary>
/// The CRC32 for the data that was written out, prior to compression.
/// </summary>
/// <remarks>
/// This value is meaningful only after a call to Close().
/// </remarks>
public int Crc32 { get { return _Crc32; } }
/// <summary>
/// The total number of uncompressed bytes processed by the ParallelDeflateOutputStream.
/// </summary>
/// <remarks>
/// This value is meaningful only after a call to Close().
/// </remarks>
public Int64 BytesProcessed { get { return _totalBytesProcessed; } }
private void _InitializePoolOfWorkItems()
{
_toWrite = new Queue<int>();
_toFill = new Queue<int>();
_pool = new System.Collections.Generic.List<WorkItem>();
int nTasks = BufferPairsPerCore * Environment.ProcessorCount;
nTasks = Math.Min(nTasks, _maxBufferPairs);
for(int i=0; i < nTasks; i++)
{
_pool.Add(new WorkItem(_bufferSize, _compressLevel, Strategy, i));
_toFill.Enqueue(i);
}
_newlyCompressedBlob = new AutoResetEvent(false);
_runningCrc = new Ionic.Crc.CRC32();
_currentlyFilling = -1;
_lastFilled = -1;
_lastWritten = -1;
_latestCompressed = -1;
}
/// <summary>
/// Write data to the stream.
/// </summary>
///
/// <remarks>
///
/// <para>
/// To use the ParallelDeflateOutputStream to compress data, create a
/// ParallelDeflateOutputStream with CompressionMode.Compress, passing a
/// writable output stream. Then call Write() on that
/// ParallelDeflateOutputStream, providing uncompressed data as input. The
/// data sent to the output stream will be the compressed form of the data
/// written.
/// </para>
///
/// <para>
/// To decompress data, use the <see cref="Ionic.Zlib.DeflateStream"/> class.
/// </para>
///
/// </remarks>
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
bool mustWait = false;
// This method does this:
// 0. handles any pending exceptions
// 1. write any buffers that are ready to be written,
// 2. fills a work buffer; when full, flip state to 'Filled',
// 3. if more data to be written, goto step 1
if (_isClosed)
throw new InvalidOperationException();
// dispense any exceptions that occurred on the BG threads
if (_pendingException != null)
{
_handlingException = true;
var pe = _pendingException;
_pendingException = null;
throw pe;
}
if (count == 0) return;
if (!_firstWriteDone)
{
// Want to do this on first Write, first session, and not in the
// constructor. We want to allow MaxBufferPairs to
// change after construction, but before first Write.
_InitializePoolOfWorkItems();
_firstWriteDone = true;
}
do
{
// may need to make buffers available
EmitPendingBuffers(false, mustWait);
mustWait = false;
// use current buffer, or get a new buffer to fill
int ix = -1;
if (_currentlyFilling >= 0)
{
ix = _currentlyFilling;
TraceOutput(TraceBits.WriteTake,
"Write notake wi({0}) lf({1})",
ix,
_lastFilled);
}
else
{
TraceOutput(TraceBits.WriteTake, "Write take?");
if (_toFill.Count == 0)
{
// no available buffers, so... need to emit
// compressed buffers.
mustWait = true;
continue;
}
ix = _toFill.Dequeue();
TraceOutput(TraceBits.WriteTake,
"Write take wi({0}) lf({1})",
ix,
_lastFilled);
++_lastFilled; // TODO: consider rollover?
}
WorkItem workitem = _pool[ix];
int limit = ((workitem.buffer.Length - workitem.inputBytesAvailable) > count)
? count
: (workitem.buffer.Length - workitem.inputBytesAvailable);
workitem.ordinal = _lastFilled;
TraceOutput(TraceBits.Write,
"Write lock wi({0}) ord({1}) iba({2})",
workitem.index,
workitem.ordinal,
workitem.inputBytesAvailable
);
// copy from the provided buffer to our workitem, starting at
// the tail end of whatever data we might have in there currently.
Buffer.BlockCopy(buffer,
offset,
workitem.buffer,
workitem.inputBytesAvailable,
limit);
count -= limit;
offset += limit;
workitem.inputBytesAvailable += limit;
if (workitem.inputBytesAvailable == workitem.buffer.Length)
{
// No need for interlocked.increment: the Write()
// method is documented as not multi-thread safe, so
// we can assume Write() calls come in from only one
// thread.
TraceOutput(TraceBits.Write,
"Write QUWI wi({0}) ord({1}) iba({2}) nf({3})",
workitem.index,
workitem.ordinal,
workitem.inputBytesAvailable );
if (!ThreadPool.QueueUserWorkItem( _DeflateOne, workitem ))
throw new Exception("Cannot enqueue workitem");
_currentlyFilling = -1; // will get a new buffer next time
}
else
_currentlyFilling = ix;
if (count > 0)
TraceOutput(TraceBits.WriteEnter, "Write more");
}
while (count > 0); // until no more to write
TraceOutput(TraceBits.WriteEnter, "Write exit");
return;
}
private void _FlushFinish()
{
// After writing a series of compressed buffers, each one closed
// with Flush.Sync, we now write the final one as Flush.Finish,
// and then stop.
byte[] buffer = new byte[128];
var compressor = new ZlibCodec();
int rc = compressor.InitializeDeflate(_compressLevel, false);
compressor.InputBuffer = null;
compressor.NextIn = 0;
compressor.AvailableBytesIn = 0;
compressor.OutputBuffer = buffer;
compressor.NextOut = 0;
compressor.AvailableBytesOut = buffer.Length;
rc = compressor.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
throw new Exception("deflating: " + compressor.Message);
if (buffer.Length - compressor.AvailableBytesOut > 0)
{
TraceOutput(TraceBits.EmitBegin,
"Emit begin flush bytes({0})",
buffer.Length - compressor.AvailableBytesOut);
_outStream.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut);
TraceOutput(TraceBits.EmitDone,
"Emit done flush");
}
compressor.EndDeflate();
_Crc32 = _runningCrc.Crc32Result;
}
private void _Flush(bool lastInput)
{
if (_isClosed)
throw new InvalidOperationException();
if (emitting) return;
// compress any partial buffer
if (_currentlyFilling >= 0)
{
WorkItem workitem = _pool[_currentlyFilling];
_DeflateOne(workitem);
_currentlyFilling = -1; // get a new buffer next Write()
}
if (lastInput)
{
EmitPendingBuffers(true, false);
_FlushFinish();
}
else
{
EmitPendingBuffers(false, false);
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
{
if (_pendingException != null)
{
_handlingException = true;
var pe = _pendingException;
_pendingException = null;
throw pe;
}
if (_handlingException)
return;
_Flush(false);
}
/// <summary>
/// Close the stream.
/// </summary>
/// <remarks>
/// You must call Close on the stream to guarantee that all of the data written in has
/// been compressed, and the compressed data has been written out.
/// </remarks>
public override void Close()
{
TraceOutput(TraceBits.Session, "Close {0:X8}", this.GetHashCode());
if (_pendingException != null)
{
_handlingException = true;
var pe = _pendingException;
_pendingException = null;
throw pe;
}
if (_handlingException)
return;
if (_isClosed) return;
_Flush(true);
if (!_leaveOpen)
_outStream.Close();
_isClosed= true;
}
// workitem 10030 - implement a new Dispose method
/// <summary>Dispose the object</summary>
/// <remarks>
/// <para>
/// Because ParallelDeflateOutputStream is IDisposable, the
/// application must call this method when finished using the instance.
/// </para>
/// <para>
/// This method is generally called implicitly upon exit from
/// a <c>using</c> scope in C# (<c>Using</c> in VB).
/// </para>
/// </remarks>
new public void Dispose()
{
TraceOutput(TraceBits.Lifecycle, "Dispose {0:X8}", this.GetHashCode());
Close();
_pool = null;
Dispose(true);
}
/// <summary>The Dispose method</summary>
/// <param name="disposing">
/// indicates whether the Dispose method was invoked by user code.
/// </param>
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
}
/// <summary>
/// Resets the stream for use with another stream.
/// </summary>
/// <remarks>
/// Because the ParallelDeflateOutputStream is expensive to create, it
/// has been designed so that it can be recycled and re-used. You have
/// to call Close() on the stream first, then you can call Reset() on
/// it, to use it again on another stream.
/// </remarks>
///
/// <param name="stream">
/// The new output stream for this era.
/// </param>
///
/// <example>
/// <code>
/// ParallelDeflateOutputStream deflater = null;
/// foreach (var inputFile in listOfFiles)
/// {
/// string outputFile = inputFile + ".compressed";
/// using (System.IO.Stream input = System.IO.File.OpenRead(inputFile))
/// {
/// using (var outStream = System.IO.File.Create(outputFile))
/// {
/// if (deflater == null)
/// deflater = new ParallelDeflateOutputStream(outStream,
/// CompressionLevel.Best,
/// CompressionStrategy.Default,
/// true);
/// deflater.Reset(outStream);
///
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// deflater.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
/// </example>
public void Reset(Stream stream)
{
TraceOutput(TraceBits.Session, "-------------------------------------------------------");
TraceOutput(TraceBits.Session, "Reset {0:X8} firstDone({1})", this.GetHashCode(), _firstWriteDone);
if (!_firstWriteDone) return;
// reset all status
_toWrite.Clear();
_toFill.Clear();
foreach (var workitem in _pool)
{
_toFill.Enqueue(workitem.index);
workitem.ordinal = -1;
}
_firstWriteDone = false;
_totalBytesProcessed = 0L;
_runningCrc = new Ionic.Crc.CRC32();
_isClosed= false;
_currentlyFilling = -1;
_lastFilled = -1;
_lastWritten = -1;
_latestCompressed = -1;
_outStream = stream;
}
private void EmitPendingBuffers(bool doAll, bool mustWait)
{
// When combining parallel deflation with a ZipSegmentedStream, it's
// possible for the ZSS to throw from within this method. In that
// case, Close/Dispose will be called on this stream, if this stream
// is employed within a using or try/finally pair as required. But
// this stream is unaware of the pending exception, so the Close()
// method invokes this method AGAIN. This can lead to a deadlock.
// Therefore, failfast if re-entering.
if (emitting) return;
emitting = true;
if (doAll || mustWait)
_newlyCompressedBlob.WaitOne();
do
{
int firstSkip = -1;
int millisecondsToWait = doAll ? 200 : (mustWait ? -1 : 0);
int nextToWrite = -1;
do
{
if (Monitor.TryEnter(_toWrite, millisecondsToWait))
{
nextToWrite = -1;
try
{
if (_toWrite.Count > 0)
nextToWrite = _toWrite.Dequeue();
}
finally
{
Monitor.Exit(_toWrite);
}
if (nextToWrite >= 0)
{
WorkItem workitem = _pool[nextToWrite];
if (workitem.ordinal != _lastWritten + 1)
{
// out of order. requeue and try again.
TraceOutput(TraceBits.EmitSkip,
"Emit skip wi({0}) ord({1}) lw({2}) fs({3})",
workitem.index,
workitem.ordinal,
_lastWritten,
firstSkip);
lock(_toWrite)
{
_toWrite.Enqueue(nextToWrite);
}
if (firstSkip == nextToWrite)
{
// We went around the list once.
// None of the items in the list is the one we want.
// Now wait for a compressor to signal again.
_newlyCompressedBlob.WaitOne();
firstSkip = -1;
}
else if (firstSkip == -1)
firstSkip = nextToWrite;
continue;
}
firstSkip = -1;
TraceOutput(TraceBits.EmitBegin,
"Emit begin wi({0}) ord({1}) cba({2})",
workitem.index,
workitem.ordinal,
workitem.compressedBytesAvailable);
_outStream.Write(workitem.compressed, 0, workitem.compressedBytesAvailable);
_runningCrc.Combine(workitem.crc, workitem.inputBytesAvailable);
_totalBytesProcessed += workitem.inputBytesAvailable;
workitem.inputBytesAvailable = 0;
TraceOutput(TraceBits.EmitDone,
"Emit done wi({0}) ord({1}) cba({2}) mtw({3})",
workitem.index,
workitem.ordinal,
workitem.compressedBytesAvailable,
millisecondsToWait);
_lastWritten = workitem.ordinal;
_toFill.Enqueue(workitem.index);
// don't wait next time through
if (millisecondsToWait == -1) millisecondsToWait = 0;
}
}
else
nextToWrite = -1;
} while (nextToWrite >= 0);
} while (doAll && (_lastWritten != _latestCompressed));
emitting = false;
}
#if OLD
private void _PerpetualWriterMethod(object state)
{
TraceOutput(TraceBits.WriterThread, "_PerpetualWriterMethod START");
try
{
do
{
// wait for the next session
TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.WaitOne(begin) PWM");
_sessionReset.WaitOne();
TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.WaitOne(done) PWM");
if (_isDisposed) break;
TraceOutput(TraceBits.Synch | TraceBits.WriterThread, "Synch _sessionReset.Reset() PWM");
_sessionReset.Reset();
// repeatedly write buffers as they become ready
WorkItem workitem = null;
Ionic.Zlib.CRC32 c= new Ionic.Zlib.CRC32();
do
{
workitem = _pool[_nextToWrite % _pc];
lock(workitem)
{
if (_noMoreInputForThisSegment)
TraceOutput(TraceBits.Write,
"Write drain wi({0}) stat({1}) canuse({2}) cba({3})",
workitem.index,
workitem.status,
(workitem.status == (int)WorkItem.Status.Compressed),
workitem.compressedBytesAvailable);
do
{
if (workitem.status == (int)WorkItem.Status.Compressed)
{
TraceOutput(TraceBits.WriteBegin,
"Write begin wi({0}) stat({1}) cba({2})",
workitem.index,
workitem.status,
workitem.compressedBytesAvailable);
workitem.status = (int)WorkItem.Status.Writing;
_outStream.Write(workitem.compressed, 0, workitem.compressedBytesAvailable);
c.Combine(workitem.crc, workitem.inputBytesAvailable);
_totalBytesProcessed += workitem.inputBytesAvailable;
_nextToWrite++;
workitem.inputBytesAvailable= 0;
workitem.status = (int)WorkItem.Status.Done;
TraceOutput(TraceBits.WriteDone,
"Write done wi({0}) stat({1}) cba({2})",
workitem.index,
workitem.status,
workitem.compressedBytesAvailable);
Monitor.Pulse(workitem);
break;
}
else
{
int wcycles = 0;
// I've locked a workitem I cannot use.
// Therefore, wake someone else up, and then release the lock.
while (workitem.status != (int)WorkItem.Status.Compressed)
{
TraceOutput(TraceBits.WriteWait,
"Write waiting wi({0}) stat({1}) nw({2}) nf({3}) nomore({4})",
workitem.index,
workitem.status,
_nextToWrite, _nextToFill,
_noMoreInputForThisSegment );
if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill)
break;
wcycles++;
// wake up someone else
Monitor.Pulse(workitem);
// release and wait
Monitor.Wait(workitem);
if (workitem.status == (int)WorkItem.Status.Compressed)
TraceOutput(TraceBits.WriteWait,
"Write A-OK wi({0}) stat({1}) iba({2}) cba({3}) cyc({4})",
workitem.index,
workitem.status,
workitem.inputBytesAvailable,
workitem.compressedBytesAvailable,
wcycles);
}
if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill)
break;
}
}
while (true);
}
if (_noMoreInputForThisSegment)
TraceOutput(TraceBits.Write,
"Write nomore nw({0}) nf({1}) break({2})",
_nextToWrite, _nextToFill, (_nextToWrite == _nextToFill));
if (_noMoreInputForThisSegment && _nextToWrite == _nextToFill)
break;
} while (true);
// Finish:
// After writing a series of buffers, closing each one with
// Flush.Sync, we now write the final one as Flush.Finish, and
// then stop.
byte[] buffer = new byte[128];
ZlibCodec compressor = new ZlibCodec();
int rc = compressor.InitializeDeflate(_compressLevel, false);
compressor.InputBuffer = null;
compressor.NextIn = 0;
compressor.AvailableBytesIn = 0;
compressor.OutputBuffer = buffer;
compressor.NextOut = 0;
compressor.AvailableBytesOut = buffer.Length;
rc = compressor.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
throw new Exception("deflating: " + compressor.Message);
if (buffer.Length - compressor.AvailableBytesOut > 0)
{
TraceOutput(TraceBits.WriteBegin,
"Write begin flush bytes({0})",
buffer.Length - compressor.AvailableBytesOut);
_outStream.Write(buffer, 0, buffer.Length - compressor.AvailableBytesOut);
TraceOutput(TraceBits.WriteBegin,
"Write done flush");
}
compressor.EndDeflate();
_Crc32 = c.Crc32Result;
// signal that writing is complete:
TraceOutput(TraceBits.Synch, "Synch _writingDone.Set() PWM");
_writingDone.Set();
}
while (true);
}
catch (System.Exception exc1)
{
lock(_eLock)
{
// expose the exception to the main thread
if (_pendingException!=null)
_pendingException = exc1;
}
}
TraceOutput(TraceBits.WriterThread, "_PerpetualWriterMethod FINIS");
}
#endif
private void _DeflateOne(Object wi)
{
// compress one buffer
WorkItem workitem = (WorkItem) wi;
try
{
Ionic.Crc.CRC32 crc = new Ionic.Crc.CRC32();
// calc CRC on the buffer
crc.SlurpBlock(workitem.buffer, 0, workitem.inputBytesAvailable);
// deflate it
DeflateOneSegment(workitem);
// update status
workitem.crc = crc.Crc32Result;
TraceOutput(TraceBits.Compress,
"Compress wi({0}) ord({1}) len({2})",
workitem.index,
workitem.ordinal,
workitem.compressedBytesAvailable
);
lock(_latestLock)
{
if (workitem.ordinal > _latestCompressed)
_latestCompressed = workitem.ordinal;
}
lock (_toWrite)
{
_toWrite.Enqueue(workitem.index);
}
_newlyCompressedBlob.Set();
}
catch (System.Exception exc1)
{
lock(_eLock)
{
// expose the exception to the main thread
if (_pendingException!=null)
_pendingException = exc1;
}
}
}
private bool DeflateOneSegment(WorkItem workitem)
{
ZlibCodec compressor = workitem.compressor;
int rc = 0;
compressor.ResetDeflate();
compressor.NextIn = 0;
compressor.AvailableBytesIn = workitem.inputBytesAvailable;
// step 1: deflate the buffer
compressor.NextOut = 0;
compressor.AvailableBytesOut = workitem.compressed.Length;
do
{
compressor.Deflate(FlushType.None);
}
while (compressor.AvailableBytesIn > 0 || compressor.AvailableBytesOut == 0);
// step 2: flush (sync)
rc = compressor.Deflate(FlushType.Sync);
// The rc is not processed here, this is only to eliminate the warning
if (rc != ZlibConstants.Z_OK | rc != ZlibConstants.Z_STREAM_END)
throw new ZlibException("Deflate: unknown return code");
workitem.compressedBytesAvailable= (int) compressor.TotalBytesOut;
return true;
}
[System.Diagnostics.ConditionalAttribute("Trace")]
private void TraceOutput(TraceBits bits, string format, params object[] varParams)
{
if ((bits & _DesiredTrace) != 0)
{
lock(_outputLock)
{
int tid = Thread.CurrentThread.GetHashCode();
#if !SILVERLIGHT
//Console.ForegroundColor = (ConsoleColor) (tid % 8 + 8);
#endif
Console.Write("{0:000} PDOS ", tid);
Console.WriteLine(format, varParams);
#if !SILVERLIGHT
//Console.ResetColor();
#endif
}
}
}
// used only when Trace is defined
[Flags]
enum TraceBits : uint
{
None = 0,
NotUsed1 = 1,
EmitLock = 2,
EmitEnter = 4, // enter _EmitPending
EmitBegin = 8, // begin to write out
EmitDone = 16, // done writing out
EmitSkip = 32, // writer skipping a workitem
EmitAll = 58, // All Emit flags
Flush = 64,
Lifecycle = 128, // constructor/disposer
Session = 256, // Close/Reset
Synch = 512, // thread synchronization
Instance = 1024, // instance settings
Compress = 2048, // compress task
Write = 4096, // filling buffers, when caller invokes Write()
WriteEnter = 8192, // upon entry to Write()
WriteTake = 16384, // on _toFill.Take()
All = 0xffffffff,
}
/// <summary>
/// Indicates whether the stream supports Seek operations.
/// </summary>
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Indicates whether the stream supports Read operations.
/// </summary>
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanRead
{
get {return false;}
}
/// <summary>
/// Indicates whether the stream supports Write operations.
/// </summary>
/// <remarks>
/// Returns true if the provided stream is writable.
/// </remarks>
public override bool CanWrite
{
get { return _outStream.CanWrite; }
}
/// <summary>
/// Reading this property always throws a NotSupportedException.
/// </summary>
public override long Length
{
get { throw new NotSupportedException(); }
}
/// <summary>
/// Returns the current position of the output stream.
/// </summary>
/// <remarks>
/// <para>
/// Because the output gets written by a background thread,
/// the value may change asynchronously. Setting this
/// property always throws a NotSupportedException.
/// </para>
/// </remarks>
public override long Position
{
get { return _outStream.Position; }
set { throw new NotSupportedException(); }
}
/// <summary>
/// This method always throws a NotSupportedException.
/// </summary>
/// <param name="buffer">
/// The buffer into which data would be read, IF THIS METHOD
/// ACTUALLY DID ANYTHING.
/// </param>
/// <param name="offset">
/// The offset within that data array at which to insert the
/// data that is read, IF THIS METHOD ACTUALLY DID
/// ANYTHING.
/// </param>
/// <param name="count">
/// The number of bytes to write, IF THIS METHOD ACTUALLY DID
/// ANYTHING.
/// </param>
/// <returns>nothing.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
/// <summary>
/// This method always throws a NotSupportedException.
/// </summary>
/// <param name="offset">
/// The offset to seek to....
/// IF THIS METHOD ACTUALLY DID ANYTHING.
/// </param>
/// <param name="origin">
/// The reference specifying how to apply the offset.... IF
/// THIS METHOD ACTUALLY DID ANYTHING.
/// </param>
/// <returns>nothing. It always throws.</returns>
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
throw new NotSupportedException();
}
/// <summary>
/// This method always throws a NotSupportedException.
/// </summary>
/// <param name="value">
/// The new value for the stream length.... IF
/// THIS METHOD ACTUALLY DID ANYTHING.
/// </param>
public override void SetLength(long value)
{
throw new NotSupportedException();
}
}
}
| |
/*
* UltraCart Rest API V2
*
* UltraCart REST API Version 2
*
* OpenAPI spec version: 2.0.0
* Contact: [email protected]
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter;
namespace com.ultracart.admin.v2.Model
{
/// <summary>
/// ItemThirdPartyEmailMarketing
/// </summary>
[DataContract]
public partial class ItemThirdPartyEmailMarketing : IEquatable<ItemThirdPartyEmailMarketing>, IValidatableObject
{
/// <summary>
/// Provider name
/// </summary>
/// <value>Provider name</value>
[JsonConverter(typeof(StringEnumConverter))]
public enum ProviderNameEnum
{
/// <summary>
/// Enum ActiveCampaign for value: ActiveCampaign
/// </summary>
[EnumMember(Value = "ActiveCampaign")]
ActiveCampaign = 1,
/// <summary>
/// Enum AWeber for value: AWeber
/// </summary>
[EnumMember(Value = "AWeber")]
AWeber = 2,
/// <summary>
/// Enum CampaignMonitor for value: Campaign Monitor
/// </summary>
[EnumMember(Value = "Campaign Monitor")]
CampaignMonitor = 3,
/// <summary>
/// Enum ConstantContact for value: ConstantContact
/// </summary>
[EnumMember(Value = "ConstantContact")]
ConstantContact = 4,
/// <summary>
/// Enum Emma for value: Emma
/// </summary>
[EnumMember(Value = "Emma")]
Emma = 5,
/// <summary>
/// Enum GetResponse for value: GetResponse
/// </summary>
[EnumMember(Value = "GetResponse")]
GetResponse = 6,
/// <summary>
/// Enum IContact for value: iContact
/// </summary>
[EnumMember(Value = "iContact")]
IContact = 7,
/// <summary>
/// Enum Klaviyo for value: Klaviyo
/// </summary>
[EnumMember(Value = "Klaviyo")]
Klaviyo = 8,
/// <summary>
/// Enum Lyris for value: Lyris
/// </summary>
[EnumMember(Value = "Lyris")]
Lyris = 9,
/// <summary>
/// Enum LyrisHQ for value: LyrisHQ
/// </summary>
[EnumMember(Value = "LyrisHQ")]
LyrisHQ = 10,
/// <summary>
/// Enum MailChimp for value: MailChimp
/// </summary>
[EnumMember(Value = "MailChimp")]
MailChimp = 11,
/// <summary>
/// Enum SilverPop for value: SilverPop
/// </summary>
[EnumMember(Value = "SilverPop")]
SilverPop = 12
}
/// <summary>
/// Provider name
/// </summary>
/// <value>Provider name</value>
[DataMember(Name="provider_name", EmitDefaultValue=false)]
public ProviderNameEnum? ProviderName { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="ItemThirdPartyEmailMarketing" /> class.
/// </summary>
/// <param name="addTags">Add tags.</param>
/// <param name="providerName">Provider name.</param>
/// <param name="removeTags">Remove tags.</param>
/// <param name="subscribeLists">Subscribe to lists.</param>
/// <param name="unsubscribeLists">Unsubscribe from lists.</param>
public ItemThirdPartyEmailMarketing(List<string> addTags = default(List<string>), ProviderNameEnum? providerName = default(ProviderNameEnum?), List<string> removeTags = default(List<string>), List<string> subscribeLists = default(List<string>), List<string> unsubscribeLists = default(List<string>))
{
this.AddTags = addTags;
this.ProviderName = providerName;
this.RemoveTags = removeTags;
this.SubscribeLists = subscribeLists;
this.UnsubscribeLists = unsubscribeLists;
}
/// <summary>
/// Add tags
/// </summary>
/// <value>Add tags</value>
[DataMember(Name="add_tags", EmitDefaultValue=false)]
public List<string> AddTags { get; set; }
/// <summary>
/// Remove tags
/// </summary>
/// <value>Remove tags</value>
[DataMember(Name="remove_tags", EmitDefaultValue=false)]
public List<string> RemoveTags { get; set; }
/// <summary>
/// Subscribe to lists
/// </summary>
/// <value>Subscribe to lists</value>
[DataMember(Name="subscribe_lists", EmitDefaultValue=false)]
public List<string> SubscribeLists { get; set; }
/// <summary>
/// Unsubscribe from lists
/// </summary>
/// <value>Unsubscribe from lists</value>
[DataMember(Name="unsubscribe_lists", EmitDefaultValue=false)]
public List<string> UnsubscribeLists { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class ItemThirdPartyEmailMarketing {\n");
sb.Append(" AddTags: ").Append(AddTags).Append("\n");
sb.Append(" ProviderName: ").Append(ProviderName).Append("\n");
sb.Append(" RemoveTags: ").Append(RemoveTags).Append("\n");
sb.Append(" SubscribeLists: ").Append(SubscribeLists).Append("\n");
sb.Append(" UnsubscribeLists: ").Append(UnsubscribeLists).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as ItemThirdPartyEmailMarketing);
}
/// <summary>
/// Returns true if ItemThirdPartyEmailMarketing instances are equal
/// </summary>
/// <param name="input">Instance of ItemThirdPartyEmailMarketing to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(ItemThirdPartyEmailMarketing input)
{
if (input == null)
return false;
return
(
this.AddTags == input.AddTags ||
this.AddTags != null &&
this.AddTags.SequenceEqual(input.AddTags)
) &&
(
this.ProviderName == input.ProviderName ||
(this.ProviderName != null &&
this.ProviderName.Equals(input.ProviderName))
) &&
(
this.RemoveTags == input.RemoveTags ||
this.RemoveTags != null &&
this.RemoveTags.SequenceEqual(input.RemoveTags)
) &&
(
this.SubscribeLists == input.SubscribeLists ||
this.SubscribeLists != null &&
this.SubscribeLists.SequenceEqual(input.SubscribeLists)
) &&
(
this.UnsubscribeLists == input.UnsubscribeLists ||
this.UnsubscribeLists != null &&
this.UnsubscribeLists.SequenceEqual(input.UnsubscribeLists)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.AddTags != null)
hashCode = hashCode * 59 + this.AddTags.GetHashCode();
if (this.ProviderName != null)
hashCode = hashCode * 59 + this.ProviderName.GetHashCode();
if (this.RemoveTags != null)
hashCode = hashCode * 59 + this.RemoveTags.GetHashCode();
if (this.SubscribeLists != null)
hashCode = hashCode * 59 + this.SubscribeLists.GetHashCode();
if (this.UnsubscribeLists != null)
hashCode = hashCode * 59 + this.UnsubscribeLists.GetHashCode();
return hashCode;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyDateTime
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Models;
/// <summary>
/// Extension methods for Datetime.
/// </summary>
public static partial class DatetimeExtensions
{
/// <summary>
/// Get null datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetNull(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetNullAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get null datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetNullAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetNullWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get invalid datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetInvalid(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetInvalidAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get invalid datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetInvalidAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetInvalidWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get overflow datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetOverflow(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetOverflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get overflow datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetOverflowAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetOverflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get underflow datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetUnderflow(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetUnderflowAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get underflow datetime value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetUnderflowAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetUnderflowWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Put max datetime value 9999-12-31T23:59:59.9999999Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
public static void PutUtcMaxDateTime(this IDatetime operations, DateTime datetimeBody)
{
Task.Factory.StartNew(s => ((IDatetime)s).PutUtcMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put max datetime value 9999-12-31T23:59:59.9999999Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task PutUtcMaxDateTimeAsync(this IDatetime operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutUtcMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get max datetime value 9999-12-31t23:59:59.9999999z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetUtcLowercaseMaxDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get max datetime value 9999-12-31t23:59:59.9999999z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetUtcLowercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetUtcLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get max datetime value 9999-12-31T23:59:59.9999999Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetUtcUppercaseMaxDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get max datetime value 9999-12-31T23:59:59.9999999Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetUtcUppercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetUtcUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Put max datetime value with positive numoffset
/// 9999-12-31t23:59:59.9999999+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
public static void PutLocalPositiveOffsetMaxDateTime(this IDatetime operations, DateTime datetimeBody)
{
Task.Factory.StartNew(s => ((IDatetime)s).PutLocalPositiveOffsetMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put max datetime value with positive numoffset
/// 9999-12-31t23:59:59.9999999+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task PutLocalPositiveOffsetMaxDateTimeAsync(this IDatetime operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutLocalPositiveOffsetMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31t23:59:59.9999999+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetLocalPositiveOffsetLowercaseMaxDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31t23:59:59.9999999+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetLocalPositiveOffsetLowercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetLocalPositiveOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31T23:59:59.9999999+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetLocalPositiveOffsetUppercaseMaxDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31T23:59:59.9999999+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetLocalPositiveOffsetUppercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetLocalPositiveOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Put max datetime value with positive numoffset
/// 9999-12-31t23:59:59.9999999-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
public static void PutLocalNegativeOffsetMaxDateTime(this IDatetime operations, DateTime datetimeBody)
{
Task.Factory.StartNew(s => ((IDatetime)s).PutLocalNegativeOffsetMaxDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put max datetime value with positive numoffset
/// 9999-12-31t23:59:59.9999999-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task PutLocalNegativeOffsetMaxDateTimeAsync(this IDatetime operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutLocalNegativeOffsetMaxDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31T23:59:59.9999999-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetLocalNegativeOffsetUppercaseMaxDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetUppercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31T23:59:59.9999999-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetLocalNegativeOffsetUppercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetLocalNegativeOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31t23:59:59.9999999-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetLocalNegativeOffsetLowercaseMaxDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetLowercaseMaxDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31t23:59:59.9999999-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetLocalNegativeOffsetLowercaseMaxDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetLocalNegativeOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
public static void PutUtcMinDateTime(this IDatetime operations, DateTime datetimeBody)
{
Task.Factory.StartNew(s => ((IDatetime)s).PutUtcMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task PutUtcMinDateTimeAsync(this IDatetime operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutUtcMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetUtcMinDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetUtcMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00Z
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetUtcMinDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetUtcMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
public static void PutLocalPositiveOffsetMinDateTime(this IDatetime operations, DateTime datetimeBody)
{
Task.Factory.StartNew(s => ((IDatetime)s).PutLocalPositiveOffsetMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task PutLocalPositiveOffsetMinDateTimeAsync(this IDatetime operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetLocalPositiveOffsetMinDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalPositiveOffsetMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00+14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetLocalPositiveOffsetMinDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
public static void PutLocalNegativeOffsetMinDateTime(this IDatetime operations, DateTime datetimeBody)
{
Task.Factory.StartNew(s => ((IDatetime)s).PutLocalNegativeOffsetMinDateTimeAsync(datetimeBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='datetimeBody'>
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task PutLocalNegativeOffsetMinDateTimeAsync(this IDatetime operations, DateTime datetimeBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(datetimeBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static DateTime? GetLocalNegativeOffsetMinDateTime(this IDatetime operations)
{
return Task.Factory.StartNew(s => ((IDatetime)s).GetLocalNegativeOffsetMinDateTimeAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00-14:00
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DateTime?> GetLocalNegativeOffsetMinDateTimeAsync(this IDatetime operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
/*
Copyright 2012 Michael Edwards
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//-CRE-
using System;
using System.Collections.Generic;
using System.Linq;
using Glass.Mapper.Configuration;
using Glass.Mapper.Pipelines.DataMapperResolver;
using Glass.Mapper.Sc.Configuration;
using Sitecore.Data;
using Sitecore.Data.Items;
using Sitecore.Data.Managers;
using Sitecore.Data.Templates;
using Sitecore.Links;
using Sitecore.Resources.Media;
namespace Glass.Mapper.Sc.DataMappers
{
/// <summary>
/// Class SitecoreInfoMapper
/// </summary>
public class SitecoreInfoMapper : AbstractDataMapper
{
/// <summary>
/// Initializes a new instance of the <see cref="SitecoreInfoMapper"/> class.
/// </summary>
public SitecoreInfoMapper()
{
ReadOnly = true;
}
/// <summary>
/// Maps data from the .Net property value to the CMS value
/// </summary>
/// <param name="mappingContext">The mapping context.</param>
/// <returns>The value to write</returns>
/// <exception cref="System.NotSupportedException">
/// Can't set DisplayName. Value is not of type System.String
/// or
/// Can't set Name. Value is not of type System.String
/// or
/// You can not save SitecoreInfo {0}.Formatted(scConfig.Type)
/// </exception>
/// <exception cref="Glass.Mapper.MapperException">You can not set an empty or null Item name</exception>
public override void MapToCms(AbstractDataMappingContext mappingContext)
{
var context = mappingContext as SitecoreDataMappingContext;
if (context == null)
{
throw new NullReferenceException("Mapping context has not been set.");
}
var item = context.Item;
var value = context.PropertyValue;
var scConfig = Configuration as SitecoreInfoConfiguration;
if (scConfig == null)
{
throw new NullReferenceException("Configuration is not set");
}
switch (scConfig.Type)
{
case SitecoreInfoType.DisplayName:
if (value is string || value == null)
item[Global.Fields.DisplayName] = (value ?? string.Empty).ToString();
else
throw new NotSupportedException("Can't set DisplayName. Value is not of type System.String");
break;
case SitecoreInfoType.Name:
if (value is string || value == null)
{
//if the name is null or empty nothing should happen
if (value == null || value.ToString().IsNullOrEmpty())
throw new MapperException("You can not set an empty or null Item name");
if (item.Name != value.ToString())
{
item.Name = value.ToString();
}
}
else
throw new NotSupportedException("Can't set Name. Value is not of type System.String");
break;
default:
throw new NotSupportedException("You can not save SitecoreInfo {0}".Formatted(scConfig.Type));
}
}
/// <summary>
/// Maps data from the CMS value to the .Net property value
/// </summary>
/// <param name="mappingContext">The mapping context.</param>
/// <returns>System.Object.</returns>
/// <exception cref="Glass.Mapper.MapperException">SitecoreInfoType {0} not supported.Formatted(scConfig.Type)</exception>
public override object MapToProperty(AbstractDataMappingContext mappingContext)
{
var context = mappingContext as SitecoreDataMappingContext;
if (context == null)
{
throw new NullReferenceException("Mapping Context has not been set.");
}
var item = context.Item;
var scConfig = Configuration as SitecoreInfoConfiguration;
if (scConfig == null)
{
throw new NullReferenceException("Configuration has not been set.");
}
//TODO: move this to the config?
var urlOptions = Utilities.CreateUrlOptions(scConfig.UrlOptions);
switch (scConfig.Type)
{
case SitecoreInfoType.ContentPath:
return item.Paths.ContentPath;
case SitecoreInfoType.DisplayName:
return item[Global.Fields.DisplayName];
case SitecoreInfoType.FullPath:
return item.Paths.FullPath;
case SitecoreInfoType.Name:
return item.Name;
case SitecoreInfoType.Key:
return item.Key;
case SitecoreInfoType.MediaUrl:
var media = new MediaItem(item);
return MediaManager.GetMediaUrl(media);
case SitecoreInfoType.Path:
return item.Paths.Path;
case SitecoreInfoType.TemplateId:
if (scConfig.PropertyInfo != null && scConfig.PropertyInfo.PropertyType == typeof (ID))
return item.TemplateID;
return item.TemplateID.Guid;
case SitecoreInfoType.TemplateName:
return item.TemplateName;
case SitecoreInfoType.Url:
urlOptions.Language = null;
return LinkManager.GetItemUrl(item, urlOptions);
case SitecoreInfoType.Version:
if (scConfig.PropertyInfo != null && scConfig.PropertyInfo.PropertyType == typeof (string))
{
return item.Version.Number.ToString();
}
return item.Version.Number;
case SitecoreInfoType.Language:
if (scConfig.PropertyInfo != null && scConfig.PropertyInfo.PropertyType == typeof (string))
{
return item.Language.Name;
}
return item.Language;
case SitecoreInfoType.BaseTemplateIds:
Template template = TemplateManager.GetTemplate(item.TemplateID, item.Database);
if (scConfig.PropertyInfo != null &&
scConfig.PropertyInfo.PropertyType == typeof (IEnumerable<ID>))
return template.GetBaseTemplates().Select(x => x.ID);
return template.GetBaseTemplates().Select(x => x.ID.Guid);
case SitecoreInfoType.ItemUri:
return new ItemUri(item.ID, item.Language, item.Version, item.Database);
default:
throw new MapperException("SitecoreInfoType {0} not supported".Formatted(scConfig.Type));
}
}
/// <summary>
/// Sets up the data mapper for a particular property
/// </summary>
/// <param name="args">The args.</param>
public override void Setup(DataMapperResolverArgs args)
{
var scConfig = args.PropertyConfiguration as SitecoreInfoConfiguration;
if (scConfig == null)
{
throw new NullReferenceException("Configuration has not been set.");
}
ReadOnly = scConfig.Type != SitecoreInfoType.DisplayName && scConfig.Type != SitecoreInfoType.Name;
base.Setup(args);
}
/// <summary>
/// Indicates that the data mapper will mapper to and from the property
/// </summary>
/// <param name="configuration">The configuration.</param>
/// <param name="context">The context.</param>
/// <returns><c>true</c> if this instance can handle the specified configuration; otherwise, <c>false</c>.</returns>
public override bool CanHandle(AbstractPropertyConfiguration configuration, Context context)
{
return configuration is SitecoreInfoConfiguration;
}
}
}
| |
//
// https://github.com/ServiceStack/ServiceStack.Text
// ServiceStack.Text: .NET C# POCO JSON, JSV and CSV Text Serializers.
//
// Authors:
// Demis Bellot ([email protected])
//
// Copyright 2012 Service Stack LLC. All Rights Reserved.
//
// Licensed under the same terms of ServiceStack.
//
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using ServiceStack.Text;
using ServiceStack.Text.Common;
using ServiceStack.Text.Support;
namespace ServiceStack
{
public static class StringExtensions
{
public static T To<T>(this string value)
{
return TypeSerializer.DeserializeFromString<T>(value);
}
public static T To<T>(this string value, T defaultValue)
{
return String.IsNullOrEmpty(value) ? defaultValue : TypeSerializer.DeserializeFromString<T>(value);
}
public static T ToOrDefaultValue<T>(this string value)
{
return String.IsNullOrEmpty(value) ? default(T) : TypeSerializer.DeserializeFromString<T>(value);
}
public static object To(this string value, Type type)
{
return TypeSerializer.DeserializeFromString(value, type);
}
/// <summary>
/// Converts from base: 0 - 62
/// </summary>
/// <param name="source">The source.</param>
/// <param name="from">From.</param>
/// <param name="to">To.</param>
/// <returns></returns>
public static string BaseConvert(this string source, int from, int to)
{
const string chars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
var result = "";
var length = source.Length;
var number = new int[length];
for (var i = 0; i < length; i++)
{
number[i] = chars.IndexOf(source[i]);
}
int newlen;
do
{
var divide = 0;
newlen = 0;
for (var i = 0; i < length; i++)
{
divide = divide * @from + number[i];
if (divide >= to)
{
number[newlen++] = divide / to;
divide = divide % to;
}
else if (newlen > 0)
{
number[newlen++] = 0;
}
}
length = newlen;
result = chars[divide] + result;
}
while (newlen != 0);
return result;
}
public static string EncodeXml(this string value)
{
return value.Replace("<", "<").Replace(">", ">").Replace("&", "&");
}
public static string EncodeJson(this string value)
{
return String.Concat
("\"",
value.Replace("\\", "\\\\").Replace("\"", "\\\"").Replace("\r", "").Replace("\n", "\\n"),
"\""
);
}
public static string EncodeJsv(this string value)
{
if (JsState.QueryStringMode)
{
return UrlEncode(value);
}
return String.IsNullOrEmpty(value) || !JsWriter.HasAnyEscapeChars(value)
? value
: String.Concat
(
JsWriter.QuoteString,
value.Replace(JsWriter.QuoteString, TypeSerializer.DoubleQuoteString),
JsWriter.QuoteString
);
}
public static string DecodeJsv(this string value)
{
const int startingQuotePos = 1;
const int endingQuotePos = 2;
return String.IsNullOrEmpty(value) || value[0] != JsWriter.QuoteChar
? value
: value.Substring(startingQuotePos, value.Length - endingQuotePos)
.Replace(TypeSerializer.DoubleQuoteString, JsWriter.QuoteString);
}
public static string UrlEncode(this string text, bool upperCase=false)
{
if (String.IsNullOrEmpty(text)) return text;
var sb = new StringBuilder();
var fmt = upperCase ? "X2" : "x2";
foreach (var charCode in Encoding.UTF8.GetBytes(text))
{
if (
charCode >= 65 && charCode <= 90 // A-Z
|| charCode >= 97 && charCode <= 122 // a-z
|| charCode >= 48 && charCode <= 57 // 0-9
|| charCode >= 44 && charCode <= 46 // ,-.
)
{
sb.Append((char)charCode);
}
else if(charCode == 32)
{
sb.Append('+');
}
else
{
sb.Append('%' + charCode.ToString(fmt));
}
}
return sb.ToString();
}
public static string UrlDecode(this string text)
{
if (String.IsNullOrEmpty(text)) return null;
var bytes = new List<byte>();
var textLength = text.Length;
for (var i = 0; i < textLength; i++)
{
var c = text[i];
if (c == '+')
{
bytes.Add(32);
}
else if (c == '%')
{
var hexNo = Convert.ToByte(text.Substring(i + 1, 2), 16);
bytes.Add(hexNo);
i += 2;
}
else
{
bytes.Add((byte)c);
}
}
byte[] byteArray = bytes.ToArray();
return Encoding.UTF8.GetString(byteArray, 0, byteArray.Length);
}
public static string HexUnescape(this string text, params char[] anyCharOf)
{
if (String.IsNullOrEmpty(text)) return null;
if (anyCharOf == null || anyCharOf.Length == 0) return text;
var sb = new StringBuilder();
var textLength = text.Length;
for (var i = 0; i < textLength; i++)
{
var c = text.Substring(i, 1);
if (c == "%")
{
var hexNo = Convert.ToInt32(text.Substring(i + 1, 2), 16);
sb.Append((char)hexNo);
i += 2;
}
else
{
sb.Append(c);
}
}
return sb.ToString();
}
public static string UrlFormat(this string url, params string[] urlComponents)
{
var encodedUrlComponents = new string[urlComponents.Length];
for (var i = 0; i < urlComponents.Length; i++)
{
var x = urlComponents[i];
encodedUrlComponents[i] = x.UrlEncode();
}
return String.Format(url, encodedUrlComponents);
}
public static string ToRot13(this string value)
{
var array = value.ToCharArray();
for (var i = 0; i < array.Length; i++)
{
var number = (int)array[i];
if (number >= 'a' && number <= 'z')
number += (number > 'm') ? -13 : 13;
else if (number >= 'A' && number <= 'Z')
number += (number > 'M') ? -13 : 13;
array[i] = (char)number;
}
return new string(array);
}
public static string WithTrailingSlash(this string path)
{
if (String.IsNullOrEmpty(path))
throw new ArgumentNullException("path");
if (path[path.Length - 1] != '/')
{
return path + "/";
}
return path;
}
public static string AppendPath(this string uri, params string[] uriComponents)
{
return AppendUrlPaths(uri, uriComponents);
}
public static string AppendUrlPaths(this string uri, params string[] uriComponents)
{
var sb = new StringBuilder(uri.WithTrailingSlash());
var i = 0;
foreach (var uriComponent in uriComponents)
{
if (i++ > 0) sb.Append('/');
sb.Append(uriComponent.UrlEncode());
}
return sb.ToString();
}
public static string AppendUrlPathsRaw(this string uri, params string[] uriComponents)
{
var sb = new StringBuilder(uri.WithTrailingSlash());
var i = 0;
foreach (var uriComponent in uriComponents)
{
if (i++ > 0) sb.Append('/');
sb.Append(uriComponent);
}
return sb.ToString();
}
public static string FromUtf8Bytes(this byte[] bytes)
{
return bytes == null ? null
: Encoding.UTF8.GetString(bytes, 0, bytes.Length);
}
public static byte[] ToUtf8Bytes(this string value)
{
return Encoding.UTF8.GetBytes(value);
}
public static byte[] ToUtf8Bytes(this int intVal)
{
return FastToUtf8Bytes(intVal.ToString());
}
public static byte[] ToUtf8Bytes(this long longVal)
{
return FastToUtf8Bytes(longVal.ToString());
}
public static byte[] ToUtf8Bytes(this ulong ulongVal)
{
return FastToUtf8Bytes(ulongVal.ToString());
}
public static byte[] ToUtf8Bytes(this double doubleVal)
{
var doubleStr = doubleVal.ToString(CultureInfo.InvariantCulture.NumberFormat);
if (doubleStr.IndexOf('E') != -1 || doubleStr.IndexOf('e') != -1)
doubleStr = DoubleConverter.ToExactString(doubleVal);
return FastToUtf8Bytes(doubleStr);
}
/// <summary>
/// Skip the encoding process for 'safe strings'
/// </summary>
/// <param name="strVal"></param>
/// <returns></returns>
private static byte[] FastToUtf8Bytes(string strVal)
{
var bytes = new byte[strVal.Length];
for (var i = 0; i < strVal.Length; i++)
bytes[i] = (byte)strVal[i];
return bytes;
}
public static string[] SplitOnFirst(this string strVal, char needle)
{
if (strVal == null) return new string[0];
var pos = strVal.IndexOf(needle);
return pos == -1
? new[] { strVal }
: new[] { strVal.Substring(0, pos), strVal.Substring(pos + 1) };
}
public static string[] SplitOnFirst(this string strVal, string needle)
{
if (strVal == null) return new string[0];
var pos = strVal.IndexOf(needle);
return pos == -1
? new[] { strVal }
: new[] { strVal.Substring(0, pos), strVal.Substring(pos + 1) };
}
public static string[] SplitOnLast(this string strVal, char needle)
{
if (strVal == null) return new string[0];
var pos = strVal.LastIndexOf(needle);
return pos == -1
? new[] { strVal }
: new[] { strVal.Substring(0, pos), strVal.Substring(pos + 1) };
}
public static string[] SplitOnLast(this string strVal, string needle)
{
if (strVal == null) return new string[0];
var pos = strVal.LastIndexOf(needle);
return pos == -1
? new[] { strVal }
: new[] { strVal.Substring(0, pos), strVal.Substring(pos + 1) };
}
public static string WithoutExtension(this string filePath)
{
if (string.IsNullOrEmpty(filePath))
return null;
var extPos = filePath.LastIndexOf('.');
if (extPos == -1) return filePath;
var dirPos = filePath.LastIndexOfAny(DirSeps);
return extPos > dirPos ? filePath.Substring(0, extPos) : filePath;
}
public static string GetExtension(this string filePath)
{
if (string.IsNullOrEmpty(filePath))
return null;
var extPos = filePath.LastIndexOf('.');
return extPos == -1 ? string.Empty : filePath.Substring(extPos);
}
static readonly char[] DirSeps = new[] { '\\', '/' };
public static string ParentDirectory(this string filePath)
{
if (String.IsNullOrEmpty(filePath)) return null;
var dirSep = filePath.IndexOf(PclExport.Instance.DirSep) != -1
? PclExport.Instance.DirSep
: filePath.IndexOf(PclExport.Instance.AltDirSep) != -1
? PclExport.Instance.AltDirSep
: (char)0;
return dirSep == 0 ? null : filePath.TrimEnd(dirSep).SplitOnLast(dirSep)[0];
}
public static string ToJsv<T>(this T obj)
{
return TypeSerializer.SerializeToString(obj);
}
public static T FromJsv<T>(this string jsv)
{
return TypeSerializer.DeserializeFromString<T>(jsv);
}
public static string ToJson<T>(this T obj)
{
return JsConfig.PreferInterfaces
? JsonSerializer.SerializeToString(obj, AssemblyUtils.MainInterface<T>())
: JsonSerializer.SerializeToString(obj);
}
public static T FromJson<T>(this string json)
{
return JsonSerializer.DeserializeFromString<T>(json);
}
public static string ToCsv<T>(this T obj)
{
return CsvSerializer.SerializeToString(obj);
}
public static string FormatWith(this string text, params object[] args)
{
return String.Format(text, args);
}
public static string Fmt(this string text, params object[] args)
{
return String.Format(text, args);
}
public static bool StartsWithIgnoreCase(this string text, string startsWith)
{
return text != null
&& text.StartsWith(startsWith, PclExport.Instance.InvariantComparisonIgnoreCase);
}
public static bool EndsWithIgnoreCase(this string text, string endsWith)
{
return text != null
&& text.EndsWith(endsWith, PclExport.Instance.InvariantComparisonIgnoreCase);
}
public static string ReadAllText(this string filePath)
{
return PclExport.Instance.ReadAllText(filePath);
}
public static bool FileExists(this string filePath)
{
return PclExport.Instance.FileExists(filePath);
}
public static bool DirectoryExists(this string dirPath)
{
return PclExport.Instance.DirectoryExists(dirPath);
}
public static void CreateDirectory(this string dirPath)
{
PclExport.Instance.CreateDirectory(dirPath);
}
public static int IndexOfAny(this string text, params string[] needles)
{
return IndexOfAny(text, 0, needles);
}
public static int IndexOfAny(this string text, int startIndex, params string[] needles)
{
var firstPos = -1;
if (text != null)
{
foreach (var needle in needles)
{
var pos = text.IndexOf(needle, startIndex);
if ((pos >= 0) && (firstPos == -1 || pos < firstPos))
firstPos = pos;
}
}
return firstPos;
}
public static string ExtractContents(this string fromText, string startAfter, string endAt)
{
return ExtractContents(fromText, startAfter, startAfter, endAt);
}
public static string ExtractContents(this string fromText, string uniqueMarker, string startAfter, string endAt)
{
if (String.IsNullOrEmpty(uniqueMarker))
throw new ArgumentNullException("uniqueMarker");
if (String.IsNullOrEmpty(startAfter))
throw new ArgumentNullException("startAfter");
if (String.IsNullOrEmpty(endAt))
throw new ArgumentNullException("endAt");
if (String.IsNullOrEmpty(fromText)) return null;
var markerPos = fromText.IndexOf(uniqueMarker);
if (markerPos == -1) return null;
var startPos = fromText.IndexOf(startAfter, markerPos);
if (startPos == -1) return null;
startPos += startAfter.Length;
var endPos = fromText.IndexOf(endAt, startPos);
if (endPos == -1) endPos = fromText.Length;
return fromText.Substring(startPos, endPos - startPos);
}
static readonly Regex StripHtmlRegEx = new Regex(@"<(.|\n)*?>", PclExport.Instance.RegexOptions);
public static string StripHtml(this string html)
{
return String.IsNullOrEmpty(html) ? null : StripHtmlRegEx.Replace(html, "");
}
static readonly Regex StripBracketsRegEx = new Regex(@"\[(.|\n)*?\]", PclExport.Instance.RegexOptions);
static readonly Regex StripBracesRegEx = new Regex(@"\((.|\n)*?\)", PclExport.Instance.RegexOptions);
public static string StripMarkdownMarkup(this string markdown)
{
if (String.IsNullOrEmpty(markdown)) return null;
markdown = StripBracketsRegEx.Replace(markdown, "");
markdown = StripBracesRegEx.Replace(markdown, "");
markdown = markdown
.Replace("*", "")
.Replace("!", "")
.Replace("\r", "")
.Replace("\n", "")
.Replace("#", "");
return markdown;
}
private const int LowerCaseOffset = 'a' - 'A';
public static string ToCamelCase(this string value)
{
if (String.IsNullOrEmpty(value)) return value;
var len = value.Length;
var newValue = new char[len];
var firstPart = true;
for (var i = 0; i < len; ++i)
{
var c0 = value[i];
var c1 = i < len - 1 ? value[i + 1] : 'A';
var c0isUpper = c0 >= 'A' && c0 <= 'Z';
var c1isUpper = c1 >= 'A' && c1 <= 'Z';
if (firstPart && c0isUpper && (c1isUpper || i == 0))
c0 = (char)(c0 + LowerCaseOffset);
else
firstPart = false;
newValue[i] = c0;
}
return new string(newValue);
}
public static string ToTitleCase(this string value)
{
return PclExport.Instance.ToTitleCase(value);
}
public static string ToLowercaseUnderscore(this string value)
{
if (String.IsNullOrEmpty(value)) return value;
value = value.ToCamelCase();
var sb = new StringBuilder(value.Length);
foreach (char t in value)
{
if (Char.IsDigit(t) || (Char.IsLetter(t) && Char.IsLower(t)) || t == '_')
{
sb.Append(t);
}
else
{
sb.Append("_");
sb.Append(Char.ToLowerInvariant(t));
}
}
return sb.ToString();
}
public static string SafeSubstring(this string value, int startIndex)
{
return SafeSubstring(value, startIndex, value.Length);
}
public static string SafeSubstring(this string value, int startIndex, int length)
{
if (String.IsNullOrEmpty(value)) return String.Empty;
if (value.Length >= (startIndex + length))
return value.Substring(startIndex, length);
return value.Length > startIndex ? value.Substring(startIndex) : String.Empty;
}
public static bool IsAnonymousType(this Type type)
{
if (type == null)
throw new ArgumentNullException("type");
return PclExport.Instance.IsAnonymousType(type);
}
public static int CompareIgnoreCase(this string strA, string strB)
{
return String.Compare(strA, strB, PclExport.Instance.InvariantComparisonIgnoreCase);
}
public static bool EndsWithInvariant(this string str, string endsWith)
{
return str.EndsWith(endsWith, PclExport.Instance.InvariantComparison);
}
private static readonly Regex InvalidVarCharsRegex = new Regex(@"[^A-Za-z0-9]", PclExport.Instance.RegexOptions);
private static readonly Regex SplitCamelCaseRegex = new Regex("([A-Z]|[0-9]+)", PclExport.Instance.RegexOptions);
private static readonly Regex HttpRegex = new Regex(@"^http://",
PclExport.Instance.RegexOptions | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
public static T ToEnum<T>(this string value)
{
return (T)Enum.Parse(typeof(T), value, true);
}
public static T ToEnumOrDefault<T>(this string value, T defaultValue)
{
if (String.IsNullOrEmpty(value)) return defaultValue;
return (T)Enum.Parse(typeof(T), value, true);
}
public static string SplitCamelCase(this string value)
{
return SplitCamelCaseRegex.Replace(value, " $1").TrimStart();
}
public static string ToInvariantUpper(this char value)
{
return PclExport.Instance.ToInvariantUpper(value);
}
public static string ToEnglish(this string camelCase)
{
var ucWords = camelCase.SplitCamelCase().ToLower();
return ucWords[0].ToInvariantUpper() + ucWords.Substring(1);
}
public static string ToHttps(this string url)
{
if (url == null)
{
throw new ArgumentNullException("url");
}
return HttpRegex.Replace(url.Trim(), "https://");
}
public static bool IsEmpty(this string value)
{
return String.IsNullOrEmpty(value);
}
public static bool IsNullOrEmpty(this string value)
{
return String.IsNullOrEmpty(value);
}
public static bool EqualsIgnoreCase(this string value, string other)
{
return String.Equals(value, other, StringComparison.CurrentCultureIgnoreCase);
}
public static string ReplaceFirst(this string haystack, string needle, string replacement)
{
var pos = haystack.IndexOf(needle);
if (pos < 0) return haystack;
return haystack.Substring(0, pos) + replacement + haystack.Substring(pos + needle.Length);
}
public static string ReplaceAll(this string haystack, string needle, string replacement)
{
int pos;
// Avoid a possible infinite loop
if (needle == replacement) return haystack;
while ((pos = haystack.IndexOf(needle)) > 0)
{
haystack = haystack.Substring(0, pos)
+ replacement
+ haystack.Substring(pos + needle.Length);
}
return haystack;
}
public static bool ContainsAny(this string text, params string[] testMatches)
{
foreach (var testMatch in testMatches)
{
if (text.Contains(testMatch)) return true;
}
return false;
}
public static string SafeVarName(this string text)
{
if (String.IsNullOrEmpty(text)) return null;
return InvalidVarCharsRegex.Replace(text, "_");
}
public static string Join(this List<string> items)
{
return String.Join(JsWriter.ItemSeperatorString, items.ToArray());
}
public static string Join(this List<string> items, string delimeter)
{
return String.Join(delimeter, items.ToArray());
}
public static string ToParentPath(this string path)
{
var pos = path.LastIndexOf('/');
if (pos == -1) return "/";
var parentPath = path.Substring(0, pos);
return parentPath;
}
public static string RemoveCharFlags(this string text, bool[] charFlags)
{
if (text == null) return null;
var copy = text.ToCharArray();
var nonWsPos = 0;
for (var i = 0; i < text.Length; i++)
{
var @char = text[i];
if (@char < charFlags.Length && charFlags[@char]) continue;
copy[nonWsPos++] = @char;
}
return new String(copy, 0, nonWsPos);
}
public static string ToNullIfEmpty(this string text)
{
return String.IsNullOrEmpty(text) ? null : text;
}
private static char[] SystemTypeChars = new[] { '<', '>', '+' };
public static bool IsUserType(this Type type)
{
return type.IsClass()
&& type.Namespace != null
&& !type.Namespace.StartsWith("System")
&& type.Name.IndexOfAny(SystemTypeChars) == -1;
}
public static bool IsUserEnum(this Type type)
{
return type.IsEnum()
&& type.Namespace != null
&& !type.Namespace.StartsWith("System")
&& type.Name.IndexOfAny(SystemTypeChars) == -1;
}
public static bool IsInt(this string text)
{
if (String.IsNullOrEmpty(text)) return false;
int ret;
return Int32.TryParse(text, out ret);
}
public static int ToInt(this string text)
{
return text == null ? default(int) : Int32.Parse(text);
}
public static int ToInt(this string text, int defaultValue)
{
int ret;
return Int32.TryParse(text, out ret) ? ret : defaultValue;
}
public static long ToInt64(this string text)
{
return Int64.Parse(text);
}
public static long ToInt64(this string text, long defaultValue)
{
long ret;
return Int64.TryParse(text, out ret) ? ret : defaultValue;
}
public static float ToFloat(this string text)
{
return text == null ? default(float) : float.Parse(text);
}
public static float ToFloat(this string text, float defaultValue)
{
float ret;
return float.TryParse(text, out ret) ? ret : defaultValue;
}
public static double ToDouble(this string text)
{
return text == null ? default(double) : double.Parse(text);
}
public static double ToDouble(this string text, double defaultValue)
{
double ret;
return double.TryParse(text, out ret) ? ret : defaultValue;
}
public static decimal ToDecimal(this string text)
{
return text == null ? default(decimal) : decimal.Parse(text);
}
public static decimal ToDecimal(this string text, decimal defaultValue)
{
decimal ret;
return decimal.TryParse(text, out ret) ? ret : defaultValue;
}
public static bool Matches(this string value, string pattern)
{
return value.Glob(pattern);
}
public static bool Glob(this string value, string pattern)
{
int pos;
for (pos = 0; pattern.Length != pos; pos++)
{
switch (pattern[pos])
{
case '?':
break;
case '*':
for (int i = value.Length; i >= pos; i--)
{
if (Glob(value.Substring(i), pattern.Substring(pos + 1)))
return true;
}
return false;
default:
if (value.Length == pos || Char.ToUpper(pattern[pos]) != Char.ToUpper(value[pos]))
{
return false;
}
break;
}
}
return value.Length == pos;
}
public static string TrimPrefixes(this string fromString, params string[] prefixes)
{
if (string.IsNullOrEmpty(fromString))
return fromString;
foreach (var prefix in prefixes)
{
if (fromString.StartsWith(prefix))
return fromString.Substring(prefix.Length);
}
return fromString;
}
public static string FromAsciiBytes(this byte[] bytes)
{
return bytes == null ? null
: PclExport.Instance.GetAsciiString(bytes);
}
public static byte[] ToAsciiBytes(this string value)
{
return PclExport.Instance.GetAsciiBytes(value);
}
public static Dictionary<string,string> ParseKeyValueText(this string text, string delimiter=" ")
{
var to = new Dictionary<string, string>();
if (text == null) return to;
foreach (var parts in text.ReadLines().Select(line => line.SplitOnFirst(delimiter)))
{
var key = parts[0].Trim();
if (key.Length == 0 || key.StartsWith("#")) continue;
to[key] = parts.Length == 2 ? parts[1].Trim() : null;
}
return to;
}
public static IEnumerable<string> ReadLines(this string text)
{
string line;
var reader = new StringReader(text ?? "");
while ((line = reader.ReadLine()) != null)
{
yield return line;
}
}
public static int CountOccurrencesOf(this string text, char needle)
{
var chars = text.ToCharArray();
var count = 0;
var length = chars.Length;
for (var n = length - 1; n >= 0; n--)
{
if (chars[n] == needle)
count++;
}
return count;
}
#if !XBOX
public static string HexEscape(this string text, params char[] anyCharOf)
{
if (String.IsNullOrEmpty(text)) return text;
if (anyCharOf == null || anyCharOf.Length == 0) return text;
var encodeCharMap = new HashSet<char>(anyCharOf);
var sb = new StringBuilder();
var textLength = text.Length;
for (var i = 0; i < textLength; i++)
{
var c = text[i];
if (encodeCharMap.Contains(c))
{
sb.Append('%' + ((int)c).ToString("x"));
}
else
{
sb.Append(c);
}
}
return sb.ToString();
}
public static string ToXml<T>(this T obj)
{
return XmlSerializer.SerializeToString(obj);
}
public static T FromXml<T>(this string json)
{
return XmlSerializer.DeserializeFromString<T>(json);
}
#endif
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using DineroPortableClientSDK;
using DineroPortableClientSDK.Mailouts;
using DineroPortableClientSDK.ResponseModels;
using DineroPortableClientSDK.TradeOffers;
namespace DineroSampleApp
{
public class TradeOfferDemo
{
public static async Task<TradeOfferFetch> FetchTradeOffer(Dinero dinero)
{
Console.WriteLine("\nWe need a contact...");
var contact = await ContactDemo.AddEanContact(dinero);
var tradeOffer = await FetchTradeOffer(dinero, contact.ContactGuid);
return tradeOffer;
}
public static async Task<TradeOfferFetch> FetchTradeOfferEmpty(Dinero dinero)
{
//initalize TradeOfferFetchCreate with min. required properties
var model = new TradeOfferFetchCreate()
{
ContactGuid = null,
ProductLines = new List<TradeOfferLineCreate>()
};
var invoiceFetchResult = await dinero.TradeOffers.FetchAsync(model);
Console.WriteLine("Offer fetched");
Console.WriteLine(TradeOfferToPrintableString(invoiceFetchResult));
return invoiceFetchResult;
}
private static async Task<TradeOfferFetch> FetchTradeOffer(Dinero dinero, Guid contactGuid)
{
var model = new TradeOfferFetchCreate()
{
ContactGuid = contactGuid,
//Address = "Road 1, 2200 Copenhagen N",// Defaults to the given contacts address
//Currency = "EUR", // Defaults to DKK
//Description = "Trade Offer", // Defaults to either 'Offer' or 'Tilbud' depending on language settings
//Date = "2015-12-24", // Defaults to Today
//Language = "en-GB", // Defaults to 'da-DK'
//ExternalReference = "Your own awesome reference",
ProductLines = CreateLines()
};
var invoiceFetchResult = await dinero.TradeOffers.FetchAsync(model);
Console.WriteLine("Offer fetched");
Console.WriteLine(TradeOfferToPrintableString(invoiceFetchResult));
return invoiceFetchResult;
}
private static List<TradeOfferLineCreate> CreateLines()
{
return new List<TradeOfferLineCreate>()
{
TradeOfferLineCreate.CreateOfferProductLine("TestProduct", 100, discount: 10),
TradeOfferLineCreate.CreatOfferTextLine("Test heading 1"),
TradeOfferLineCreate.CreateOfferProductLine("TestProduct2", 150, quantity: 4)
};
}
public static async Task<TradeOffer> AddAndGetNewTradeOffer(Dinero dinero, bool linesInclVat = false)
{
Console.WriteLine("\nBefore creating a tradeoffer we need a contact...");
var contact = await ContactDemo.AddNewContact(dinero);
var tradeoffer = await AddNewTradeOffer(dinero, contact.ContactGuid, linesInclVat);
if (tradeoffer != null)
return await GetTradeOffer(dinero, tradeoffer.Guid);
return null;
}
public static async Task AddAndGetDeleteTradeOffer(Dinero dinero)
{
var tradeoffer = await AddAndGetNewTradeOffer(dinero);
await dinero.TradeOffers.DeleteAsync(tradeoffer.Guid);
Console.WriteLine("Trade offer deleted.");
}
public static async Task AddGetUpdateTradeOffer(Dinero dinero)
{
var tradeoffer = await AddAndGetNewTradeOffer(dinero);
var updateObj = new TradeOfferUpdate()
{
Timestamp = tradeoffer.TimeStamp,
ContactGuid = tradeoffer.ContactGuid,
ExternalReference = "I've updated this invoice!",
ProductLines = tradeoffer.ProductLines.Select(TradeOfferLineCreate.CreateFrom).ToList()
};
await dinero.TradeOffers.UpdateAsync(tradeoffer.Guid, updateObj);
Console.WriteLine("Trade offer updated.");
}
public static async Task GenerateInvoice(Dinero dinero)
{
var tradeoffer = await AddAndGetNewTradeOffer(dinero);
var createdResult = await dinero.TradeOffers.GenerateInvoiceAsync(tradeoffer.Guid,
tradeoffer.TimeStamp); // timestamp is optional, use it to verify you got the latest version of the trade offer
Console.WriteLine("Invoice generated from trade offer");
var generatedInvoice = await InvoiceDemo.GetInvoice(dinero, createdResult.Guid);
}
public static async Task SendEmail(Dinero dinero, string receiverEmail)
{
//Creating the tradeoffer
var tradeoffer = await AddAndGetNewTradeOffer(dinero);
//Creating mailout model. None of the properties are mandatory.
var mailoutModel = new MailOut()
{
//Timestamp = tradeoffer.TimeStamp, //Defaults to latest version, but you can use this to ensure the offer has not been changed
Sender = "[email protected]", //Defaults to organization email
Receiver = receiverEmail, //Defaults to contact email. Make sure the contact has an email if you leave this empty.
Message = "This is a test email from the demo API. To use the default logic for generating email," //Defaults to standard message
+ " just leave this property empty. Here is a downlaod link to your tradeoffer: [link-to-pdf]"
};
await dinero.TradeOffers.SendEmailAsync(tradeoffer.Guid, mailoutModel);
Console.WriteLine("Email was send to: " + receiverEmail);
}
public static async Task DownloadPdf(Dinero dinero, string path)
{
var tradeOffer = await AddAndGetNewTradeOffer(dinero);
if (!Directory.Exists(path))
Directory.CreateDirectory(path);
var file = Path.Combine(path,
$"Trade offer {tradeOffer.Number} -{DateTime.Now:yyyy-MM-dd HHmmss}.pdf");
if (File.Exists(file))
File.Delete(file);
var pdfStream = await dinero.TradeOffers.DownloadPdfAsync(tradeOffer.Guid);
using (var fileStream = File.Create(file))
{
pdfStream.Seek(0, SeekOrigin.Begin);
pdfStream.CopyTo(fileStream);
fileStream.Flush();
fileStream.Close();
}
Console.WriteLine("PDF saved to: " + file);
System.Diagnostics.Process.Start(file);
}
public static async Task<TradeOffer> GetTradeOffer(Dinero dinero, Guid tradeofferGuid)
{
var tradeOfferResult = await dinero.TradeOffers.GetAsync(tradeofferGuid);
Console.WriteLine("Trade offer retrieved from server: " + TradeOfferToPrintableString(tradeOfferResult));
if (tradeOfferResult.TotalLines != null)
{
foreach (var t in tradeOfferResult.TotalLines.OrderBy(x => x.Position))
{
Console.WriteLine("({0}) {1} {2} ", t.Type, t.Label, t.TotalAmount);
}
}
foreach (var tradeOfferLine in tradeOfferResult.ProductLines)
{
if (tradeOfferLine.LineType == "Product")
{
var amount = tradeOfferResult.ShowLinesInclVat
? tradeOfferLine.TotalAmountInclVat
: tradeOfferLine.TotalAmount;
Console.WriteLine(
$"Product line: {tradeOfferLine.Description} DKK {amount} {(tradeOfferResult.ShowLinesInclVat ? "(incl vat)" : "")}- {tradeOfferLine.AccountName}");
}
else
{
Console.WriteLine($"Text line: {tradeOfferLine.Description}");
}
}
return tradeOfferResult;
}
private static async Task<CreatedResponse> AddNewTradeOffer(Dinero dinero, Guid contactGuid, bool linesInclVat = false)
{
//initalize TradeOfferCreate with min. required properties
var model = new TradeOfferCreate()
{
ShowLinesInclVat = linesInclVat,
ContactGuid = contactGuid,
//Address = "Road 1, 2200 Copenhagen N",// Defaults to the given contacts address
//Currency = "EUR", // Defaults to DKK
//Description = "Offer", // Defaults to either 'Offer' or 'Tilbud' depending on language settings
//Date = "2015-12-24", // Defaults to Today
//Language = "en-GB", // Defaults to 'da-DK'
//ExternalReference = "Your own awesome reference",
ProductLines = CreateLines()
};
var tradeOfferCreatedResult = await dinero.TradeOffers.AddAsync(model);
Console.WriteLine("Trade offer created");
return tradeOfferCreatedResult;
}
public static string TradeOfferToPrintableString(TradeOfferFetch tradeOffer)
{
return string.Format("{0} {4} - {1} - {2} {3} ",
tradeOffer.Description, tradeOffer.Date, tradeOffer.TotalInclVat, tradeOffer.Currency, tradeOffer.Number);
}
public static string TradeOfferToPrintableString(TradeOffer tradeOffer)
{
return string.Format("{0} {4} - {1} - {2} {3} ",
tradeOffer.Description, tradeOffer.Date, tradeOffer.TotalInclVat, tradeOffer.Currency, tradeOffer.Number);
}
public static async Task GetList(Dinero dinero, string filter = null, DateTime? changedSince = null, bool? includeDeleted = null)
{
var collectionResult = await dinero.TradeOffers.GetListAsync("Number,ContactName,ContactGuid,ExternalReference,Description,TotalInclVat,Currency,UpdatedAt,DeletedAt", filter, changedSince, includeDeleted, freeTextSearch: "tilbud");
foreach (var tradeoffer in collectionResult.Collection)
{
Console.WriteLine(TradeOfferToPrintableString(tradeoffer) + " - " + tradeoffer.ContactName);
}
Console.WriteLine("\nFound {0} tradeoffers", collectionResult.Collection.Count);
}
}
}
| |
namespace android.text.util
{
[global::MonoJavaBridge.JavaClass()]
public partial class Linkify : java.lang.Object
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
protected Linkify(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
[global::MonoJavaBridge.JavaInterface(typeof(global::android.text.util.Linkify.MatchFilter_))]
public partial interface MatchFilter : global::MonoJavaBridge.IJavaObject
{
bool acceptMatch(java.lang.CharSequence arg0, int arg1, int arg2);
}
[global::MonoJavaBridge.JavaProxy(typeof(global::android.text.util.Linkify.MatchFilter))]
internal sealed partial class MatchFilter_ : java.lang.Object, MatchFilter
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
internal MatchFilter_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
private static global::MonoJavaBridge.MethodId _m0;
bool android.text.util.Linkify.MatchFilter.acceptMatch(java.lang.CharSequence arg0, int arg1, int arg2)
{
return global::MonoJavaBridge.JavaBridge.CallBooleanMethod(this, global::android.text.util.Linkify.MatchFilter_.staticClass, "acceptMatch", "(Ljava/lang/CharSequence;II)Z", ref global::android.text.util.Linkify.MatchFilter_._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2));
}
static MatchFilter_()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.text.util.Linkify.MatchFilter_.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/text/util/Linkify$MatchFilter"));
}
}
public delegate bool MatchFilterDelegate(java.lang.CharSequence arg0, int arg1, int arg2);
internal partial class MatchFilterDelegateWrapper : java.lang.Object, MatchFilter
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
protected MatchFilterDelegateWrapper(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
private static global::MonoJavaBridge.MethodId _m0;
public MatchFilterDelegateWrapper() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify.MatchFilterDelegateWrapper._m0.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify.MatchFilterDelegateWrapper._m0 = @__env.GetMethodIDNoThrow(global::android.text.util.Linkify.MatchFilterDelegateWrapper.staticClass, "<init>", "()V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.text.util.Linkify.MatchFilterDelegateWrapper.staticClass, global::android.text.util.Linkify.MatchFilterDelegateWrapper._m0);
Init(@__env, handle);
}
static MatchFilterDelegateWrapper()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.text.util.Linkify.MatchFilterDelegateWrapper.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/text/util/Linkify_MatchFilterDelegateWrapper"));
}
}
internal partial class MatchFilterDelegateWrapper
{
private MatchFilterDelegate myDelegate;
public bool acceptMatch(java.lang.CharSequence arg0, int arg1, int arg2)
{
return myDelegate(arg0, arg1, arg2);
}
public static implicit operator MatchFilterDelegateWrapper(MatchFilterDelegate d)
{
global::android.text.util.Linkify.MatchFilterDelegateWrapper ret = new global::android.text.util.Linkify.MatchFilterDelegateWrapper();
ret.myDelegate = d;
global::MonoJavaBridge.JavaBridge.SetGCHandle(global::MonoJavaBridge.JNIEnv.ThreadEnv, ret);
return ret;
}
}
[global::MonoJavaBridge.JavaInterface(typeof(global::android.text.util.Linkify.TransformFilter_))]
public partial interface TransformFilter : global::MonoJavaBridge.IJavaObject
{
global::java.lang.String transformUrl(java.util.regex.Matcher arg0, java.lang.String arg1);
}
[global::MonoJavaBridge.JavaProxy(typeof(global::android.text.util.Linkify.TransformFilter))]
internal sealed partial class TransformFilter_ : java.lang.Object, TransformFilter
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
internal TransformFilter_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
private static global::MonoJavaBridge.MethodId _m0;
global::java.lang.String android.text.util.Linkify.TransformFilter.transformUrl(java.util.regex.Matcher arg0, java.lang.String arg1)
{
return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.String>(this, global::android.text.util.Linkify.TransformFilter_.staticClass, "transformUrl", "(Ljava/util/regex/Matcher;Ljava/lang/String;)Ljava/lang/String;", ref global::android.text.util.Linkify.TransformFilter_._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)) as java.lang.String;
}
static TransformFilter_()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.text.util.Linkify.TransformFilter_.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/text/util/Linkify$TransformFilter"));
}
}
public delegate java.lang.String TransformFilterDelegate(java.util.regex.Matcher arg0, java.lang.String arg1);
internal partial class TransformFilterDelegateWrapper : java.lang.Object, TransformFilter
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
protected TransformFilterDelegateWrapper(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
private static global::MonoJavaBridge.MethodId _m0;
public TransformFilterDelegateWrapper() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify.TransformFilterDelegateWrapper._m0.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify.TransformFilterDelegateWrapper._m0 = @__env.GetMethodIDNoThrow(global::android.text.util.Linkify.TransformFilterDelegateWrapper.staticClass, "<init>", "()V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.text.util.Linkify.TransformFilterDelegateWrapper.staticClass, global::android.text.util.Linkify.TransformFilterDelegateWrapper._m0);
Init(@__env, handle);
}
static TransformFilterDelegateWrapper()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.text.util.Linkify.TransformFilterDelegateWrapper.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/text/util/Linkify_TransformFilterDelegateWrapper"));
}
}
internal partial class TransformFilterDelegateWrapper
{
private TransformFilterDelegate myDelegate;
public java.lang.String transformUrl(java.util.regex.Matcher arg0, java.lang.String arg1)
{
return myDelegate(arg0, arg1);
}
public static implicit operator TransformFilterDelegateWrapper(TransformFilterDelegate d)
{
global::android.text.util.Linkify.TransformFilterDelegateWrapper ret = new global::android.text.util.Linkify.TransformFilterDelegateWrapper();
ret.myDelegate = d;
global::MonoJavaBridge.JavaBridge.SetGCHandle(global::MonoJavaBridge.JNIEnv.ThreadEnv, ret);
return ret;
}
}
private static global::MonoJavaBridge.MethodId _m0;
public static bool addLinks(android.widget.TextView arg0, int arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m0.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m0 = @__env.GetStaticMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "addLinks", "(Landroid/widget/TextView;I)Z");
return @__env.CallStaticBooleanMethod(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
}
private static global::MonoJavaBridge.MethodId _m1;
public static void addLinks(android.widget.TextView arg0, java.util.regex.Pattern arg1, java.lang.String arg2)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m1.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m1 = @__env.GetStaticMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "addLinks", "(Landroid/widget/TextView;Ljava/util/regex/Pattern;Ljava/lang/String;)V");
@__env.CallStaticVoidMethod(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m1, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2));
}
private static global::MonoJavaBridge.MethodId _m2;
public static void addLinks(android.widget.TextView arg0, java.util.regex.Pattern arg1, java.lang.String arg2, android.text.util.Linkify.MatchFilter arg3, android.text.util.Linkify.TransformFilter arg4)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m2.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m2 = @__env.GetStaticMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "addLinks", "(Landroid/widget/TextView;Ljava/util/regex/Pattern;Ljava/lang/String;Landroid/text/util/Linkify$MatchFilter;Landroid/text/util/Linkify$TransformFilter;)V");
@__env.CallStaticVoidMethod(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m2, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4));
}
public static void addLinks(android.widget.TextView arg0, java.util.regex.Pattern arg1, java.lang.String arg2, global::android.text.util.Linkify.MatchFilterDelegate arg3, global::android.text.util.Linkify.TransformFilterDelegate arg4)
{
addLinks(arg0, arg1, arg2, (global::android.text.util.Linkify.MatchFilterDelegateWrapper)arg3, (global::android.text.util.Linkify.TransformFilterDelegateWrapper)arg4);
}
private static global::MonoJavaBridge.MethodId _m3;
public static bool addLinks(android.text.Spannable arg0, java.util.regex.Pattern arg1, java.lang.String arg2)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m3.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m3 = @__env.GetStaticMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "addLinks", "(Landroid/text/Spannable;Ljava/util/regex/Pattern;Ljava/lang/String;)Z");
return @__env.CallStaticBooleanMethod(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m3, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2));
}
private static global::MonoJavaBridge.MethodId _m4;
public static bool addLinks(android.text.Spannable arg0, java.util.regex.Pattern arg1, java.lang.String arg2, android.text.util.Linkify.MatchFilter arg3, android.text.util.Linkify.TransformFilter arg4)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m4.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m4 = @__env.GetStaticMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "addLinks", "(Landroid/text/Spannable;Ljava/util/regex/Pattern;Ljava/lang/String;Landroid/text/util/Linkify$MatchFilter;Landroid/text/util/Linkify$TransformFilter;)Z");
return @__env.CallStaticBooleanMethod(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m4, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4));
}
public static bool addLinks(android.text.Spannable arg0, java.util.regex.Pattern arg1, java.lang.String arg2, global::android.text.util.Linkify.MatchFilterDelegate arg3, global::android.text.util.Linkify.TransformFilterDelegate arg4)
{
return addLinks(arg0, arg1, arg2, (global::android.text.util.Linkify.MatchFilterDelegateWrapper)arg3, (global::android.text.util.Linkify.TransformFilterDelegateWrapper)arg4);
}
private static global::MonoJavaBridge.MethodId _m5;
public static bool addLinks(android.text.Spannable arg0, int arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m5.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m5 = @__env.GetStaticMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "addLinks", "(Landroid/text/Spannable;I)Z");
return @__env.CallStaticBooleanMethod(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m5, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
}
private static global::MonoJavaBridge.MethodId _m6;
public Linkify() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.text.util.Linkify._m6.native == global::System.IntPtr.Zero)
global::android.text.util.Linkify._m6 = @__env.GetMethodIDNoThrow(global::android.text.util.Linkify.staticClass, "<init>", "()V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.text.util.Linkify.staticClass, global::android.text.util.Linkify._m6);
Init(@__env, handle);
}
public static int WEB_URLS
{
get
{
return 1;
}
}
public static int EMAIL_ADDRESSES
{
get
{
return 2;
}
}
public static int PHONE_NUMBERS
{
get
{
return 4;
}
}
public static int MAP_ADDRESSES
{
get
{
return 8;
}
}
public static int ALL
{
get
{
return 15;
}
}
internal static global::MonoJavaBridge.FieldId _sUrlMatchFilter5367;
public static global::android.text.util.Linkify.MatchFilter sUrlMatchFilter
{
get
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::android.text.util.Linkify.MatchFilter>(@__env.GetStaticObjectField(global::android.text.util.Linkify.staticClass, _sUrlMatchFilter5367)) as android.text.util.Linkify.MatchFilter;
}
}
internal static global::MonoJavaBridge.FieldId _sPhoneNumberMatchFilter5368;
public static global::android.text.util.Linkify.MatchFilter sPhoneNumberMatchFilter
{
get
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::android.text.util.Linkify.MatchFilter>(@__env.GetStaticObjectField(global::android.text.util.Linkify.staticClass, _sPhoneNumberMatchFilter5368)) as android.text.util.Linkify.MatchFilter;
}
}
internal static global::MonoJavaBridge.FieldId _sPhoneNumberTransformFilter5369;
public static global::android.text.util.Linkify.TransformFilter sPhoneNumberTransformFilter
{
get
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::android.text.util.Linkify.TransformFilter>(@__env.GetStaticObjectField(global::android.text.util.Linkify.staticClass, _sPhoneNumberTransformFilter5369)) as android.text.util.Linkify.TransformFilter;
}
}
static Linkify()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.text.util.Linkify.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/text/util/Linkify"));
global::android.text.util.Linkify._sUrlMatchFilter5367 = @__env.GetStaticFieldIDNoThrow(global::android.text.util.Linkify.staticClass, "sUrlMatchFilter", "Landroid/text/util/Linkify$MatchFilter;");
global::android.text.util.Linkify._sPhoneNumberMatchFilter5368 = @__env.GetStaticFieldIDNoThrow(global::android.text.util.Linkify.staticClass, "sPhoneNumberMatchFilter", "Landroid/text/util/Linkify$MatchFilter;");
global::android.text.util.Linkify._sPhoneNumberTransformFilter5369 = @__env.GetStaticFieldIDNoThrow(global::android.text.util.Linkify.staticClass, "sPhoneNumberTransformFilter", "Landroid/text/util/Linkify$TransformFilter;");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security;
namespace System.IO.Compression
{
/// <summary>
/// Provides a wrapper around the ZLib decompression API
/// </summary>
internal sealed class Inflater : IDisposable
{
private bool _finished; // Whether the end of the stream has been reached
private bool _isDisposed; // Prevents multiple disposals
private ZLibNative.ZLibStreamHandle _zlibStream; // The handle to the primary underlying zlib stream
private GCHandle _inputBufferHandle; // The handle to the buffer that provides input to _zlibStream
private readonly object _syncLock = new object(); // Used to make writing to unmanaged structures atomic
private const int minWindowBits = -15; // WindowBits must be between -8..-15 to ignore the header, 8..15 for
private const int maxWindowBits = 47; // zlib headers, 24..31 for GZip headers, or 40..47 for either Zlib or GZip
#region Exposed Members
/// <summary>
/// Initialized the Inflater with the given windowBits size
/// </summary>
internal Inflater(int windowBits)
{
Debug.Assert(windowBits >= minWindowBits && windowBits <= maxWindowBits);
_finished = false;
_isDisposed = false;
InflateInit(windowBits);
}
public int AvailableOutput
{
get
{
return (int)_zlibStream.AvailOut;
}
}
/// <summary>
/// Returns true if the end of the stream has been reached.
/// </summary>
public bool Finished()
{
return _finished && _zlibStream.AvailIn == 0 && _zlibStream.AvailOut == 0;
}
public unsafe bool Inflate(out byte b)
{
// If Inflate is called on an invalid or unready inflater, return 0 to indicate no bytes have been read.
if (NeedsInput() || _inputBufferHandle == null || !_inputBufferHandle.IsAllocated)
{
b = 0;
return false;
}
fixed (byte* bufPtr = &b)
{
int bytesRead = InflateVerified(bufPtr, 1);
Debug.Assert(bytesRead == 0 || bytesRead == 1);
return bytesRead != 0;
}
}
public unsafe int Inflate(byte[] bytes, int offset, int length)
{
// If Inflate is called on an invalid or unready inflater, return 0 to indicate no bytes have been read.
if (NeedsInput() || _inputBufferHandle == null || !_inputBufferHandle.IsAllocated || length == 0)
return 0;
Debug.Assert(null != bytes, "Can't pass in a null output buffer!");
fixed (byte* bufPtr = bytes)
{
return InflateVerified(bufPtr + offset, length);
}
}
public unsafe int InflateVerified(byte* bufPtr, int length)
{
// State is valid; attempt inflation
Debug.Assert(!NeedsInput() && _inputBufferHandle != null && _inputBufferHandle.IsAllocated && length != 0);
try
{
int bytesRead;
if (ReadInflateOutput(bufPtr, length, ZLibNative.FlushCode.NoFlush, out bytesRead) == ZLibNative.ErrorCode.StreamEnd)
{
_finished = true;
}
return bytesRead;
}
finally
{
// Before returning, make sure to release input buffer if necessary:
if (0 == _zlibStream.AvailIn && _inputBufferHandle.IsAllocated)
{
DeallocateInputBufferHandle();
}
}
}
public bool NeedsInput()
{
return _zlibStream.AvailIn == 0;
}
public void SetInput(byte[] inputBuffer, int startIndex, int count)
{
Debug.Assert(NeedsInput(), "We have something left in previous input!");
Debug.Assert(inputBuffer != null);
Debug.Assert(startIndex >= 0 && count >= 0 && count + startIndex <= inputBuffer.Length);
Debug.Assert(!_inputBufferHandle.IsAllocated);
if (0 == count)
return;
lock (_syncLock)
{
_inputBufferHandle = GCHandle.Alloc(inputBuffer, GCHandleType.Pinned);
_zlibStream.NextIn = _inputBufferHandle.AddrOfPinnedObject() + startIndex;
_zlibStream.AvailIn = (uint)count;
_finished = false;
}
}
[SecuritySafeCritical]
private void Dispose(bool disposing)
{
if (!_isDisposed)
{
if (disposing)
_zlibStream.Dispose();
if (_inputBufferHandle.IsAllocated)
DeallocateInputBufferHandle();
_isDisposed = true;
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
~Inflater()
{
Dispose(false);
}
#endregion
#region Helper Methods
/// <summary>
/// Creates the ZStream that will handle inflation
/// </summary>
[SecuritySafeCritical]
private void InflateInit(int windowBits)
{
ZLibNative.ErrorCode error;
try
{
error = ZLibNative.CreateZLibStreamForInflate(out _zlibStream, windowBits);
}
catch (Exception exception) // could not load the ZLib dll
{
throw new ZLibException(SR.ZLibErrorDLLLoadError, exception);
}
switch (error)
{
case ZLibNative.ErrorCode.Ok: // Successful initialization
return;
case ZLibNative.ErrorCode.MemError: // Not enough memory
throw new ZLibException(SR.ZLibErrorNotEnoughMemory, "inflateInit2_", (int)error, _zlibStream.GetErrorMessage());
case ZLibNative.ErrorCode.VersionError: //zlib library is incompatible with the version assumed
throw new ZLibException(SR.ZLibErrorVersionMismatch, "inflateInit2_", (int)error, _zlibStream.GetErrorMessage());
case ZLibNative.ErrorCode.StreamError: // Parameters are invalid
throw new ZLibException(SR.ZLibErrorIncorrectInitParameters, "inflateInit2_", (int)error, _zlibStream.GetErrorMessage());
default:
throw new ZLibException(SR.ZLibErrorUnexpected, "inflateInit2_", (int)error, _zlibStream.GetErrorMessage());
}
}
/// <summary>
/// Wrapper around the ZLib inflate function, configuring the stream appropriately.
/// </summary>
private unsafe ZLibNative.ErrorCode ReadInflateOutput(byte* bufPtr, int length, ZLibNative.FlushCode flushCode, out int bytesRead)
{
lock (_syncLock)
{
_zlibStream.NextOut = (IntPtr)bufPtr;
_zlibStream.AvailOut = (uint)length;
ZLibNative.ErrorCode errC = Inflate(flushCode);
bytesRead = length - (int)_zlibStream.AvailOut;
return errC;
}
}
/// <summary>
/// Wrapper around the ZLib inflate function
/// </summary>
[SecuritySafeCritical]
private ZLibNative.ErrorCode Inflate(ZLibNative.FlushCode flushCode)
{
ZLibNative.ErrorCode errC;
try
{
errC = _zlibStream.Inflate(flushCode);
}
catch (Exception cause) // could not load the Zlib DLL correctly
{
throw new ZLibException(SR.ZLibErrorDLLLoadError, cause);
}
switch (errC)
{
case ZLibNative.ErrorCode.Ok: // progress has been made inflating
case ZLibNative.ErrorCode.StreamEnd: // The end of the input stream has been reached
return errC;
case ZLibNative.ErrorCode.BufError: // No room in the output buffer - inflate() can be called again with more space to continue
return errC;
case ZLibNative.ErrorCode.MemError: // Not enough memory to complete the operation
throw new ZLibException(SR.ZLibErrorNotEnoughMemory, "inflate_", (int)errC, _zlibStream.GetErrorMessage());
case ZLibNative.ErrorCode.DataError: // The input data was corrupted (input stream not conforming to the zlib format or incorrect check value)
throw new InvalidDataException(SR.UnsupportedCompression);
case ZLibNative.ErrorCode.StreamError: //the stream structure was inconsistent (for example if next_in or next_out was NULL),
throw new ZLibException(SR.ZLibErrorInconsistentStream, "inflate_", (int)errC, _zlibStream.GetErrorMessage());
default:
throw new ZLibException(SR.ZLibErrorUnexpected, "inflate_", (int)errC, _zlibStream.GetErrorMessage());
}
}
/// <summary>
/// Frees the GCHandle being used to store the input buffer
/// </summary>
private void DeallocateInputBufferHandle()
{
Debug.Assert(_inputBufferHandle.IsAllocated);
lock (_syncLock)
{
_zlibStream.AvailIn = 0;
_zlibStream.NextIn = ZLibNative.ZNullPtr;
_inputBufferHandle.Free();
}
}
#endregion
}
}
| |
using HarmonyLib;
using HarmonyLibTests.Assets;
using NUnit.Framework;
using System;
namespace HarmonyLibTests.Patching
{
[TestFixture]
public class Arguments : TestLogger
{
[Test]
public void Test_Method6()
{
var originalClass = typeof(Class6);
Assert.NotNull(originalClass);
var originalMethod = originalClass.GetMethod("Method6");
Assert.NotNull(originalMethod);
var patchClass = typeof(Class6Patch);
var prefix = patchClass.GetMethod("Prefix");
Assert.NotNull(prefix);
var instance = new Harmony("test");
Assert.NotNull(instance);
var patcher = instance.CreateProcessor(originalMethod);
Assert.NotNull(patcher);
_ = patcher.AddPrefix(prefix);
Assert.NotNull(patcher);
_ = patcher.Patch();
var instance6 = new Class6
{
someFloat = 999,
someString = "original",
someStruct = new Class6Struct() { d1 = 1, d2 = 2, d3 = 3 }
};
var res = instance6.Method6();
Assert.AreEqual(res[0], 123);
Assert.AreEqual(res[1], "patched");
Assert.AreEqual(((Class6Struct)res[2]).d1, 10.0);
}
[Test]
public void Test_Method7()
{
var originalClass = typeof(Class7);
Assert.NotNull(originalClass);
var originalMethod = originalClass.GetMethod("Method7");
Assert.NotNull(originalMethod);
var patchClass = typeof(Class7Patch);
var postfix = patchClass.GetMethod("Postfix");
Assert.NotNull(postfix);
var instance = new Harmony("test");
Assert.NotNull(instance);
var patcher = instance.CreateProcessor(originalMethod);
Assert.NotNull(patcher);
_ = patcher.AddPostfix(postfix);
_ = patcher.Patch();
var instance7 = new Class7();
var result = instance7.Method7("parameter");
Assert.AreEqual("parameter", instance7.state1);
Assert.AreEqual(10, result.a);
Assert.AreEqual(20, result.b);
}
[Test]
public void Test_Method8()
{
var originalClass = typeof(Class8);
Assert.NotNull(originalClass);
var originalMethod = originalClass.GetMethod("Method8");
Assert.NotNull(originalMethod);
var patchClass = typeof(Class8Patch);
var postfix = patchClass.GetMethod("Postfix");
Assert.NotNull(postfix);
var instance = new Harmony("test");
Assert.NotNull(instance);
var patcher = instance.CreateProcessor(originalMethod);
Assert.NotNull(patcher);
_ = patcher.AddPostfix(postfix);
Assert.NotNull(patcher);
_ = patcher.Patch();
var result = Class8.Method8("patched");
Assert.True(Class8.mainRun);
Assert.AreEqual(10, result.a);
Assert.AreEqual(20, result.b);
}
[Test]
public void Test_InjectingBaseClassField()
{
var testInstance = new InjectFieldSubClass();
testInstance.Method("foo");
Assert.AreEqual("foo", testInstance.TestValue);
var originalClass = testInstance.GetType();
Assert.NotNull(originalClass);
var originalMethod = originalClass.GetMethod("Method");
Assert.NotNull(originalMethod);
var patchClass = typeof(InjectFieldSubClass_Patch);
var postfix = patchClass.GetMethod("Postfix");
Assert.NotNull(postfix);
var instance = new Harmony("test");
Assert.NotNull(instance);
var patcher = instance.CreateProcessor(originalMethod);
Assert.NotNull(patcher);
_ = patcher.AddPostfix(postfix);
Assert.NotNull(patcher);
_ = patcher.Patch();
testInstance.Method("bar");
Assert.AreEqual("patched", testInstance.TestValue);
}
[Test]
public void Test_InjectBaseDelegateForClass()
{
var instance = new InjectDelegateClass() { pre = "{", post = "}" };
instance.Method(123);
Assert.AreEqual("[{test:123}]", instance.result);
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(InjectDelegateClassPatch));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
instance.Method(123);
Assert.AreEqual("{patch:456} | [{patch:456}]", InjectDelegateClassPatch.result);
}
[Test]
public void Test_InjectDelegateForStaticClass()
{
Assert.AreEqual("[1999]", InjectDelegateStaticClass.Method(999));
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(InjectDelegateStaticClassPatch));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
Assert.AreEqual("[123]/[456]", InjectDelegateStaticClass.Method(4444));
}
[Test]
public void Test_InjectDelegateForValueType()
{
var instance = new InjectDelegateStruct() { pre = "{", post = "}" };
Assert.AreEqual("{1999}", instance.Method(999));
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(InjectDelegateStructPatch));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
Assert.AreEqual("{123}/{456}", instance.Method(4444));
}
[Test]
public void Test_RefResults()
{
var intRef1 = Class19.Method19();
Assert.AreEqual(123, intRef1);
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(Class19Patch));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
var intRef2 = Class19.Method19();
Assert.AreEqual(456, intRef2);
}
[Test]
public void Test_BoxingValueResults()
{
var struct1 = Class20.Method20();
Assert.AreEqual(123, struct1.value);
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(Class20Patch));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
_ = Class20.Method20();
var result = (Class20.Struct20)Class20Patch.theResult;
Assert.AreEqual(123, result.value);
}
[Test]
public void Test_BoxingRefValueResults()
{
var struct1 = Class21.Method21();
Assert.AreEqual(123, struct1.value);
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(Class21Patch));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
var result = Class21.Method21();
Assert.AreEqual(456, result.value);
}
[Test]
public void Test_ArgumentCases()
{
var harmony = new Harmony("test");
typeof(ArgumentOriginalMethods).GetMethods().Do(original =>
{
var name = original.Name;
var i = name.IndexOf("_2_");
if (i > 0)
{
var typeName = name.Substring(i + 3);
var replacementName = $"To_{typeName}";
var replacement = typeof(ArgumentPatchMethods).GetMethod(replacementName);
Assert.NotNull(replacement, $"replacement '{replacementName}'");
try
{
var result = harmony.Patch(original, new HarmonyMethod(replacement));
Assert.NotNull(result, "result");
}
catch (Exception ex)
{
Assert.Fail($"Patching {original.Name} failed:\n{ex}");
}
}
});
var instance = new ArgumentOriginalMethods();
ArgumentPatchMethods.Reset();
var obj = new ArgumentTypes.Object();
instance.Object_2_Object(obj);
instance.Object_2_ObjectRef(obj);
instance.ObjectRef_2_Object(ref obj);
instance.ObjectRef_2_ObjectRef(ref obj);
var val = new ArgumentTypes.Value() { n = 100 };
instance.Value_2_Value(val);
instance.Value_2_Boxing(val);
instance.Value_2_ValueRef(val);
Assert.AreEqual(100, val.n);
instance.Value_2_BoxingRef(val);
instance.ValueRef_2_Value(ref val);
instance.ValueRef_2_Boxing(ref val);
instance.ValueRef_2_ValueRef(ref val);
Assert.AreEqual(101, val.n);
instance.ValueRef_2_BoxingRef(ref val);
Assert.AreEqual(102, val.n);
Assert.AreEqual("OOOOVVVVVVVV", ArgumentPatchMethods.result);
}
[Test]
public void Test_ArrayArguments()
{
var harmony = new Harmony("test");
var processor = new PatchClassProcessor(harmony, typeof(ArgumentArrayPatches));
var patches = processor.Patch();
Assert.NotNull(patches, "patches");
Assert.AreEqual(1, patches.Count);
ArgumentArrayPatches.prefixInput = null;
ArgumentArrayPatches.postfixInput = null;
var instance = new ArgumentArrayMethods();
var n1 = 8;
var n2 = 9;
var s1 = "A";
var s2 = "B";
var st1 = new ArgumentArrayMethods.SomeStruct() { n = 8 };
var st2 = new ArgumentArrayMethods.SomeStruct() { n = 9 };
var f1 = new float[] { 8f };
var f2 = new float[] { 9f };
instance.Method(
n1, ref n2, out var n3,
s1, ref s2, out var s3,
st1, ref st2, out var st3,
f1, ref f2, out var f3
);
// prefix input
var r = ArgumentArrayPatches.prefixInput;
var i = 0;
Assert.AreEqual(8, r[i], $"prefix[{i++}]");
Assert.AreEqual(9, r[i], $"prefix[{i++}]");
Assert.AreEqual(0, r[i], $"prefix[{i++}]");
Assert.AreEqual("A", r[i], $"prefix[{i++}]");
Assert.AreEqual("B", r[i], $"prefix[{i++}]");
Assert.AreEqual(null, r[i], $"prefix[{i++}]");
Assert.AreEqual(8, ((ArgumentArrayMethods.SomeStruct)r[i]).n, $"prefix[{i++}]");
Assert.AreEqual(9, ((ArgumentArrayMethods.SomeStruct)r[i]).n, $"prefix[{i++}]");
Assert.AreEqual(0, ((ArgumentArrayMethods.SomeStruct)r[i]).n, $"prefix[{i++}]");
Assert.AreEqual(8f, ((float[])r[i])[0], $"prefix[{i++}]");
Assert.AreEqual(9f, ((float[])r[i])[0], $"prefix[{i++}]");
Assert.AreEqual(null, (float[])r[i], $"prefix[{i++}]");
// postfix input
r = ArgumentArrayPatches.postfixInput;
i = 0;
Assert.AreEqual(8, r[i], $"postfix[{i++}]");
Assert.AreEqual(123, r[i], $"postfix[{i++}]");
Assert.AreEqual(456, r[i], $"postfix[{i++}]");
Assert.AreEqual("A", r[i], $"postfix[{i++}]");
Assert.AreEqual("abc", r[i], $"postfix[{i++}]");
Assert.AreEqual("def", r[i], $"postfix[{i++}]");
Assert.AreEqual(8, ((ArgumentArrayMethods.SomeStruct)r[i]).n, $"postfix[{i++}]");
Assert.AreEqual(123, ((ArgumentArrayMethods.SomeStruct)r[i]).n, $"postfix[{i++}]");
Assert.AreEqual(456, ((ArgumentArrayMethods.SomeStruct)r[i]).n, $"postfix[{i++}]");
Assert.AreEqual(8f, ((float[])r[i])[0], $"postfix[{i++}]");
Assert.AreEqual(5.6f, ((float[])r[i])[2], $"postfix[{i++}]");
Assert.AreEqual(6.5f, ((float[])r[i])[2], $"postfix[{i++}]");
// method output values
Assert.AreEqual(123, n2, "n2");
Assert.AreEqual(456, n3, "n3");
Assert.AreEqual("abc", s2, "s2");
Assert.AreEqual("def", s3, "s3");
Assert.AreEqual(123, st2.n, "st2");
Assert.AreEqual(456, st3.n, "st3");
Assert.AreEqual(5.6f, f2[2], "f2");
Assert.AreEqual(6.5f, f3[2], "f3");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.IO;
using System.Net.Mime;
using System.Runtime.ExceptionServices;
using System.Security.Cryptography.X509Certificates;
namespace System.Net.Mail
{
internal class SmtpTransport
{
internal const int DefaultPort = 25;
private ISmtpAuthenticationModule[] _authenticationModules;
private SmtpConnection _connection;
private SmtpClient _client;
private ICredentialsByHost _credentials;
private int _timeout = 100000; // seconds
private List<SmtpFailedRecipientException> _failedRecipientExceptions = new List<SmtpFailedRecipientException>();
private bool _identityRequired;
private bool _enableSsl = false;
private X509CertificateCollection _clientCertificates = null;
internal SmtpTransport(SmtpClient client) : this(client, SmtpAuthenticationManager.GetModules())
{
}
internal SmtpTransport(SmtpClient client, ISmtpAuthenticationModule[] authenticationModules)
{
_client = client;
if (authenticationModules == null)
{
throw new ArgumentNullException(nameof(authenticationModules));
}
_authenticationModules = authenticationModules;
}
internal ICredentialsByHost Credentials
{
get
{
return _credentials;
}
set
{
_credentials = value;
}
}
internal bool IdentityRequired
{
get
{
return _identityRequired;
}
set
{
_identityRequired = value;
}
}
internal bool IsConnected
{
get
{
return _connection != null && _connection.IsConnected;
}
}
internal int Timeout
{
get
{
return _timeout;
}
set
{
if (value < 0)
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_timeout = value;
}
}
internal bool EnableSsl
{
get
{
return _enableSsl;
}
set
{
_enableSsl = value;
}
}
internal X509CertificateCollection ClientCertificates
{
get
{
if (_clientCertificates == null)
{
_clientCertificates = new X509CertificateCollection();
}
return _clientCertificates;
}
}
internal bool ServerSupportsEai
{
get { return _connection != null && _connection.ServerSupportsEai; }
}
internal void GetConnection(string host, int port)
{
try
{
_connection = new SmtpConnection(this, _client, _credentials, _authenticationModules);
_connection.Timeout = _timeout;
if (NetEventSource.IsEnabled) NetEventSource.Associate(this, _connection);
if (EnableSsl)
{
_connection.EnableSsl = true;
_connection.ClientCertificates = ClientCertificates;
}
_connection.GetConnection(host, port);
}
finally { }
}
internal IAsyncResult BeginGetConnection(ContextAwareResult outerResult, AsyncCallback callback, object state, string host, int port)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(this);
IAsyncResult result = null;
try
{
_connection = new SmtpConnection(this, _client, _credentials, _authenticationModules);
_connection.Timeout = _timeout;
if (NetEventSource.IsEnabled) NetEventSource.Associate(this, _connection);
if (EnableSsl)
{
_connection.EnableSsl = true;
_connection.ClientCertificates = ClientCertificates;
}
result = _connection.BeginGetConnection(outerResult, callback, state, host, port);
}
catch (Exception innerException)
{
throw new SmtpException(SR.MailHostNotFound, innerException);
}
if (NetEventSource.IsEnabled)
{
NetEventSource.Info(this, "Sync completion");
NetEventSource.Exit(this);
}
return result;
}
internal void EndGetConnection(IAsyncResult result)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(this);
try
{
_connection.EndGetConnection(result);
}
finally
{
if (NetEventSource.IsEnabled) NetEventSource.Exit(this);
}
}
internal IAsyncResult BeginSendMail(MailAddress sender, MailAddressCollection recipients,
string deliveryNotify, bool allowUnicode, AsyncCallback callback, object state)
{
if (sender == null)
{
throw new ArgumentNullException(nameof(sender));
}
if (recipients == null)
{
throw new ArgumentNullException(nameof(recipients));
}
SendMailAsyncResult result = new SendMailAsyncResult(_connection, sender, recipients,
allowUnicode, _connection.DSNEnabled ? deliveryNotify : null,
callback, state);
result.Send();
return result;
}
internal void ReleaseConnection()
{
if (_connection != null)
{
_connection.ReleaseConnection();
}
}
internal void Abort()
{
if (_connection != null)
{
_connection.Abort();
}
}
internal MailWriter EndSendMail(IAsyncResult result)
{
try
{
return SendMailAsyncResult.End(result);
}
finally
{
}
}
internal MailWriter SendMail(MailAddress sender, MailAddressCollection recipients, string deliveryNotify,
bool allowUnicode, out SmtpFailedRecipientException exception)
{
if (sender == null)
{
throw new ArgumentNullException(nameof(sender));
}
if (recipients == null)
{
throw new ArgumentNullException(nameof(recipients));
}
MailCommand.Send(_connection, SmtpCommands.Mail, sender, allowUnicode);
_failedRecipientExceptions.Clear();
exception = null;
string response;
foreach (MailAddress address in recipients)
{
string smtpAddress = address.GetSmtpAddress(allowUnicode);
string to = smtpAddress + (_connection.DSNEnabled ? deliveryNotify : string.Empty);
if (!RecipientCommand.Send(_connection, to, out response))
{
_failedRecipientExceptions.Add(
new SmtpFailedRecipientException(_connection.Reader.StatusCode, smtpAddress, response));
}
}
if (_failedRecipientExceptions.Count > 0)
{
if (_failedRecipientExceptions.Count == 1)
{
exception = _failedRecipientExceptions[0];
}
else
{
exception = new SmtpFailedRecipientsException(_failedRecipientExceptions, _failedRecipientExceptions.Count == recipients.Count);
}
if (_failedRecipientExceptions.Count == recipients.Count)
{
exception.fatal = true;
throw exception;
}
}
DataCommand.Send(_connection);
return new MailWriter(_connection.GetClosableStream());
}
}
internal class SendMailAsyncResult : LazyAsyncResult
{
private SmtpConnection _connection;
private MailAddress _from;
private string _deliveryNotify;
private static AsyncCallback s_sendMailFromCompleted = new AsyncCallback(SendMailFromCompleted);
private static AsyncCallback s_sendToCollectionCompleted = new AsyncCallback(SendToCollectionCompleted);
private static AsyncCallback s_sendDataCompleted = new AsyncCallback(SendDataCompleted);
private List<SmtpFailedRecipientException> _failedRecipientExceptions = new List<SmtpFailedRecipientException>();
private Stream _stream;
private MailAddressCollection _toCollection;
private int _toIndex;
private bool _allowUnicode;
internal SendMailAsyncResult(SmtpConnection connection, MailAddress from, MailAddressCollection toCollection,
bool allowUnicode, string deliveryNotify, AsyncCallback callback, object state)
: base(null, state, callback)
{
_toCollection = toCollection;
_connection = connection;
_from = from;
_deliveryNotify = deliveryNotify;
_allowUnicode = allowUnicode;
}
internal void Send()
{
SendMailFrom();
}
internal static MailWriter End(IAsyncResult result)
{
SendMailAsyncResult thisPtr = (SendMailAsyncResult)result;
object sendMailResult = thisPtr.InternalWaitForCompletion();
// Note the difference between the singular and plural FailedRecipient exceptions.
// Only fail immediately if we couldn't send to any recipients.
if ((sendMailResult is Exception e)
&& (!(sendMailResult is SmtpFailedRecipientException)
|| ((SmtpFailedRecipientException)sendMailResult).fatal))
{
ExceptionDispatchInfo.Capture(e).Throw();
}
return new MailWriter(thisPtr._stream);
}
private void SendMailFrom()
{
IAsyncResult result = MailCommand.BeginSend(_connection, SmtpCommands.Mail, _from, _allowUnicode,
s_sendMailFromCompleted, this);
if (!result.CompletedSynchronously)
{
return;
}
MailCommand.EndSend(result);
SendToCollection();
}
private static void SendMailFromCompleted(IAsyncResult result)
{
if (!result.CompletedSynchronously)
{
SendMailAsyncResult thisPtr = (SendMailAsyncResult)result.AsyncState;
try
{
MailCommand.EndSend(result);
thisPtr.SendToCollection();
}
catch (Exception e)
{
thisPtr.InvokeCallback(e);
}
}
}
private void SendToCollection()
{
while (_toIndex < _toCollection.Count)
{
MultiAsyncResult result = (MultiAsyncResult)RecipientCommand.BeginSend(_connection,
_toCollection[_toIndex++].GetSmtpAddress(_allowUnicode) + _deliveryNotify,
s_sendToCollectionCompleted, this);
if (!result.CompletedSynchronously)
{
return;
}
string response;
if (!RecipientCommand.EndSend(result, out response))
{
_failedRecipientExceptions.Add(new SmtpFailedRecipientException(_connection.Reader.StatusCode,
_toCollection[_toIndex - 1].GetSmtpAddress(_allowUnicode), response));
}
}
SendData();
}
private static void SendToCollectionCompleted(IAsyncResult result)
{
if (!result.CompletedSynchronously)
{
SendMailAsyncResult thisPtr = (SendMailAsyncResult)result.AsyncState;
try
{
string response;
if (!RecipientCommand.EndSend(result, out response))
{
thisPtr._failedRecipientExceptions.Add(
new SmtpFailedRecipientException(thisPtr._connection.Reader.StatusCode,
thisPtr._toCollection[thisPtr._toIndex - 1].GetSmtpAddress(thisPtr._allowUnicode),
response));
if (thisPtr._failedRecipientExceptions.Count == thisPtr._toCollection.Count)
{
SmtpFailedRecipientException exception = null;
if (thisPtr._toCollection.Count == 1)
{
exception = (SmtpFailedRecipientException)thisPtr._failedRecipientExceptions[0];
}
else
{
exception = new SmtpFailedRecipientsException(thisPtr._failedRecipientExceptions, true);
}
exception.fatal = true;
thisPtr.InvokeCallback(exception);
return;
}
}
thisPtr.SendToCollection();
}
catch (Exception e)
{
thisPtr.InvokeCallback(e);
}
}
}
private void SendData()
{
IAsyncResult result = DataCommand.BeginSend(_connection, s_sendDataCompleted, this);
if (!result.CompletedSynchronously)
{
return;
}
DataCommand.EndSend(result);
_stream = _connection.GetClosableStream();
if (_failedRecipientExceptions.Count > 1)
{
InvokeCallback(new SmtpFailedRecipientsException(_failedRecipientExceptions, _failedRecipientExceptions.Count == _toCollection.Count));
}
else if (_failedRecipientExceptions.Count == 1)
{
InvokeCallback(_failedRecipientExceptions[0]);
}
else
{
InvokeCallback();
}
}
private static void SendDataCompleted(IAsyncResult result)
{
if (!result.CompletedSynchronously)
{
SendMailAsyncResult thisPtr = (SendMailAsyncResult)result.AsyncState;
try
{
DataCommand.EndSend(result);
thisPtr._stream = thisPtr._connection.GetClosableStream();
if (thisPtr._failedRecipientExceptions.Count > 1)
{
thisPtr.InvokeCallback(new SmtpFailedRecipientsException(thisPtr._failedRecipientExceptions, thisPtr._failedRecipientExceptions.Count == thisPtr._toCollection.Count));
}
else if (thisPtr._failedRecipientExceptions.Count == 1)
{
thisPtr.InvokeCallback(thisPtr._failedRecipientExceptions[0]);
}
else
{
thisPtr.InvokeCallback();
}
}
catch (Exception e)
{
thisPtr.InvokeCallback(e);
}
}
}
// Return the list of non-terminal failures (some recipients failed but not others).
internal SmtpFailedRecipientException GetFailedRecipientException()
{
if (_failedRecipientExceptions.Count == 1)
{
return (SmtpFailedRecipientException)_failedRecipientExceptions[0];
}
else if (_failedRecipientExceptions.Count > 1)
{
// Aggregate exception, multiple failures
return new SmtpFailedRecipientsException(_failedRecipientExceptions, false);
}
return null;
}
}
}
| |
///////////////////////////////////////////////////////////////////////////
// Description: Data Access class for the table 'RS_JenisLayanan'
// Generated by LLBLGen v1.21.2003.712 Final on: Thursday, October 11, 2007, 2:03:11 AM
// Because the Base Class already implements IDispose, this class doesn't.
///////////////////////////////////////////////////////////////////////////
using System;
using System.Data;
using System.Data.SqlTypes;
using System.Data.SqlClient;
namespace SIMRS.DataAccess
{
/// <summary>
/// Purpose: Data Access class for the table 'RS_JenisLayanan'.
/// </summary>
public class RS_JenisLayanan : DBInteractionBase
{
#region Class Member Declarations
private SqlBoolean _published;
private SqlDateTime _createdDate, _modifiedDate;
private SqlInt32 _createdBy, _modifiedBy, _ordering, _id;
private SqlString _kode, _nama, _keterangan;
#endregion
/// <summary>
/// Purpose: Class constructor.
/// </summary>
public RS_JenisLayanan()
{
// Nothing for now.
}
/// <summary>
/// Purpose: IsExist method. This method will check Exsisting data from database.
/// </summary>
/// <returns></returns>
public bool IsExist()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_IsExist]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@Id", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _id));
cmdToExecute.Parameters.Add(new SqlParameter("@Kode", SqlDbType.VarChar, 50, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _kode));
cmdToExecute.Parameters.Add(new SqlParameter("@Nama", SqlDbType.VarChar, 50, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _nama));
cmdToExecute.Parameters.Add(new SqlParameter("@IsExist", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, false, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
_rowsAffected = cmdToExecute.ExecuteNonQuery();
int IsExist = int.Parse(cmdToExecute.Parameters["@IsExist"].Value.ToString());
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_IsExist' reported the ErrorCode: " + _errorCode);
}
return IsExist == 1;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::IsExist::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
}
}
/// <summary>
/// Purpose: Insert method. This method will insert one new row into the database.
/// </summary>
/// <returns>True if succeeded, otherwise an Exception is thrown. </returns>
/// <remarks>
/// Properties needed for this method:
/// <UL>
/// <LI>Id</LI>
/// <LI>Kode</LI>
/// <LI>Nama</LI>
/// <LI>Keterangan. May be SqlString.Null</LI>
/// <LI>Published</LI>
/// <LI>Ordering</LI>
/// <LI>CreatedBy</LI>
/// <LI>CreatedDate</LI>
/// <LI>ModifiedBy. May be SqlInt32.Null</LI>
/// <LI>ModifiedDate. May be SqlDateTime.Null</LI>
/// </UL>
/// Properties set after a succesful call of this method:
/// <UL>
/// <LI>ErrorCode</LI>
/// </UL>
/// </remarks>
public override bool Insert()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_Insert]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@Id", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _id));
cmdToExecute.Parameters.Add(new SqlParameter("@Kode", SqlDbType.VarChar, 50, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _kode));
cmdToExecute.Parameters.Add(new SqlParameter("@Nama", SqlDbType.VarChar, 50, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _nama));
cmdToExecute.Parameters.Add(new SqlParameter("@Keterangan", SqlDbType.VarChar, 255, ParameterDirection.Input, true, 0, 0, "", DataRowVersion.Proposed, _keterangan));
cmdToExecute.Parameters.Add(new SqlParameter("@Published", SqlDbType.Bit, 1, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _published));
cmdToExecute.Parameters.Add(new SqlParameter("@Ordering", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _ordering));
cmdToExecute.Parameters.Add(new SqlParameter("@CreatedBy", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _createdBy));
cmdToExecute.Parameters.Add(new SqlParameter("@CreatedDate", SqlDbType.DateTime, 8, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _createdDate));
cmdToExecute.Parameters.Add(new SqlParameter("@ModifiedBy", SqlDbType.Int, 4, ParameterDirection.Input, true, 10, 0, "", DataRowVersion.Proposed, _modifiedBy));
cmdToExecute.Parameters.Add(new SqlParameter("@ModifiedDate", SqlDbType.DateTime, 8, ParameterDirection.Input, true, 0, 0, "", DataRowVersion.Proposed, _modifiedDate));
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
_rowsAffected = cmdToExecute.ExecuteNonQuery();
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_Insert' reported the ErrorCode: " + _errorCode);
}
return true;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::Insert::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
}
}
/// <summary>
/// Purpose: Update method. This method will Update one existing row in the database.
/// </summary>
/// <returns>True if succeeded, otherwise an Exception is thrown. </returns>
/// <remarks>
/// Properties needed for this method:
/// <UL>
/// <LI>Id</LI>
/// <LI>Kode</LI>
/// <LI>Nama</LI>
/// <LI>Keterangan. May be SqlString.Null</LI>
/// <LI>Published</LI>
/// <LI>Ordering</LI>
/// <LI>CreatedBy</LI>
/// <LI>CreatedDate</LI>
/// <LI>ModifiedBy. May be SqlInt32.Null</LI>
/// <LI>ModifiedDate. May be SqlDateTime.Null</LI>
/// </UL>
/// Properties set after a succesful call of this method:
/// <UL>
/// <LI>ErrorCode</LI>
/// </UL>
/// </remarks>
public override bool Update()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_Update]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@Id", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _id));
cmdToExecute.Parameters.Add(new SqlParameter("@Kode", SqlDbType.VarChar, 50, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _kode));
cmdToExecute.Parameters.Add(new SqlParameter("@Nama", SqlDbType.VarChar, 50, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _nama));
cmdToExecute.Parameters.Add(new SqlParameter("@Keterangan", SqlDbType.VarChar, 255, ParameterDirection.Input, true, 0, 0, "", DataRowVersion.Proposed, _keterangan));
cmdToExecute.Parameters.Add(new SqlParameter("@Published", SqlDbType.Bit, 1, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _published));
cmdToExecute.Parameters.Add(new SqlParameter("@Ordering", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _ordering));
cmdToExecute.Parameters.Add(new SqlParameter("@CreatedBy", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _createdBy));
cmdToExecute.Parameters.Add(new SqlParameter("@CreatedDate", SqlDbType.DateTime, 8, ParameterDirection.Input, false, 0, 0, "", DataRowVersion.Proposed, _createdDate));
cmdToExecute.Parameters.Add(new SqlParameter("@ModifiedBy", SqlDbType.Int, 4, ParameterDirection.Input, true, 10, 0, "", DataRowVersion.Proposed, _modifiedBy));
cmdToExecute.Parameters.Add(new SqlParameter("@ModifiedDate", SqlDbType.DateTime, 8, ParameterDirection.Input, true, 0, 0, "", DataRowVersion.Proposed, _modifiedDate));
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
_rowsAffected = cmdToExecute.ExecuteNonQuery();
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_Update' reported the ErrorCode: " + _errorCode);
}
return true;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::Update::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
}
}
/// <summary>
/// Purpose: Delete method. This method will Delete one existing row in the database, based on the Primary Key.
/// </summary>
/// <returns>True if succeeded, otherwise an Exception is thrown. </returns>
/// <remarks>
/// Properties needed for this method:
/// <UL>
/// <LI>Id</LI>
/// </UL>
/// Properties set after a succesful call of this method:
/// <UL>
/// <LI>ErrorCode</LI>
/// </UL>
/// </remarks>
public override bool Delete()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_Delete]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@Id", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _id));
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
_rowsAffected = cmdToExecute.ExecuteNonQuery();
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_Delete' reported the ErrorCode: " + _errorCode);
}
return true;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::Delete::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
}
}
/// <summary>
/// Purpose: Select method. This method will Select one existing row from the database, based on the Primary Key.
/// </summary>
/// <returns>DataTable object if succeeded, otherwise an Exception is thrown. </returns>
/// <remarks>
/// Properties needed for this method:
/// <UL>
/// <LI>Id</LI>
/// </UL>
/// Properties set after a succesful call of this method:
/// <UL>
/// <LI>ErrorCode</LI>
/// <LI>Id</LI>
/// <LI>Kode</LI>
/// <LI>Nama</LI>
/// <LI>Keterangan</LI>
/// <LI>Published</LI>
/// <LI>Ordering</LI>
/// <LI>CreatedBy</LI>
/// <LI>CreatedDate</LI>
/// <LI>ModifiedBy</LI>
/// <LI>ModifiedDate</LI>
/// </UL>
/// Will fill all properties corresponding with a field in the table with the value of the row selected.
/// </remarks>
public override DataTable SelectOne()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_SelectOne]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
DataTable toReturn = new DataTable("RS_JenisLayanan");
SqlDataAdapter adapter = new SqlDataAdapter(cmdToExecute);
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@Id", SqlDbType.Int, 4, ParameterDirection.Input, false, 10, 0, "", DataRowVersion.Proposed, _id));
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
adapter.Fill(toReturn);
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_SelectOne' reported the ErrorCode: " + _errorCode);
}
if (toReturn.Rows.Count > 0)
{
_id = (Int32)toReturn.Rows[0]["Id"];
_kode = (string)toReturn.Rows[0]["Kode"];
_nama = (string)toReturn.Rows[0]["Nama"];
_keterangan = toReturn.Rows[0]["Keterangan"] == System.DBNull.Value ? SqlString.Null : (string)toReturn.Rows[0]["Keterangan"];
_published = (bool)toReturn.Rows[0]["Published"];
_ordering = (Int32)toReturn.Rows[0]["Ordering"];
_createdBy = (Int32)toReturn.Rows[0]["CreatedBy"];
_createdDate = (DateTime)toReturn.Rows[0]["CreatedDate"];
_modifiedBy = toReturn.Rows[0]["ModifiedBy"] == System.DBNull.Value ? SqlInt32.Null : (Int32)toReturn.Rows[0]["ModifiedBy"];
_modifiedDate = toReturn.Rows[0]["ModifiedDate"] == System.DBNull.Value ? SqlDateTime.Null : (DateTime)toReturn.Rows[0]["ModifiedDate"];
}
return toReturn;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::SelectOne::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
adapter.Dispose();
}
}
/// <summary>
/// Purpose: SelectAll method. This method will Select all rows from the table.
/// </summary>
/// <returns>DataTable object if succeeded, otherwise an Exception is thrown. </returns>
/// <remarks>
/// Properties set after a succesful call of this method:
/// <UL>
/// <LI>ErrorCode</LI>
/// </UL>
/// </remarks>
public override DataTable SelectAll()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_SelectAll]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
DataTable toReturn = new DataTable("RS_JenisLayanan");
SqlDataAdapter adapter = new SqlDataAdapter(cmdToExecute);
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
adapter.Fill(toReturn);
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_SelectAll' reported the ErrorCode: " + _errorCode);
}
return toReturn;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::SelectAll::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
adapter.Dispose();
}
}
/// <summary>
/// Purpose: GetList method. This method will Select all rows from the table where is active.
/// </summary>
/// <returns>DataTable object if succeeded, otherwise an Exception is thrown. </returns>
/// <remarks>
/// Properties set after a succesful call of this method:
/// <UL>
/// <LI>ErrorCode</LI>
/// </UL>
/// </remarks>
public DataTable GetList()
{
SqlCommand cmdToExecute = new SqlCommand();
cmdToExecute.CommandText = "dbo.[RS_JenisLayanan_GetList]";
cmdToExecute.CommandType = CommandType.StoredProcedure;
DataTable toReturn = new DataTable("RS_JenisLayanan");
SqlDataAdapter adapter = new SqlDataAdapter(cmdToExecute);
// Use base class' connection object
cmdToExecute.Connection = _mainConnection;
try
{
cmdToExecute.Parameters.Add(new SqlParameter("@ErrorCode", SqlDbType.Int, 4, ParameterDirection.Output, true, 10, 0, "", DataRowVersion.Proposed, _errorCode));
// Open connection.
_mainConnection.Open();
// Execute query.
adapter.Fill(toReturn);
_errorCode = (SqlInt32)cmdToExecute.Parameters["@ErrorCode"].Value;
if (_errorCode != (int)LLBLError.AllOk)
{
// Throw error.
throw new Exception("Stored Procedure 'RS_JenisLayanan_GetList' reported the ErrorCode: " + _errorCode);
}
return toReturn;
}
catch (Exception ex)
{
// some error occured. Bubble it to caller and encapsulate Exception object
throw new Exception("RS_JenisLayanan::GetList::Error occured.", ex);
}
finally
{
// Close connection.
_mainConnection.Close();
cmdToExecute.Dispose();
adapter.Dispose();
}
}
#region Class Property Declarations
public SqlInt32 Id
{
get
{
return _id;
}
set
{
SqlInt32 idTmp = (SqlInt32)value;
if (idTmp.IsNull)
{
throw new ArgumentOutOfRangeException("Id", "Id can't be NULL");
}
_id = value;
}
}
public SqlString Kode
{
get
{
return _kode;
}
set
{
SqlString kodeTmp = (SqlString)value;
if (kodeTmp.IsNull)
{
throw new ArgumentOutOfRangeException("Kode", "Kode can't be NULL");
}
_kode = value;
}
}
public SqlString Nama
{
get
{
return _nama;
}
set
{
SqlString namaTmp = (SqlString)value;
if (namaTmp.IsNull)
{
throw new ArgumentOutOfRangeException("Nama", "Nama can't be NULL");
}
_nama = value;
}
}
public SqlString Keterangan
{
get
{
return _keterangan;
}
set
{
_keterangan = value;
}
}
public SqlBoolean Published
{
get
{
return _published;
}
set
{
_published = value;
}
}
public SqlInt32 Ordering
{
get
{
return _ordering;
}
set
{
SqlInt32 orderingTmp = (SqlInt32)value;
if (orderingTmp.IsNull)
{
throw new ArgumentOutOfRangeException("Ordering", "Ordering can't be NULL");
}
_ordering = value;
}
}
public SqlInt32 CreatedBy
{
get
{
return _createdBy;
}
set
{
SqlInt32 createdByTmp = (SqlInt32)value;
if (createdByTmp.IsNull)
{
throw new ArgumentOutOfRangeException("CreatedBy", "CreatedBy can't be NULL");
}
_createdBy = value;
}
}
public SqlDateTime CreatedDate
{
get
{
return _createdDate;
}
set
{
SqlDateTime createdDateTmp = (SqlDateTime)value;
if (createdDateTmp.IsNull)
{
throw new ArgumentOutOfRangeException("CreatedDate", "CreatedDate can't be NULL");
}
_createdDate = value;
}
}
public SqlInt32 ModifiedBy
{
get
{
return _modifiedBy;
}
set
{
_modifiedBy = value;
}
}
public SqlDateTime ModifiedDate
{
get
{
return _modifiedDate;
}
set
{
_modifiedDate = value;
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
namespace SilentOrbit.ProtocolBuffers
{
class ProtoPrepare
{
readonly Options options;
/// <summary>
/// Convert message/class and field/propery names to CamelCase
/// </summary>
readonly bool ConvertToCamelCase = true;
public ProtoPrepare(Options options)
{
if (options.PreserveNames)
{
ConvertToCamelCase = false;
}
this.options = options;
}
public void Prepare(ProtoCollection file)
{
foreach (ProtoMessage m in file.Messages.Values)
{
if (m.OptionNamespace == null)
{
m.OptionNamespace = GetCamelCase(m.Package);
}
PrepareMessage(m);
}
foreach (ProtoEnum e in file.Enums.Values)
{
if (e.OptionNamespace == null)
{
e.OptionNamespace = GetCamelCase(e.Package);
}
e.CsType = GetCamelCase(e.ProtoName);
}
}
void PrepareMessage(ProtoMessage m)
{
//Name of message and enums
m.CsType = GetCamelCase(m.ProtoName);
foreach (ProtoEnum e in m.Enums.Values)
{
e.CsType = GetCamelCase(e.ProtoName);
}
foreach (ProtoMessage sub in m.Messages.Values)
{
PrepareMessage(sub);
}
//Prepare fields
foreach (Field f in m.Fields.Values)
{
PrepareProtoType(m, f);
DetectNameClash(m, f);
if (f.OptionDefault != null)
{
if (f.ProtoType is ProtoBuiltin && ((ProtoBuiltin)f.ProtoType).ProtoName == "bytes")
{
throw new NotImplementedException();
}
if (f.ProtoType is ProtoMessage)
{
throw new ProtoFormatException("Message can't have a default", f.Source);
}
}
}
}
/// <summary>
/// Detect field which have the same name as a submessage in the same message.
/// </summary>
/// <param name="m">Parent message</param>
/// <param name="f">Field to check</param>
void DetectNameClash(ProtoMessage m, Field f)
{
bool nameclash = m.CsType == f.CsName;
foreach (var tm in m.Messages.Values)
{
if (tm.CsType == f.CsName)
{
nameclash = true;
}
}
foreach (var te in m.Enums.Values)
{
if (te.CsType == f.CsName)
{
nameclash = true;
}
}
foreach (var tf in m.Fields.Values)
{
if (tf == f)
{
continue;
}
if (tf.CsName == f.CsName)
{
nameclash = true;
}
}
if (nameclash == false)
{
return;
}
//Name clash
if (options.FixNameclash)
{
if (ConvertToCamelCase)
{
f.CsName += "Field";
}
else
{
f.CsName += "_field";
}
Console.Error.WriteLine("Warning: renamed field: " + m.FullCsType + "." + f.CsName);
//Make sure our change did not result in another name collission
DetectNameClash(m, f);
}
else
{
throw new ProtoFormatException("The field: " + m.FullCsType + "." + f.CsName +
" has the same name as a sibling class/enum type which is not allowed in C#. " +
"Use --fix-nameclash to automatically rename the field.", f.Source);
}
}
/// <summary>
/// Prepare: ProtoType, WireType and CSType
/// </summary>
void PrepareProtoType(ProtoMessage m, Field f)
{
//Change property name to C# style, CamelCase.
f.CsName = GetCSPropertyName(m, f.ProtoName);
f.ProtoType = GetBuiltinProtoType(f.ProtoTypeName) ?? Search.GetProtoType(m, f.ProtoTypeName);
if (f.ProtoType == null)
{
#if DEBUG
//this will still return null but we keep it here for debugging purposes
f.ProtoType = Search.GetProtoType(m, f.ProtoTypeName);
#endif
throw new ProtoFormatException("Field type \"" + f.ProtoTypeName + "\" not found for field " + f.ProtoName + " in message " + m.FullProtoName, f.Source);
}
if (f.OptionPacked)
{
if (f.ProtoType.WireType == Wire.LengthDelimited)
{
throw new ProtoFormatException("Length delimited types cannot be packed", f.Source);
}
}
}
/// <summary>
/// Return the type given the name from a .proto file.
/// Return Unknonw if it is a message or an enum.
/// </summary>
static ProtoBuiltin GetBuiltinProtoType(string type)
{
switch (type)
{
case "double":
return new ProtoBuiltin(type, Wire.Fixed64, "double");
case "float":
return new ProtoBuiltin(type, Wire.Fixed32, "float");
case "int32":
return new ProtoBuiltin(type, Wire.Varint, "int");
case "int64":
return new ProtoBuiltin(type, Wire.Varint, "long");
case "uint32":
return new ProtoBuiltin(type, Wire.Varint, "uint");
case "uint64":
return new ProtoBuiltin(type, Wire.Varint, "ulong");
case "sint32":
return new ProtoBuiltin(type, Wire.Varint, "int");
case "sint64":
return new ProtoBuiltin(type, Wire.Varint, "long");
case "fixed32":
return new ProtoBuiltin(type, Wire.Fixed32, "uint");
case "fixed64":
return new ProtoBuiltin(type, Wire.Fixed64, "ulong");
case "sfixed32":
return new ProtoBuiltin(type, Wire.Fixed32, "int");
case "sfixed64":
return new ProtoBuiltin(type, Wire.Fixed64, "long");
case "bool":
return new ProtoBuiltin(type, Wire.Varint, "bool");
case "string":
return new ProtoBuiltin(type, Wire.LengthDelimited, "string");
case "bytes":
return new ProtoBuiltin(type, Wire.LengthDelimited, "byte[]");
default:
return null;
}
}
/// <summary>
/// Gets the C# CamelCase version of a given name.
/// Name collisions with enums are avoided.
/// </summary>
string GetCSPropertyName(ProtoMessage m, string name)
{
string csname = GetCamelCase(name);
foreach (ProtoEnum me in m.Enums.Values)
{
if (me.CsType == csname)
{
return name;
}
}
return csname;
}
/// <summary>
/// Gets the CamelCase version of a given name.
/// </summary>
string GetCamelCase(string name)
{
if (ConvertToCamelCase == false)
{
return name;
}
return SilentOrbit.Code.Name.ToCamelCase(name);
}
}
}
| |
using System;
using System.Collections;
using System.Dynamic;
using System.Linq.Expressions;
namespace Python.Runtime
{
/// <summary>
/// Represents a generic Python object. The methods of this class are
/// generally equivalent to the Python "abstract object API". See
/// PY2: https://docs.python.org/2/c-api/object.html
/// PY3: https://docs.python.org/3/c-api/object.html
/// for details.
/// </summary>
public class PyObject : DynamicObject, IDisposable
{
protected internal IntPtr obj = IntPtr.Zero;
private bool disposed = false;
/// <summary>
/// PyObject Constructor
/// </summary>
/// <remarks>
/// Creates a new PyObject from an IntPtr object reference. Note that
/// the PyObject instance assumes ownership of the object reference
/// and the reference will be DECREFed when the PyObject is garbage
/// collected or explicitly disposed.
/// </remarks>
public PyObject(IntPtr ptr)
{
obj = ptr;
}
// Protected default constructor to allow subclasses to manage
// initialization in different ways as appropriate.
protected PyObject()
{
}
// Ensure that encapsulated Python object is decref'ed appropriately
// when the managed wrapper is garbage-collected.
~PyObject()
{
Dispose();
}
/// <summary>
/// Handle Property
/// </summary>
/// <remarks>
/// Gets the native handle of the underlying Python object. This
/// value is generally for internal use by the PythonNet runtime.
/// </remarks>
public IntPtr Handle
{
get { return obj; }
}
/// <summary>
/// FromManagedObject Method
/// </summary>
/// <remarks>
/// Given an arbitrary managed object, return a Python instance that
/// reflects the managed object.
/// </remarks>
public static PyObject FromManagedObject(object ob)
{
// Special case: if ob is null, we return None.
if (ob == null)
{
Runtime.XIncref(Runtime.PyNone);
return new PyObject(Runtime.PyNone);
}
IntPtr op = CLRObject.GetInstHandle(ob);
return new PyObject(op);
}
/// <summary>
/// AsManagedObject Method
/// </summary>
/// <remarks>
/// Return a managed object of the given type, based on the
/// value of the Python object.
/// </remarks>
public object AsManagedObject(Type t)
{
object result;
if (!Converter.ToManaged(obj, t, out result, false))
{
throw new InvalidCastException("cannot convert object to target type");
}
return result;
}
/// <summary>
/// Dispose Method
/// </summary>
/// <remarks>
/// The Dispose method provides a way to explicitly release the
/// Python object represented by a PyObject instance. It is a good
/// idea to call Dispose on PyObjects that wrap resources that are
/// limited or need strict lifetime control. Otherwise, references
/// to Python objects will not be released until a managed garbage
/// collection occurs.
/// </remarks>
protected virtual void Dispose(bool disposing)
{
if (!disposed)
{
if (Runtime.Py_IsInitialized() > 0 && !Runtime.IsFinalizing)
{
IntPtr gs = PythonEngine.AcquireLock();
Runtime.XDecref(obj);
obj = IntPtr.Zero;
PythonEngine.ReleaseLock(gs);
}
disposed = true;
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// GetPythonType Method
/// </summary>
/// <remarks>
/// Returns the Python type of the object. This method is equivalent
/// to the Python expression: type(object).
/// </remarks>
public PyObject GetPythonType()
{
IntPtr tp = Runtime.PyObject_Type(obj);
return new PyObject(tp);
}
/// <summary>
/// TypeCheck Method
/// </summary>
/// <remarks>
/// Returns true if the object o is of type typeOrClass or a subtype
/// of typeOrClass.
/// </remarks>
public bool TypeCheck(PyObject typeOrClass)
{
return Runtime.PyObject_TypeCheck(obj, typeOrClass.obj);
}
/// <summary>
/// HasAttr Method
/// </summary>
/// <remarks>
/// Returns true if the object has an attribute with the given name.
/// </remarks>
public bool HasAttr(string name)
{
return Runtime.PyObject_HasAttrString(obj, name) != 0;
}
/// <summary>
/// HasAttr Method
/// </summary>
/// <remarks>
/// Returns true if the object has an attribute with the given name,
/// where name is a PyObject wrapping a string or unicode object.
/// </remarks>
public bool HasAttr(PyObject name)
{
return Runtime.PyObject_HasAttr(obj, name.obj) != 0;
}
/// <summary>
/// GetAttr Method
/// </summary>
/// <remarks>
/// Returns the named attribute of the Python object, or raises a
/// PythonException if the attribute access fails.
/// </remarks>
public PyObject GetAttr(string name)
{
IntPtr op = Runtime.PyObject_GetAttrString(obj, name);
if (op == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(op);
}
/// <summary>
/// GetAttr Method
/// </summary>
/// <remarks>
/// Returns the named attribute of the Python object, or the given
/// default object if the attribute access fails.
/// </remarks>
public PyObject GetAttr(string name, PyObject _default)
{
IntPtr op = Runtime.PyObject_GetAttrString(obj, name);
if (op == IntPtr.Zero)
{
Runtime.PyErr_Clear();
return _default;
}
return new PyObject(op);
}
/// <summary>
/// GetAttr Method
/// </summary>
/// <remarks>
/// Returns the named attribute of the Python object or raises a
/// PythonException if the attribute access fails. The name argument
/// is a PyObject wrapping a Python string or unicode object.
/// </remarks>
public PyObject GetAttr(PyObject name)
{
IntPtr op = Runtime.PyObject_GetAttr(obj, name.obj);
if (op == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(op);
}
/// <summary>
/// GetAttr Method
/// </summary>
/// <remarks>
/// Returns the named attribute of the Python object, or the given
/// default object if the attribute access fails. The name argument
/// is a PyObject wrapping a Python string or unicode object.
/// </remarks>
public PyObject GetAttr(PyObject name, PyObject _default)
{
IntPtr op = Runtime.PyObject_GetAttr(obj, name.obj);
if (op == IntPtr.Zero)
{
Runtime.PyErr_Clear();
return _default;
}
return new PyObject(op);
}
/// <summary>
/// SetAttr Method
/// </summary>
/// <remarks>
/// Set an attribute of the object with the given name and value. This
/// method throws a PythonException if the attribute set fails.
/// </remarks>
public void SetAttr(string name, PyObject value)
{
int r = Runtime.PyObject_SetAttrString(obj, name, value.obj);
if (r < 0)
{
throw new PythonException();
}
}
/// <summary>
/// SetAttr Method
/// </summary>
/// <remarks>
/// Set an attribute of the object with the given name and value,
/// where the name is a Python string or unicode object. This method
/// throws a PythonException if the attribute set fails.
/// </remarks>
public void SetAttr(PyObject name, PyObject value)
{
int r = Runtime.PyObject_SetAttr(obj, name.obj, value.obj);
if (r < 0)
{
throw new PythonException();
}
}
/// <summary>
/// DelAttr Method
/// </summary>
/// <remarks>
/// Delete the named attribute of the Python object. This method
/// throws a PythonException if the attribute set fails.
/// </remarks>
public void DelAttr(string name)
{
int r = Runtime.PyObject_SetAttrString(obj, name, IntPtr.Zero);
if (r < 0)
{
throw new PythonException();
}
}
/// <summary>
/// DelAttr Method
/// </summary>
/// <remarks>
/// Delete the named attribute of the Python object, where name is a
/// PyObject wrapping a Python string or unicode object. This method
/// throws a PythonException if the attribute set fails.
/// </remarks>
public void DelAttr(PyObject name)
{
int r = Runtime.PyObject_SetAttr(obj, name.obj, IntPtr.Zero);
if (r < 0)
{
throw new PythonException();
}
}
/// <summary>
/// GetItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// return the item at the given object index. This method raises a
/// PythonException if the indexing operation fails.
/// </remarks>
public virtual PyObject GetItem(PyObject key)
{
IntPtr op = Runtime.PyObject_GetItem(obj, key.obj);
if (op == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(op);
}
/// <summary>
/// GetItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// return the item at the given string index. This method raises a
/// PythonException if the indexing operation fails.
/// </remarks>
public virtual PyObject GetItem(string key)
{
using (var pyKey = new PyString(key))
{
return GetItem(pyKey);
}
}
/// <summary>
/// GetItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// return the item at the given numeric index. This method raises a
/// PythonException if the indexing operation fails.
/// </remarks>
public virtual PyObject GetItem(int index)
{
using (var key = new PyInt(index))
{
return GetItem(key);
}
}
/// <summary>
/// SetItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// set the item at the given object index to the given value. This
/// method raises a PythonException if the set operation fails.
/// </remarks>
public virtual void SetItem(PyObject key, PyObject value)
{
int r = Runtime.PyObject_SetItem(obj, key.obj, value.obj);
if (r < 0)
{
throw new PythonException();
}
}
/// <summary>
/// SetItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// set the item at the given string index to the given value. This
/// method raises a PythonException if the set operation fails.
/// </remarks>
public virtual void SetItem(string key, PyObject value)
{
using (var pyKey = new PyString(key))
{
SetItem(pyKey, value);
}
}
/// <summary>
/// SetItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// set the item at the given numeric index to the given value. This
/// method raises a PythonException if the set operation fails.
/// </remarks>
public virtual void SetItem(int index, PyObject value)
{
using (var pyindex = new PyInt(index))
{
SetItem(pyindex, value);
}
}
/// <summary>
/// DelItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// delete the item at the given object index. This method raises a
/// PythonException if the delete operation fails.
/// </remarks>
public virtual void DelItem(PyObject key)
{
int r = Runtime.PyObject_DelItem(obj, key.obj);
if (r < 0)
{
throw new PythonException();
}
}
/// <summary>
/// DelItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// delete the item at the given string index. This method raises a
/// PythonException if the delete operation fails.
/// </remarks>
public virtual void DelItem(string key)
{
using (var pyKey = new PyString(key))
{
DelItem(pyKey);
}
}
/// <summary>
/// DelItem Method
/// </summary>
/// <remarks>
/// For objects that support the Python sequence or mapping protocols,
/// delete the item at the given numeric index. This method raises a
/// PythonException if the delete operation fails.
/// </remarks>
public virtual void DelItem(int index)
{
using (var pyindex = new PyInt(index))
{
DelItem(pyindex);
}
}
/// <summary>
/// Length Method
/// </summary>
/// <remarks>
/// Returns the length for objects that support the Python sequence
/// protocol, or 0 if the object does not support the protocol.
/// </remarks>
public virtual int Length()
{
int s = Runtime.PyObject_Size(obj);
if (s < 0)
{
Runtime.PyErr_Clear();
return 0;
}
return s;
}
/// <summary>
/// String Indexer
/// </summary>
/// <remarks>
/// Provides a shorthand for the string versions of the GetItem and
/// SetItem methods.
/// </remarks>
public virtual PyObject this[string key]
{
get { return GetItem(key); }
set { SetItem(key, value); }
}
/// <summary>
/// PyObject Indexer
/// </summary>
/// <remarks>
/// Provides a shorthand for the object versions of the GetItem and
/// SetItem methods.
/// </remarks>
public virtual PyObject this[PyObject key]
{
get { return GetItem(key); }
set { SetItem(key, value); }
}
/// <summary>
/// Numeric Indexer
/// </summary>
/// <remarks>
/// Provides a shorthand for the numeric versions of the GetItem and
/// SetItem methods.
/// </remarks>
public virtual PyObject this[int index]
{
get { return GetItem(index); }
set { SetItem(index, value); }
}
/// <summary>
/// GetIterator Method
/// </summary>
/// <remarks>
/// Return a new (Python) iterator for the object. This is equivalent
/// to the Python expression "iter(object)". A PythonException will be
/// raised if the object cannot be iterated.
/// </remarks>
public PyObject GetIterator()
{
IntPtr r = Runtime.PyObject_GetIter(obj);
if (r == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(r);
}
/// <summary>
/// GetEnumerator Method
/// </summary>
/// <remarks>
/// Return a new PyIter object for the object. This allows any iterable
/// python object to be iterated over in C#. A PythonException will be
/// raised if the object is not iterable.
/// </remarks>
public IEnumerator GetEnumerator()
{
return new PyIter(this);
}
/// <summary>
/// Invoke Method
/// </summary>
/// <remarks>
/// Invoke the callable object with the given arguments, passed as a
/// PyObject[]. A PythonException is raised if the invokation fails.
/// </remarks>
public PyObject Invoke(params PyObject[] args)
{
var t = new PyTuple(args);
IntPtr r = Runtime.PyObject_Call(obj, t.obj, IntPtr.Zero);
t.Dispose();
if (r == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(r);
}
/// <summary>
/// Invoke Method
/// </summary>
/// <remarks>
/// Invoke the callable object with the given arguments, passed as a
/// Python tuple. A PythonException is raised if the invokation fails.
/// </remarks>
public PyObject Invoke(PyTuple args)
{
IntPtr r = Runtime.PyObject_Call(obj, args.obj, IntPtr.Zero);
if (r == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(r);
}
/// <summary>
/// Invoke Method
/// </summary>
/// <remarks>
/// Invoke the callable object with the given positional and keyword
/// arguments. A PythonException is raised if the invokation fails.
/// </remarks>
public PyObject Invoke(PyObject[] args, PyDict kw)
{
var t = new PyTuple(args);
IntPtr r = Runtime.PyObject_Call(obj, t.obj, kw != null ? kw.obj : IntPtr.Zero);
t.Dispose();
if (r == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(r);
}
/// <summary>
/// Invoke Method
/// </summary>
/// <remarks>
/// Invoke the callable object with the given positional and keyword
/// arguments. A PythonException is raised if the invokation fails.
/// </remarks>
public PyObject Invoke(PyTuple args, PyDict kw)
{
IntPtr r = Runtime.PyObject_Call(obj, args.obj, kw != null ? kw.obj : IntPtr.Zero);
if (r == IntPtr.Zero)
{
throw new PythonException();
}
return new PyObject(r);
}
/// <summary>
/// InvokeMethod Method
/// </summary>
/// <remarks>
/// Invoke the named method of the object with the given arguments.
/// A PythonException is raised if the invokation is unsuccessful.
/// </remarks>
public PyObject InvokeMethod(string name, params PyObject[] args)
{
PyObject method = GetAttr(name);
PyObject result = method.Invoke(args);
method.Dispose();
return result;
}
/// <summary>
/// InvokeMethod Method
/// </summary>
/// <remarks>
/// Invoke the named method of the object with the given arguments.
/// A PythonException is raised if the invokation is unsuccessful.
/// </remarks>
public PyObject InvokeMethod(string name, PyTuple args)
{
PyObject method = GetAttr(name);
PyObject result = method.Invoke(args);
method.Dispose();
return result;
}
/// <summary>
/// InvokeMethod Method
/// </summary>
/// <remarks>
/// Invoke the named method of the object with the given arguments
/// and keyword arguments. Keyword args are passed as a PyDict object.
/// A PythonException is raised if the invokation is unsuccessful.
/// </remarks>
public PyObject InvokeMethod(string name, PyObject[] args, PyDict kw)
{
PyObject method = GetAttr(name);
PyObject result = method.Invoke(args, kw);
method.Dispose();
return result;
}
/// <summary>
/// InvokeMethod Method
/// </summary>
/// <remarks>
/// Invoke the named method of the object with the given arguments
/// and keyword arguments. Keyword args are passed as a PyDict object.
/// A PythonException is raised if the invokation is unsuccessful.
/// </remarks>
public PyObject InvokeMethod(string name, PyTuple args, PyDict kw)
{
PyObject method = GetAttr(name);
PyObject result = method.Invoke(args, kw);
method.Dispose();
return result;
}
/// <summary>
/// IsInstance Method
/// </summary>
/// <remarks>
/// Return true if the object is an instance of the given Python type
/// or class. This method always succeeds.
/// </remarks>
public bool IsInstance(PyObject typeOrClass)
{
int r = Runtime.PyObject_IsInstance(obj, typeOrClass.obj);
if (r < 0)
{
Runtime.PyErr_Clear();
return false;
}
return r != 0;
}
/// <summary>
/// IsSubclass Method
/// </summary>
/// <remarks>
/// Return true if the object is identical to or derived from the
/// given Python type or class. This method always succeeds.
/// </remarks>
public bool IsSubclass(PyObject typeOrClass)
{
int r = Runtime.PyObject_IsSubclass(obj, typeOrClass.obj);
if (r < 0)
{
Runtime.PyErr_Clear();
return false;
}
return r != 0;
}
/// <summary>
/// IsCallable Method
/// </summary>
/// <remarks>
/// Returns true if the object is a callable object. This method
/// always succeeds.
/// </remarks>
public bool IsCallable()
{
return Runtime.PyCallable_Check(obj) != 0;
}
/// <summary>
/// IsIterable Method
/// </summary>
/// <remarks>
/// Returns true if the object is iterable object. This method
/// always succeeds.
/// </remarks>
public bool IsIterable()
{
return Runtime.PyIter_Check(obj);
}
/// <summary>
/// IsTrue Method
/// </summary>
/// <remarks>
/// Return true if the object is true according to Python semantics.
/// This method always succeeds.
/// </remarks>
public bool IsTrue()
{
return Runtime.PyObject_IsTrue(obj) != 0;
}
/// <summary>
/// Dir Method
/// </summary>
/// <remarks>
/// Return a list of the names of the attributes of the object. This
/// is equivalent to the Python expression "dir(object)".
/// </remarks>
public PyList Dir()
{
IntPtr r = Runtime.PyObject_Dir(obj);
if (r == IntPtr.Zero)
{
throw new PythonException();
}
return new PyList(r);
}
/// <summary>
/// Repr Method
/// </summary>
/// <remarks>
/// Return a string representation of the object. This method is
/// the managed equivalent of the Python expression "repr(object)".
/// </remarks>
public string Repr()
{
IntPtr strval = Runtime.PyObject_Repr(obj);
string result = Runtime.GetManagedString(strval);
Runtime.XDecref(strval);
return result;
}
/// <summary>
/// ToString Method
/// </summary>
/// <remarks>
/// Return the string representation of the object. This method is
/// the managed equivalent of the Python expression "str(object)".
/// </remarks>
public override string ToString()
{
IntPtr strval = Runtime.PyObject_Unicode(obj);
string result = Runtime.GetManagedString(strval);
Runtime.XDecref(strval);
return result;
}
/// <summary>
/// Equals Method
/// </summary>
/// <remarks>
/// Return true if this object is equal to the given object. This
/// method is based on Python equality semantics.
/// </remarks>
public override bool Equals(object o)
{
if (!(o is PyObject))
{
return false;
}
if (obj == ((PyObject)o).obj)
{
return true;
}
int r = Runtime.PyObject_Compare(obj, ((PyObject)o).obj);
if (Exceptions.ErrorOccurred())
{
throw new PythonException();
}
return r == 0;
}
/// <summary>
/// GetHashCode Method
/// </summary>
/// <remarks>
/// Return a hashcode based on the Python object. This returns the
/// hash as computed by Python, equivalent to the Python expression
/// "hash(obj)".
/// </remarks>
public override int GetHashCode()
{
return Runtime.PyObject_Hash(obj).ToInt32();
}
public long Refcount
{
get
{
return Runtime.Refcount(obj);
}
}
public override bool TryGetMember(GetMemberBinder binder, out object result)
{
result = CheckNone(this.GetAttr(binder.Name));
return true;
}
public override bool TrySetMember(SetMemberBinder binder, object value)
{
IntPtr ptr = Converter.ToPython(value, value?.GetType());
int r = Runtime.PyObject_SetAttrString(obj, binder.Name, ptr);
if (r < 0)
{
throw new PythonException();
}
Runtime.XDecref(ptr);
return true;
}
private void GetArgs(object[] inargs, out PyTuple args, out PyDict kwargs)
{
int arg_count;
for (arg_count = 0; arg_count < inargs.Length && !(inargs[arg_count] is Py.KeywordArguments); ++arg_count)
{
;
}
IntPtr argtuple = Runtime.PyTuple_New(arg_count);
for (var i = 0; i < arg_count; i++)
{
IntPtr ptr;
if (inargs[i] is PyObject)
{
ptr = ((PyObject)inargs[i]).Handle;
Runtime.XIncref(ptr);
}
else
{
ptr = Converter.ToPython(inargs[i], inargs[i]?.GetType());
}
if (Runtime.PyTuple_SetItem(argtuple, i, ptr) < 0)
{
throw new PythonException();
}
}
args = new PyTuple(argtuple);
kwargs = null;
for (int i = arg_count; i < inargs.Length; i++)
{
if (!(inargs[i] is Py.KeywordArguments))
{
throw new ArgumentException("Keyword arguments must come after normal arguments.");
}
if (kwargs == null)
{
kwargs = (Py.KeywordArguments)inargs[i];
}
else
{
kwargs.Update((Py.KeywordArguments)inargs[i]);
}
}
}
public override bool TryInvokeMember(InvokeMemberBinder binder, object[] args, out object result)
{
if (this.HasAttr(binder.Name) && this.GetAttr(binder.Name).IsCallable())
{
PyTuple pyargs = null;
PyDict kwargs = null;
try
{
GetArgs(args, out pyargs, out kwargs);
result = CheckNone(InvokeMethod(binder.Name, pyargs, kwargs));
}
finally
{
if (null != pyargs)
{
pyargs.Dispose();
}
if (null != kwargs)
{
kwargs.Dispose();
}
}
return true;
}
else
{
return base.TryInvokeMember(binder, args, out result);
}
}
public override bool TryInvoke(InvokeBinder binder, object[] args, out object result)
{
if (this.IsCallable())
{
PyTuple pyargs = null;
PyDict kwargs = null;
try
{
GetArgs(args, out pyargs, out kwargs);
result = CheckNone(Invoke(pyargs, kwargs));
}
finally
{
if (null != pyargs)
{
pyargs.Dispose();
}
if (null != kwargs)
{
kwargs.Dispose();
}
}
return true;
}
else
{
return base.TryInvoke(binder, args, out result);
}
}
public override bool TryConvert(ConvertBinder binder, out object result)
{
return Converter.ToManaged(this.obj, binder.Type, out result, false);
}
public override bool TryBinaryOperation(BinaryOperationBinder binder, object arg, out object result)
{
IntPtr res;
if (!(arg is PyObject))
{
arg = arg.ToPython();
}
switch (binder.Operation)
{
case ExpressionType.Add:
res = Runtime.PyNumber_Add(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.AddAssign:
res = Runtime.PyNumber_InPlaceAdd(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.Subtract:
res = Runtime.PyNumber_Subtract(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.SubtractAssign:
res = Runtime.PyNumber_InPlaceSubtract(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.Multiply:
res = Runtime.PyNumber_Multiply(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.MultiplyAssign:
res = Runtime.PyNumber_InPlaceMultiply(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.Divide:
res = Runtime.PyNumber_Divide(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.DivideAssign:
res = Runtime.PyNumber_InPlaceDivide(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.And:
res = Runtime.PyNumber_And(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.AndAssign:
res = Runtime.PyNumber_InPlaceAnd(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.ExclusiveOr:
res = Runtime.PyNumber_Xor(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.ExclusiveOrAssign:
res = Runtime.PyNumber_InPlaceXor(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.GreaterThan:
result = Runtime.PyObject_Compare(this.obj, ((PyObject)arg).obj) > 0;
return true;
case ExpressionType.GreaterThanOrEqual:
result = Runtime.PyObject_Compare(this.obj, ((PyObject)arg).obj) >= 0;
return true;
case ExpressionType.LeftShift:
res = Runtime.PyNumber_Lshift(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.LeftShiftAssign:
res = Runtime.PyNumber_InPlaceLshift(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.LessThan:
result = Runtime.PyObject_Compare(this.obj, ((PyObject)arg).obj) < 0;
return true;
case ExpressionType.LessThanOrEqual:
result = Runtime.PyObject_Compare(this.obj, ((PyObject)arg).obj) <= 0;
return true;
case ExpressionType.Modulo:
res = Runtime.PyNumber_Remainder(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.ModuloAssign:
res = Runtime.PyNumber_InPlaceRemainder(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.NotEqual:
result = Runtime.PyObject_Compare(this.obj, ((PyObject)arg).obj) != 0;
return true;
case ExpressionType.Or:
res = Runtime.PyNumber_Or(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.OrAssign:
res = Runtime.PyNumber_InPlaceOr(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.Power:
res = Runtime.PyNumber_Power(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.RightShift:
res = Runtime.PyNumber_Rshift(this.obj, ((PyObject)arg).obj);
break;
case ExpressionType.RightShiftAssign:
res = Runtime.PyNumber_InPlaceRshift(this.obj, ((PyObject)arg).obj);
break;
default:
result = null;
return false;
}
result = CheckNone(new PyObject(res));
return true;
}
// Workaround for https://bugzilla.xamarin.com/show_bug.cgi?id=41509
// See https://github.com/pythonnet/pythonnet/pull/219
private static object CheckNone(PyObject pyObj)
{
if (pyObj != null)
{
if (pyObj.obj == Runtime.PyNone)
{
return null;
}
}
return pyObj;
}
public override bool TryUnaryOperation(UnaryOperationBinder binder, out object result)
{
int r;
IntPtr res;
switch (binder.Operation)
{
case ExpressionType.Negate:
res = Runtime.PyNumber_Negative(this.obj);
break;
case ExpressionType.UnaryPlus:
res = Runtime.PyNumber_Positive(this.obj);
break;
case ExpressionType.OnesComplement:
res = Runtime.PyNumber_Invert(this.obj);
break;
case ExpressionType.Not:
r = Runtime.PyObject_Not(this.obj);
result = r == 1;
return r != -1;
case ExpressionType.IsFalse:
r = Runtime.PyObject_IsTrue(this.obj);
result = r == 0;
return r != -1;
case ExpressionType.IsTrue:
r = Runtime.PyObject_IsTrue(this.obj);
result = r == 1;
return r != -1;
case ExpressionType.Decrement:
case ExpressionType.Increment:
default:
result = null;
return false;
}
result = CheckNone(new PyObject(res));
return true;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Security;
namespace System.Globalization
{
public partial class CompareInfo
{
[NonSerialized]
private Interop.GlobalizationInterop.SafeSortHandle _sortHandle;
[NonSerialized]
private bool _isAsciiEqualityOrdinal;
private void InitSort(CultureInfo culture)
{
_sortName = culture.SortName;
if (_invariantMode)
{
_isAsciiEqualityOrdinal = true;
}
else
{
Interop.GlobalizationInterop.ResultCode resultCode = Interop.GlobalizationInterop.GetSortHandle(GetNullTerminatedUtf8String(_sortName), out _sortHandle);
if (resultCode != Interop.GlobalizationInterop.ResultCode.Success)
{
_sortHandle.Dispose();
if (resultCode == Interop.GlobalizationInterop.ResultCode.OutOfMemory)
throw new OutOfMemoryException();
throw new ExternalException(SR.Arg_ExternalException);
}
_isAsciiEqualityOrdinal = (_sortName == "en-US" || _sortName == "");
}
}
internal static unsafe int IndexOfOrdinalCore(string source, string value, int startIndex, int count, bool ignoreCase)
{
Debug.Assert(!GlobalizationMode.Invariant);
Debug.Assert(source != null);
Debug.Assert(value != null);
if (value.Length == 0)
{
return startIndex;
}
if (count < value.Length)
{
return -1;
}
if (ignoreCase)
{
fixed (char* pSource = source)
{
int index = Interop.GlobalizationInterop.IndexOfOrdinalIgnoreCase(value, value.Length, pSource + startIndex, count, findLast: false);
return index != -1 ?
startIndex + index :
-1;
}
}
int endIndex = startIndex + (count - value.Length);
for (int i = startIndex; i <= endIndex; i++)
{
int valueIndex, sourceIndex;
for (valueIndex = 0, sourceIndex = i;
valueIndex < value.Length && source[sourceIndex] == value[valueIndex];
valueIndex++, sourceIndex++) ;
if (valueIndex == value.Length)
{
return i;
}
}
return -1;
}
internal static unsafe int LastIndexOfOrdinalCore(string source, string value, int startIndex, int count, bool ignoreCase)
{
Debug.Assert(!GlobalizationMode.Invariant);
Debug.Assert(source != null);
Debug.Assert(value != null);
if (value.Length == 0)
{
return startIndex;
}
if (count < value.Length)
{
return -1;
}
// startIndex is the index into source where we start search backwards from.
// leftStartIndex is the index into source of the start of the string that is
// count characters away from startIndex.
int leftStartIndex = startIndex - count + 1;
if (ignoreCase)
{
fixed (char* pSource = source)
{
int lastIndex = Interop.GlobalizationInterop.IndexOfOrdinalIgnoreCase(value, value.Length, pSource + leftStartIndex, count, findLast: true);
return lastIndex != -1 ?
leftStartIndex + lastIndex :
-1;
}
}
for (int i = startIndex - value.Length + 1; i >= leftStartIndex; i--)
{
int valueIndex, sourceIndex;
for (valueIndex = 0, sourceIndex = i;
valueIndex < value.Length && source[sourceIndex] == value[valueIndex];
valueIndex++, sourceIndex++) ;
if (valueIndex == value.Length) {
return i;
}
}
return -1;
}
private static unsafe int CompareStringOrdinalIgnoreCase(char* string1, int count1, char* string2, int count2)
{
Debug.Assert(!GlobalizationMode.Invariant);
return Interop.GlobalizationInterop.CompareStringOrdinalIgnoreCase(string1, count1, string2, count2);
}
private unsafe int CompareString(string string1, int offset1, int length1, string string2, int offset2, int length2, CompareOptions options)
{
Debug.Assert(!_invariantMode);
Debug.Assert(string1 != null);
Debug.Assert(string2 != null);
Debug.Assert((options & (CompareOptions.Ordinal | CompareOptions.OrdinalIgnoreCase)) == 0);
fixed (char* pString1 = string1)
{
fixed (char* pString2 = string2)
{
return Interop.GlobalizationInterop.CompareString(_sortHandle, pString1 + offset1, length1, pString2 + offset2, length2, options);
}
}
}
internal unsafe int IndexOfCore(string source, string target, int startIndex, int count, CompareOptions options, int* matchLengthPtr)
{
Debug.Assert(!_invariantMode);
Debug.Assert(!string.IsNullOrEmpty(source));
Debug.Assert(target != null);
Debug.Assert((options & CompareOptions.OrdinalIgnoreCase) == 0);
int index;
if (target.Length == 0)
{
if(matchLengthPtr != null)
*matchLengthPtr = 0;
return startIndex;
}
if (options == CompareOptions.Ordinal)
{
index = IndexOfOrdinal(source, target, startIndex, count, ignoreCase: false);
if(index != -1)
{
if(matchLengthPtr != null)
*matchLengthPtr = target.Length;
}
return index;
}
if (_isAsciiEqualityOrdinal && CanUseAsciiOrdinalForOptions(options) && source.IsFastSort() && target.IsFastSort())
{
index = IndexOf(source, target, startIndex, count, GetOrdinalCompareOptions(options));
if(index != -1)
{
if(matchLengthPtr != null)
*matchLengthPtr = target.Length;
}
return index;
}
fixed (char* pSource = source)
{
index = Interop.GlobalizationInterop.IndexOf(_sortHandle, target, target.Length, pSource + startIndex, count, options, matchLengthPtr);
return index != -1 ? index + startIndex : -1;
}
}
private unsafe int LastIndexOfCore(string source, string target, int startIndex, int count, CompareOptions options)
{
Debug.Assert(!_invariantMode);
Debug.Assert(!string.IsNullOrEmpty(source));
Debug.Assert(target != null);
Debug.Assert((options & CompareOptions.OrdinalIgnoreCase) == 0);
if (target.Length == 0)
{
return startIndex;
}
if (options == CompareOptions.Ordinal)
{
return LastIndexOfOrdinalCore(source, target, startIndex, count, ignoreCase: false);
}
if (_isAsciiEqualityOrdinal && CanUseAsciiOrdinalForOptions(options) && source.IsFastSort() && target.IsFastSort())
{
return LastIndexOf(source, target, startIndex, count, GetOrdinalCompareOptions(options));
}
// startIndex is the index into source where we start search backwards from. leftStartIndex is the index into source
// of the start of the string that is count characters away from startIndex.
int leftStartIndex = (startIndex - count + 1);
fixed (char* pSource = source)
{
int lastIndex = Interop.GlobalizationInterop.LastIndexOf(_sortHandle, target, target.Length, pSource + (startIndex - count + 1), count, options);
return lastIndex != -1 ? lastIndex + leftStartIndex : -1;
}
}
private bool StartsWith(string source, string prefix, CompareOptions options)
{
Debug.Assert(!_invariantMode);
Debug.Assert(!string.IsNullOrEmpty(source));
Debug.Assert(!string.IsNullOrEmpty(prefix));
Debug.Assert((options & (CompareOptions.Ordinal | CompareOptions.OrdinalIgnoreCase)) == 0);
if (_isAsciiEqualityOrdinal && CanUseAsciiOrdinalForOptions(options) && source.IsFastSort() && prefix.IsFastSort())
{
return IsPrefix(source, prefix, GetOrdinalCompareOptions(options));
}
return Interop.GlobalizationInterop.StartsWith(_sortHandle, prefix, prefix.Length, source, source.Length, options);
}
private bool EndsWith(string source, string suffix, CompareOptions options)
{
Debug.Assert(!_invariantMode);
Debug.Assert(!string.IsNullOrEmpty(source));
Debug.Assert(!string.IsNullOrEmpty(suffix));
Debug.Assert((options & (CompareOptions.Ordinal | CompareOptions.OrdinalIgnoreCase)) == 0);
if (_isAsciiEqualityOrdinal && CanUseAsciiOrdinalForOptions(options) && source.IsFastSort() && suffix.IsFastSort())
{
return IsSuffix(source, suffix, GetOrdinalCompareOptions(options));
}
return Interop.GlobalizationInterop.EndsWith(_sortHandle, suffix, suffix.Length, source, source.Length, options);
}
private unsafe SortKey CreateSortKey(String source, CompareOptions options)
{
Debug.Assert(!_invariantMode);
if (source==null) { throw new ArgumentNullException(nameof(source)); }
Contract.EndContractBlock();
if ((options & ValidSortkeyCtorMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
}
byte [] keyData;
if (source.Length == 0)
{
keyData = Array.Empty<Byte>();
}
else
{
int sortKeyLength = Interop.GlobalizationInterop.GetSortKey(_sortHandle, source, source.Length, null, 0, options);
keyData = new byte[sortKeyLength];
fixed (byte* pSortKey = keyData)
{
Interop.GlobalizationInterop.GetSortKey(_sortHandle, source, source.Length, pSortKey, sortKeyLength, options);
}
}
return new SortKey(Name, source, options, keyData);
}
private unsafe static bool IsSortable(char *text, int length)
{
Debug.Assert(!GlobalizationMode.Invariant);
int index = 0;
UnicodeCategory uc;
while (index < length)
{
if (Char.IsHighSurrogate(text[index]))
{
if (index == length - 1 || !Char.IsLowSurrogate(text[index+1]))
return false; // unpaired surrogate
uc = CharUnicodeInfo.InternalGetUnicodeCategory(Char.ConvertToUtf32(text[index], text[index+1]));
if (uc == UnicodeCategory.PrivateUse || uc == UnicodeCategory.OtherNotAssigned)
return false;
index += 2;
continue;
}
if (Char.IsLowSurrogate(text[index]))
{
return false; // unpaired surrogate
}
uc = CharUnicodeInfo.GetUnicodeCategory(text[index]);
if (uc == UnicodeCategory.PrivateUse || uc == UnicodeCategory.OtherNotAssigned)
{
return false;
}
index++;
}
return true;
}
// -----------------------------
// ---- PAL layer ends here ----
// -----------------------------
internal unsafe int GetHashCodeOfStringCore(string source, CompareOptions options)
{
Debug.Assert(!_invariantMode);
Debug.Assert(source != null);
Debug.Assert((options & (CompareOptions.Ordinal | CompareOptions.OrdinalIgnoreCase)) == 0);
if (source.Length == 0)
{
return 0;
}
int sortKeyLength = Interop.GlobalizationInterop.GetSortKey(_sortHandle, source, source.Length, null, 0, options);
// As an optimization, for small sort keys we allocate the buffer on the stack.
if (sortKeyLength <= 256)
{
byte* pSortKey = stackalloc byte[sortKeyLength];
Interop.GlobalizationInterop.GetSortKey(_sortHandle, source, source.Length, pSortKey, sortKeyLength, options);
return InternalHashSortKey(pSortKey, sortKeyLength);
}
byte[] sortKey = new byte[sortKeyLength];
fixed (byte* pSortKey = sortKey)
{
Interop.GlobalizationInterop.GetSortKey(_sortHandle, source, source.Length, pSortKey, sortKeyLength, options);
return InternalHashSortKey(pSortKey, sortKeyLength);
}
}
[DllImport(JitHelpers.QCall)]
[SuppressUnmanagedCodeSecurity]
private static unsafe extern int InternalHashSortKey(byte* sortKey, int sortKeyLength);
private static CompareOptions GetOrdinalCompareOptions(CompareOptions options)
{
if ((options & CompareOptions.IgnoreCase) == CompareOptions.IgnoreCase)
{
return CompareOptions.OrdinalIgnoreCase;
}
else
{
return CompareOptions.Ordinal;
}
}
private static bool CanUseAsciiOrdinalForOptions(CompareOptions options)
{
// Unlike the other Ignore options, IgnoreSymbols impacts ASCII characters (e.g. ').
return (options & CompareOptions.IgnoreSymbols) == 0;
}
private static byte[] GetNullTerminatedUtf8String(string s)
{
int byteLen = System.Text.Encoding.UTF8.GetByteCount(s);
// Allocate an extra byte (which defaults to 0) as the null terminator.
byte[] buffer = new byte[byteLen + 1];
int bytesWritten = System.Text.Encoding.UTF8.GetBytes(s, 0, s.Length, buffer, 0);
Debug.Assert(bytesWritten == byteLen);
return buffer;
}
private SortVersion GetSortVersion()
{
Debug.Assert(!_invariantMode);
int sortVersion = Interop.GlobalizationInterop.GetSortVersion(_sortHandle);
return new SortVersion(sortVersion, LCID, new Guid(sortVersion, 0, 0, 0, 0, 0, 0,
(byte) (LCID >> 24),
(byte) ((LCID & 0x00FF0000) >> 16),
(byte) ((LCID & 0x0000FF00) >> 8),
(byte) (LCID & 0xFF)));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos.Table;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.AzureUtils;
using Orleans.Clustering.AzureStorage;
using Orleans.Clustering.AzureStorage.Utilities;
using Orleans.Configuration;
using LogLevel = Microsoft.Extensions.Logging.LogLevel;
namespace Orleans.Runtime.MembershipService
{
internal class AzureBasedMembershipTable : IMembershipTable
{
private readonly ILogger logger;
private readonly ILoggerFactory loggerFactory;
private OrleansSiloInstanceManager tableManager;
private readonly AzureStorageClusteringOptions options;
private readonly string clusterId;
public AzureBasedMembershipTable(
ILoggerFactory loggerFactory,
IOptions<AzureStorageClusteringOptions> clusteringOptions,
IOptions<ClusterOptions> clusterOptions)
{
this.loggerFactory = loggerFactory;
this.logger = loggerFactory.CreateLogger<AzureBasedMembershipTable>();
this.options = clusteringOptions.Value;
this.clusterId = clusterOptions.Value.ClusterId;
}
public async Task InitializeMembershipTable(bool tryInitTableVersion)
{
LogFormatter.SetExceptionDecoder(typeof(StorageException), AzureTableUtils.PrintStorageException);
this.tableManager = await OrleansSiloInstanceManager.GetManager(
this.clusterId,
this.loggerFactory,
this.options);
// even if I am not the one who created the table,
// try to insert an initial table version if it is not already there,
// so we always have a first table version row, before this silo starts working.
if (tryInitTableVersion)
{
// ignore return value, since we don't care if I inserted it or not, as long as it is in there.
bool created = await tableManager.TryCreateTableVersionEntryAsync();
if(created) logger.Info("Created new table version row.");
}
}
public Task DeleteMembershipTableEntries(string clusterId)
{
return tableManager.DeleteTableEntries(clusterId);
}
public Task CleanupDefunctSiloEntries(DateTimeOffset beforeDate)
{
return tableManager.CleanupDefunctSiloEntries(beforeDate);
}
public async Task<MembershipTableData> ReadRow(SiloAddress key)
{
try
{
var entries = await tableManager.FindSiloEntryAndTableVersionRow(key);
MembershipTableData data = Convert(entries);
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("Read my entry {0} Table=" + Environment.NewLine + "{1}", key.ToLongString(), data.ToString());
return data;
}
catch (Exception exc)
{
logger.Warn((int)TableStorageErrorCode.AzureTable_20,
$"Intermediate error reading silo entry for key {key.ToLongString()} from the table {tableManager.TableName}.", exc);
throw;
}
}
public async Task<MembershipTableData> ReadAll()
{
try
{
var entries = await tableManager.FindAllSiloEntries();
MembershipTableData data = Convert(entries);
if (logger.IsEnabled(LogLevel.Trace)) logger.Trace("ReadAll Table=" + Environment.NewLine + "{0}", data.ToString());
return data;
}
catch (Exception exc)
{
logger.Warn((int)TableStorageErrorCode.AzureTable_21,
$"Intermediate error reading all silo entries {tableManager.TableName}.", exc);
throw;
}
}
public async Task<bool> InsertRow(MembershipEntry entry, TableVersion tableVersion)
{
try
{
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("InsertRow entry = {0}, table version = {1}", entry.ToFullString(), tableVersion);
var tableEntry = Convert(entry, tableManager.DeploymentId);
var versionEntry = tableManager.CreateTableVersionEntry(tableVersion.Version);
bool result = await tableManager.InsertSiloEntryConditionally(
tableEntry, versionEntry, tableVersion.VersionEtag);
if (result == false)
logger.Warn((int)TableStorageErrorCode.AzureTable_22,
$"Insert failed due to contention on the table. Will retry. Entry {entry.ToFullString()}, table version = {tableVersion}");
return result;
}
catch (Exception exc)
{
logger.Warn((int)TableStorageErrorCode.AzureTable_23,
$"Intermediate error inserting entry {entry.ToFullString()} tableVersion {(tableVersion == null ? "null" : tableVersion.ToString())} to the table {tableManager.TableName}.", exc);
throw;
}
}
public async Task<bool> UpdateRow(MembershipEntry entry, string etag, TableVersion tableVersion)
{
try
{
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("UpdateRow entry = {0}, etag = {1}, table version = {2}", entry.ToFullString(), etag, tableVersion);
var siloEntry = Convert(entry, tableManager.DeploymentId);
var versionEntry = tableManager.CreateTableVersionEntry(tableVersion.Version);
bool result = await tableManager.UpdateSiloEntryConditionally(siloEntry, etag, versionEntry, tableVersion.VersionEtag);
if (result == false)
logger.Warn((int)TableStorageErrorCode.AzureTable_24,
$"Update failed due to contention on the table. Will retry. Entry {entry.ToFullString()}, eTag {etag}, table version = {tableVersion} ");
return result;
}
catch (Exception exc)
{
logger.Warn((int)TableStorageErrorCode.AzureTable_25,
$"Intermediate error updating entry {entry.ToFullString()} tableVersion {(tableVersion == null ? "null" : tableVersion.ToString())} to the table {tableManager.TableName}.", exc);
throw;
}
}
public async Task UpdateIAmAlive(MembershipEntry entry)
{
try
{
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("Merge entry = {0}", entry.ToFullString());
var siloEntry = ConvertPartial(entry, tableManager.DeploymentId);
await tableManager.MergeTableEntryAsync(siloEntry);
}
catch (Exception exc)
{
logger.Warn((int)TableStorageErrorCode.AzureTable_26,
$"Intermediate error updating IAmAlive field for entry {entry.ToFullString()} to the table {tableManager.TableName}.", exc);
throw;
}
}
private MembershipTableData Convert(List<Tuple<SiloInstanceTableEntry, string>> entries)
{
try
{
var memEntries = new List<Tuple<MembershipEntry, string>>();
TableVersion tableVersion = null;
foreach (var tuple in entries)
{
var tableEntry = tuple.Item1;
if (tableEntry.RowKey.Equals(SiloInstanceTableEntry.TABLE_VERSION_ROW))
{
tableVersion = new TableVersion(Int32.Parse(tableEntry.MembershipVersion), tuple.Item2);
}
else
{
try
{
MembershipEntry membershipEntry = Parse(tableEntry);
memEntries.Add(new Tuple<MembershipEntry, string>(membershipEntry, tuple.Item2));
}
catch (Exception exc)
{
logger.Error((int)TableStorageErrorCode.AzureTable_61,
$"Intermediate error parsing SiloInstanceTableEntry to MembershipTableData: {tableEntry}. Ignoring this entry.", exc);
}
}
}
var data = new MembershipTableData(memEntries, tableVersion);
return data;
}
catch (Exception exc)
{
logger.Error((int)TableStorageErrorCode.AzureTable_60,
$"Intermediate error parsing SiloInstanceTableEntry to MembershipTableData: {Utils.EnumerableToString(entries, tuple => tuple.Item1.ToString())}.", exc);
throw;
}
}
private static MembershipEntry Parse(SiloInstanceTableEntry tableEntry)
{
var parse = new MembershipEntry
{
HostName = tableEntry.HostName,
Status = (SiloStatus) Enum.Parse(typeof (SiloStatus), tableEntry.Status)
};
if (!string.IsNullOrEmpty(tableEntry.ProxyPort))
parse.ProxyPort = int.Parse(tableEntry.ProxyPort);
int port = 0;
if (!string.IsNullOrEmpty(tableEntry.Port))
int.TryParse(tableEntry.Port, out port);
int gen = 0;
if (!string.IsNullOrEmpty(tableEntry.Generation))
int.TryParse(tableEntry.Generation, out gen);
parse.SiloAddress = SiloAddress.New(new IPEndPoint(IPAddress.Parse(tableEntry.Address), port), gen);
parse.RoleName = tableEntry.RoleName;
if (!string.IsNullOrEmpty(tableEntry.SiloName))
{
parse.SiloName = tableEntry.SiloName;
}else if (!string.IsNullOrEmpty(tableEntry.InstanceName))
{
// this is for backward compatability: in a mixed cluster of old and new version,
// some entries will have the old InstanceName column.
parse.SiloName = tableEntry.InstanceName;
}
if (!string.IsNullOrEmpty(tableEntry.UpdateZone))
parse.UpdateZone = int.Parse(tableEntry.UpdateZone);
if (!string.IsNullOrEmpty(tableEntry.FaultZone))
parse.FaultZone = int.Parse(tableEntry.FaultZone);
parse.StartTime = !string.IsNullOrEmpty(tableEntry.StartTime) ?
LogFormatter.ParseDate(tableEntry.StartTime) : default(DateTime);
parse.IAmAliveTime = !string.IsNullOrEmpty(tableEntry.IAmAliveTime) ?
LogFormatter.ParseDate(tableEntry.IAmAliveTime) : default(DateTime);
var suspectingSilos = new List<SiloAddress>();
var suspectingTimes = new List<DateTime>();
if (!string.IsNullOrEmpty(tableEntry.SuspectingSilos))
{
string[] silos = tableEntry.SuspectingSilos.Split('|');
foreach (string silo in silos)
{
suspectingSilos.Add(SiloAddress.FromParsableString(silo));
}
}
if (!string.IsNullOrEmpty(tableEntry.SuspectingTimes))
{
string[] times = tableEntry.SuspectingTimes.Split('|');
foreach (string time in times)
suspectingTimes.Add(LogFormatter.ParseDate(time));
}
if (suspectingSilos.Count != suspectingTimes.Count)
throw new OrleansException(String.Format("SuspectingSilos.Length of {0} as read from Azure table is not equal to SuspectingTimes.Length of {1}", suspectingSilos.Count, suspectingTimes.Count));
for (int i = 0; i < suspectingSilos.Count; i++)
parse.AddSuspector(suspectingSilos[i], suspectingTimes[i]);
return parse;
}
private static SiloInstanceTableEntry Convert(MembershipEntry memEntry, string deploymentId)
{
var tableEntry = new SiloInstanceTableEntry
{
DeploymentId = deploymentId,
Address = memEntry.SiloAddress.Endpoint.Address.ToString(),
Port = memEntry.SiloAddress.Endpoint.Port.ToString(CultureInfo.InvariantCulture),
Generation = memEntry.SiloAddress.Generation.ToString(CultureInfo.InvariantCulture),
HostName = memEntry.HostName,
Status = memEntry.Status.ToString(),
ProxyPort = memEntry.ProxyPort.ToString(CultureInfo.InvariantCulture),
RoleName = memEntry.RoleName,
SiloName = memEntry.SiloName,
// this is for backward compatability: in a mixed cluster of old and new version,
// we need to populate both columns.
InstanceName = memEntry.SiloName,
UpdateZone = memEntry.UpdateZone.ToString(CultureInfo.InvariantCulture),
FaultZone = memEntry.FaultZone.ToString(CultureInfo.InvariantCulture),
StartTime = LogFormatter.PrintDate(memEntry.StartTime),
IAmAliveTime = LogFormatter.PrintDate(memEntry.IAmAliveTime)
};
if (memEntry.SuspectTimes != null)
{
var siloList = new StringBuilder();
var timeList = new StringBuilder();
bool first = true;
foreach (var tuple in memEntry.SuspectTimes)
{
if (!first)
{
siloList.Append('|');
timeList.Append('|');
}
siloList.Append(tuple.Item1.ToParsableString());
timeList.Append(LogFormatter.PrintDate(tuple.Item2));
first = false;
}
tableEntry.SuspectingSilos = siloList.ToString();
tableEntry.SuspectingTimes = timeList.ToString();
}
else
{
tableEntry.SuspectingSilos = String.Empty;
tableEntry.SuspectingTimes = String.Empty;
}
tableEntry.PartitionKey = deploymentId;
tableEntry.RowKey = SiloInstanceTableEntry.ConstructRowKey(memEntry.SiloAddress);
return tableEntry;
}
private static SiloInstanceTableEntry ConvertPartial(MembershipEntry memEntry, string deploymentId)
{
return new SiloInstanceTableEntry
{
DeploymentId = deploymentId,
IAmAliveTime = LogFormatter.PrintDate(memEntry.IAmAliveTime),
PartitionKey = deploymentId,
RowKey = SiloInstanceTableEntry.ConstructRowKey(memEntry.SiloAddress)
};
}
}
}
| |
using System;
using Org.BouncyCastle.Crypto.Digests;
using Org.BouncyCastle.Crypto.Modes;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Security;
using Org.BouncyCastle.Utilities;
namespace Org.BouncyCastle.Crypto.Engines
{
/**
* Wrap keys according to
* <a href="http://www.ietf.org/internet-drafts/draft-ietf-smime-key-wrap-01.txt">
* draft-ietf-smime-key-wrap-01.txt</a>.
* <p>
* Note:
* <ul>
* <li>this is based on a draft, and as such is subject to change - don't use this class for anything requiring long term storage.</li>
* <li>if you are using this to wrap triple-des keys you need to set the
* parity bits on the key and, if it's a two-key triple-des key, pad it
* yourself.</li>
* </ul>
* </p>
*/
public class DesEdeWrapEngine
: IWrapper
{
/** Field engine */
private CbcBlockCipher engine;
/** Field param */
private KeyParameter param;
/** Field paramPlusIV */
private ParametersWithIV paramPlusIV;
/** Field iv */
private byte[] iv;
/** Field forWrapping */
private bool forWrapping;
/** Field IV2 */
private static readonly byte[] IV2 = { (byte) 0x4a, (byte) 0xdd, (byte) 0xa2,
(byte) 0x2c, (byte) 0x79, (byte) 0xe8,
(byte) 0x21, (byte) 0x05 };
//
// checksum digest
//
private readonly IDigest sha1 = new Sha1Digest();
private readonly byte[] digest = new byte[20];
/**
* Method init
*
* @param forWrapping
* @param param
*/
public void Init(
bool forWrapping,
ICipherParameters parameters)
{
this.forWrapping = forWrapping;
this.engine = new CbcBlockCipher(new DesEdeEngine());
ISecureRandom sr;
if (parameters is ParametersWithRandom)
{
ParametersWithRandom pr = (ParametersWithRandom) parameters;
parameters = pr.Parameters;
sr = pr.Random;
}
else
{
sr = new SecureRandom();
}
if (parameters is KeyParameter)
{
this.param = (KeyParameter) parameters;
if (this.forWrapping)
{
// Hm, we have no IV but we want to wrap ?!?
// well, then we have to create our own IV.
this.iv = new byte[8];
sr.NextBytes(iv);
this.paramPlusIV = new ParametersWithIV(this.param, this.iv);
}
}
else if (parameters is ParametersWithIV)
{
if (!forWrapping)
throw new ArgumentException("You should not supply an IV for unwrapping");
this.paramPlusIV = (ParametersWithIV) parameters;
this.iv = this.paramPlusIV.GetIV();
this.param = (KeyParameter) this.paramPlusIV.Parameters;
if (this.iv.Length != 8)
throw new ArgumentException(@"IV is not 8 octets", "parameters");
}
}
/**
* Method GetAlgorithmName
*
* @return
*/
public string AlgorithmName
{
get { return "DESede"; }
}
/**
* Method wrap
*
* @param in
* @param inOff
* @param inLen
* @return
*/
public byte[] Wrap(
byte[] input,
int inOff,
int length)
{
if (!forWrapping)
{
throw new InvalidOperationException("Not initialized for wrapping");
}
byte[] keyToBeWrapped = new byte[length];
Array.Copy(input, inOff, keyToBeWrapped, 0, length);
// Compute the CMS Key Checksum, (section 5.6.1), call this CKS.
byte[] CKS = CalculateCmsKeyChecksum(keyToBeWrapped);
// Let WKCKS = WK || CKS where || is concatenation.
byte[] WKCKS = new byte[keyToBeWrapped.Length + CKS.Length];
Array.Copy(keyToBeWrapped, 0, WKCKS, 0, keyToBeWrapped.Length);
Array.Copy(CKS, 0, WKCKS, keyToBeWrapped.Length, CKS.Length);
// Encrypt WKCKS in CBC mode using KEK as the key and IV as the
// initialization vector. Call the results TEMP1.
int blockSize = engine.GetBlockSize();
if (WKCKS.Length % blockSize != 0)
throw new InvalidOperationException("Not multiple of block length");
engine.Init(true, paramPlusIV);
byte [] TEMP1 = new byte[WKCKS.Length];
for (int currentBytePos = 0; currentBytePos != WKCKS.Length; currentBytePos += blockSize)
{
engine.ProcessBlock(WKCKS, currentBytePos, TEMP1, currentBytePos);
}
// Let TEMP2 = IV || TEMP1.
byte[] TEMP2 = new byte[this.iv.Length + TEMP1.Length];
Array.Copy(this.iv, 0, TEMP2, 0, this.iv.Length);
Array.Copy(TEMP1, 0, TEMP2, this.iv.Length, TEMP1.Length);
// Reverse the order of the octets in TEMP2 and call the result TEMP3.
byte[] TEMP3 = reverse(TEMP2);
// Encrypt TEMP3 in CBC mode using the KEK and an initialization vector
// of 0x 4a dd a2 2c 79 e8 21 05. The resulting cipher text is the desired
// result. It is 40 octets long if a 168 bit key is being wrapped.
ParametersWithIV param2 = new ParametersWithIV(this.param, IV2);
this.engine.Init(true, param2);
for (int currentBytePos = 0; currentBytePos != TEMP3.Length; currentBytePos += blockSize)
{
engine.ProcessBlock(TEMP3, currentBytePos, TEMP3, currentBytePos);
}
return TEMP3;
}
/**
* Method unwrap
*
* @param in
* @param inOff
* @param inLen
* @return
* @throws InvalidCipherTextException
*/
public byte[] Unwrap(
byte[] input,
int inOff,
int length)
{
if (forWrapping)
{
throw new InvalidOperationException("Not set for unwrapping");
}
if (input == null)
{
throw new InvalidCipherTextException("Null pointer as ciphertext");
}
int blockSize = engine.GetBlockSize();
if (length % blockSize != 0)
{
throw new InvalidCipherTextException("Ciphertext not multiple of " + blockSize);
}
/*
// Check if the length of the cipher text is reasonable given the key
// type. It must be 40 bytes for a 168 bit key and either 32, 40, or
// 48 bytes for a 128, 192, or 256 bit key. If the length is not supported
// or inconsistent with the algorithm for which the key is intended,
// return error.
//
// we do not accept 168 bit keys. it has to be 192 bit.
int lengthA = (estimatedKeyLengthInBit / 8) + 16;
int lengthB = estimatedKeyLengthInBit % 8;
if ((lengthA != keyToBeUnwrapped.Length) || (lengthB != 0)) {
throw new XMLSecurityException("empty");
}
*/
// Decrypt the cipher text with TRIPLedeS in CBC mode using the KEK
// and an initialization vector (IV) of 0x4adda22c79e82105. Call the output TEMP3.
ParametersWithIV param2 = new ParametersWithIV(this.param, IV2);
this.engine.Init(false, param2);
byte [] TEMP3 = new byte[length];
for (int currentBytePos = 0; currentBytePos != TEMP3.Length; currentBytePos += blockSize)
{
engine.ProcessBlock(input, inOff + currentBytePos, TEMP3, currentBytePos);
}
// Reverse the order of the octets in TEMP3 and call the result TEMP2.
byte[] TEMP2 = reverse(TEMP3);
// Decompose TEMP2 into IV, the first 8 octets, and TEMP1, the remaining octets.
this.iv = new byte[8];
byte[] TEMP1 = new byte[TEMP2.Length - 8];
Array.Copy(TEMP2, 0, this.iv, 0, 8);
Array.Copy(TEMP2, 8, TEMP1, 0, TEMP2.Length - 8);
// Decrypt TEMP1 using TRIPLedeS in CBC mode using the KEK and the IV
// found in the previous step. Call the result WKCKS.
this.paramPlusIV = new ParametersWithIV(this.param, this.iv);
this.engine.Init(false, this.paramPlusIV);
byte[] WKCKS = new byte[TEMP1.Length];
for (int currentBytePos = 0; currentBytePos != WKCKS.Length; currentBytePos += blockSize)
{
engine.ProcessBlock(TEMP1, currentBytePos, WKCKS, currentBytePos);
}
// Decompose WKCKS. CKS is the last 8 octets and WK, the wrapped key, are
// those octets before the CKS.
byte[] result = new byte[WKCKS.Length - 8];
byte[] CKStoBeVerified = new byte[8];
Array.Copy(WKCKS, 0, result, 0, WKCKS.Length - 8);
Array.Copy(WKCKS, WKCKS.Length - 8, CKStoBeVerified, 0, 8);
// Calculate a CMS Key Checksum, (section 5.6.1), over the WK and compare
// with the CKS extracted in the above step. If they are not equal, return error.
if (!CheckCmsKeyChecksum(result, CKStoBeVerified)) {
throw new InvalidCipherTextException(
"Checksum inside ciphertext is corrupted");
}
// WK is the wrapped key, now extracted for use in data decryption.
return result;
}
/**
* Some key wrap algorithms make use of the Key Checksum defined
* in CMS [CMS-Algorithms]. This is used to provide an integrity
* check value for the key being wrapped. The algorithm is
*
* - Compute the 20 octet SHA-1 hash on the key being wrapped.
* - Use the first 8 octets of this hash as the checksum value.
*
* @param key
* @return
* @throws Exception
* @see http://www.w3.org/TR/xmlenc-core/#sec-CMSKeyChecksum
*/
private byte[] CalculateCmsKeyChecksum(
byte[] key)
{
sha1.BlockUpdate(key, 0, key.Length);
sha1.DoFinal(digest, 0);
byte[] result = new byte[8];
Array.Copy(digest, 0, result, 0, 8);
return result;
}
/**
* @param key
* @param checksum
* @return
* @see http://www.w3.org/TR/xmlenc-core/#sec-CMSKeyChecksum
*/
private bool CheckCmsKeyChecksum(
byte[] key,
byte[] checksum)
{
return Arrays.ConstantTimeAreEqual(CalculateCmsKeyChecksum(key), checksum);
}
private static byte[] reverse(byte[] bs)
{
byte[] result = new byte[bs.Length];
for (int i = 0; i < bs.Length; i++)
{
result[i] = bs[bs.Length - (i + 1)];
}
return result;
}
}
}
| |
using System;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
using System.Runtime.Serialization;
using System.Security.Permissions;
namespace Dalssoft.DiagramNet
{
/// <summary>
/// This class control the elements collection and visualization.
/// </summary>
[Serializable]
public class Document: IDeserializationCallback
{
//Draw properties
private SmoothingMode smoothingMode = SmoothingMode.HighQuality;
private PixelOffsetMode pixelOffsetMode = PixelOffsetMode.Default;
private CompositingQuality compositingQuality = CompositingQuality.AssumeLinear;
//Action
private DesignerAction action = DesignerAction.Select;
private ElementType elementType = ElementType.RectangleNode;
private LinkType linkType = LinkType.RightAngle;
// Element Collection
internal ElementCollection elements = new ElementCollection();
// Selections Collections
private ElementCollection selectedElements = new ElementCollection();
private ElementCollection selectedNodes = new ElementCollection();
//Document Size
private Point location = new Point(100, 100);
private Size size = new Size(0, 0);
private Size windowSize = new Size(0, 0);
//Zoom
private float zoom = 1.0f;
//Grig
private Size gridSize = new Size(50, 50);
//Events
private bool canFireEvents = true;
public Document(){}
#region Add Methods
public void AddElement(BaseElement el)
{
elements.Add(el);
el.AppearanceChanged +=new EventHandler(element_AppearanceChanged);
OnAppearancePropertyChanged(new EventArgs());
}
public void AddElements(ElementCollection els)
{
AddElements(els.GetArray());
}
public void AddElements(BaseElement[] els)
{
elements.EnabledCalc = false;
foreach (BaseElement el in els)
{
this.AddElement(el);
}
elements.EnabledCalc = true;
}
internal bool CanAddLink(ConnectorElement connStart, ConnectorElement connEnd)
{
return ((connStart != connEnd) && (connStart.ParentElement != connEnd.ParentElement));
}
public BaseLinkElement AddLink(ConnectorElement connStart, ConnectorElement connEnd)
{
if (CanAddLink(connStart, connEnd))
{
BaseLinkElement lnk;
if (linkType == LinkType.Straight)
lnk = new StraightLinkElement(connStart, connEnd);
else // (linkType == LinkType.RightAngle)
lnk = new RightAngleLinkElement(connStart, connEnd);
elements.Add(lnk);
lnk.AppearanceChanged +=new EventHandler(element_AppearanceChanged);
OnAppearancePropertyChanged(new EventArgs());
return lnk;
}
else
{
return null;
}
}
#endregion
#region Delete Methods
public void DeleteElement(BaseElement el)
{
if ((el != null) && !(el is ConnectorElement))
{
//Delete link
if (el is BaseLinkElement)
{
BaseLinkElement lnk = (BaseLinkElement) el;
DeleteLink(lnk);
return;
}
//Delete node
if (el is NodeElement)
{
NodeElement conn = ((NodeElement) el);
foreach (ConnectorElement elconn in conn.Connectors)
{
BaseLinkElement lnk;
for (int i = elconn.Links.Count - 1; i>=0; i--)
{
lnk = (BaseLinkElement) elconn.Links[i];
DeleteLink(lnk);
}
}
if (selectedNodes.Contains(el))
selectedNodes.Remove(el);
}
if (SelectedElements.Contains(el))
selectedElements.Remove(el);
elements.Remove(el);
OnAppearancePropertyChanged(new EventArgs());
}
el = null;
}
public void DeleteElement(Point p)
{
BaseElement selectedElement = FindElement(p);
DeleteElement(selectedElement);
}
public void DeleteSelectedElements()
{
selectedElements.EnabledCalc = false;
selectedNodes.EnabledCalc = false;
for(int i = selectedElements.Count - 1; i >= 0; i-- )
{
DeleteElement(selectedElements[i]);
}
selectedElements.EnabledCalc = true;
selectedNodes.EnabledCalc = true;
}
public void DeleteLink(BaseLinkElement lnk)
{
if (lnk != null)
{
lnk.Connector1.RemoveLink(lnk);
lnk.Connector2.RemoveLink(lnk);
if (elements.Contains(lnk))
elements.Remove(lnk);
if (selectedElements.Contains(lnk))
selectedElements.Remove(lnk);
OnAppearancePropertyChanged(new EventArgs());
}
}
#endregion
#region Select Methods
public void ClearSelection()
{
selectedElements.Clear();
selectedNodes.Clear();
OnElementSelection(this, new ElementSelectionEventArgs(selectedElements));
}
public void SelectElement(BaseElement el)
{
selectedElements.Add(el);
if (el is NodeElement)
{
selectedNodes.Add(el);
}
if (canFireEvents)
OnElementSelection(this, new ElementSelectionEventArgs(selectedElements));
}
public void SelectElements(BaseElement[] els)
{
selectedElements.EnabledCalc = false;
selectedNodes.EnabledCalc = false;
canFireEvents = false;
try
{
foreach(BaseElement el in els)
{
SelectElement(el);
}
}
finally
{
canFireEvents = true;
}
selectedElements.EnabledCalc = true;
selectedNodes.EnabledCalc = true;
OnElementSelection(this, new ElementSelectionEventArgs(selectedElements));
}
public void SelectElements(Rectangle selectionRectangle)
{
selectedElements.EnabledCalc = false;
selectedNodes.EnabledCalc = false;
// Add all "hitable" elements
foreach(BaseElement element in elements)
{
if (element is IControllable)
{
IController ctrl = ((IControllable)element).GetController();
if (ctrl.HitTest(selectionRectangle))
{
if (!(element is ConnectorElement))
selectedElements.Add(element);
if (element is NodeElement)
selectedNodes.Add(element);
}
}
}
//if the seleciont isn't a expecific link, remove links
// without 2 elements in selection
if (selectedElements.Count > 1)
{
foreach(BaseElement el in elements)
{
BaseLinkElement lnk = el as BaseLinkElement;
if (lnk == null) continue;
if ((!selectedElements.Contains(lnk.Connector1.ParentElement)) ||
(!selectedElements.Contains(lnk.Connector2.ParentElement)))
{
selectedElements.Remove(lnk);
}
}
}
selectedElements.EnabledCalc = true;
selectedNodes.EnabledCalc = true;
OnElementSelection(this, new ElementSelectionEventArgs(selectedElements));
}
public void SelectAllElements()
{
selectedElements.EnabledCalc = false;
selectedNodes.EnabledCalc = false;
foreach(BaseElement element in elements)
{
if (!(element is ConnectorElement))
selectedElements.Add(element);
if (element is NodeElement)
selectedNodes.Add(element);
}
selectedElements.EnabledCalc = true;
selectedNodes.EnabledCalc = true;
}
public BaseElement FindElement(Point point)
{
BaseElement el;
if ((elements != null) && (elements.Count > 0))
{
// First, find elements
for(int i = elements.Count - 1; i >=0 ; i--)
{
el = elements[i];
if (el is BaseLinkElement)
continue;
//Find element in a Connector array
if (el is NodeElement)
{
NodeElement nel = (NodeElement) el;
foreach(ConnectorElement cel in nel.Connectors)
{
IController ctrl = ((IControllable) cel).GetController();
if (ctrl.HitTest(point))
return cel;
}
}
//Find element in a Container Element
if (el is IContainer)
{
BaseElement inner = FindInnerElement((IContainer) el, point);
if (inner != null)
return inner;
}
//Find element by hit test
if (el is IControllable)
{
IController ctrl = ((IControllable) el).GetController();
if (ctrl.HitTest(point))
return el;
}
}
// Then, find links
for(int i = elements.Count - 1; i >=0 ; i--)
{
el = elements[i];
if (!(el is BaseLinkElement))
continue;
if (el is IControllable)
{
IController ctrl = ((IControllable) el).GetController();
if (ctrl.HitTest(point))
return el;
}
}
}
return null;
}
public BaseElement FindElement(string name)
{
BaseElement el;
if ((elements != null) && (elements.Count > 0))
{
// First, find elements
for (int i = elements.Count - 1; i >= 0; i--)
{
el = elements[i];
if (el is BaseLinkElement)
continue;
//Find element in a Connector array
if (el is NodeElement)
{
NodeElement nel = (NodeElement)el;
if (el.Name ==name)
{
return el;
}
foreach (ConnectorElement cel in nel.Connectors)
{
if (cel.Name ==name )
{
return cel;
}
}
}
//Find element in a Container Element
if (el is IContainer)
{
BaseElement inner = FindInnerElement((IContainer)el, name );
if (inner != null)
return inner;
}
}
// Then, find links
for (int i = elements.Count - 1; i >= 0; i--)
{
el = elements[i];
if (!(el is BaseLinkElement))
continue;
if (el.Name==name )
{
return el;
}
}
}
return null;
}
private BaseElement FindInnerElement(IContainer parent, string name)
{
foreach (BaseElement el in parent.Elements)
{
if (el is IContainer)
{
BaseElement retEl = FindInnerElement((IContainer)el, name);
if (retEl != null)
return retEl;
}
if (el is IControllable)
{
IController ctrl = ((IControllable)el).GetController();
if (el.Name ==name )
{
return el;
}
}
}
return null;
}
private BaseElement FindInnerElement(IContainer parent, Point hitPos)
{
foreach (BaseElement el in parent.Elements)
{
if (el is IContainer)
{
BaseElement retEl = FindInnerElement((IContainer)el, hitPos);
if (retEl != null)
return retEl;
}
if (el is IControllable)
{
IController ctrl = ((IControllable) el).GetController();
if (ctrl.HitTest(hitPos))
return el;
}
}
return null;
}
#endregion
#region Position Methods
public void MoveUpElement(BaseElement el)
{
int i = elements.IndexOf(el);
if (i != elements.Count - 1)
{
elements.ChangeIndex(i, i + 1);
OnAppearancePropertyChanged(new EventArgs());
}
}
public void MoveDownElement(BaseElement el)
{
int i = elements.IndexOf(el);
if (i != 0)
{
elements.ChangeIndex(i, i - 1);
OnAppearancePropertyChanged(new EventArgs());
}
}
public void BringToFrontElement(BaseElement el)
{
int i = elements.IndexOf(el);
for (int x = i + 1; x <= elements.Count - 1; x++)
{
elements.ChangeIndex(i, x);
i = x;
}
OnAppearancePropertyChanged(new EventArgs());
}
public void SendToBackElement(BaseElement el)
{
int i = elements.IndexOf(el);
for (int x = i - 1; x >= 0; x--)
{
elements.ChangeIndex(i, x);
i = x;
}
OnAppearancePropertyChanged(new EventArgs());
}
#endregion
internal void CalcWindow(bool forceCalc)
{
elements.CalcWindow(forceCalc);
selectedElements.CalcWindow(forceCalc);
selectedNodes.CalcWindow(forceCalc);
}
#region Properties
public ElementCollection Elements
{
get
{
return elements;
}
}
public ElementCollection SelectedElements
{
get
{
return selectedElements;
}
}
public ElementCollection SelectedNodes
{
get
{
return selectedNodes;
}
}
public Point Location
{
get
{
return elements.WindowLocation;
}
}
public Size Size
{
get
{
return elements.WindowSize;
}
}
internal Size WindowSize
{
set
{
windowSize = value;
}
}
public SmoothingMode SmoothingMode
{
get
{
return smoothingMode;
}
set
{
smoothingMode = value;
OnAppearancePropertyChanged(new EventArgs());
}
}
public PixelOffsetMode PixelOffsetMode
{
get
{
return pixelOffsetMode;
}
set
{
pixelOffsetMode = value;
OnAppearancePropertyChanged(new EventArgs());
}
}
public CompositingQuality CompositingQuality
{
get
{
return compositingQuality;
}
set
{
compositingQuality = value;
OnAppearancePropertyChanged(new EventArgs());
}
}
public DesignerAction Action
{
get
{
return action;
}
set
{
action = value;
OnPropertyChanged(new EventArgs());
}
}
public float Zoom
{
get
{
return zoom;
}
set
{
zoom = value;
OnPropertyChanged(new EventArgs());
}
}
public ElementType ElementType
{
get
{
return elementType;
}
set
{
elementType = value;
OnPropertyChanged(new EventArgs());
}
}
public LinkType LinkType
{
get
{
return linkType;
}
set
{
linkType = value;
OnPropertyChanged(new EventArgs());
}
}
public Size GridSize
{
get
{
return gridSize;
}
set
{
gridSize = value;
OnAppearancePropertyChanged(new EventArgs());
}
}
#endregion
#region Draw Methods
internal void DrawElements(Graphics g, Rectangle clippingRegion)
{
//Draw Links first
for (int i = 0; i <= elements.Count - 1; i++)
{
BaseElement el = elements[i];
if ((el is BaseLinkElement) && (NeedDrawElement(el, clippingRegion)))
el.Draw(g);
if (el is ILabelElement)
((ILabelElement) el).Label.Draw(g);
}
//Draw the other elements
for (int i = 0; i <= elements.Count - 1; i++)
{
BaseElement el = elements[i];
if (!(el is BaseLinkElement) && (NeedDrawElement(el, clippingRegion)))
{
if (el is NodeElement)
{
NodeElement n = (NodeElement) el;
n.Draw(g, (action == DesignerAction.Connect));
}
else
{
el.Draw(g);
}
if (el is ILabelElement)
((ILabelElement) el).Label.Draw(g);
}
}
}
private bool NeedDrawElement(BaseElement el, Rectangle clippingRegion)
{
if (!el.Visible) return false;
Rectangle elRectangle = el.GetUnsignedRectangle();
elRectangle.Inflate(5, 5);
return clippingRegion.IntersectsWith(elRectangle);
}
internal void DrawSelections(Graphics g, Rectangle clippingRegion)
{
for(int i = selectedElements.Count - 1; i >=0 ; i--)
{
if (selectedElements[i] is IControllable)
{
IController ctrl = ((IControllable) selectedElements[i]).GetController();
ctrl.DrawSelection(g);
if (selectedElements[i] is BaseLinkElement)
{
BaseLinkElement link = (BaseLinkElement) selectedElements[i];
ctrl = ((IControllable) link.Connector1).GetController();
ctrl.DrawSelection(g);
ctrl = ((IControllable) link.Connector2).GetController();
ctrl.DrawSelection(g);
}
}
}
}
internal void DrawGrid(Graphics g, Rectangle clippingRegion)
{
// ControlPaint.DrawGrid(g, clippingRegion, gridSize, Color.LightGray);
Pen p = new Pen(new HatchBrush(HatchStyle.LargeGrid | HatchStyle.Percent90, Color.LightGray, Color.Transparent), 1);
//Pen p = new Pen(Color.LightGray, 1);
int maxX = location.X + this.Size.Width;
int maxY = location.Y + this.Size.Height;
if (windowSize.Width / zoom > maxX)
maxX = (int)(windowSize.Width / zoom);
if (windowSize.Height / zoom > maxY)
maxY = (int)(windowSize.Height / zoom);
for(int i = 0; i < maxX; i += gridSize.Width)
{
g.DrawLine(p, i, 0, i, maxY);
}
for(int i = 0; i < maxY; i += gridSize.Height)
{
g.DrawLine(p, 0, i, maxX, i);
}
p.Dispose();
}
#endregion
#region Events Raising
// Property Changed
[field: NonSerialized]
public event EventHandler PropertyChanged;
protected virtual void OnPropertyChanged(EventArgs e)
{
if (PropertyChanged != null)
PropertyChanged(this, e);
}
// Appearance Property Changed
[field: NonSerialized]
public event EventHandler AppearancePropertyChanged;
protected virtual void OnAppearancePropertyChanged(EventArgs e)
{
OnPropertyChanged(e);
if (AppearancePropertyChanged != null)
AppearancePropertyChanged(this, e);
}
// Element Property Changed
[field: NonSerialized]
public event EventHandler ElementPropertyChanged;
protected virtual void OnElementPropertyChanged(object sender, EventArgs e)
{
if (ElementPropertyChanged != null)
ElementPropertyChanged(sender, e);
}
// Element Selection
public delegate void ElementSelectionEventHandler(object sender, ElementSelectionEventArgs e);
[field: NonSerialized]
public event ElementSelectionEventHandler ElementSelection;
protected virtual void OnElementSelection(object sender, ElementSelectionEventArgs e)
{
if (ElementSelection != null)
ElementSelection(sender, e);
}
#endregion
#region Events Handling
private void RecreateEventsHandlers()
{
foreach(BaseElement el in elements)
el.AppearanceChanged +=new EventHandler(element_AppearanceChanged);
}
[SecurityPermissionAttribute(SecurityAction.Demand,SerializationFormatter=true)]
private void element_AppearanceChanged(object sender, EventArgs e)
{
OnElementPropertyChanged(sender, e);
}
#endregion
#region IDeserializationCallback Members
void IDeserializationCallback.OnDeserialization(object sender)
{
RecreateEventsHandlers();
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Its.Domain.Testing;
using NUnit.Framework;
using Sample.Domain.Ordering;
namespace Microsoft.Its.Domain.Sql.Tests
{
[Category("Catchups")]
[TestFixture]
public class RemainingCatchupTimeTests : EventStoreDbTest
{
protected override void AfterClassIsInitialized()
{
// these tests behave a little oddly sometimes if the database has just been rebuilt and no events have yet been caught up, so run a quick catchup to start
Events.Write(1);
RunCatchup(new TestProjector()).Wait();
}
[SetUp]
public void Init()
{
VirtualClock.Start();
}
[TearDown]
public new void TearDown()
{
Clock.Reset();
}
[Test]
public async Task If_events_have_been_processed_during_initial_replay_then_the_remaining_time_is_estimated_correctly()
{
//arrange
IEnumerable<EventHandlerProgress> progress = null;
Events.Write(10);
var eventsProcessed = 0;
var projector = new TestProjector
{
DoSomething = e =>
{
if (eventsProcessed == 5)
{
progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
}
VirtualClock.Current.AdvanceBy(TimeSpan.FromSeconds(1));
eventsProcessed++;
}
};
//act
await RunCatchup(projector);
progress.First(p => p.Name == EventHandler.FullName(projector))
.TimeRemainingForCatchup
.Value
.Should()
.Be(TimeSpan.FromSeconds(5));
}
[Test]
public async Task If_events_have_been_processed_after_initial_replay_then_the_remaining_time_is_estimated_correctly()
{
//arrange
//Initial replay
Events.Write(10);
var projector = new TestProjector();
await RunCatchup(projector);
//new set of events come in
IEnumerable<EventHandlerProgress> progress = null;
Events.Write(10);
var eventsProcessed = 0;
projector.DoSomething = e =>
{
if (eventsProcessed == 5)
{
progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
}
VirtualClock.Current.AdvanceBy(TimeSpan.FromSeconds(1));
eventsProcessed++;
};
//act
await RunCatchup(projector);
progress.First(p => p.Name == EventHandler.FullName(projector))
.TimeRemainingForCatchup.Value
.Should()
.Be(TimeSpan.FromSeconds(5));
}
[Test]
public async Task If_events_have_been_processed_after_initial_replay_then_the_time_taken_for_initial_replay_is_saved()
{
//arrange
ResetReadModelInfo();
var projector = new TestProjector
{
DoSomething = e => VirtualClock.Current.AdvanceBy(TimeSpan.FromSeconds(1))
};
//Initial replay
Events.Write(10);
await RunCatchup(projector);
//new set of events come in
Events.Write(5);
//act
await RunCatchup(projector);
var progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
//assert
progress.First(p => p.Name == EventHandler.FullName(projector))
.TimeTakenForInitialCatchup.Value
.Should()
.Be(TimeSpan.FromSeconds(9));
}
[Test]
public async Task If_events_have_been_processed_after_initial_replay_then_the_number_of_events_for_initial_replay_is_saved()
{
//arrange
ResetReadModelInfo();
var projector = new TestProjector
{
DoSomething = e => VirtualClock.Current.AdvanceBy(TimeSpan.FromSeconds(1))
};
//Initial replay
Events.Write(10);
await RunCatchup(projector);
//new set of events come in
Events.Write(5);
await RunCatchup(projector);
//act
var progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
//assert
progress.First(p => p.Name == EventHandler.FullName(projector))
.InitialCatchupEvents.Value
.Should()
.Be(10);
}
[Test]
public async Task If_events_have_been_processed_then_the_correct_number_of_remaining_events_is_returned()
{
//arrange
IEnumerable<EventHandlerProgress> progress = null;
Events.Write(5);
var eventsProcessed = 0;
var projector = new TestProjector
{
DoSomething = e =>
{
if (eventsProcessed == 4)
{
progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
}
eventsProcessed++;
}
};
//act
await RunCatchup(projector);
//assert
progress.First(p => p.Name == EventHandler.FullName(projector))
.EventsRemaining
.Should()
.Be(1);
}
[Test]
public async Task If_no_events_have_been_processed_then_the_remaining_time_is_null()
{
//arrange
ResetReadModelInfo();
Events.Write(5);
//act
var progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
//assert
progress.First(p => p.Name == EventHandler.FullName(new TestProjector()))
.TimeRemainingForCatchup
.HasValue
.Should()
.BeFalse();
}
[Test]
public async Task If_all_events_have_been_processed_then_the_remaining_time_is_zero()
{
//arrange
var projector = new TestProjector();
Events.Write(5);
await RunCatchup(projector);
//act
var progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
//assert
progress.First(p => p.Name == EventHandler.FullName(projector))
.TimeRemainingForCatchup
.Value
.Should()
.Be(TimeSpan.FromMinutes(0));
}
[Test]
public async Task If_all_events_have_been_processed_then_the_percentage_completed_is_100()
{
//arrange
Events.Write(5);
var projector = new TestProjector();
await RunCatchup(projector);
//act
var progress = EventHandlerProgressCalculator.Calculate(() => new ReadModelDbContext());
//assert
progress.First(p => p.Name == EventHandler.FullName(projector))
.PercentageCompleted
.Should()
.Be(100);
}
private void ResetReadModelInfo()
{
using (var db = new ReadModelDbContext())
{
foreach (var info in db.Set<ReadModelInfo>())
{
info.InitialCatchupStartTime = null;
info.InitialCatchupEndTime = null;
info.BatchRemainingEvents = 0;
info.BatchTotalEvents = 0;
info.BatchStartTime = null;
}
db.SaveChanges();
}
}
private async Task RunCatchup(TestProjector projector)
{
using (var catchup = CreateReadModelCatchup(projector))
{
await catchup.Run();
}
}
}
public class TestProjector : IUpdateProjectionWhen<Order.ItemAdded>
{
public Action<Order.ItemAdded> DoSomething = e => { };
public void UpdateProjection(Order.ItemAdded @event)
{
DoSomething(@event);
}
}
}
| |
using NeuroLinker.Extensions;
using NeuroLinker.Helpers;
using NeuroLinker.Interfaces.Helpers;
using NeuroLinker.Interfaces.Workers;
using NeuroLinker.Models;
using System;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using NeuroLinker.ResponseWrappers;
using VaraniumSharp.Attributes;
namespace NeuroLinker.Workers
{
/// <summary>
/// Wrapper class for request processing
/// </summary>
[AutomaticContainerRegistration(typeof(IRequestProcessor))]
public class RequestProcessor : IRequestProcessor
{
#region Constructor
/// <summary>
/// DI Constructor
/// </summary>
/// <param name="pageRetriever">Page retriever instance</param>
public RequestProcessor(IPageRetriever pageRetriever)
{
_pageRetriever = pageRetriever;
}
#endregion
#region Public Methods
/// <summary>
/// Retrieve a Character from MAL
/// </summary>
/// <param name="characterId">Character Id</param>
/// <returns>Populated Character</returns>
public async Task<RetrievalWrapper<Character>> DoCharacterRetrieval(int characterId)
{
var character = new Character
{
Id = characterId,
Url = MalRouteBuilder.AnimeCharacterUrl(characterId)
};
try
{
var characterResponse = await _pageRetriever.RetrieveHtmlPageAsync(character.Url);
if (characterResponse.ResponseStatusCode == null)
{
throw characterResponse.Exception;
}
var characterDoc = characterResponse.Document;
character
.RetrieveCharacterName(characterDoc)
.RetrieveCharacterImage(characterDoc)
.RetrieveFavoriteCount(characterDoc)
.RetrieveBiography(characterDoc)
.RetrieveAnimeography(characterDoc)
.RetrieveMangaograhy(characterDoc)
.RetrieveSeiyuu(characterDoc);
return new RetrievalWrapper<Character>(characterResponse.ResponseStatusCode.Value,
characterResponse.Success,
character);
}
catch (Exception exception)
{
character.ErrorOccured = true;
character.ErrorMessage = exception.Message;
return new RetrievalWrapper<Character>(exception, character);
}
}
/// <summary>
/// Retrieve a Seiyuu from MAL
/// </summary>
/// <param name="seiyuuId"></param>
/// <returns></returns>
public async Task<RetrievalWrapper<Seiyuu>> DoSeiyuuRetrieval(int seiyuuId)
{
var seiyuu = new Seiyuu
{
Id = seiyuuId,
Url = MalRouteBuilder.SeiyuuUrl(seiyuuId)
};
try
{
var seiyuuResponse = await _pageRetriever.RetrieveHtmlPageAsync(MalRouteBuilder.SeiyuuUrl(seiyuuId));
if (seiyuuResponse.ResponseStatusCode == null)
{
throw seiyuuResponse.Exception;
}
var seiyuuDoc = seiyuuResponse.Document;
seiyuu
.RetrieveName(seiyuuDoc)
.RetrieveGivenName(seiyuuDoc)
.RetrieveFamilyName(seiyuuDoc)
.RetrieveBirthday(seiyuuDoc)
.RetrieveAdditionalInformation(seiyuuDoc)
.RetrieveWebsite(seiyuuDoc)
.RetrieveRoles(seiyuuDoc)
.RetrieveSeiyuuImage(seiyuuDoc);
return new RetrievalWrapper<Seiyuu>(seiyuuResponse.ResponseStatusCode.Value, seiyuuResponse.Success,
seiyuu);
}
catch (Exception exception)
{
seiyuu.ErrorOccured = true;
seiyuu.ErrorMessage = exception.Message;
return new RetrievalWrapper<Seiyuu>(exception, seiyuu);
}
}
/// <summary>
/// Retrieve an anime from MAL
/// </summary>
/// <param name="id">MAL Id</param>
/// <returns>Anime instance</returns>
public async Task<RetrievalWrapper<Anime>> GetAnime(int id)
{
return await DoAnimeRetrieval(id, null);
}
/// <summary>
/// Retrieve an anime from MAL
/// </summary>
/// <param name="id">MAL Id</param>
/// <param name="username">Username</param>
/// <param name="password">Password</param>
/// <returns>Anime instance</returns>
public async Task<RetrievalWrapper<Anime>> GetAnime(int id, string username, string password)
{
return await DoAnimeRetrieval(id, new Tuple<string, string>(username, password));
}
/// <summary>
/// Verify user credentials
/// </summary>
/// <param name="username">Username</param>
/// <param name="password">Password</param>
/// <returns>True - Credentials are valid, otherwise false</returns>
public async Task<DataPushResponseWrapper> VerifyCredentials(string username, string password)
{
var page = await _pageRetriever.RetrieveDocumentAsStringAsync(MalRouteBuilder.VerifyCredentialsUrl(),
username, password);
if (page.ResponseStatusCode == null)
{
return new DataPushResponseWrapper(page.Exception);
}
return new DataPushResponseWrapper(page.ResponseStatusCode.Value, page.Success);
}
#endregion
#region Private Methods
/// <summary>
/// Retrieve an anime from MAL
/// </summary>
/// <param name="id">MAL Id</param>
/// <param name="loginDetails">Username and password for retrieving user information. Pass null to retrieve pulbic page</param>
/// <returns>Anime instance</returns>
private async Task<RetrievalWrapper<Anime>> DoAnimeRetrieval(int id, Tuple<string, string> loginDetails)
{
var anime = new Anime();
try
{
var animePageTask = loginDetails == null
? _pageRetriever.RetrieveHtmlPageAsync(MalRouteBuilder.AnimeUrl(id))
: _pageRetriever.RetrieveHtmlPageAsync(MalRouteBuilder.AnimeUrl(id), loginDetails.Item1,
loginDetails.Item2);
var characterTask = _pageRetriever.RetrieveHtmlPageAsync(MalRouteBuilder.AnimeCastUrl(id));
var animeResponse = await animePageTask;
if (animeResponse.ResponseStatusCode == null)
{
throw animeResponse.Exception;
}
if (!new HttpResponseMessage(animeResponse.ResponseStatusCode.Value)
.IsSuccessStatusCode)
{
anime.ErrorOccured = true;
anime.ErrorMessage =
$"Status code {animeResponse.ResponseStatusCode.Value} does not indicate success";
return new RetrievalWrapper<Anime>(animeResponse.ResponseStatusCode.Value, false, anime);
}
var characterResponse = await characterTask;
var animeDoc = animeResponse.Document;
var characterDoc = characterResponse.Document;
anime
.RetrieveAnimeId(animeDoc)
.RetrieveAnimeTitle(animeDoc)
.RetrieveAlternativeTitles(animeDoc)
.RetrieveSynopsis(animeDoc)
.RetrieveImage(animeDoc)
.RetrieveType(animeDoc)
.RetrieveEpisodes(animeDoc)
.RetrieveStatus(animeDoc)
.RetrieveAirDates(animeDoc)
.RetrieveRating(animeDoc)
.RetrieveRank(animeDoc)
.RetrievePopularity(animeDoc)
.RetrieveScore(animeDoc)
.RetrieveMemberCount(animeDoc)
.RetrieveFavotireCount(animeDoc)
.RetrieveGenres(animeDoc)
.RetrieveInfoUrls(animeDoc)
.RetrieveRelatedAnime(animeDoc)
.PopulateCharacterAndSeiyuuInformation(characterDoc);
if (loginDetails != null)
{
anime
.RetrieveUserScore(animeDoc)
.RetrieveUserEpisode(animeDoc)
.RetrieveUserStatus(animeDoc);
}
// TODO - Add sanity check
return new RetrievalWrapper<Anime>(animeResponse.ResponseStatusCode.Value, animeResponse.Success,
anime);
}
catch (Exception exception)
{
anime.ErrorOccured = true;
anime.ErrorMessage = exception.Message;
return new RetrievalWrapper<Anime>(exception, anime);
}
}
#endregion
#region Variables
private readonly IPageRetriever _pageRetriever;
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the MIT license. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using Analyzer.Utilities;
using Analyzer.Utilities.Extensions;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
namespace Microsoft.CodeQuality.Analyzers.ApiDesignGuidelines
{
using static MicrosoftCodeQualityAnalyzersResources;
/// <summary>
/// CA1724: Type names should not match namespaces
/// </summary>
[DiagnosticAnalyzer(LanguageNames.CSharp, LanguageNames.VisualBasic)]
public sealed class TypeNamesShouldNotMatchNamespacesAnalyzer : DiagnosticAnalyzer
{
internal const string RuleId = "CA1724";
private static readonly LocalizableString s_localizableTitle = CreateLocalizableResourceString(nameof(TypeNamesShouldNotMatchNamespacesTitle));
private static readonly LocalizableString s_localizableDescription = CreateLocalizableResourceString(nameof(TypeNamesShouldNotMatchNamespacesDescription));
internal static readonly DiagnosticDescriptor DefaultRule = DiagnosticDescriptorHelper.Create(
RuleId,
s_localizableTitle,
CreateLocalizableResourceString(nameof(TypeNamesShouldNotMatchNamespacesMessageDefault)),
DiagnosticCategory.Naming,
RuleLevel.Disabled,
description: s_localizableDescription,
isPortedFxCopRule: true,
isDataflowRule: false,
isReportedAtCompilationEnd: true);
internal static readonly DiagnosticDescriptor SystemRule = DiagnosticDescriptorHelper.Create(
RuleId,
s_localizableTitle,
CreateLocalizableResourceString(nameof(TypeNamesShouldNotMatchNamespacesMessageSystem)),
DiagnosticCategory.Naming,
RuleLevel.Disabled,
description: s_localizableDescription,
isPortedFxCopRule: true,
isDataflowRule: false,
isReportedAtCompilationEnd: true);
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get; } = ImmutableArray.Create(DefaultRule, SystemRule);
private static readonly object s_lock = new();
private static ImmutableDictionary<string, string>? s_wellKnownSystemNamespaceTable;
private static ImmutableDictionary<string, string> WellKnownSystemNamespaceTable
{
get
{
InitializeWellKnownSystemNamespaceTable();
RoslynDebug.Assert(s_wellKnownSystemNamespaceTable != null);
return s_wellKnownSystemNamespaceTable;
}
}
public override void Initialize(AnalysisContext context)
{
context.EnableConcurrentExecution();
context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.Analyze);
context.RegisterCompilationStartAction(
compilationStartAnalysisContext =>
{
var externallyVisibleNamedTypes = new ConcurrentBag<INamedTypeSymbol>();
compilationStartAnalysisContext.RegisterSymbolAction(
symbolAnalysisContext =>
{
var namedType = (INamedTypeSymbol)symbolAnalysisContext.Symbol;
if (namedType.IsExternallyVisible())
{
externallyVisibleNamedTypes.Add(namedType);
}
}, SymbolKind.NamedType);
compilationStartAnalysisContext.RegisterCompilationEndAction(
compilationAnalysisContext =>
{
var namespaceNamesInCompilation = new ConcurrentBag<string>();
Compilation compilation = compilationAnalysisContext.Compilation;
AddNamespacesFromCompilation(namespaceNamesInCompilation, compilation.GlobalNamespace);
/* We construct a dictionary whose keys are all the components of all the namespace names in the compilation,
* and whose values are the namespace names of which the components are a part. For example, if the compilation
* includes namespaces A.B and C.D, the dictionary will map "A" to "A", "B" to "A.B", "C" to "C", and "D" to "C.D".
* When the analyzer encounters a type name that appears in a dictionary, it will emit a diagnostic, for instance,
* "Type name "D" conflicts with namespace name "C.D"".
* A component can occur in more than one namespace (for example, you might have namespaces "A" and "A.B".).
* In that case, we have to choose one namespace to report the diagnostic on. We want to make sure that this is
* deterministic (we don't want to complain about "A" in one compilation, and about "A.B" in the next).
* By calling ToImmutableSortedSet on the list of namespace names in the compilation, we ensure that
* we'll always construct the dictionary with the same set of keys.
*/
var namespaceComponentToNamespaceNameDictionary = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
UpdateNamespaceTable(namespaceComponentToNamespaceNameDictionary, namespaceNamesInCompilation.ToImmutableSortedSet());
foreach (INamedTypeSymbol symbol in externallyVisibleNamedTypes)
{
string symbolName = symbol.Name;
if (WellKnownSystemNamespaceTable.ContainsKey(symbolName))
{
compilationAnalysisContext.ReportDiagnostic(symbol.CreateDiagnostic(SystemRule, symbolName, WellKnownSystemNamespaceTable[symbolName]));
}
else if (namespaceComponentToNamespaceNameDictionary.ContainsKey(symbolName))
{
compilationAnalysisContext.ReportDiagnostic(symbol.CreateDiagnostic(DefaultRule, symbolName, namespaceComponentToNamespaceNameDictionary[symbolName]));
}
}
});
});
}
private static bool AddNamespacesFromCompilation(ConcurrentBag<string> namespaceNamesInCompilation, INamespaceSymbol @namespace)
{
bool hasExternallyVisibleType = false;
foreach (INamespaceSymbol namespaceMember in @namespace.GetNamespaceMembers())
{
if (AddNamespacesFromCompilation(namespaceNamesInCompilation, namespaceMember))
{
hasExternallyVisibleType = true;
}
}
if (!hasExternallyVisibleType)
{
foreach (var type in @namespace.GetTypeMembers())
{
if (type.IsExternallyVisible())
{
hasExternallyVisibleType = true;
break;
}
}
}
if (hasExternallyVisibleType)
{
namespaceNamesInCompilation.Add(@namespace.ToDisplayString());
return true;
}
return false;
}
private static void InitializeWellKnownSystemNamespaceTable()
{
if (s_wellKnownSystemNamespaceTable == null)
{
lock (s_lock)
{
#pragma warning disable CA1508 // Avoid dead conditional code - https://github.com/dotnet/roslyn-analyzers/issues/3861
if (s_wellKnownSystemNamespaceTable == null)
#pragma warning restore CA1508 // Avoid dead conditional code
{
#region List of Well known System Namespaces
var wellKnownSystemNamespaces = new List<string>
{
"Microsoft.CSharp",
"Microsoft.SqlServer.Server",
"Microsoft.VisualBasic",
"Microsoft.Win32",
"Microsoft.Win32.SafeHandles",
"System",
"System.CodeDom",
"System.CodeDom.Compiler",
"System.Collections",
"System.Collections.Generic",
"System.Collections.ObjectModel",
"System.Collections.Specialized",
"System.ComponentModel",
"System.ComponentModel.Design",
"System.ComponentModel.Design.Serialization",
"System.Configuration",
"System.Configuration.Assemblies",
"System.Data",
"System.Data.Common",
"System.Data.Odbc",
"System.Data.OleDb",
"System.Data.Sql",
"System.Data.SqlClient",
"System.Data.SqlTypes",
"System.Deployment.Internal",
"System.Diagnostics",
"System.Diagnostics.CodeAnalysis",
"System.Diagnostics.SymbolStore",
"System.Drawing",
"System.Drawing.Design",
"System.Drawing.Drawing2D",
"System.Drawing.Imaging",
"System.Drawing.Printing",
"System.Drawing.Text",
"System.Globalization",
"System.IO",
"System.IO.Compression",
"System.IO.IsolatedStorage",
"System.IO.Ports",
"System.Media",
"System.Net",
"System.Net.Cache",
"System.Net.Configuration",
"System.Net.Mail",
"System.Net.Mime",
"System.Net.NetworkInformation",
"System.Net.Security",
"System.Net.Sockets",
"System.Reflection",
"System.Reflection.Emit",
"System.Resources",
"System.Runtime",
"System.Runtime.CompilerServices",
"System.Runtime.ConstrainedExecution",
"System.Runtime.Hosting",
"System.Runtime.InteropServices",
"System.Runtime.InteropServices.ComTypes",
"System.Runtime.InteropServices.Expando",
"System.Runtime.Remoting",
"System.Runtime.Remoting.Activation",
"System.Runtime.Remoting.Channels",
"System.Runtime.Remoting.Contexts",
"System.Runtime.Remoting.Lifetime",
"System.Runtime.Remoting.Messaging",
"System.Runtime.Remoting.Metadata",
"System.Runtime.Remoting.Metadata.W3cXsd2001",
"System.Runtime.Remoting.Proxies",
"System.Runtime.Remoting.Services",
"System.Runtime.Serialization",
"System.Runtime.Serialization.Formatters",
"System.Runtime.Serialization.Formatters.Binary",
"System.Runtime.Versioning",
"System.Security",
"System.Security.AccessControl",
"System.Security.Authentication",
"System.Security.Cryptography",
"System.Security.Cryptography.X509Certificates",
"System.Security.Permissions",
"System.Security.Policy",
"System.Security.Principal",
"System.Text",
"System.Text.RegularExpressions",
"System.Threading",
"System.Timers",
"System.Web",
"System.Web.Caching",
"System.Web.Compilation",
"System.Web.Configuration",
"System.Web.Configuration.Internal",
"System.Web.Handlers",
"System.Web.Hosting",
"System.Web.Mail",
"System.Web.Management",
"System.Web.Profile",
"System.Web.Security",
"System.Web.SessionState",
"System.Web.UI",
"System.Web.UI.Adapters",
"System.Web.UI.HtmlControls",
"System.Web.UI.WebControls",
"System.Web.UI.WebControls.Adapters",
"System.Web.UI.WebControls.WebParts",
"System.Web.Util",
"System.Windows.Forms",
"System.Windows.Forms.ComponentModel.Com2Interop",
"System.Windows.Forms.Design",
"System.Windows.Forms.Layout",
"System.Windows.Forms.PropertyGridInternal",
"System.Windows.Forms.VisualStyles",
"System.Xml",
"System.Xml.Schema",
"System.Xml.Serialization",
"System.Xml.Serialization.Advanced",
"System.Xml.Serialization.Configuration",
"System.Xml.XPath",
"System.Xml.Xsl"
};
#endregion
var wellKnownSystemNamespaceTable = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
UpdateNamespaceTable(wellKnownSystemNamespaceTable, wellKnownSystemNamespaces);
s_wellKnownSystemNamespaceTable = wellKnownSystemNamespaceTable.ToImmutableDictionary();
}
}
}
}
private static void UpdateNamespaceTable(Dictionary<string, string> namespaceTable, IList<string> namespaces)
{
if (namespaces == null)
{
return;
}
foreach (string namespaceName in namespaces)
{
UpdateNamespaceTable(namespaceTable, namespaceName);
}
}
private static void UpdateNamespaceTable(Dictionary<string, string> namespaceTable, string namespaceName)
{
foreach (string word in namespaceName.Split('.'))
{
if (!namespaceTable.ContainsKey(word))
namespaceTable.Add(word, namespaceName);
}
}
}
}
| |
//
// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).
// All rights reserved.
//
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
using OpenADK.Library.Impl;
using OpenADK.Library.Infra;
using OpenADK.Library.Tools.XPath;
namespace OpenADK.Library
{
/// <summary> Encapsulates a SIF Query.
///
/// An instance of this class is passed to the <c>Zone.query</c> and
/// <c>Topic.query</c> methods when issuing a SIF Request. A Query object
/// defines the following parameters to the request:
///
/// <list type="bullet">
/// <item><term>The type of SIF Data Object to query for</term></item>
/// <item><term>Conditions: One or more conditions may be placed on the query to
/// select a subset of objects from the responder (when no conditions are
/// present the responder returns all objects)</term></item>
/// <item><term>Field Restrictions: An optional list of elements to include in
/// responses to the query (when no field restrictions are present the
/// responder returns the full set of elements for each object)</term></item>
/// </list>
///
///
/// To construct a simple Query to query for all objects with no conditions or
/// field restrictions, call the constructor that accepts an ElementDef constant
/// from the SifDtd class:
///
///
/// <blockquote>
/// <c>
/// Query myQuery = new Query( SifDtd.STUDENTPERSONAL );<br/>
/// </c>
/// </blockquote>
///
/// More complex queries can be constructed by specifying conditions and field
/// restrictions.
///
/// <b>Conditions</b>
/// A Query may optionally specify one or more conditions to restrict the number
/// of objects returned by the responder. (Refer to the SIF Specification for a
/// detailed description of how query conditions may be constructed.) When no
/// conditions are specified, the responder interprets the query to mean "all
/// objects". Note SIF 1.0r2 and earlier limit queries such that only root-level
/// attributes may be included in query conditions, and only the equals ("EQ")
/// comparison operator may be used. SIF 1.1 and later allow agents to query for
/// elements within an object, but responders may return an error if they do not
/// support that functionality.
///
///
/// Query conditions are encapsulated by the Adk's ConditionGroup class, which is
/// used to build SIF_ConditionGroup, SIF_Conditions, and SIF_Condition elements
/// when the class framework sends a SIF_Request message to a zone. Every Query
/// with conditions has a root ConditionGroup with one or more child ConditionGroups.
/// Unless you construct these groups manually, the Query class will automatically
/// establish a root ConditionGroup and a single child when the <c>addCondition</c>
/// method is called. Use the <c>addCondition</c> method to add conditions
/// to a Query. Note the form of Query constructor you call determines how the
/// <c>addCondition</c> method works. If you call the default constructor,
/// the Adk automatically establishes a root SIF_ConditionGroup with a Type
/// attribute of "None", and a single SIF_Conditions child with a Type attribute
/// of "And". ("None" will be used if the query has only one condition.)
/// SIF_Condition elements are then added to this element whenever the
/// <c>addCondition</c> method is called.
///
/// For example,
///
/// <blockquote>
/// <c>
/// // Query for a single student by RefId<br/>
/// Query query = new Query( SifDtd.STUDENTPERSONAL );<br/>
/// query.addCondition(<br/>
/// SifDtd.STUDENTPERSONAL_REFID, Condition.EQ,<br/>
/// "4A37969803F0D00322AF0EB969038483" );<br/>
/// </c>
/// </blockquote>
///
/// If you want to specify the "Or" comparision operator instead of the default
/// of "And", call the constructor that accepts a constant from the Condition
/// class.
///
/// For example,
///
/// <blockquote>
/// <c>
/// // Query for student where the RefId is A, B, or C<br/>
/// Query query = new Query( SifDtd.STUDENTPERSONAL, Condition.OR );<br/>
/// <br/>
/// query.addCondition(<br/>
/// SifDtd.STUDENTPERSONAL_REFID, Condition.EQ,<br/>
/// "4A37969803F0D00322AF0EB969038483" );<br/>
/// query.addCondition(<br/>
/// SifDtd.STUDENTPERSONAL_REFID, Condition.EQ,<br/>
/// "5A37969803F0D00322AF0EB969038484" );<br/>
/// query.addCondition(<br/>
/// SifDtd.STUDENTPERSONAL_REFID, Condition.EQ,<br/>
/// "6A37969803F0D00322AF0EB969038485" );<br/>
/// </c>
/// </blockquote>
///
/// The above examples show how to add simple conditions to a Query. To construct
/// complex queries with nested groups of conditions, create your own root
/// SIF_ConditionGroup object by calling the form of constructor that
/// accepts a ConditionGroup instance. You can specify nested ConditionGroup
/// children of this root object.
///
///
/// For example,
///
/// <blockquote>
/// <c>
/// // Query for student where the Last Name is Jones and the First Name is<br/>
/// // Bob, and the graduation year is 2004, 2005, or 2006<br/>
/// ConditionGroup root = new ConditionGroup( Condition.AND );<br/>
/// ConditionGroup grp1 = new ConditionGroup( Condition.AND );<br/>
/// ConditionGroup grp2 = new ConditionGroup( Condition.OR );<br/>
/// <br/>
/// // For nested elements, you cannot reference a SifDtd constant. Instead, use<br/>
/// // the lookupElementDefBySQL function to lookup an ElementDef constant<br/>
/// // given a SIF Query Pattern (SQP)<br/>
/// ElementDef lname = Adk.Dtd().lookupElementDefBySQP(<br/>
/// SifDtd.STUDENTPERSONAL, "Name/LastName" );</br>
/// ElementDef fname = Adk.Dtd().lookupElementDefBySQP(<br/>
/// SifDtd.STUDENTPERSONAL, "Name/FirstName" );</br>
/// grp1.addCondition( lname, Condition.EQ, "Jones" );<br/>
/// grp1.addCondition( fname, Condition.EQ, "Bob" );<br/>
/// <br/>
/// grp2.addCondition( SifDtd.STUDENTPERSONAL_GRADYEAR, Condition.EQ, "2004" );<br/>
/// grp2.addCondition( SifDtd.STUDENTPERSONAL_GRADYEAR, Condition.EQ, "2005" );<br/>
/// grp2.addCondition( SifDtd.STUDENTPERSONAL_GRADYEAR, Condition.EQ, "2006" );<br/>
/// <br/>
/// // Add condition groups to the root group<br/>
/// root.addGroup( grp1 );<br/>
/// root.addGroup( grp2 );<br/>
/// <br/>
/// // Query for student with the conditions prepared above by passing the<br/>
/// // root ConditionGroup to the constructor<br/>
/// Query query = new Query( SifDtd.STUDENTPERSONAL, root );<br/>
/// </c>
/// </blockquote>
///
/// <b>Field Restrictions</b>
/// If only a subset of elements and attributes are requested, use the
/// <c>setFieldRestrictions</c> method to indicate which elements and
/// attributes should be returned to your agent by the responder. For example,
/// to request the <StudentPersonal> object with RefId "4A37969803F0D00322AF0EB969038483"
/// but to only include the <c>RefId</c> attribute and <c>Name</c>
/// and <c>PhoneNumber</c> elements in the response,
///
/// <blockquote>
/// <c>
///
/// // Query for a single student by RefId<br/>
/// Query query = new Query( SifDtd.STUDENTPERSONAL );<br/>
/// <br/>
/// query.addCondition(<br/>
/// SifDtd.STUDENTPERSONAL_REFID, Condition.EQ,<br/>
/// "4A37969803F0D00322AF0EB969038483" );<br/>
///
/// query.setFieldRestrictions(<br/>
/// new ElementDef[] {<br/>
/// SifDtd.STUDENTPERSONAL_REFID,<br/>
/// SifDtd.STUDENTPERSONAL_NAME,<br/>
/// SifDtd.STUDENTPERSONAL_PHONENUMBER<br/>
/// }
/// );
/// </c>
/// </blockquote>
///
/// </summary>
/// <author> Eric Petersen
/// </author>
/// <version> 1.0
/// </version>
public sealed class Query
{
/// <summary>The object to query </summary>
private IElementDef fObjType;
/// <summary>The version of SIF associated with the query </summary>
private SifVersion[] fVersions = new SifVersion[0];
/// <summary>
/// The SIF Context that this Query applies to
/// </summary>
private SifContext fContext = SifContext.DEFAULT;
/// <summary>Root condition groups </summary>
private ConditionGroup fRoot;
/// <summary>Fields to include in the result of the query (null = all fields) </summary>
private List<ElementRef> fFieldRestrictions;
/// <summary>
/// User state
/// </summary>
private object fUserData;
/// <summary> Constructs a Query object with no initial conditions or field
/// restrictions. If conditions are subsequently added to the Query, they
/// will be evaluated as a group with the logical AND operator. To specify
/// that the logical OR operator be used, call the form of constructor that
/// accepts an alternate operator.
///
///
/// </summary>
/// <param name="objectType">An ElementDef describing the object type to query (e.g.
/// <c>Adk.Dtd.STUDENTPERSONAL</c>)
/// </param>
public Query(IElementDef objectType)
{
if (!objectType.Object)
{
throw new ArgumentException
("\"" + objectType.Name + "\" is not a root-level SIF Data Object");
}
fObjType = objectType;
fRoot = null;
}
/// <summary> Constructs a Query object with one ConditionGroup where all conditions
/// in the group are evaluated using the supplied grouping operator. All Conditions
/// subsequently added to this Query will be placed into the ConditionGroup
/// created by the constructor.
///
/// This constructor is provided as a convenience so that callers do
/// not have to explicitly create a ConditionGroup for simple queries.
///
/// </summary>
/// <param name="objectType">An ElementDef describing the object type to query (e.g.
/// <c>StudentDTD.STUDENTPERSONAL</c>)
/// </param>
/// <param name="logicalOp">The logical operator that defines how to compare this group
/// with other condition groups that comprise the query (e.g. Condition.OR)
/// </param>
public Query(IElementDef objectType,
GroupOperator logicalOp)
{
if (!objectType.Object)
{
throw new ArgumentException
("\"" + objectType.Name + "\" is not a root-level SIF Data Object");
}
fObjType = objectType;
fRoot = new ConditionGroup(logicalOp);
}
/// <summary> Constructs a Query object with a ConditionGroup.
///
///
/// </summary>
/// <param name="objectType">An ElementDef describing the object type to query (e.g.
/// <c>StudentDtd.STUDENTPERSONAL</c>)
/// </param>
/// <param name="conditions">A ConditionGroup comprised of one or more query Conditions
/// </param>
public Query(IElementDef objectType,
ConditionGroup conditions)
{
if (!objectType.Object)
{
throw new ArgumentException
("\"" + objectType.Name + "\" is not a root-level SIF Data Object");
}
fObjType = objectType;
fRoot = conditions;
}
/// <summary> Constructs a Query object from a SIF_QueryObject.
///
/// This constructor is not typically called by agents but is used internally
/// by the class framework. The other constructors can be used to safely
/// create Query instances to request a specific SIF Data Object. Use the
/// <c>addCondition</c> and <c>setFieldRestrictions</c> methods
/// to further define the conditions and SIF elements specified by the query.
///
/// </summary>
/// <param name="query">A SIF_Query object received in a SIF_Request message
/// </param>
/// <exception cref="AdkUnknownOperatorException">If one of the operators in the SIF_Query is
/// unrecognized by the ADK</exception>
/// <exception cref="AdkSchemaException">If the object or elements defined in the query or
/// not recognized by the ADK </exception>
public Query(SIF_Query query)
{
SIF_QueryObject qo = query.SIF_QueryObject;
if (qo == null)
{
throw new ArgumentException("SIF_Query must have a SIF_QueryObject element");
}
fObjType = Adk.Dtd.LookupElementDef(qo.ObjectName);
if (fObjType == null)
{
throw new AdkSchemaException
(qo.ObjectName +
" is not a recognized SIF Data Object, or the agent is not configured to support this object type");
}
fRoot = null;
SIF_ConditionGroup cg = query.SIF_ConditionGroup;
if (cg != null && cg.GetSIF_Conditionses() != null)
{
GroupOperator grpOp;
try
{
grpOp = Condition.ParseGroupOperator(cg.Type);
}
catch (AdkUnknownOperatorException)
{
grpOp = GroupOperator.None;
}
fRoot = new ConditionGroup(grpOp);
SIF_Conditions[] sifConds = cg.GetSIF_Conditionses();
if (sifConds.Length == 1)
{
// There is one SIF_ConditionGroup with one SIF_Conditions,
// so just add all of the conditions (no nested groups)
string typ = sifConds[0].Type;
if (typ == null)
{
throw new AdkSchemaException
("SIF_Conditions/@Type is a required attribute");
}
fRoot.fOp = Condition.ParseGroupOperator(typ);
SIF_Condition[] clist = sifConds[0].GetSIF_Conditions();
PopulateConditions(query, clist, fRoot);
}
else
{
// There are multiple SIF_Conditions, so add each as a nested
// ConditionGroup of the fRoot
for (int i = 0; i < sifConds.Length; i++)
{
ConditionGroup nested =
new ConditionGroup(Condition.ParseGroupOperator(sifConds[i].Type));
PopulateConditions(query, sifConds[i].GetSIF_Conditions(), nested);
fRoot.AddGroup(nested);
}
}
}
SifVersion[] reqVersions = null;
// First, try to get the version from the SIF_Request
Element parent = query.Parent;
if (parent != null)
{
if (parent is SIF_Request)
{
SIF_Request request = (SIF_Request)parent;
SifVersion[] versions = request.parseRequestVersions(Adk.Log);
if (versions.Length > 0)
{
reqVersions = versions;
}
}
}
if (reqVersions == null)
{
SifVersion version = query.EffectiveSIFVersion;
if (version != null)
{
reqVersions = new SifVersion[] { version };
}
}
if (reqVersions == null || reqVersions.Length == 0)
{
throw new ArgumentException(
"SIF_Query is not contained in a SIF_Request that has a SIF_Version element; cannot determine version of SIF to associated with this Query object");
}
else
{
fVersions = reqVersions;
}
SIF_Element[] fields = query.SIF_QueryObject.GetSIF_Elements();
if (fields != null && fields.Length > 0)
{
for (int i = 0; i < fields.Length; i++)
{
string xPath = fields[i].TextValue;
if (xPath == null || xPath.Length == 0)
{
continue;
}
AddFieldRestriction(xPath);
}
}
}
/// <summary> Gets the object type being queried</summary>
/// <returns> The name of the object passed to the constructor
/// </returns>
public IElementDef ObjectType
{
get { return fObjType; }
}
/// <summary> Gets the tag name of the object type being queried</summary>
/// <returns> The tag name of the object passed to the constructor
/// </returns>
public string ObjectTag
{
get { return fObjType.Tag(Adk.GetLatestSupportedVersion(fVersions)); }
}
/// <summary>
/// Gets and Sets custom state associated with this request. The state object used must be
/// serializable and for performance reasons should be reasonably small.
/// </summary>
public object UserData
{
get { return fUserData; }
set { fUserData = value; }
}
/// <summary>Gets or sets the fields to include in the result of the query.</summary>
/// <value> An array of fields that should be included in the results of
/// this query, or null if all fields are to be included
/// </value>
public IElementDef[] FieldRestrictions
{
get
{
if (fFieldRestrictions == null)
{
return null;
}
IElementDef[] returnValue = new IElementDef[fFieldRestrictions.Count];
for (int i = 0; i < returnValue.Length; i++)
{
returnValue[i] = fFieldRestrictions[i].Field;
}
return returnValue;
}
set
{
if (fFieldRestrictions != null)
{
fFieldRestrictions.Clear();
}
foreach (IElementDef def in value)
{
AddFieldRestriction(def);
}
}
}
/// <summary>
/// Gets the fields that will be include din the result of the query
/// </summary>
/// <value>An array of field references that should be included
/// in the results of the query or null if all fields are to be included</value>
public IList<ElementRef> FieldRestrictionRefs
{
get { return fFieldRestrictions; }
}
/// <summary> Gets the conditions placed on this query.</summary>
/// <returns> An array of ConditionGroup objects in evaluation order. The
/// children of the root ConditionGroup are returned. If no conditions
/// have been specified, an empty array is returned.
/// </returns>
public ConditionGroup[] Conditions
{
get
{
if (fRoot == null)
{
return new ConditionGroup[0];
}
ConditionGroup[] groups = fRoot.Groups;
if (groups != null && groups.Length > 0)
{
return groups;
}
// There is a fRoot group -- which means the user must have called
// the default constructor and then called addCondition() to add one
// or more conditions -- but the root group does not itself have any
// nested groups. So, just return the root group...
return new ConditionGroup[] { fRoot };
}
}
/// <summary> Gets the root ConditionGroup.</summary>
/// <returns> The root ConditionGroup that was established by the constructor.
/// If this query has no conditions, null is returned.
/// </returns>
public ConditionGroup RootConditionGroup
{
get { return fRoot; }
}
/// <summary>Gets or Sets the value of the SIF_Request/SIF_Version element. By default,
/// this value is set to the version of SIF declared for the agent when the
/// Adk was initialized.
///
/// </summary>
/// <value> The version of SIF the responding agent should use when
/// returning SIF_Response messages for this query
/// </value>
public SifVersion[] SifVersions
{
get { return fVersions; }
set { fVersions = value; }
}
/// <summary>
/// From the list of SifVersions associated with this Query, returns the latest SifVersion
/// supported by the current ADK instance.
/// </summary>
/// <seealso cref="Adk.GetLatestSupportedVersion"/>
public SifVersion EffectiveVersion
{
get { return Adk.GetLatestSupportedVersion(fVersions); }
}
/// <summary> Sets the root ConditionGroup.
///
/// By default a Query is constructed with a ConditionGroup to which
/// individual conditions will be added by the <c>addCondition</c>
/// methods. You can call this method to prepare a ConditionGroup ahead of
/// time and replace the default with your own.
///
/// Note calling this method after <c>addCondition</c> will replace
/// any conditions previously added to the Query with the conditions in the
/// supplied ConditionGroup.
/// </summary>
public ConditionGroup ConditionGroup
{
set { fRoot = value; }
}
private void PopulateConditions(SIF_Query query,
SIF_Condition[] clist,
ConditionGroup target)
{
for (int i = 0; i < clist.Length; i++)
{
String o = clist[i].SIF_Operator;
ComparisonOperators ops = Condition.ParseComparisionOperators(o);
String val = clist[i].SIF_Value;
String path = clist[i].SIF_Element;
target.AddCondition(fObjType, path, ops, val);
}
}
/// <summary>
/// Add a condition to this query.
/// </summary>
/// <remarks>
/// This method of adding conditions is convenient for adding conditions involving
/// root attributes or elements to a query. If you need to add conditions on deeply
/// nested elements, use <see cref="AddCondition(string,ComparisonOperators,string)"/>
/// </remarks>
/// <param name="field">A constant from the package DTD class that identifies an element
/// or attribute of the data object (e.g. <c>StudentDTD.STUDENTPERSONAL_REFID</c>)</param>
/// <param name="ops">The comparison operator. Comparison operator constants are
/// defined by the ComparisionOperators enum</param>
/// <param name="value">The data that is used to compare to the element or attribute</param>
/// <exception cref="ArgumentException">if the ElementDef does not represent an immediate
/// child of the object being queried.</exception>
public void AddCondition(IElementDef field, ComparisonOperators ops, String value)
{
// Do some validation to try to prevent invalid query paths from being created
String relativePath = field.GetSQPPath(Adk.SifVersion);
IElementDef lookedUp = Adk.Dtd.LookupElementDefBySQP(fObjType, relativePath);
if (lookedUp == null)
{
throw new ArgumentException("Invalid path: " + fObjType.Name + "/" + relativePath +
" is unable to be resolved");
}
AddCondition(new Condition(fObjType, relativePath, ops, value));
}
/// <summary>
/// Add a condition to this query.
/// </summary>
/// <param name="condition">The condition to add. This condition is added to the root
/// condition group.</param>
/// <seealso cref="Query.RootConditionGroup"/>
/// <seealso cref="Conditions"/>
public void AddCondition(Condition condition)
{
if (fRoot == null)
{
fRoot = new ConditionGroup(GroupOperator.And);
}
fRoot.AddCondition(condition);
}
/// <summary>
/// Add a condition to this query using a deeply nested path. Using this
/// method of adding query condition allows for specifying deeply nested query
/// conditions. However, the xpath specified here is specific to the version
/// of SIF
/// </summary>
/// <remarks>To ensure your code works with all versions of SIF, you should use
/// <see cref="Query.AddCondition(IElementDef, ComparisonOperators, String)"/>
/// whenever possible.</remarks>
/// <param name="xPath">he Simple XPath to use for this query condition. E.g.
/// <c>SIF_ExendedElements/SIF_ExtendedElement[@Name='eyecolor']</c></param>
/// <param name="ops">Comparison operator value from the
/// ComparisonOperators enum</param>
/// <param name="value">The data that is used to compare to the element or attribute</param>
public void AddCondition(String xPath, ComparisonOperators ops, String value)
{
AddCondition(new Condition(fObjType, xPath, ops, value));
}
/// <summary>
/// Add a condition to this query. This form of the <c>AddCondition</c>
/// method is intended to be called internally by the ADK when parsing an
/// incoming SIF_Query element. To ensure your code works with all versions
/// of SIF, you should use the other form of this method that accepts an
/// ElementDef constant for the <i>field</i> parameter whenever possible.
/// </summary>
/// <param name="field">
/// Identifies an element or attribute of the data object in
/// SIF Query Pattern form as described by the SIF Specification
/// (e.g. "@RefId"). With SIF 1.5r1 and earlier, only root-level
/// attributes may be specified in a query. Note this string is specific
/// to the version of SIF associated with the Query as element and
/// attribute names may vary from one version of SIF to the next. The
/// version defaults to the version of SIF in effect for the agent or the
/// version of SIF associated with the <c>SIF_Query</c> object
/// passed to the constructor.
/// </param>
/// <param name="ops">A value from the ComparisonOperators enum</param>
/// <param name="value">The data that is used to compare to the element or attribute</param>
public void AddCondition(String field, String ops, String value)
{
try
{
AddCondition(
field, Condition.ParseComparisionOperators(ops), value);
}
catch (AdkUnknownOperatorException uoe)
{
Adk.Log.WarnFormat("Unable to parse operator: {0} {1}", ops, uoe, uoe);
AddCondition(field, ComparisonOperators.EQ, value);
}
}
/// <summary> Restricts the query to a specific field (i.e. element or attribute) of
/// the data object being requested. If invoked, the results of the query
/// will only contain the elements or attributes specified by the fields for
/// which this method is called (call this method repeatedly for each field).
/// Otherwise, the results will contain a complete object.
///
/// </summary>
/// <param name="field">A <c>ElementDef</c> object defined by the static
/// constants of the <c>SifDtd</c> class. For example, to restrict
/// a query for the StudentPersonal topic to include only the StatePr
/// element of the student address, pass <c>SifDtd.ADDRESS_STATEPR</c>.
/// This would cause the query results to include only
/// <c>StudentPersonal/Address/StatePr</c> elements.
/// </param>
[MethodImpl(MethodImplOptions.Synchronized)]
public void AddFieldRestriction(IElementDef field)
{
if (field == null)
{
throw new ArgumentException("Field cannot be null");
}
if (fFieldRestrictions == null)
{
fFieldRestrictions = new List<ElementRef>();
}
fFieldRestrictions.Add(new ElementRef(fObjType, field, EffectiveVersion));
}
/// <summary>
/// Restricts the query to a specific field (i.e. element or attribute) of
/// the data object being requested. If invoked, the results of the query
/// will only contain the elements or attributes specified by the fields for
/// which this method is called (call this method repeatedly for each field).
/// Otherwise, the results will contain a complete object.
/// </summary>
/// <param name="xPath">An XPath representing the field being referenced</param>
public void AddFieldRestriction(String xPath)
{
if (xPath == null || xPath.Length == 0)
{
throw new ArgumentException("Field cannot be null or zero-length : " + xPath);
}
if (fFieldRestrictions == null)
{
fFieldRestrictions = new List<ElementRef>();
}
fFieldRestrictions.Add(new ElementRef(fObjType, xPath, EffectiveVersion));
}
/// <summary> Determines if this Query has any conditions</summary>
/// <value> true if the query has one or more conditions
/// </value>
/// <seealso cref="Conditions">
/// </seealso>
/// <seealso cref="AddCondition(IElementDef,ComparisonOperators,string)">
/// </seealso>
public bool HasConditions
{
get { return fRoot != null && fRoot.HasConditions(); }
}
/// <summary> Determines if this Query has any field restrictions</summary>
/// <value> true if the query specifies a subset of fields to be returned;
/// false if the query returns all elements and attributes of each object
/// matching the query conditions
/// </value>
/// <seealso cref="FieldRestrictions">
/// </seealso>
/// <seealso cref="AddFieldRestriction(string)"></seealso>
/// <seealso cref="AddFieldRestriction(IElementDef)"/>
public bool HasFieldRestrictions
{
get { return fFieldRestrictions != null && fFieldRestrictions.Count > 0; }
}
/// <summary> Tests if this Query has a specific element or attribute condition</summary>
/// <param name="elementOrAttr">The ElementDef constant from the SifDtd class that
/// identifies the specific attribute or element to search for
/// </param>
/// <returns>The Condition object representing the condition. If no
/// Condition exists for the element or attribute, null is returned</returns>
public Condition HasCondition(IElementDef elementOrAttr)
{
ConditionGroup[] grps = Conditions;
for (int i = 0; i < grps.Length; i++)
{
Condition c = grps[i].HasCondition(elementOrAttr);
if (c != null)
{
return c;
}
}
return null;
}
/// <summary>
/// Tests if this Query has a condition referencing a specific xPath
/// </summary>
/// <param name="xPath">The Xpath which identifies the specific attribute or element to search for</param>
/// <returns>The Condition object representing the condition. If no
/// Condition exists for the element or attribute, null is returned</returns>
public Condition HasCondition(String xPath)
{
ConditionGroup[] grps = Conditions;
for (int i = 0; i < grps.Length; i++)
{
Condition c = grps[i].HasCondition(xPath);
if (c != null)
return c;
}
return null;
}
/// <summary>
/// Returns the XML representation of this Query in the format required by SIF
/// </summary>
/// <returns>a string containing the XML representation as a SIF_Query element. If an error
/// occurs during the conversion, an empty string ("") is returned.</returns>
public String ToXml()
{
return ToXml(EffectiveVersion);
}
/// <summary>
/// Returns the XML representation of this Query in the format required by SIF
/// for the specified version
/// </summary>
/// <param name="version">The SIF Version to render the Query in. The ADK will attempt to render
/// the query path using the proper element or attribute names for the version of SIF
/// </param>
/// <returns>a string containing the XML representation as a SIF_Query element. If an error
/// occurs during the conversion, an empty string ("") is returned.
/// </returns>
public String ToXml(SifVersion version)
{
// Create a SIF_Query object
SIF_Query sifQ = SIFPrimitives.CreateSIF_Query(this, version, true);
try
{
using (StringWriter outStream = new StringWriter())
{
SifWriter w = new SifWriter(outStream);
w.Write(sifQ);
w.Flush();
return outStream.ToString();
}
}
catch (Exception e)
{
Adk.Log.Warn("Error creating XML equivalent of Query: " + e, e);
return "";
}
}
/// <summary>
/// Returns the SIF_Query representation of this Query in the format required by SIF
/// </summary>
/// <returns>A SIF_Query element</returns>
public SIF_Query ToSIF_Query()
{
return ToSIF_Query(Adk.SifVersion);
}
/// <summary>the SIF_Query representation of this Query in the format required by SIF
/// for the specified version
/// </summary>
/// <param name="version">The SIF Version to render the Query in. The ADK will attempt to render
/// the query path using the proper element or attribute names for the version of SIF</param>
/// <returns>A SIF_Query element</returns>
public SIF_Query ToSIF_Query(SifVersion version)
{
return SIFPrimitives.CreateSIF_Query(this, version, true);
}
/// <summary>
/// Evaluate the given the SIFDataObject against the conditions provided in the
/// Query. All conditions are evaluated using standard string comparisons using
/// the Invariant Culture
/// </summary>
/// <param name="obj"></param>
/// <returns></returns>
/// <exception cref="OpenADK.Library.AdkSchemaException">If the condition contains references to invalid elements</exception>
public bool Evaluate(SifDataObject obj)
{
return Evaluate(obj, CultureInfo.InvariantCulture);
}
/// <summary>
/// Evaluate the given the SIFDataObject against the conditions provided in the
/// Query. All conditions are evaluated using the provided comparer
/// </summary>
/// <param name="obj"> The SIFDataObject to evalaute against this query</param>
/// <param name="culture">The culture info used to do string comparisons</param>
/// <returns></returns>
/// <exception cref="OpenADK.Library.AdkSchemaException">If the condition contains references to invalid elements</exception>
public bool Evaluate(SifDataObject obj,
CultureInfo culture)
{
if (!(obj.ElementDef == fObjType))
{
return false;
}
if (fRoot != null)
{
SifXPathContext context = SifXPathContext.NewSIFContext(obj, EffectiveVersion);
return EvaluateConditionGroup(context, fRoot, culture);
}
return true;
}
/// <summary>
/// Evaluates a condition group against a SifDataObject to determine if
/// they are a match or not
/// </summary>
/// <param name="grp"></param>
/// <param name="context"></param>
/// <param name="culture"></param>
/// <returns>True if the result of evaluating the condition groups is true</returns>
/// <exception cref="OpenADK.Library.AdkSchemaException">If the condition contains references to invalid elements</exception>
private bool EvaluateConditionGroup(SifXPathContext context,
ConditionGroup grp,
CultureInfo culture)
{
Condition[] conds = grp.Conditions;
if (conds.Length > 0)
{
bool returnOnFirstMatch = grp.Operator == GroupOperator.Or ? true : false;
foreach (Condition c in conds)
{
if ((EvaluateCondition(context, c, culture)) == returnOnFirstMatch)
{
// If this is an OR group, return true on the first match
// If this is an AND Group, return false on the first failure
return returnOnFirstMatch;
}
}
// None of the conditions matched the returnOnFirstMathValue. Therefore,
// return the opposite value
return !returnOnFirstMatch;
}
else
{
return EvaluateConditionGroups(context, grp.Operator, grp.Groups, culture);
}
}
/// <summary>
/// Evaluates the condition groups and returns True if the Operator is OR and at least
/// one of the groups evaluates to TRUE. If the Operator is AND, all of the condition
/// groups have to evaluate to TRUE
/// </summary>
/// <param name="op"></param>
/// <param name="grps"></param>
/// <param name="context"></param>
/// <param name="culture"></param>
/// <returns></returns>
/// <exception cref="OpenADK.Library.AdkSchemaException">If the condition contains references to invalid elements</exception>
private bool EvaluateConditionGroups(SifXPathContext context,
GroupOperator op,
ConditionGroup[] grps,
CultureInfo culture)
{
bool isMatch = true;
for (int c = 0; c < grps.Length; c++)
{
bool singleMatch = EvaluateConditionGroup(context, grps[c], culture);
if (op == GroupOperator.Or)
{
if (singleMatch)
{
// In OR mode, return as soon as we evaluate to True
return true;
}
isMatch |= singleMatch;
}
else
{
isMatch &= singleMatch;
}
// As soon as the evaluation fails, return
if (!isMatch)
{
return false;
}
}
return isMatch;
}
/// <summary>
/// Evaluates a single SIF_Condition against an object and returns whether it matches or not
/// </summary>
/// <param name="cond"></param>
/// <param name="context"></param>
/// <param name="culture"></param>
/// <returns></returns>
/// <exception cref="OpenADK.Library.AdkSchemaException">If the condition contains references to invalid elements</exception>
private bool EvaluateCondition(SifXPathContext context,
Condition cond,
CultureInfo culture)
{
// TODO: Add support for comparison using the SIF Data Types
Element def = context.GetElementOrAttribute(cond.GetXPath());
String conditionValue = cond.Value;
String elementValue = null;
if (def != null)
{
SifSimpleType value = def.SifValue;
if (value != null)
{
// Format the value to string, based on the query version
elementValue = value.ToString(EffectiveVersion);
}
else
{
// TODO: Not sure if this would ever return a value if the above does not
elementValue = def.TextValue;
}
}
if (elementValue == null || conditionValue == null)
{
// Don't use standard comparision because it will fail. If
// one or the other value is null, it cannot be compared, except for
// if the operator is EQ or NOT
bool bothAreNull = (elementValue == null && conditionValue == null);
switch (cond.Operators)
{
case ComparisonOperators.EQ:
case ComparisonOperators.GE:
case ComparisonOperators.LE:
return bothAreNull;
case ComparisonOperators.NE:
return !bothAreNull;
default:
// For any other operator, the results are indeterminate with
// null values. Return false in this case.
return false;
}
}
int compareLevel = String.Compare(elementValue, conditionValue, false, culture);
switch (cond.Operators)
{
case ComparisonOperators.EQ:
return compareLevel == 0;
case ComparisonOperators.NE:
return compareLevel != 0;
case ComparisonOperators.GT:
return compareLevel > 0;
case ComparisonOperators.LT:
return compareLevel < 0;
case ComparisonOperators.GE:
return compareLevel >= 0;
case ComparisonOperators.LE:
return compareLevel <= 0;
}
return false;
}
///<summary>
/// Sets the SIFContext that this query should apply to
/// </summary>
/// <value>The SIF Context that this query applies to</value>
public SifContext SifContext
{
get { return fContext; }
set { fContext = value; }
}
/// <summary>
/// If SIFElement restrictions are placed on this query, this method
/// will take the SIFDataObject and call setChanged(false). It will then
/// go through each of the SIFElement restrictions, resolve them, and
/// call setChanged(true) on those elements only. This will cause the
/// object to be rendered properly using SIFWriter.
/// </summary>
/// <param name="sdo"></param>
public void SetRenderingRestrictionsTo(SifDataObject sdo)
{
if (sdo == null || fFieldRestrictions == null)
{
return;
}
sdo.SetChanged(false);
// Go through and only set the filtered items to true
SifXPathContext context = SifXPathContext.NewSIFContext(sdo);
foreach (ElementRef elementRef in fFieldRestrictions)
{
String xPath = elementRef.XPath;
Element e = context.GetElementOrAttribute(xPath);
if (e != null)
{
e.SetChanged();
}
}
sdo.EnsureRootElementRendered();
}
}
}
| |
using System;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Html;
using Microsoft.AspNetCore.Mvc.Localization;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
using Microsoft.Extensions.DependencyInjection;
using OrchardCore.DisplayManagement.Shapes;
using OrchardCore.DisplayManagement.Title;
using OrchardCore.Settings;
namespace OrchardCore.DisplayManagement.Razor
{
public interface IRazorPage
{
string ViewLayout { get; set; }
}
public abstract class RazorPage<TModel> : Microsoft.AspNetCore.Mvc.Razor.RazorPage<TModel>, IRazorPage
{
private IDisplayHelper _displayHelper;
private IShapeFactory _shapeFactory;
private IOrchardDisplayHelper _orchardHelper;
private ISite _site;
public override ViewContext ViewContext
{
get => base.ViewContext;
set
{
// We make the ViewContext available to other sub-systems that need it.
var viewContextAccessor = value.HttpContext.RequestServices.GetService<ViewContextAccessor>();
base.ViewContext = viewContextAccessor.ViewContext = value;
}
}
private void EnsureDisplayHelper()
{
if (_displayHelper == null)
{
_displayHelper = Context.RequestServices.GetService<IDisplayHelper>();
}
}
private void EnsureShapeFactory()
{
if (_shapeFactory == null)
{
_shapeFactory = Context.RequestServices.GetService<IShapeFactory>();
}
}
/// <summary>
/// Gets a dynamic shape factory to create new shapes.
/// </summary>
/// <example>
/// Usage:
/// <code>
/// await New.MyShape()
/// await New.MyShape(A: 1, B: "Some text")
/// (await New.MyShape()).A(1).B("Some text")
/// </code>
/// </example>
public dynamic New => Factory;
/// <summary>
/// Gets an <see cref="IShapeFactory"/> to create new shapes.
/// </summary>
public IShapeFactory Factory
{
get
{
EnsureShapeFactory();
return _shapeFactory;
}
}
/// <summary>
/// Renders a shape.
/// </summary>
/// <param name="shape">The shape.</param>
public Task<IHtmlContent> DisplayAsync(dynamic shape)
{
EnsureDisplayHelper();
return _displayHelper.ShapeExecuteAsync(shape);
}
public IOrchardDisplayHelper Orchard
{
get
{
if (_orchardHelper == null)
{
EnsureDisplayHelper();
_orchardHelper = new OrchardDisplayHelper(Context, _displayHelper);
}
return _orchardHelper;
}
}
private dynamic _themeLayout;
public dynamic ThemeLayout
{
get
{
if (_themeLayout == null)
{
_themeLayout = Context.Features.Get<RazorViewFeature>()?.ThemeLayout;
}
return _themeLayout;
}
set
{
_themeLayout = value;
}
}
public string ViewLayout
{
get
{
if (ThemeLayout is IShape layout)
{
if (layout.Metadata.Alternates.Count > 0)
{
return layout.Metadata.Alternates.Last;
}
return layout.Metadata.Type;
}
return String.Empty;
}
set
{
if (ThemeLayout is IShape layout)
{
if (layout.Metadata.Alternates.Contains(value))
{
if (layout.Metadata.Alternates.Last == value)
{
return;
}
layout.Metadata.Alternates.Remove(value);
}
layout.Metadata.Alternates.Add(value);
}
}
}
private IPageTitleBuilder _pageTitleBuilder;
public IPageTitleBuilder Title
{
get
{
if (_pageTitleBuilder == null)
{
_pageTitleBuilder = Context.RequestServices.GetRequiredService<IPageTitleBuilder>();
}
return _pageTitleBuilder;
}
}
private IViewLocalizer _t;
/// <summary>
/// The <see cref="IViewLocalizer"/> instance for the current view.
/// </summary>
public IViewLocalizer T
{
get
{
if (_t == null)
{
_t = Context.RequestServices.GetRequiredService<IViewLocalizer>();
((IViewContextAware)_t).Contextualize(ViewContext);
}
return _t;
}
}
/// <summary>
/// Adds a segment to the title and returns all segments.
/// </summary>
/// <param name="segment">The segment to add to the title.</param>
/// <param name="position">Optional. The position of the segment in the title.</param>
/// <param name="separator">The html string that should separate all segments.</param>
/// <returns>And <see cref="IHtmlContent"/> instance representing the full title.</returns>
public IHtmlContent RenderTitleSegments(IHtmlContent segment, string position = "0", IHtmlContent separator = null)
{
Title.AddSegment(segment, position);
return Title.GenerateTitle(separator);
}
/// <summary>
/// Adds a segment to the title and returns all segments.
/// </summary>
/// <param name="segment">The segment to add to the title.</param>
/// <param name="position">Optional. The position of the segment in the title.</param>
/// <param name="separator">The html string that should separate all segments.</param>
/// <returns>And <see cref="IHtmlContent"/> instance representing the full title.</returns>
public IHtmlContent RenderTitleSegments(string segment, string position = "0", IHtmlContent separator = null)
{
if (!String.IsNullOrEmpty(segment))
{
Title.AddSegment(new StringHtmlContent(segment), position);
}
return Title.GenerateTitle(separator);
}
/// <summary>
/// Renders the content zone of the layout.
/// </summary>
public IHtmlContent RenderLayoutBody()
{
var result = base.RenderBody();
return result;
}
/// <summary>
/// Creates a <see cref="TagBuilder"/> to render a shape.
/// </summary>
/// <param name="shape">The shape.</param>
/// <returns>A new <see cref="TagBuilder"/>.</returns>
public TagBuilder Tag(dynamic shape)
{
return Shape.GetTagBuilder(shape);
}
public TagBuilder Tag(dynamic shape, string tag)
{
return Shape.GetTagBuilder(shape, tag);
}
// <summary>
/// In a Razor layout page, renders the portion of a content page that is not within a named zone.
/// </summary>
/// <returns>The HTML content to render.</returns>
public Task<IHtmlContent> RenderBodyAsync()
{
return DisplayAsync(ThemeLayout.Content);
}
/// <summary>
/// Check if a zone is defined in the layout or it has items.
/// </summary>
/// <param name="name"></param>
/// <returns></returns>
public new bool IsSectionDefined(string name)
{
// We can replace the base implementation as it can't be called on a view that is not an actual MVC Layout.
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
var zone = ThemeLayout[name];
return zone != null;
}
/// <summary>
/// Renders a zone from the layout.
/// </summary>
/// <param name="name">The name of the zone to render.</param>
public new IHtmlContent RenderSection(string name)
{
// We can replace the base implementation as it can't be called on a view that is not an actual MVC Layout.
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
return RenderSection(name, required: true);
}
/// <summary>
/// Renders a zone from the layout.
/// </summary>
/// <param name="name">The name of the zone to render.</param>
/// <param name="required">Whether the zone is required or not.</param>
public new IHtmlContent RenderSection(string name, bool required)
{
// We can replace the base implementation as it can't be called on a view that is not an actual MVC Layout.
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
return RenderSectionAsync(name, required).GetAwaiter().GetResult();
}
/// <summary>
/// Renders a zone from the layout.
/// </summary>
/// <param name="name">The name of the zone to render.</param>
public new Task<IHtmlContent> RenderSectionAsync(string name)
{
// We can replace the base implementation as it can't be called on a view that is not an actual MVC Layout.
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
return RenderSectionAsync(name, required: true);
}
/// <summary>
/// Renders a zone from the layout.
/// </summary>
/// <param name="name">The name of the zone to render.</param>
/// <param name="required">Whether the zone is required or not.</param>
public new Task<IHtmlContent> RenderSectionAsync(string name, bool required)
{
// We can replace the base implementation as it can't be called on a view that is not an actual MVC Layout.
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
var zone = ThemeLayout[name];
if (required && zone != null && zone is Shape && zone.Items.Count == 0)
{
throw new InvalidOperationException("Zone not found: " + name);
}
return DisplayAsync(zone);
}
public object OrDefault(object text, object other)
{
if (text == null || Convert.ToString(text) == "")
{
return other;
}
return text;
}
/// <summary>
/// Returns the full escaped path of the current request.
/// </summary>
public string FullRequestPath => Context.Request.PathBase + Context.Request.Path + Context.Request.QueryString;
/// <summary>
/// Gets the <see cref="ISite"/> instance.
/// </summary>
public ISite Site
{
get
{
if (_site == null)
{
_site = Context.Features.Get<RazorViewFeature>()?.Site;
}
return _site;
}
}
}
public abstract class RazorPage : RazorPage<dynamic>
{
}
}
| |
using System;
using System.Xml;
using TimeAndDate.Services.Common;
using System.Globalization;
using System.Collections.Generic;
using System.Linq;
namespace TimeAndDate.Services.DataTypes.Time
{
public class TADDateTime
{
/// <value>
/// Year.
/// </value>
public Int32 Year { get; set; }
/// <value>
/// Month.
/// </value>
public Int32 Month { get; set; }
/// <value>
/// Day.
/// </value>
public Int32 Day { get; set; }
/// <value>
/// Hour.
/// </value>
public Int32 Hour { get; set; }
/// <value>
/// Minute.
/// </value>
public Int32 Minute { get; set; }
/// <value>
/// Second.
/// </value>
public Int32 Second { get; set; }
private string Iso { get; set; }
public TADDateTime() { }
public TADDateTime(Int32 year, int month, int day, int hour, int minute, int second)
{
Year = year;
Month = month;
Day = day;
Hour = hour;
Minute = minute;
Second = second;
}
public TADDateTime(Int32 year, int month, int day)
{
Year = year;
Month = month;
Day = day;
}
public TADDateTime(DateTimeOffset date)
{
Year = date.Year;
Month = date.Month;
Day = date.Day;
Hour = date.Hour;
Minute = date.Minute;
Second = date.Second;
}
public TADDateTime(DateTime date)
{
Year = date.Year;
Month = date.Month;
Day = date.Day;
Hour = date.Hour;
Minute = date.Minute;
Second = date.Second;
}
public static explicit operator DateTimeOffset (TADDateTime date)
{
return new DateTimeOffset(date.Year, date.Month, date.Day, date.Hour, date.Minute, date.Second, TimeSpan.FromMinutes(0));
}
public TADDateTime(string s)
{
var strlist = new List<string>();
if (s.Contains("T"))
{
strlist = new List<string>(s.Split("T"));
foreach (char ch in "+-Z")
{
strlist[1] = strlist[1].Split(ch)[0];
}
}
else
{
strlist.Add(s);
}
try {
List<int> date_list = strlist[0].Split("-").Select(Int32.Parse).ToList<int>();
if (s.Contains("T"))
{
List<int> time_list = strlist[1].Split(":").Select(Int32.Parse).ToList<int>();
Hour = time_list[0];
Minute = time_list[1];
Second = time_list[2];
}
Year = date_list[0];
Month = date_list[1];
Day = date_list[2];
Iso = s;
} catch {
throw new InvalidIsoStringException ("Failed to parse date from the given string.");
}
}
public override bool Equals(Object obj)
{
if ((obj == null) || !this.GetType().Equals(obj.GetType()))
{
return false;
}
TADDateTime t1 = obj as TADDateTime;
return Year == t1.Year &&
Month == t1.Month &&
Day == t1.Day &&
Hour == t1.Hour &&
Minute == t1.Minute &&
Second == t1.Second;
}
public static explicit operator string (TADDateTime date)
{
if (date.Iso != null)
return date.Iso;
return $"{date.Year.ToString().PadLeft(4, '0')}"
+ $"-{date.Month.ToString().PadLeft(2, '0')}"
+ $"-{date.Day.ToString().PadLeft(2, '0')}"
+ $"T{date.Hour.ToString().PadLeft(2, '0')}"
+ $":{date.Minute.ToString().PadLeft(2, '0')}"
+ $":{date.Second.ToString().PadLeft(2, '0')}";
}
public static explicit operator TADDateTime(XmlNode node)
{
var model = new TADDateTime();
var year = node.SelectSingleNode("year");
var month = node.SelectSingleNode("month");
var day = node.SelectSingleNode("day");
var hour = node.SelectSingleNode("hour");
var minute = node.SelectSingleNode("minute");
var second = node.SelectSingleNode("second");
if (year != null)
{
model.Year = Int32.Parse(year.InnerText);
}
if (month != null)
{
model.Month = Int32.Parse(month.InnerText);
}
if (day != null)
{
model.Day = Int32.Parse(day.InnerText);
}
if (hour != null)
{
model.Hour = Int32.Parse(hour.InnerText);
}
if (minute != null)
{
model.Minute = Int32.Parse(minute.InnerText);
}
if (second != null)
{
model.Second = Int32.Parse(second.InnerText);
}
return model;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using OrchardCore.Autoroute.Services;
using OrchardCore.ContentManagement.Routing;
using OrchardCore.Environment.Shell;
using OrchardCore.Environment.Shell.Builders;
using OrchardCore.Environment.Shell.Models;
using OrchardCore.Locking;
using OrchardCore.Locking.Distributed;
using Xunit;
namespace OrchardCore.Tests.Routing
{
public class AutorouteEntriesTests
{
[Fact]
public async Task ShouldGetContainedEntryByPath()
{
// Setup
var shellContext = CreateShellContext();
await shellContext.CreateScope().UsingAsync(scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Act
var initialEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path", "contained")
};
entries.AddEntries(initialEntries);
return Task.CompletedTask;
});
await shellContext.CreateScope().UsingAsync(async scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Test
(var result, var containedEntry) = await entries.TryGetEntryByPathAsync("/contained-path");
Assert.True(result);
Assert.Equal("contained", containedEntry.ContainedContentItemId);
});
}
[Fact]
public async Task ShouldGetEntryByContainedContentItemId()
{
// Setup
var shellContext = CreateShellContext();
await shellContext.CreateScope().UsingAsync(scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Act
var initialEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path", "contained")
};
entries.AddEntries(initialEntries);
return Task.CompletedTask;
});
await shellContext.CreateScope().UsingAsync(async scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Test
(var result, var containedEntry) = await entries.TryGetEntryByContentItemIdAsync("contained");
Assert.True(result);
Assert.Equal("/contained-path", containedEntry.Path);
});
}
[Fact]
public async Task RemovesContainedEntriesWhenContainerRemoved()
{
// Setup
var shellContext = CreateShellContext();
await shellContext.CreateScope().UsingAsync(scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Act
var initialEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path", "contained")
};
entries.AddEntries(initialEntries);
entries.RemoveEntries(new[] { new AutorouteEntry("container", "container-path", null, null) });
return Task.CompletedTask;
});
await shellContext.CreateScope().UsingAsync(async scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Test
(var result, var containedEntry) = await entries.TryGetEntryByPathAsync("/contained-path");
Assert.False(result);
});
}
[Fact]
public async Task RemovesContainedEntriesWhenDeleted()
{
// Setup
var shellContext = CreateShellContext();
await shellContext.CreateScope().UsingAsync(scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Act
var initialEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path1", "contained1"),
new AutorouteEntry("container", "contained-path2", "contained2")
};
entries.AddEntries(initialEntries);
var updatedEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path1", "contained1")
};
entries.AddEntries(updatedEntries);
return Task.CompletedTask;
});
await shellContext.CreateScope().UsingAsync(async scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Test
(var result, var containedEntry) = await entries.TryGetEntryByPathAsync("/contained-path2");
Assert.False(result);
});
}
[Fact]
public async Task RemovesOldContainedPaths()
{
// Setup
var shellContext = CreateShellContext();
await shellContext.CreateScope().UsingAsync(scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Act
var initialEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path-old", "contained")
};
entries.AddEntries(initialEntries);
var updatedEntries = new List<AutorouteEntry>()
{
new AutorouteEntry("container", "container-path"),
new AutorouteEntry("container", "contained-path-new", "contained")
};
entries.AddEntries(updatedEntries);
return Task.CompletedTask;
});
await shellContext.CreateScope().UsingAsync(async scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Test
(var result, var containedEntry) = await entries.TryGetEntryByPathAsync("/contained-path-old");
Assert.False(result);
});
}
[Fact]
public async Task RemovesOldPaths()
{
// Setup
var shellContext = CreateShellContext();
await shellContext.CreateScope().UsingAsync(scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Act
entries.AddEntries(new[] { new AutorouteEntry("container", "container-path", null, null) });
entries.RemoveEntries(new[] { new AutorouteEntry("container", "container-path", null, null) });
return Task.CompletedTask;
});
await shellContext.CreateScope().UsingAsync(async scope =>
{
var entries = scope.ServiceProvider.GetRequiredService<IStubAutorouteEntries>();
// Test
(var result, var containedEntry) = await entries.TryGetEntryByPathAsync("/container-path");
Assert.False(result);
});
}
private static ShellContext CreateShellContext()
{
return new ShellContext()
{
Settings = new ShellSettings() { Name = ShellHelper.DefaultShellName, State = TenantState.Running },
ServiceProvider = CreateServiceProvider()
};
}
private static IServiceProvider CreateServiceProvider()
{
var services = new ServiceCollection();
services.AddSingleton<IStubAutorouteEntries, StubAutorouteEntries>();
services.AddSingleton<IDistributedLock, LocalLock>();
return services.AddLogging().BuildServiceProvider();
}
public interface IStubAutorouteEntries : IAutorouteEntries
{
void AddEntries(IEnumerable<AutorouteEntry> entries);
void RemoveEntries(IEnumerable<AutorouteEntry> entries);
}
private class StubAutorouteEntries : AutorouteEntries, IStubAutorouteEntries
{
public StubAutorouteEntries() : base(null) { }
public new void AddEntries(IEnumerable<AutorouteEntry> entries) => base.AddEntries(entries);
public new void RemoveEntries(IEnumerable<AutorouteEntry> entries) => base.RemoveEntries(entries);
protected override Task InitializeEntriesAsync() => Task.CompletedTask;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if ES_BUILD_STANDALONE
using System;
using System.Diagnostics;
#endif
using System.Text;
#if ES_BUILD_STANDALONE
using Environment = Microsoft.Diagnostics.Tracing.Internal.Environment;
namespace Microsoft.Diagnostics.Tracing
#else
namespace System.Diagnostics.Tracing
#endif
{
/// <summary>
/// TraceLogging: Contains the information needed to generate tracelogging
/// metadata for an event field.
/// </summary>
internal class FieldMetadata
{
/// <summary>
/// Name of the field
/// </summary>
private readonly string name;
/// <summary>
/// The number of bytes in the UTF8 Encoding of 'name' INCLUDING a null terminator.
/// </summary>
private readonly int nameSize;
private readonly EventFieldTags tags;
private readonly byte[]? custom;
/// <summary>
/// ETW supports fixed sized arrays. If inType has the InTypeFixedCountFlag then this is the
/// statically known count for the array. It is also used to encode the number of bytes of
/// custom meta-data if InTypeCustomCountFlag set.
/// </summary>
private readonly ushort fixedCount;
private byte inType;
private byte outType;
/// <summary>
/// Scalar or variable-length array.
/// </summary>
public FieldMetadata(
string name,
TraceLoggingDataType type,
EventFieldTags tags,
bool variableCount)
: this(
name,
type,
tags,
variableCount ? Statics.InTypeVariableCountFlag : (byte)0,
0,
null)
{
}
/// <summary>
/// Fixed-length array.
/// </summary>
public FieldMetadata(
string name,
TraceLoggingDataType type,
EventFieldTags tags,
ushort fixedCount)
: this(
name,
type,
tags,
Statics.InTypeFixedCountFlag,
fixedCount,
null)
{
}
/// <summary>
/// Custom serializer
/// </summary>
public FieldMetadata(
string name,
TraceLoggingDataType type,
EventFieldTags tags,
byte[]? custom)
: this(
name,
type,
tags,
Statics.InTypeCustomCountFlag,
checked((ushort)(custom == null ? 0 : custom.Length)),
custom)
{
}
private FieldMetadata(
string name,
TraceLoggingDataType dataType,
EventFieldTags tags,
byte countFlags,
ushort fixedCount = 0,
byte[]? custom = null)
{
if (name == null)
{
throw new ArgumentNullException(
nameof(name),
"This usually means that the object passed to Write is of a type that"
+ " does not support being used as the top-level object in an event,"
+ " e.g. a primitive or built-in type.");
}
Statics.CheckName(name);
int coreType = (int)dataType & Statics.InTypeMask;
this.name = name;
this.nameSize = Encoding.UTF8.GetByteCount(this.name) + 1;
this.inType = (byte)(coreType | countFlags);
this.outType = (byte)(((int)dataType >> 8) & Statics.OutTypeMask);
this.tags = tags;
this.fixedCount = fixedCount;
this.custom = custom;
if (countFlags != 0)
{
if (coreType == (int)TraceLoggingDataType.Nil)
{
throw new NotSupportedException(SR.EventSource_NotSupportedArrayOfNil);
}
if (coreType == (int)TraceLoggingDataType.Binary)
{
throw new NotSupportedException(SR.EventSource_NotSupportedArrayOfBinary);
}
if (coreType == (int)TraceLoggingDataType.Utf16String ||
coreType == (int)TraceLoggingDataType.MbcsString)
{
throw new NotSupportedException(SR.EventSource_NotSupportedArrayOfNullTerminatedString);
}
}
if (((int)this.tags & 0xfffffff) != 0)
{
this.outType |= Statics.OutTypeChainFlag;
}
if (this.outType != 0)
{
this.inType |= Statics.InTypeChainFlag;
}
}
public void IncrementStructFieldCount()
{
this.inType |= Statics.InTypeChainFlag;
this.outType++;
if ((this.outType & Statics.OutTypeMask) == 0)
{
throw new NotSupportedException(SR.EventSource_TooManyFields);
}
}
/// <summary>
/// This is the main routine for FieldMetaData. Basically it will serialize the data in
/// this structure as TraceLogging style meta-data into the array 'metaArray' starting at
/// 'pos' (pos is updated to reflect the bytes written).
///
/// Note that 'metaData' can be null, in which case it only updates 'pos'. This is useful
/// for a 'two pass' approach where you figure out how big to make the array, and then you
/// fill it in.
/// </summary>
public void Encode(ref int pos, byte[]? metadata)
{
// Write out the null terminated UTF8 encoded name
if (metadata != null)
{
Encoding.UTF8.GetBytes(this.name, 0, this.name.Length, metadata, pos);
}
pos += this.nameSize;
// Write 1 byte for inType
if (metadata != null)
{
metadata[pos] = this.inType;
}
pos++;
// If InTypeChainFlag set, then write out the outType
if (0 != (this.inType & Statics.InTypeChainFlag))
{
if (metadata != null)
{
metadata[pos] = this.outType;
}
pos++;
// If OutTypeChainFlag set, then write out tags
if (0 != (this.outType & Statics.OutTypeChainFlag))
{
Statics.EncodeTags((int)this.tags, ref pos, metadata);
}
}
// If InTypeFixedCountFlag set, write out the fixedCount (2 bytes little endian)
if (0 != (this.inType & Statics.InTypeFixedCountFlag))
{
if (metadata != null)
{
metadata[pos + 0] = unchecked((byte)this.fixedCount);
metadata[pos + 1] = (byte)(this.fixedCount >> 8);
}
pos += 2;
// If InTypeCustomCountFlag set, write out the blob of custom meta-data.
if (Statics.InTypeCustomCountFlag == (this.inType & Statics.InTypeCountMask) &&
this.fixedCount != 0)
{
if (metadata != null)
{
Debug.Assert(custom != null);
Buffer.BlockCopy(custom, 0, metadata, pos, this.fixedCount);
}
pos += this.fixedCount;
}
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: money/cards/transactions/payment_card_authorization_reversal.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace HOLMS.Types.Money.Cards.Transactions {
/// <summary>Holder for reflection information generated from money/cards/transactions/payment_card_authorization_reversal.proto</summary>
public static partial class PaymentCardAuthorizationReversalReflection {
#region Descriptor
/// <summary>File descriptor for money/cards/transactions/payment_card_authorization_reversal.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static PaymentCardAuthorizationReversalReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkJtb25leS9jYXJkcy90cmFuc2FjdGlvbnMvcGF5bWVudF9jYXJkX2F1dGhv",
"cml6YXRpb25fcmV2ZXJzYWwucHJvdG8SJGhvbG1zLnR5cGVzLm1vbmV5LmNh",
"cmRzLnRyYW5zYWN0aW9ucxo7bW9uZXkvY2FyZHMvdHJhbnNhY3Rpb25zL3By",
"b2Nlc3Nvcl90cmFuc2FjdGlvbl9yZXN1bHQucHJvdG8aH3ByaW1pdGl2ZS9t",
"b25ldGFyeV9hbW91bnQucHJvdG8aH2dvb2dsZS9wcm90b2J1Zi90aW1lc3Rh",
"bXAucHJvdG8iggIKIFBheW1lbnRDYXJkQXV0aG9yaXphdGlvblJldmVyc2Fs",
"Ej4KD3JldmVyc2FsX2Ftb3VudBgBIAEoCzIlLmhvbG1zLnR5cGVzLnByaW1p",
"dGl2ZS5Nb25ldGFyeUFtb3VudBIdChVob3N0X3JlZmVyZW5jZV9udW1iZXIY",
"AiABKAkSUAoGcmVzdWx0GAMgASgOMkAuaG9sbXMudHlwZXMubW9uZXkuY2Fy",
"ZHMudHJhbnNhY3Rpb25zLlByb2Nlc3NvclRyYW5zYWN0aW9uUmVzdWx0Ei0K",
"CXBvc3RlZF9hdBgEIAEoCzIaLmdvb2dsZS5wcm90b2J1Zi5UaW1lc3RhbXBC",
"QVoYbW9uZXkvY2FyZHMvdHJhbnNhY3Rpb25zqgIkSE9MTVMuVHlwZXMuTW9u",
"ZXkuQ2FyZHMuVHJhbnNhY3Rpb25zYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResultReflection.Descriptor, global::HOLMS.Types.Primitive.MonetaryAmountReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Money.Cards.Transactions.PaymentCardAuthorizationReversal), global::HOLMS.Types.Money.Cards.Transactions.PaymentCardAuthorizationReversal.Parser, new[]{ "ReversalAmount", "HostReferenceNumber", "Result", "PostedAt" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class PaymentCardAuthorizationReversal : pb::IMessage<PaymentCardAuthorizationReversal> {
private static readonly pb::MessageParser<PaymentCardAuthorizationReversal> _parser = new pb::MessageParser<PaymentCardAuthorizationReversal>(() => new PaymentCardAuthorizationReversal());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<PaymentCardAuthorizationReversal> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::HOLMS.Types.Money.Cards.Transactions.PaymentCardAuthorizationReversalReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PaymentCardAuthorizationReversal() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PaymentCardAuthorizationReversal(PaymentCardAuthorizationReversal other) : this() {
ReversalAmount = other.reversalAmount_ != null ? other.ReversalAmount.Clone() : null;
hostReferenceNumber_ = other.hostReferenceNumber_;
result_ = other.result_;
PostedAt = other.postedAt_ != null ? other.PostedAt.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public PaymentCardAuthorizationReversal Clone() {
return new PaymentCardAuthorizationReversal(this);
}
/// <summary>Field number for the "reversal_amount" field.</summary>
public const int ReversalAmountFieldNumber = 1;
private global::HOLMS.Types.Primitive.MonetaryAmount reversalAmount_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::HOLMS.Types.Primitive.MonetaryAmount ReversalAmount {
get { return reversalAmount_; }
set {
reversalAmount_ = value;
}
}
/// <summary>Field number for the "host_reference_number" field.</summary>
public const int HostReferenceNumberFieldNumber = 2;
private string hostReferenceNumber_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string HostReferenceNumber {
get { return hostReferenceNumber_; }
set {
hostReferenceNumber_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "result" field.</summary>
public const int ResultFieldNumber = 3;
private global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResult result_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResult Result {
get { return result_; }
set {
result_ = value;
}
}
/// <summary>Field number for the "posted_at" field.</summary>
public const int PostedAtFieldNumber = 4;
private global::Google.Protobuf.WellKnownTypes.Timestamp postedAt_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Protobuf.WellKnownTypes.Timestamp PostedAt {
get { return postedAt_; }
set {
postedAt_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as PaymentCardAuthorizationReversal);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(PaymentCardAuthorizationReversal other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(ReversalAmount, other.ReversalAmount)) return false;
if (HostReferenceNumber != other.HostReferenceNumber) return false;
if (Result != other.Result) return false;
if (!object.Equals(PostedAt, other.PostedAt)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (reversalAmount_ != null) hash ^= ReversalAmount.GetHashCode();
if (HostReferenceNumber.Length != 0) hash ^= HostReferenceNumber.GetHashCode();
if (Result != 0) hash ^= Result.GetHashCode();
if (postedAt_ != null) hash ^= PostedAt.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (reversalAmount_ != null) {
output.WriteRawTag(10);
output.WriteMessage(ReversalAmount);
}
if (HostReferenceNumber.Length != 0) {
output.WriteRawTag(18);
output.WriteString(HostReferenceNumber);
}
if (Result != 0) {
output.WriteRawTag(24);
output.WriteEnum((int) Result);
}
if (postedAt_ != null) {
output.WriteRawTag(34);
output.WriteMessage(PostedAt);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (reversalAmount_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(ReversalAmount);
}
if (HostReferenceNumber.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(HostReferenceNumber);
}
if (Result != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Result);
}
if (postedAt_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(PostedAt);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(PaymentCardAuthorizationReversal other) {
if (other == null) {
return;
}
if (other.reversalAmount_ != null) {
if (reversalAmount_ == null) {
reversalAmount_ = new global::HOLMS.Types.Primitive.MonetaryAmount();
}
ReversalAmount.MergeFrom(other.ReversalAmount);
}
if (other.HostReferenceNumber.Length != 0) {
HostReferenceNumber = other.HostReferenceNumber;
}
if (other.Result != 0) {
Result = other.Result;
}
if (other.postedAt_ != null) {
if (postedAt_ == null) {
postedAt_ = new global::Google.Protobuf.WellKnownTypes.Timestamp();
}
PostedAt.MergeFrom(other.PostedAt);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (reversalAmount_ == null) {
reversalAmount_ = new global::HOLMS.Types.Primitive.MonetaryAmount();
}
input.ReadMessage(reversalAmount_);
break;
}
case 18: {
HostReferenceNumber = input.ReadString();
break;
}
case 24: {
result_ = (global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResult) input.ReadEnum();
break;
}
case 34: {
if (postedAt_ == null) {
postedAt_ = new global::Google.Protobuf.WellKnownTypes.Timestamp();
}
input.ReadMessage(postedAt_);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Linq;
using System.Text;
using Validation;
namespace System.Collections.Immutable
{
/// <summary>
/// An immutable queue.
/// </summary>
/// <typeparam name="T">The type of elements stored in the queue.</typeparam>
[DebuggerDisplay("IsEmpty = {IsEmpty}")]
[DebuggerTypeProxy(typeof(ImmutableQueue<>.DebuggerProxy))]
[SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "Ignored")]
[SuppressMessage("Microsoft.Naming", "CA1711:IdentifiersShouldNotHaveIncorrectSuffix", Justification = "Ignored")]
public sealed class ImmutableQueue<T> : IImmutableQueue<T>
{
/// <summary>
/// The singleton empty queue.
/// </summary>
/// <remarks>
/// Additional instances representing the empty queue may exist on deserialized instances.
/// Actually since this queue is a struct, instances don't even apply and there are no singletons.
/// </remarks>
private static readonly ImmutableQueue<T> EmptyField = new ImmutableQueue<T>(ImmutableStack<T>.Empty, ImmutableStack<T>.Empty);
/// <summary>
/// The end of the queue that enqueued elements are pushed onto.
/// </summary>
private readonly ImmutableStack<T> backwards;
/// <summary>
/// The end of the queue from which elements are dequeued.
/// </summary>
private readonly ImmutableStack<T> forwards;
/// <summary>
/// Backing field for the <see cref="BackwardsReversed"/> property.
/// </summary>
private ImmutableStack<T> backwardsReversed;
/// <summary>
/// Initializes a new instance of the <see cref="ImmutableQueue<T>"/> class.
/// </summary>
/// <param name="forward">The forward stack.</param>
/// <param name="backward">The backward stack.</param>
private ImmutableQueue(ImmutableStack<T> forward, ImmutableStack<T> backward)
{
Requires.NotNull(forward, "forward");
Requires.NotNull(backward, "backward");
this.forwards = forward;
this.backwards = backward;
this.backwardsReversed = null;
}
/// <summary>
/// Gets the empty queue.
/// </summary>
public ImmutableQueue<T> Clear()
{
Contract.Ensures(Contract.Result<ImmutableQueue<T>>().IsEmpty);
Contract.Assume(EmptyField.IsEmpty);
return Empty;
}
/// <summary>
/// Gets a value indicating whether this instance is empty.
/// </summary>
/// <value>
/// <c>true</c> if this instance is empty; otherwise, <c>false</c>.
/// </value>
public bool IsEmpty
{
get { return this.forwards.IsEmpty && this.backwards.IsEmpty; }
}
/// <summary>
/// Gets the empty queue.
/// </summary>
public static ImmutableQueue<T> Empty
{
get
{
Contract.Ensures(Contract.Result<ImmutableQueue<T>>().IsEmpty);
Contract.Assume(EmptyField.IsEmpty);
return EmptyField;
}
}
/// <summary>
/// Gets an empty queue.
/// </summary>
IImmutableQueue<T> IImmutableQueue<T>.Clear()
{
Contract.Assume(EmptyField.IsEmpty);
return this.Clear();
}
/// <summary>
/// Gets the reversed <see cref="backwards"/> stack.
/// </summary>
private ImmutableStack<T> BackwardsReversed
{
get
{
Contract.Ensures(Contract.Result<ImmutableStack<T>>() != null);
// Although this is a lazy-init pattern, no lock is required because
// this instance is immutable otherwise, and a double-assignment from multiple
// threads is harmless.
if (this.backwardsReversed == null)
{
this.backwardsReversed = this.backwards.Reverse();
}
return this.backwardsReversed;
}
}
/// <summary>
/// Gets the element at the front of the queue.
/// </summary>
/// <exception cref="InvalidOperationException">Thrown when the stack is empty.</exception>
[Pure]
public T Peek()
{
if (this.IsEmpty)
{
throw new InvalidOperationException(Strings.InvalidEmptyOperation);
}
return this.forwards.Peek();
}
/// <summary>
/// Adds an element to the back of the queue.
/// </summary>
/// <param name="value">The value.</param>
/// <returns>
/// The new queue.
/// </returns>
[Pure]
public ImmutableQueue<T> Enqueue(T value)
{
Contract.Ensures(!Contract.Result<ImmutableQueue<T>>().IsEmpty);
if (this.IsEmpty)
{
return new ImmutableQueue<T>(ImmutableStack<T>.Empty.Push(value), ImmutableStack<T>.Empty);
}
else
{
return new ImmutableQueue<T>(this.forwards, this.backwards.Push(value));
}
}
/// <summary>
/// Adds an element to the back of the queue.
/// </summary>
/// <param name="value">The value.</param>
/// <returns>
/// The new queue.
/// </returns>
[Pure]
IImmutableQueue<T> IImmutableQueue<T>.Enqueue(T value)
{
return this.Enqueue(value);
}
/// <summary>
/// Returns a queue that is missing the front element.
/// </summary>
/// <returns>A queue; never <c>null</c>.</returns>
/// <exception cref="InvalidOperationException">Thrown when the stack is empty.</exception>
[Pure]
public ImmutableQueue<T> Dequeue()
{
if (this.IsEmpty)
{
throw new InvalidOperationException(Strings.InvalidEmptyOperation);
}
ImmutableStack<T> f = this.forwards.Pop();
if (!f.IsEmpty)
{
return new ImmutableQueue<T>(f, this.backwards);
}
else if (this.backwards.IsEmpty)
{
return ImmutableQueue<T>.Empty;
}
else
{
return new ImmutableQueue<T>(this.BackwardsReversed, ImmutableStack<T>.Empty);
}
}
/// <summary>
/// Retrieves the item at the head of the queue, and returns a queue with the head element removed.
/// </summary>
/// <param name="value">Receives the value from the head of the queue.</param>
/// <returns>The new queue with the head element removed.</returns>
/// <exception cref="InvalidOperationException">Thrown when the stack is empty.</exception>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", MessageId = "0#")]
[Pure]
public ImmutableQueue<T> Dequeue(out T value)
{
value = this.Peek();
return this.Dequeue();
}
/// <summary>
/// Returns a queue that is missing the front element.
/// </summary>
/// <returns>A queue; never <c>null</c>.</returns>
/// <exception cref="InvalidOperationException">Thrown when the stack is empty.</exception>
[Pure]
IImmutableQueue<T> IImmutableQueue<T>.Dequeue()
{
return this.Dequeue();
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// An <see cref="T:Enumerator"/> that can be used to iterate through the collection.
/// </returns>
[Pure]
public Enumerator GetEnumerator()
{
return new Enumerator(this);
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection.
/// </returns>
[Pure]
IEnumerator<T> IEnumerable<T>.GetEnumerator()
{
return new EnumeratorObject(this);
}
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>
/// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection.
/// </returns>
[Pure]
IEnumerator IEnumerable.GetEnumerator()
{
return new EnumeratorObject(this);
}
/// <summary>
/// A memory allocation-free enumerator of <see cref="ImmutableQueue<T>"/>.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Advanced)]
public struct Enumerator
{
/// <summary>
/// The original queue being enumerated.
/// </summary>
private readonly ImmutableQueue<T> originalQueue;
/// <summary>
/// The remaining forwards stack of the queue being enumerated.
/// </summary>
private ImmutableStack<T> remainingForwardsStack;
/// <summary>
/// The remaining backwards stack of the queue being enumerated.
/// Its order is reversed when the field is first initialized.
/// </summary>
private ImmutableStack<T> remainingBackwardsStack;
/// <summary>
/// Initializes a new instance of the <see cref="Enumerator"/> struct.
/// </summary>
/// <param name="queue">The queue to enumerate.</param>
internal Enumerator(ImmutableQueue<T> queue)
{
this.originalQueue = queue;
// The first call to MoveNext will initialize these.
this.remainingForwardsStack = null;
this.remainingBackwardsStack = null;
}
/// <summary>
/// The current element.
/// </summary>
public T Current
{
get
{
if (this.remainingForwardsStack == null)
{
// The initial call to MoveNext has not yet been made.
throw new InvalidOperationException();
}
if (!this.remainingForwardsStack.IsEmpty)
{
return this.remainingForwardsStack.Peek();
}
else if (!this.remainingBackwardsStack.IsEmpty)
{
return this.remainingBackwardsStack.Peek();
}
else
{
// We've advanced beyond the end of the queue.
throw new InvalidOperationException();
}
}
}
/// <summary>
/// Advances enumeration to the next element.
/// </summary>
/// <returns>A value indicating whether there is another element in the enumeration.</returns>
public bool MoveNext()
{
if (this.remainingForwardsStack == null)
{
// This is the initial step.
// Empty queues have no forwards or backwards
this.remainingForwardsStack = this.originalQueue.forwards;
this.remainingBackwardsStack = this.originalQueue.BackwardsReversed;
}
else if (!this.remainingForwardsStack.IsEmpty)
{
this.remainingForwardsStack = this.remainingForwardsStack.Pop();
}
else if (!this.remainingBackwardsStack.IsEmpty)
{
this.remainingBackwardsStack = this.remainingBackwardsStack.Pop();
}
return !this.remainingForwardsStack.IsEmpty || !this.remainingBackwardsStack.IsEmpty;
}
}
/// <summary>
/// A memory allocation-free enumerator of <see cref="ImmutableQueue<T>"/>.
/// </summary>
private class EnumeratorObject : IEnumerator<T>
{
/// <summary>
/// The original queue being enumerated.
/// </summary>
private readonly ImmutableQueue<T> originalQueue;
/// <summary>
/// The remaining forwards stack of the queue being enumerated.
/// </summary>
private ImmutableStack<T> remainingForwardsStack;
/// <summary>
/// The remaining backwards stack of the queue being enumerated.
/// Its order is reversed when the field is first initialized.
/// </summary>
private ImmutableStack<T> remainingBackwardsStack;
/// <summary>
/// A value indicating whether this enumerator has been disposed.
/// </summary>
private bool disposed;
/// <summary>
/// Initializes a new instance of the <see cref="Enumerator"/> struct.
/// </summary>
/// <param name="queue">The queue to enumerate.</param>
internal EnumeratorObject(ImmutableQueue<T> queue)
{
this.originalQueue = queue;
}
/// <summary>
/// The current element.
/// </summary>
public T Current
{
get
{
this.ThrowIfDisposed();
if (this.remainingForwardsStack == null)
{
// The initial call to MoveNext has not yet been made.
throw new InvalidOperationException();
}
if (!this.remainingForwardsStack.IsEmpty)
{
return this.remainingForwardsStack.Peek();
}
else if (!this.remainingBackwardsStack.IsEmpty)
{
return this.remainingBackwardsStack.Peek();
}
else
{
// We've advanced beyond the end of the queue.
throw new InvalidOperationException();
}
}
}
/// <summary>
/// The current element.
/// </summary>
object IEnumerator.Current
{
get { return this.Current; }
}
/// <summary>
/// Advances enumeration to the next element.
/// </summary>
/// <returns>A value indicating whether there is another element in the enumeration.</returns>
public bool MoveNext()
{
this.ThrowIfDisposed();
if (this.remainingForwardsStack == null)
{
// This is the initial step.
// Empty queues have no forwards or backwards
this.remainingForwardsStack = this.originalQueue.forwards;
this.remainingBackwardsStack = this.originalQueue.BackwardsReversed;
}
else if (!this.remainingForwardsStack.IsEmpty)
{
this.remainingForwardsStack = this.remainingForwardsStack.Pop();
}
else if (!this.remainingBackwardsStack.IsEmpty)
{
this.remainingBackwardsStack = this.remainingBackwardsStack.Pop();
}
return !this.remainingForwardsStack.IsEmpty || !this.remainingBackwardsStack.IsEmpty;
}
/// <summary>
/// Restarts enumeration.
/// </summary>
public void Reset()
{
this.ThrowIfDisposed();
this.remainingBackwardsStack = null;
this.remainingForwardsStack = null;
}
/// <summary>
/// Disposes this instance.
/// </summary>
public void Dispose()
{
this.disposed = true;
}
/// <summary>
/// Throws an <see cref="ObjectDisposedException"/> if this
/// enumerator has already been disposed.
/// </summary>
private void ThrowIfDisposed()
{
if (this.disposed)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
}
}
/// <summary>
/// A simple view of the immutable collection that the debugger can show to the developer.
/// </summary>
[ExcludeFromCodeCoverage]
private class DebuggerProxy
{
/// <summary>
/// The collection to be enumerated.
/// </summary>
private readonly ImmutableQueue<T> queue;
/// <summary>
/// The simple view of the collection.
/// </summary>
private T[] contents;
/// <summary>
/// Initializes a new instance of the <see cref="DebuggerProxy"/> class.
/// </summary>
/// <param name="queue">The collection to display in the debugger</param>
public DebuggerProxy(ImmutableQueue<T> queue)
{
this.queue = queue;
}
/// <summary>
/// Gets a simple debugger-viewable collection.
/// </summary>
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public T[] Contents
{
get
{
if (this.contents == null)
{
this.contents = this.queue.ToArray();
}
return this.contents;
}
}
}
}
}
| |
namespace Azure.Analytics.Synapse.Spark
{
public partial class SparkBatchClient
{
protected SparkBatchClient() { }
public SparkBatchClient(System.Uri endpoint, string sparkPoolName, Azure.Core.TokenCredential credential) { }
public SparkBatchClient(System.Uri endpoint, string sparkPoolName, Azure.Core.TokenCredential credential, Azure.Analytics.Synapse.Spark.SparkClientOptions options) { }
public virtual Azure.Response CancelSparkBatchJob(int batchId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CancelSparkBatchJobAsync(int batchId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkBatchJob> GetSparkBatchJob(int batchId, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkBatchJob>> GetSparkBatchJobAsync(int batchId, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkBatchJobCollection> GetSparkBatchJobs(int? from = default(int?), int? size = default(int?), bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkBatchJobCollection>> GetSparkBatchJobsAsync(int? from = default(int?), int? size = default(int?), bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Analytics.Synapse.Spark.SparkBatchOperation StartCreateSparkBatchJob(Azure.Analytics.Synapse.Spark.Models.SparkBatchJobOptions sparkBatchJobOptions, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Analytics.Synapse.Spark.SparkBatchOperation> StartCreateSparkBatchJobAsync(Azure.Analytics.Synapse.Spark.Models.SparkBatchJobOptions sparkBatchJobOptions, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class SparkBatchOperation : Azure.Operation<Azure.Analytics.Synapse.Spark.Models.SparkBatchJob>
{
internal SparkBatchOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Analytics.Synapse.Spark.Models.SparkBatchJob Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkBatchJob>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkBatchJob>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken) { throw null; }
}
public partial class SparkClientOptions : Azure.Core.ClientOptions
{
public SparkClientOptions(Azure.Analytics.Synapse.Spark.SparkClientOptions.ServiceVersion serviceVersion = Azure.Analytics.Synapse.Spark.SparkClientOptions.ServiceVersion.V2019_11_01_preview) { }
public enum ServiceVersion
{
V2019_11_01_preview = 1,
}
}
public partial class SparkSessionClient
{
protected SparkSessionClient() { }
public SparkSessionClient(System.Uri endpoint, string sparkPoolName, Azure.Core.TokenCredential credential) { }
public SparkSessionClient(System.Uri endpoint, string sparkPoolName, Azure.Core.TokenCredential credential, Azure.Analytics.Synapse.Spark.SparkClientOptions options) { }
public virtual Azure.Response CancelSparkSession(int sessionId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CancelSparkSessionAsync(int sessionId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatementCancellationResult> CancelSparkStatement(int sessionId, int statementId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatementCancellationResult>> CancelSparkStatementAsync(int sessionId, int statementId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkSession> GetSparkSession(int sessionId, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkSession>> GetSparkSessionAsync(int sessionId, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkSessionCollection> GetSparkSessions(int? from = default(int?), int? size = default(int?), bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkSessionCollection>> GetSparkSessionsAsync(int? from = default(int?), int? size = default(int?), bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatement> GetSparkStatement(int sessionId, int statementId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatement>> GetSparkStatementAsync(int sessionId, int statementId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatementCollection> GetSparkStatements(int sessionId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatementCollection>> GetSparkStatementsAsync(int sessionId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response ResetSparkSessionTimeout(int sessionId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> ResetSparkSessionTimeoutAsync(int sessionId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Analytics.Synapse.Spark.SparkSessionOperation StartCreateSparkSession(Azure.Analytics.Synapse.Spark.Models.SparkSessionOptions sparkSessionOptions, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Analytics.Synapse.Spark.SparkSessionOperation> StartCreateSparkSessionAsync(Azure.Analytics.Synapse.Spark.Models.SparkSessionOptions sparkSessionOptions, bool? detailed = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Analytics.Synapse.Spark.SparkStatementOperation StartCreateSparkStatement(int sessionId, Azure.Analytics.Synapse.Spark.Models.SparkStatementOptions sparkStatementOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Analytics.Synapse.Spark.SparkStatementOperation> StartCreateSparkStatementAsync(int sessionId, Azure.Analytics.Synapse.Spark.Models.SparkStatementOptions sparkStatementOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class SparkSessionOperation : Azure.Operation<Azure.Analytics.Synapse.Spark.Models.SparkSession>
{
internal SparkSessionOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Analytics.Synapse.Spark.Models.SparkSession Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkSession>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkSession>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken) { throw null; }
}
public partial class SparkStatementOperation : Azure.Operation<Azure.Analytics.Synapse.Spark.Models.SparkStatement>
{
internal SparkStatementOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Analytics.Synapse.Spark.Models.SparkStatement Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatement>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Analytics.Synapse.Spark.Models.SparkStatement>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken) { throw null; }
}
}
namespace Azure.Analytics.Synapse.Spark.Models
{
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct PluginCurrentState : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.PluginCurrentState>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public PluginCurrentState(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState Cleanup { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState Ended { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState Monitoring { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState Preparation { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState Queued { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState ResourceAcquisition { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.PluginCurrentState Submission { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.PluginCurrentState other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.PluginCurrentState left, Azure.Analytics.Synapse.Spark.Models.PluginCurrentState right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.PluginCurrentState (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.PluginCurrentState left, Azure.Analytics.Synapse.Spark.Models.PluginCurrentState right) { throw null; }
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct SchedulerCurrentState : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public SchedulerCurrentState(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState Ended { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState Queued { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState Scheduled { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState left, Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState left, Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState right) { throw null; }
public override string ToString() { throw null; }
}
public partial class SparkBatchJob
{
internal SparkBatchJob() { }
public string AppId { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> AppInfo { get { throw null; } }
public string ArtifactId { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Analytics.Synapse.Spark.Models.SparkServiceError> Errors { get { throw null; } }
public int Id { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkJobType? JobType { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkBatchJobState LivyInfo { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> LogLines { get { throw null; } }
public string Name { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkServicePlugin Plugin { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType? Result { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkScheduler Scheduler { get { throw null; } }
public string SparkPoolName { get { throw null; } }
public string State { get { throw null; } }
public string SubmitterId { get { throw null; } }
public string SubmitterName { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> Tags { get { throw null; } }
public string WorkspaceName { get { throw null; } }
}
public partial class SparkBatchJobCollection
{
internal SparkBatchJobCollection() { }
public int From { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Analytics.Synapse.Spark.Models.SparkBatchJob> Sessions { get { throw null; } }
public int Total { get { throw null; } }
}
public partial class SparkBatchJobOptions
{
public SparkBatchJobOptions(string name, string file) { }
public System.Collections.Generic.IList<string> Archives { get { throw null; } }
public System.Collections.Generic.IList<string> Arguments { get { throw null; } }
public string ArtifactId { get { throw null; } set { } }
public string ClassName { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, string> Configuration { get { throw null; } }
public int? DriverCores { get { throw null; } set { } }
public string DriverMemory { get { throw null; } set { } }
public int? ExecutorCores { get { throw null; } set { } }
public int? ExecutorCount { get { throw null; } set { } }
public string ExecutorMemory { get { throw null; } set { } }
public string File { get { throw null; } }
public System.Collections.Generic.IList<string> Files { get { throw null; } }
public System.Collections.Generic.IList<string> Jars { get { throw null; } }
public string Name { get { throw null; } }
public System.Collections.Generic.IList<string> PythonFiles { get { throw null; } }
public System.Collections.Generic.IDictionary<string, string> Tags { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct SparkBatchJobResultType : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public SparkBatchJobResultType(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType Cancelled { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType Failed { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType Succeeded { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType Uncertain { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType left, Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType left, Azure.Analytics.Synapse.Spark.Models.SparkBatchJobResultType right) { throw null; }
public override string ToString() { throw null; }
}
public partial class SparkBatchJobState
{
internal SparkBatchJobState() { }
public string CurrentState { get { throw null; } }
public System.DateTimeOffset? DeadAt { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkRequest JobCreationRequest { get { throw null; } }
public System.DateTimeOffset? NotStartedAt { get { throw null; } }
public System.DateTimeOffset? RecoveringAt { get { throw null; } }
public System.DateTimeOffset? RunningAt { get { throw null; } }
public System.DateTimeOffset? StartingAt { get { throw null; } }
public System.DateTimeOffset? SuccessAt { get { throw null; } }
public System.DateTimeOffset? TerminatedAt { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct SparkErrorSource : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.SparkErrorSource>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public SparkErrorSource(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.SparkErrorSource DependencyError { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkErrorSource SystemError { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkErrorSource UnknownError { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkErrorSource UserError { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.SparkErrorSource other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.SparkErrorSource left, Azure.Analytics.Synapse.Spark.Models.SparkErrorSource right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.SparkErrorSource (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.SparkErrorSource left, Azure.Analytics.Synapse.Spark.Models.SparkErrorSource right) { throw null; }
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct SparkJobType : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.SparkJobType>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public SparkJobType(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.SparkJobType SparkBatch { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkJobType SparkSession { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.SparkJobType other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.SparkJobType left, Azure.Analytics.Synapse.Spark.Models.SparkJobType right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.SparkJobType (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.SparkJobType left, Azure.Analytics.Synapse.Spark.Models.SparkJobType right) { throw null; }
public override string ToString() { throw null; }
}
public partial class SparkRequest
{
internal SparkRequest() { }
public System.Collections.Generic.IReadOnlyList<string> Archives { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> Arguments { get { throw null; } }
public string ClassName { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> Configuration { get { throw null; } }
public int? DriverCores { get { throw null; } }
public string DriverMemory { get { throw null; } }
public int? ExecutorCores { get { throw null; } }
public int? ExecutorCount { get { throw null; } }
public string ExecutorMemory { get { throw null; } }
public string File { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> Files { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> Jars { get { throw null; } }
public string Name { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> PythonFiles { get { throw null; } }
}
public partial class SparkScheduler
{
internal SparkScheduler() { }
public System.DateTimeOffset? CancellationRequestedAt { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SchedulerCurrentState? CurrentState { get { throw null; } }
public System.DateTimeOffset? EndedAt { get { throw null; } }
public System.DateTimeOffset? ScheduledAt { get { throw null; } }
public System.DateTimeOffset? SubmittedAt { get { throw null; } }
}
public partial class SparkServiceError
{
internal SparkServiceError() { }
public string ErrorCode { get { throw null; } }
public string Message { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkErrorSource? Source { get { throw null; } }
}
public partial class SparkServicePlugin
{
internal SparkServicePlugin() { }
public System.DateTimeOffset? CleanupStartedAt { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.PluginCurrentState? CurrentState { get { throw null; } }
public System.DateTimeOffset? MonitoringStartedAt { get { throw null; } }
public System.DateTimeOffset? PreparationStartedAt { get { throw null; } }
public System.DateTimeOffset? ResourceAcquisitionStartedAt { get { throw null; } }
public System.DateTimeOffset? SubmissionStartedAt { get { throw null; } }
}
public partial class SparkSession
{
internal SparkSession() { }
public string AppId { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> AppInfo { get { throw null; } }
public string ArtifactId { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Analytics.Synapse.Spark.Models.SparkServiceError> Errors { get { throw null; } }
public int Id { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkJobType? JobType { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkSessionState LivyInfo { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> LogLines { get { throw null; } }
public string Name { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkServicePlugin Plugin { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType? Result { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkScheduler Scheduler { get { throw null; } }
public string SparkPoolName { get { throw null; } }
public string State { get { throw null; } }
public string SubmitterId { get { throw null; } }
public string SubmitterName { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> Tags { get { throw null; } }
public string WorkspaceName { get { throw null; } }
}
public partial class SparkSessionCollection
{
internal SparkSessionCollection() { }
public int From { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Analytics.Synapse.Spark.Models.SparkSession> Sessions { get { throw null; } }
public int Total { get { throw null; } }
}
public partial class SparkSessionOptions
{
public SparkSessionOptions(string name) { }
public System.Collections.Generic.IList<string> Archives { get { throw null; } }
public System.Collections.Generic.IList<string> Arguments { get { throw null; } }
public string ArtifactId { get { throw null; } set { } }
public string ClassName { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, string> Configuration { get { throw null; } }
public int? DriverCores { get { throw null; } set { } }
public string DriverMemory { get { throw null; } set { } }
public int? ExecutorCores { get { throw null; } set { } }
public int? ExecutorCount { get { throw null; } set { } }
public string ExecutorMemory { get { throw null; } set { } }
public string File { get { throw null; } set { } }
public System.Collections.Generic.IList<string> Files { get { throw null; } }
public System.Collections.Generic.IList<string> Jars { get { throw null; } }
public string Name { get { throw null; } }
public System.Collections.Generic.IList<string> PythonFiles { get { throw null; } }
public System.Collections.Generic.IDictionary<string, string> Tags { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct SparkSessionResultType : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public SparkSessionResultType(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType Cancelled { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType Failed { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType Succeeded { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType Uncertain { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType left, Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType left, Azure.Analytics.Synapse.Spark.Models.SparkSessionResultType right) { throw null; }
public override string ToString() { throw null; }
}
public partial class SparkSessionState
{
internal SparkSessionState() { }
public System.DateTimeOffset? BusyAt { get { throw null; } }
public string CurrentState { get { throw null; } }
public System.DateTimeOffset? DeadAt { get { throw null; } }
public System.DateTimeOffset? ErrorAt { get { throw null; } }
public System.DateTimeOffset? IdleAt { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkRequest JobCreationRequest { get { throw null; } }
public System.DateTimeOffset? NotStartedAt { get { throw null; } }
public System.DateTimeOffset? RecoveringAt { get { throw null; } }
public System.DateTimeOffset? ShuttingDownAt { get { throw null; } }
public System.DateTimeOffset? StartingAt { get { throw null; } }
public System.DateTimeOffset? TerminatedAt { get { throw null; } }
}
public partial class SparkStatement
{
internal SparkStatement() { }
public string Code { get { throw null; } }
public int Id { get { throw null; } }
public Azure.Analytics.Synapse.Spark.Models.SparkStatementOutput Output { get { throw null; } }
public string State { get { throw null; } }
}
public partial class SparkStatementCancellationResult
{
internal SparkStatementCancellationResult() { }
public string Message { get { throw null; } }
}
public partial class SparkStatementCollection
{
internal SparkStatementCollection() { }
public System.Collections.Generic.IReadOnlyList<Azure.Analytics.Synapse.Spark.Models.SparkStatement> Statements { get { throw null; } }
public int Total { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct SparkStatementLanguageType : System.IEquatable<Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public SparkStatementLanguageType(string value) { throw null; }
public static Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType DotNetSpark { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType PySpark { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType Spark { get { throw null; } }
public static Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType Sql { get { throw null; } }
public bool Equals(Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType left, Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType right) { throw null; }
public static implicit operator Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType (string value) { throw null; }
public static bool operator !=(Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType left, Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType right) { throw null; }
public override string ToString() { throw null; }
}
public partial class SparkStatementOptions
{
public SparkStatementOptions() { }
public string Code { get { throw null; } set { } }
public Azure.Analytics.Synapse.Spark.Models.SparkStatementLanguageType? Kind { get { throw null; } set { } }
}
public partial class SparkStatementOutput
{
internal SparkStatementOutput() { }
public object Data { get { throw null; } }
public string ErrorName { get { throw null; } }
public string ErrorValue { get { throw null; } }
public int ExecutionCount { get { throw null; } }
public string Status { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> Traceback { get { throw null; } }
}
}
| |
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Xml;
using System.Linq;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Xml.Serialization;
using DOKuStar.Data.Xml;
using RightDocs.Common;
namespace ExportExtensionCommon
{
[TestClass]
public class SIEEBaseTest
{
public SIEEBaseTest()
{
SIEEMessageBox.Suppress = true;
// Initialize Factory manager with test export extension
SIEEFactoryManager.Add(new Test_SIEEFactory());
}
#region Empty field handling
/// The purpose of this test is to ensure that export fields receive the correct values.
/// 1. SIEE_Fields mapped to OCC fields receive their values from the OCC fields
/// 2. SIEE_Fields not mapped keep their values from the schema (actually should be null)
///
/// The implementation of the test is rather complex because it has to do what OCC does.
/// (But I think it's instructive at the same time.)
///
/// The test creates a schema with three fields and exports a document with three fields.
/// One field of the schema is mapped to the document. The other two fields in the schema
/// are not mapped. One has a value of null the other of "".
[TestMethod]
[TestCategory("SIEE Base")]
public void t01_EmptyFieldHandling()
{
// First we create the runtime document that is to be exported.
DataPool pool = createDataPool();
Document doc = pool.RootNode.Documents[0];
doc.Fields["field1"].Value = "field1value"; // should show up
doc.Fields.Add(new Field(pool, "field2", "field2value")); // should be ignored
doc.Fields.Add(new Field(pool, "field3", "field3value")); // should be ignored
// Create an xml document from the data pool
XmlDocument data = pool.RootNode.InnerXmlNode.OwnerDocument;
EECWriterSettings adapterSettings = createWriterSettings(new SIEEFieldlist() {
{ new SIEEField() { Name = "field1", ExternalId = "" } },
{ new SIEEField() { Name = "field2", ExternalId = "" } },
{ new SIEEField() { Name = "field3", Value = "default value dor field3" } }
});
SIEEWriterExport adapterExport = new SIEEWriterExport();
adapterExport.FieldMapping4UnitTest.Add("field1"); // we just want to simulate one mapped field
adapterExport.Configure(adapterSettings);
SIEEFieldlist lastFieldList = null;
Test_SIEEExport.ExportFunc = (settings, document, name, fl) =>
{
lastFieldList = fl;
};
adapterExport.transform(data, null); // do the export
// The test export actually has not exported anything but stored the field list internally.
// We execute the assertions on this result field list.
SIEEFieldlist fieldlist = lastFieldList;
Assert.AreEqual("field1value", fieldlist.Where(n => n.Name == "field1").First().Value, "field1 != null");
Assert.IsNull(fieldlist.Where(n => n.Name == "field2").First().Value, "field2 == null");
Assert.AreEqual("default value dor field3", fieldlist.Where(n => n.Name == "field3").First().Value, "field3 has default value");
}
#endregion
#region Serialize Schema to XML
[TestMethod]
[TestCategory("SIEE Base")]
public void t02_SerializeSchemaToXML()
{
SIEEFieldlist s1 = new SIEEFieldlist();
s1.Add(new SIEEField("1", "ex1", "value1"));
s1.Add(new SIEEField("2", "ex2", "value2"));
s1.Add(new SIEEField("3", "ex3", "value3"));
SIEEFieldlist s2 = new SIEEFieldlist();
s2.Add(new SIEEField("1.1", "ex1.1", "value1.1"));
s2.Add(new SIEEField("1.2", "ex1.2", "value1.2"));
s1.Add(new SIEETableField("4", "ex4", "value4", s2));
string ser = Serializer.SerializeToXmlString(s1, System.Text.Encoding.Unicode);
SIEEFieldlist deser = (SIEEFieldlist)Serializer.DeserializeFromXmlString(ser, typeof(SIEEFieldlist), System.Text.Encoding.Unicode);
SIEEField f1 = deser.GetFieldByName("1");
Assert.AreEqual("value1", f1.Value);
SIEEField f2 = deser.GetFieldByName("2");
Assert.AreEqual("2", f2.Name);
SIEEField f3 = deser.GetFieldByName("3");
Assert.AreEqual("ex3", f3.ExternalId);
SIEETableField f4 = deser.GetFieldByName("4") as SIEETableField;
Assert.AreEqual("ex4", f4.ExternalId);
SIEEField f5 = f4.Columns[0];
Assert.AreEqual("value1.1", f5.Value);
}
#endregion
#region Work with Schema
[TestMethod]
[TestCategory("SIEE Base")]
public void t03_WorkWithSchema()
{
SIEEFieldlist schema = new SIEEFieldlist();
Assert.IsNotNull(schema);
schema.Add(new SIEEField("1", "ex1", "value1"));
schema.Add(new SIEEField("2", "ex2", "value2"));
schema.Add(new SIEEField("3", "ex3", "value3"));
bool b = schema.Exists("2");
Assert.IsTrue(b);
SIEEField f = schema.GetFieldByName("2");
Assert.AreEqual("2", f.Name);
f = schema.GetFieldByName("x");
Assert.IsNull(f);
schema.MakeFieldnamesOCCCompliant();
}
#endregion
#region Document name composer (Basic)
[TestMethod]
[TestCategory("SIEE Base")]
public void t04_DocumentNameComposer_Basic()
{
SIEEFieldlist fl = new SIEEFieldlist();
var td = new[]
{
new {n=01, spec=@"abc", result=@"abc"},
new {n=02, spec=@"a\\bc", result=@"a\bc"},
new {n=03, spec=@"a\bc", result=@"a\bc"},
new {n=04, spec=@"a\<bc", result=@"a<bc",},
new {n=05, spec=@"abc\", result=@"abc\"},
new {n=10, spec=@"abc_<:Field_1>_def", result=@"abc_Hello_def"},
new {n=11, spec=@"abc_<unknown>_def", result=@"abc_<unknown>_def"},
new {n=12, spec=@"<:Field_1> <:Field_2>", result=@"Hello World"},
new {n=13, spec=@"<:Field_1>", result=@"Hello"},
new {n=20, spec=@"x<:Field_1", result=@"x<:Field_1"},
new {n=21, spec=@"<:Field_1", result=@"<:Field_1"},
new {n=22, spec=@"<<:Field_1>", result=@"<<:Field_1>"},
new {n=30, spec=@"abc\>de", result=@"abc\>de"},
new {n=31, spec=@"abc\\de", result=@"abc\de"},
};
List<KeyValuePair<string, string>> valueList = new List<KeyValuePair<string, string>>();
valueList.Add(new KeyValuePair<string, string>("Field_1", "Hello"));
valueList.Add(new KeyValuePair<string, string>("Field_2", "World"));
NameSpecParser nsp = new NameSpecParser("", "", valueList);
int doOnly = 0;
for (int i = 0; i != td.Length; i++)
{
if (doOnly != 0 && td[i].n != doOnly) continue;
string result = nsp.Convert(td[i].spec);
Assert.AreEqual(td[i].result, result, td[i].n + ": Value");
}
Assert.AreEqual(0, doOnly, "Not all batches executed");
}
#endregion
#region Document name find numbers
[TestMethod]
[TestCategory("SIEE Base")]
public void t05_DocumentNameFindNumbers()
{
var td = new[]
{
new {n=01, from=0, to=100, exception=false },
new {n=02, from=900, to=1100, exception=false },
new {n=03, from=16777216, to=16777216, exception=true },
new {n=04, from=16777215, to=16777215, exception=false },
};
DocumentNameFindNumber dnfn = new DocumentNameFindNumber(DNFN_probe);
int doOnly = 0;
for (int i = 0; i != td.Length; i++)
{
if (doOnly != 0 && td[i].n != doOnly) continue;
for (int n = td[i].from; n <= td[i].to; n++)
{
lastExistingNumber = n;
bool gotException = false;
int result = 0;
try { result = dnfn.GetNextFileName("Somename"); }
catch { gotException = true; }
Assert.AreEqual(td[i].exception, gotException, td[i].n + "->" + n + ": Exception");
if (gotException) continue;
Assert.AreEqual(lastExistingNumber + 1, result, td[i].n + "->" + n + ": Value");
}
}
Assert.AreEqual(0, doOnly, "Not all batches executed");
}
private int lastExistingNumber;
private bool DNFN_probe(string filename, int number)
{
return number <= lastExistingNumber;
}
#endregion
#region Name spec parser
[TestMethod]
[TestCategory("SIEE Base")]
public void t06_TestNameSpecParser()
{
NameSpecParser nsp = new NameSpecParser();
// test data and test loop
var td = new[]
{
new { n=01, spec="abc", result= "abc"},
new { n=02, spec="a<BatchId>b", result = "a_BatchId_b"},
new { n=03, spec="a<DocumentNumber>b", result = "a_DocumentNumber_b"},
new { n=04, spec="a<Guid>b", result = "a_Guid_b"},
new { n=05, spec="a<Host>b", result = "a_Host_b"},
new { n=06, spec="a<Date>b", result = "a_Date_b"},
new { n=07, spec="a<Time>b", result = "a_Time_b"},
new { n=06, spec="a<UniqueId>b", result = "a_UniqueId_b"},
new { n=07, spec="a<:Field>b", result = "a:Field:b"},
new { n=09, spec="a<some thing>b", result = "a<some thing>b"},
new { n=03, spec="a<PageNumber>b", result = "a_PageNumber_b"},
new { n=20, spec=@"a\\b\", result = @"a\b\"},
new { n=21, spec=@"a\<b<", result = @"a<b<"},
new { n=22, spec=@"a>>b>", result = @"a>>b>"},
new { n=23, spec=@"a<1<2\34>b", result = @"a<1<2\34>b"},
new { n=24, spec=@"a<b", result = @"a<b"},
new { n=24, spec=@"ab<", result = @"ab<"},
new { n=25, spec=@"a\b", result = @"a\b"},
new { n=26, spec=@"<>", result = @"<>"},
};
int doOnly = 0;
// Test the parsing function
for (int i = 0; i != td.Length; i++)
{
if (doOnly != 0 && td[i].n != doOnly) continue;
List<NameSpecParser.SubstituteItem> r = nsp.Parse(td[i].spec);
setSubstitutionValues(r);
string finalString = nsp.ComposeResultString(r);
Assert.AreEqual(td[i].result, finalString, "case: " + td[i].n);
}
Assert.AreEqual(doOnly, 0, "Not all tests executed");
/// Test the substitution process
nsp.BatchId = "42";
nsp.DocumentNumber = "0042";
nsp.ValueList = new List<KeyValuePair<string, string>>();
nsp.ValueList.Add(new KeyValuePair<string, string>("myField", "myFieldValue"));
Assert.AreEqual("-42-", nsp.Convert("-<BatchId>-"));
Assert.AreEqual("-0042-", nsp.Convert("-<DocumentNumber>-"));
Assert.AreEqual(true, (DateTime.Parse(nsp.Convert("<Date>")) - DateTime.Now).Days < 1);
Assert.AreEqual(true, int.Parse(nsp.Convert("<Time>")) - int.Parse(DateTime.Now.ToString("HHmmss")) > -5);
Assert.AreEqual(System.Environment.MachineName, nsp.Convert("<Host>"));
Guid newGuid;
Assert.AreEqual(true, Guid.TryParse(nsp.Convert("<Guid>"), out newGuid));
Assert.AreEqual(true, nsp.Convert("<UniqueId>").Length == 11);
Assert.AreEqual("-myFieldValue-", nsp.Convert("-<:myField>-"));
Assert.AreEqual("-noField-", nsp.Convert("-<:noField>-"));
nsp.ValueList.Add(new KeyValuePair<string, string>("name", "SomeValue"));
Assert.AreEqual("SomeValue", nsp.Convert("<:name>"));
}
private void setSubstitutionValues(List<NameSpecParser.SubstituteItem> r)
{
string result = string.Empty;
foreach (NameSpecParser.SubstituteItem s in r)
{
if (s.SubstitutionType == NameSpecParser.SubstitutionType.Const)
{
s.FinalValue = s.Parameter;
continue;
}
if (s.SubstitutionType == NameSpecParser.SubstitutionType.Field)
{
s.FinalValue = ":" + s.Parameter + ":";
continue;
}
s.FinalValue = "_" + s.SubstitutionType.ToString() + "_";
}
}
#endregion
#region Tree view
[TestMethod] [TestCategory("SIEE Base")]
public void t07_TreeViewViewModel()
{
SIEETreeView Folders = new SIEETreeView(null);
TVIViewModel tviVM;
TVIViewModel tviVMa;
string testPath = Path.GetTempPath();
testPath = testPath.Substring(0, testPath.Length - 1); // Remove trailing "\"
findFolderBruteForce(testPath);
string startPath = testPath.Split('\\').First() + @"\"; // e.g. "C:\"
FilesystemFolder startFsf = new FilesystemFolder(null, new DirectoryInfo(startPath));
// Select a folder and verify results
Folders.AddItem(new TVIViewModel(startFsf, null, true));
Assert.AreEqual(startPath, Folders[0].GetDisplayNamePath(), "t0");
tviVM = Folders.FindNodeInTree(testPath);
verifySelectedNode(tviVM, testPath, "t1");
// Serialize and reinstantiate
List<string> serializedPath = tviVM.GetSerializedPath();
Folders.Clear();
Folders.AddItem(new TVIViewModel(startFsf, null, true));
tviVM = Folders.InitializeTree(serializedPath, typeof(FilesystemFolder));
verifySelectedNode(tviVM, testPath, "t2");
tviVMa = Folders.FindNodeInTree(testPath);
verifySelectedNode(tviVMa, testPath, "t3");
}
private void verifySelectedNode(TVIViewModel node, string path, string testName)
{
if (node == null) throw new Exception("verifySelectedNode, testName=" + testName);
string tail = path.Split('\\').Last();
FilesystemFolder fsf = node.Tvim as FilesystemFolder;
Assert.AreEqual(tail.ToLower(), node.DisplayName.ToLower());
Assert.AreEqual(tail.ToLower(), fsf.DisplayName.ToLower());
Assert.AreEqual(path.Split('\\').Length, fsf.Depth + 1);
Assert.AreEqual(fsf.FolderPath.ToLower(), path.ToLower());
}
private void findFolderBruteForce(string path)
{
DirectoryInfo di = null;
foreach(string elem in path.Split('\\'))
{
if (di == null)
{
di = new DirectoryInfo(elem + "\\");
continue;
}
var x = di.GetDirectories();
di = di.GetDirectories().Where(n => n.Name.ToLower() == elem.ToLower()).FirstOrDefault();
if (di == null)
throw new Exception("Subfolder not found. Path=" + path + " folder=" + elem);
}
}
#endregion
#region FilesystemFolder
/// A test model to test the TreViewModel. It parses the directory
public class FilesystemFolder : TVIModel
{
public FilesystemFolder() { } // for xml serializer
public FilesystemFolder(FilesystemFolder parent, DirectoryInfo di)
{
DirInfo = di;
if (parent == null)
FolderPath = di.Name;
else
{
FolderPath = parent.FolderPath + (parent.Depth == 0 ? "" : @"\") + di.Name;
}
}
public FilesystemFolder(TVIModel parent, string name)
{
foreach (DirectoryInfo di in ((FilesystemFolder)parent).DirInfo.GetDirectories())
if (di.Name.ToLower() == name.ToLower())
{
DirInfo = di;
FolderPath = Path.Combine(((FilesystemFolder)parent).FolderPath, di.Name);
break;
}
}
#region Properties
private DirectoryInfo dirInfo;
[XmlIgnore]
public DirectoryInfo DirInfo
{
get { return dirInfo; }
set { dirInfo = value; DisplayName = Id = dirInfo.Name; }
}
public string FolderPath { get; set; }
#endregion
#region Functions
public override List<TVIModel> GetChildren()
{
List<TVIModel> result = new List<TVIModel>();
foreach (DirectoryInfo di in DirInfo.GetDirectories())
result.Add(new FilesystemFolder(this, di));
return result;
}
public override string GetPathConcatenationString() { return @"\"; }
public override string GetTypeName() { return "Folder"; }
public override TVIModel Clone()
{
return this.MemberwiseClone() as FilesystemFolder;
}
public override string GetPath(List<TVIModel> path, Pathtype pt)
{
string result = string.Empty;
for (int i = 0; i != path.Count; i++)
{
result += pt == Pathtype.DisplayName ? path[i].DisplayName : path[i].Id;
if (i > 0) result += GetPathConcatenationString();
}
return result;
}
public override bool IsSame(string id)
{
string Id1 = Id.ToLower();
string id1 = id.ToLower();
return (Id1 == id1 || Id1 + @"\" == id1 || Id1 == id1 + @"\");
}
#endregion
}
#endregion
#region SIEESerializer
[TestMethod]
[TestCategory("SIEE Base")]
public void t08_SIEESerializer()
{
// Create fieldlist
SIEEFieldlist fieldlist = new SIEEFieldlist();
fieldlist.Add(new SIEEField { Name = "Field_1", ExternalId = "Ext_1" });
fieldlist.Add(new SIEEField { Name = "Field_2", ExternalId = "Ext_2" });
SIEETableField tf = new SIEETableField { Name = "Table", ExternalId = "Ext_Table" };
tf.Columns.Add(new SIEEField { Name = "TabField_1", ExternalId = "TabExt_1" });
tf.Columns.Add(new SIEEField { Name = "TabField_2", ExternalId = "TabExt_2" });
fieldlist.Add(tf);
// Serialize
string s1 = SIEESerializer.ObjectToString(fieldlist);
// Deserialize
SIEEFieldlist f1 = (SIEEFieldlist)SIEESerializer.StringToObject(s1);
// Serialize the newly created field list
string s2 = SIEESerializer.ObjectToString(f1);
// final compare
string txt1 = fieldlist.ToString(data: false);
string txt2 = f1.ToString(data: false);
Assert.AreEqual(s1, s2);
}
#endregion
#region SIEEAnnotation handling
[TestMethod]
[TestCategory("SIEE Base")]
public void t09_SIEEAnnotation()
{
// Create a data pool
DataPool pool = createDataPool();
pool.RootNode.Documents[0].Fields["field1"].Value = "field1value";
// Create an xml document from the data pool
XmlDocument data;
data = pool.RootNode.InnerXmlNode.OwnerDocument;
// We use a dedicated SIEE_Adapter for this test. We must first register it in the FactoryManager.
SIEEFactory factory = new Test_SIEEFactory();
SIEEFactoryManager.Add(factory);
// We use a default SIEE_Adapter_Settings object and set the Schema
EECWriterSettings adapterSettings = createWriterSettings(new SIEEFieldlist() {
{ new SIEEField() { Name = "field1", ExternalId = "" } },
{ new SIEEField() { Name = "field2", ExternalId = "" } },
});
SIEEWriterExport adapterExport = new SIEEWriterExport();
adapterExport.Configure(adapterSettings);
Test_SIEEExport.ExportFunc = (settings, doc, name, fieldlist) =>
{
int val = 0;
if (doc.SIEEAnnotation != null) val = int.Parse(doc.SIEEAnnotation);
if (val <= 3) doc.NewSIEEAnnotation = (val+1).ToString();
throw new Exception("Some exception");
};
int count = 1;
pool = new DataPool(adapterExport.transform(data, null));
t09_testAnnotation(pool, count++);
pool = new DataPool(adapterExport.transform(pool.RootNode.InnerXmlNode.OwnerDocument, null));
t09_testAnnotation(pool, count++);
pool = new DataPool(adapterExport.transform(pool.RootNode.InnerXmlNode.OwnerDocument, null));
t09_testAnnotation(pool, count);
pool = new DataPool(adapterExport.transform(pool.RootNode.InnerXmlNode.OwnerDocument, null));
t09_testAnnotation(pool, count);
}
private void t09_testAnnotation(DataPool pool, int val)
{
Document doc = pool.RootNode.Documents[0];
string annotation = doc.Annotations["SIEEAnnotation" + (val - 1).ToString("D4")].Value;
Assert.AreEqual(val.ToString(), annotation);
}
#endregion
#region List field handling
[TestMethod]
[TestCategory("SIEE Base")]
public void t10_ListFieldHandling()
{
// First we create the runtime document that is to be exported.
DataPool pool = createDataPool();
Document doc = pool.RootNode.Documents[0];
doc.Fields["field1"].Value = "field1value";
doc.Fields.Add(new Field(pool, "field2", "field2value"));
doc.Fields.Add(new Field(pool, "field3", "field3value"));
doc.Fields.Add(new Field(pool, "field4", "field4value"));
doc.Fields.Add(new Field(pool, "field5", "field5value"));
doc.Fields.Add(new Field(pool, "field6", "field6value"));
addFieldList(pool, doc.Fields["field1"], 2); // to ba ignored
// field2 --> no list
addFieldList(pool, doc.Fields["field3"], 2); // fewer subfields
addFieldList(pool, doc.Fields["field4"], 4); // exact
addFieldList(pool, doc.Fields["field5"], 6); // more sub fields
addFieldList(pool, doc.Fields["field6"], 42); // no limits
// Create an xml document from the data pool
XmlDocument data;
data = pool.RootNode.InnerXmlNode.OwnerDocument;
EECWriterSettings adapterSettings = createWriterSettings(new SIEEFieldlist() {
{ new SIEEField() { Name = "field1", } },
{ new SIEEField() { Name = "field2", Cardinality = 2 } },
{ new SIEEField() { Name = "field3", Cardinality = 3 } },
{ new SIEEField() { Name = "field4", Cardinality = 4 } },
{ new SIEEField() { Name = "field5", Cardinality = 5 } },
{ new SIEEField() { Name = "field6", Cardinality = -1 } },
});
SIEEWriterExport adapterExport = new SIEEWriterExport();
adapterExport.FieldMapping4UnitTest.Add("field1");
adapterExport.FieldMapping4UnitTest.Add("field2");
adapterExport.FieldMapping4UnitTest.Add("field3");
adapterExport.FieldMapping4UnitTest.Add("field4");
adapterExport.FieldMapping4UnitTest.Add("field5");
adapterExport.FieldMapping4UnitTest.Add("field6");
adapterExport.Configure(adapterSettings);
SIEEFieldlist lastFieldList = null;
Test_SIEEExport.ExportFunc = (settings, document, name, fl) =>
{
lastFieldList = fl;
};
adapterExport.transform(data, null); // do the export
// The test export actually has not exported anything but stored the field list internally.
// We execute the assertions on this result field list.
SIEEFieldlist fieldlist = lastFieldList;
verfiyValueList(lastFieldList, "field1", "field1value", 0);
verfiyValueList(lastFieldList, "field2", "field2value", 0);
verfiyValueList(lastFieldList, "field3", "field3value", 2);
verfiyValueList(lastFieldList, "field4", "field4value", 4);
verfiyValueList(lastFieldList, "field5", "field5value", 5);
verfiyValueList(lastFieldList, "field6", "field6value", 42);
}
private void addFieldList(DataPool pool, IField f, int count)
{
for (int i = 0; i < count; i++)
f.Fields.Add(new Field(pool, "ignore", f.Name + "_" + i.ToString()));
}
private void verfiyValueList(SIEEFieldlist fieldlist, string fieldname, string value, int count)
{
SIEEField field = fieldlist.GetFieldByName(fieldname);
Assert.AreEqual(value, field.Value);
Assert.AreEqual(count, field.ValueList.Count);
}
#endregion
#region Target document id handling
[TestMethod]
[TestCategory("SIEE Base")]
public void t11_TargetDocumentId()
{
// Create xml document
XmlDocument data = createDataPool().RootNode.InnerXmlNode.OwnerDocument;
// We use a dedicated SIEE_Adapter for this test. We must first register it in the FactoryManager.
SIEEFactory factory = new Test_SIEEFactory();
SIEEFactoryManager.Add(factory);
// We use a default SIEE_Adapter_Settings object and set the Schema
EECWriterSettings adapterSettings = createWriterSettings(new SIEEFieldlist());
SIEEWriterExport adapterExport = new SIEEWriterExport();
adapterExport.Configure(adapterSettings);
Test_SIEEExport.ExportFunc = (settings, doc, name, fieldlist) =>
{
doc.TargetDocumentId = "4711";
};
DataPool pool = new DataPool(adapterExport.transform(data, null));
Assert.AreEqual("4711", pool.RootNode.Documents[0].Annotations["TargetDocumentId"].Value);
Assert.AreEqual("SIEE_Adapter", pool.RootNode.Documents[0].Annotations["TargetType"].Value);
}
#endregion
#region Utilities
private DataPool createDataPool()
{
DataPool pool = new DataPool();
Document d = new Document(pool, "someDocument");
Field f = new Field(pool, "field1");
d.Fields.Add(f);
Source scr = new Source(pool, "Some not existing path");
d.Sources.Add(scr);
d.NamedSources["pdf"] = scr;
d.Sources.Add(new Source(pool, "Some other not existing path"));
pool.RootNode.Documents.Add(d);
pool.RootNode.Fields.Add(new Field(pool, "cc_BatchId", "someBatch"));
pool.RootNode.Fields.Add(new Field(pool, "cc_ProfileName", "someProfile"));
return pool;
}
#endregion
#region Test factory
public class Test_SIEEFactory : SIEEFactory
{
public override SIEESettings CreateSettings() { return new Test_SIEESettings(); }
public override SIEEExport CreateExport() { return new Test_SIEEExport(); }
public override SIEEUserControl CreateWpfControl() { return null; } // not needed
public override SIEEViewModel CreateViewModel(SIEESettings settings) { return null; } // not needed
}
public class Test_SIEEExport : SIEEExport
{
public delegate void ExportDocumentFunction(SIEESettings settings, SIEEDocument document, string name, SIEEFieldlist fieldlist);
public static ExportDocumentFunction ExportFunc;
public override void ExportDocument(SIEESettings settings, SIEEDocument document, string name, SIEEFieldlist fieldlist)
{
ExportFunc(settings, document, name, fieldlist);
}
}
[Serializable]
public class Test_SIEESettings : SIEESettings { } // just there to index the SIEE_FactoryManager
private EECWriterSettings createWriterSettings(SIEEFieldlist schema)
{
EECWriterSettings adapterSettings = new EECWriterSettings();
adapterSettings.SerializedSchema = SIEESerializer.ObjectToString(schema);
Test_SIEESettings myTestSettings = new Test_SIEESettings();
adapterSettings.SettingsTypename = myTestSettings.GetType().ToString();
string xmlString = Serializer.SerializeToXmlString(myTestSettings, System.Text.Encoding.Unicode);
adapterSettings.SerializedSettings = SIEESerializer.ObjectToString(xmlString);
adapterSettings.FieldsMapper = new CustomFieldsMapper(); // (Empty), does nothing but must he there
return adapterSettings;
}
#endregion
}
}
| |
using UnityEditorInternal;
using UnityEngine;
using UnityEngine.PostProcessing;
namespace UnityEditor.PostProcessing
{
public class ParadeMonitor : PostProcessingMonitor
{
static GUIContent s_MonitorTitle = new GUIContent("Parade");
ComputeShader m_ComputeShader;
ComputeBuffer m_Buffer;
Material m_Material;
RenderTexture m_WaveformTexture;
Rect m_MonitorAreaRect;
public ParadeMonitor()
{
m_ComputeShader = EditorResources.Load<ComputeShader>("Monitors/WaveformCompute.compute");
}
public override void Dispose()
{
GraphicsUtils.Destroy(m_Material);
GraphicsUtils.Destroy(m_WaveformTexture);
if (m_Buffer != null)
m_Buffer.Release();
m_Material = null;
m_WaveformTexture = null;
m_Buffer = null;
}
public override bool IsSupported()
{
return m_ComputeShader != null && GraphicsUtils.supportsDX11;
}
public override GUIContent GetMonitorTitle()
{
return s_MonitorTitle;
}
public override void OnMonitorSettings()
{
EditorGUI.BeginChangeCheck();
bool refreshOnPlay = m_MonitorSettings.refreshOnPlay;
float exposure = m_MonitorSettings.paradeExposure;
refreshOnPlay = GUILayout.Toggle(refreshOnPlay, new GUIContent(FxStyles.playIcon, "Keep refreshing the parade in play mode; this may impact performances."), FxStyles.preButton);
exposure = GUILayout.HorizontalSlider(exposure, 0.05f, 0.3f, FxStyles.preSlider, FxStyles.preSliderThumb, GUILayout.Width(40f));
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(m_BaseEditor.serializedObject.targetObject, "Parade Settings Changed");
m_MonitorSettings.refreshOnPlay = refreshOnPlay;
m_MonitorSettings.paradeExposure = exposure;
InternalEditorUtility.RepaintAllViews();
}
}
public override void OnMonitorGUI(Rect r)
{
if (Event.current.type == EventType.Repaint)
{
// If m_MonitorAreaRect isn't set the preview was just opened so refresh the render to get the waveform data
if (Mathf.Approximately(m_MonitorAreaRect.width, 0) && Mathf.Approximately(m_MonitorAreaRect.height, 0))
InternalEditorUtility.RepaintAllViews();
// Sizing
float width = m_WaveformTexture != null
? Mathf.Min(m_WaveformTexture.width, r.width - 65f)
: r.width;
float height = m_WaveformTexture != null
? Mathf.Min(m_WaveformTexture.height, r.height - 45f)
: r.height;
m_MonitorAreaRect = new Rect(
Mathf.Floor(r.x + r.width / 2f - width / 2f),
Mathf.Floor(r.y + r.height / 2f - height / 2f - 5f),
width, height
);
if (m_WaveformTexture != null)
{
m_Material.SetFloat("_Exposure", m_MonitorSettings.paradeExposure);
var oldActive = RenderTexture.active;
Graphics.Blit(null, m_WaveformTexture, m_Material, 0);
RenderTexture.active = oldActive;
Graphics.DrawTexture(m_MonitorAreaRect, m_WaveformTexture);
var color = Color.white;
const float kTickSize = 5f;
// Rect, lines & ticks points
// A O B P C Q D
// N E
// M F
// L G
// K T J S I R H
var A = new Vector3(m_MonitorAreaRect.x, m_MonitorAreaRect.y);
var D = new Vector3(A.x + m_MonitorAreaRect.width + 1f, m_MonitorAreaRect.y);
var H = new Vector3(D.x, D.y + m_MonitorAreaRect.height + 1f);
var K = new Vector3(A.x, H.y);
var F = new Vector3(D.x, D.y + (H.y - D.y) / 2f);
var M = new Vector3(A.x, A.y + (K.y - A.y) / 2f);
var B = new Vector3(A.x + (D.x - A.x) / 3f, A.y);
var C = new Vector3(A.x + (D.x - A.x) * 2f / 3f, A.y);
var I = new Vector3(K.x + (H.x - K.x) * 2f / 3f, K.y);
var J = new Vector3(K.x + (H.x - K.x) / 3f, K.y);
var N = new Vector3(A.x, A.y + (M.y - A.y) / 2f);
var L = new Vector3(A.x, M.y + (K.y - M.y) / 2f);
var E = new Vector3(D.x, D.y + (F.y - D.y) / 2f);
var G = new Vector3(D.x, F.y + (H.y - F.y) / 2f);
var O = new Vector3(A.x + (B.x - A.x) / 2f, A.y);
var P = new Vector3(B.x + (C.x - B.x) / 2f, B.y);
var Q = new Vector3(C.x + (D.x - C.x) / 2f, C.y);
var R = new Vector3(I.x + (H.x - I.x) / 2f, I.y);
var S = new Vector3(J.x + (I.x - J.x) / 2f, J.y);
var T = new Vector3(K.x + (J.x - K.x) / 2f, K.y);
// Borders
Handles.color = color;
Handles.DrawLine(A, D);
Handles.DrawLine(D, H);
Handles.DrawLine(H, K);
Handles.DrawLine(K, new Vector3(A.x, A.y - 1f));
Handles.DrawLine(B, J);
Handles.DrawLine(C, I);
// Vertical ticks
Handles.DrawLine(A, new Vector3(A.x - kTickSize, A.y));
Handles.DrawLine(N, new Vector3(N.x - kTickSize, N.y));
Handles.DrawLine(M, new Vector3(M.x - kTickSize, M.y));
Handles.DrawLine(L, new Vector3(L.x - kTickSize, L.y));
Handles.DrawLine(K, new Vector3(K.x - kTickSize, K.y));
Handles.DrawLine(D, new Vector3(D.x + kTickSize, D.y));
Handles.DrawLine(E, new Vector3(E.x + kTickSize, E.y));
Handles.DrawLine(F, new Vector3(F.x + kTickSize, F.y));
Handles.DrawLine(G, new Vector3(G.x + kTickSize, G.y));
Handles.DrawLine(H, new Vector3(H.x + kTickSize, H.y));
// Horizontal ticks
Handles.DrawLine(A, new Vector3(A.x, A.y - kTickSize));
Handles.DrawLine(B, new Vector3(B.x, B.y - kTickSize));
Handles.DrawLine(C, new Vector3(C.x, C.y - kTickSize));
Handles.DrawLine(D, new Vector3(D.x, D.y - kTickSize));
Handles.DrawLine(O, new Vector3(O.x, O.y - kTickSize));
Handles.DrawLine(P, new Vector3(P.x, P.y - kTickSize));
Handles.DrawLine(Q, new Vector3(Q.x, Q.y - kTickSize));
Handles.DrawLine(H, new Vector3(H.x, H.y + kTickSize));
Handles.DrawLine(I, new Vector3(I.x, I.y + kTickSize));
Handles.DrawLine(J, new Vector3(J.x, J.y + kTickSize));
Handles.DrawLine(K, new Vector3(K.x, K.y + kTickSize));
Handles.DrawLine(R, new Vector3(R.x, R.y + kTickSize));
Handles.DrawLine(S, new Vector3(S.x, S.y + kTickSize));
Handles.DrawLine(T, new Vector3(T.x, T.y + kTickSize));
// Labels
GUI.color = color;
GUI.Label(new Rect(A.x - kTickSize - 34f, A.y - 15f, 30f, 30f), "1.0", FxStyles.tickStyleRight);
GUI.Label(new Rect(M.x - kTickSize - 34f, M.y - 15f, 30f, 30f), "0.5", FxStyles.tickStyleRight);
GUI.Label(new Rect(K.x - kTickSize - 34f, K.y - 15f, 30f, 30f), "0.0", FxStyles.tickStyleRight);
GUI.Label(new Rect(D.x + kTickSize + 4f, D.y - 15f, 30f, 30f), "1.0", FxStyles.tickStyleLeft);
GUI.Label(new Rect(F.x + kTickSize + 4f, F.y - 15f, 30f, 30f), "0.5", FxStyles.tickStyleLeft);
GUI.Label(new Rect(H.x + kTickSize + 4f, H.y - 15f, 30f, 30f), "0.0", FxStyles.tickStyleLeft);
}
}
}
public override void OnFrameData(RenderTexture source)
{
if (Application.isPlaying && !m_MonitorSettings.refreshOnPlay)
return;
if (Mathf.Approximately(m_MonitorAreaRect.width, 0) || Mathf.Approximately(m_MonitorAreaRect.height, 0))
return;
float ratio = ((float)source.width / (float)source.height) / 3f;
int h = 384;
int w = Mathf.FloorToInt(h * ratio);
var rt = RenderTexture.GetTemporary(w, h, 0, source.format);
Graphics.Blit(source, rt);
ComputeWaveform(rt);
m_BaseEditor.Repaint();
RenderTexture.ReleaseTemporary(rt);
}
void CreateBuffer(int width, int height)
{
m_Buffer = new ComputeBuffer(width * height, sizeof(uint) << 2);
}
void ComputeWaveform(RenderTexture source)
{
if (m_Buffer == null)
{
CreateBuffer(source.width, source.height);
}
else if (m_Buffer.count != (source.width * source.height))
{
m_Buffer.Release();
CreateBuffer(source.width, source.height);
}
var channels = m_MonitorSettings.waveformY
? new Vector4(0f, 0f, 0f, 1f)
: new Vector4(m_MonitorSettings.waveformR ? 1f : 0f, m_MonitorSettings.waveformG ? 1f : 0f, m_MonitorSettings.waveformB ? 1f : 0f, 0f);
var cs = m_ComputeShader;
int kernel = cs.FindKernel("KWaveformClear");
cs.SetBuffer(kernel, "_Waveform", m_Buffer);
cs.Dispatch(kernel, source.width, 1, 1);
kernel = cs.FindKernel("KWaveform");
cs.SetBuffer(kernel, "_Waveform", m_Buffer);
cs.SetTexture(kernel, "_Source", source);
cs.SetInt("_IsLinear", GraphicsUtils.isLinearColorSpace ? 1 : 0);
cs.SetVector("_Channels", channels);
cs.Dispatch(kernel, source.width, 1, 1);
if (m_WaveformTexture == null || m_WaveformTexture.width != (source.width * 3) || m_WaveformTexture.height != source.height)
{
GraphicsUtils.Destroy(m_WaveformTexture);
m_WaveformTexture = new RenderTexture(source.width * 3, source.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear)
{
hideFlags = HideFlags.DontSave,
wrapMode = TextureWrapMode.Clamp,
filterMode = FilterMode.Bilinear
};
}
if (m_Material == null)
m_Material = new Material(Shader.Find("Hidden/Post FX/Monitors/Parade Render")) { hideFlags = HideFlags.DontSave };
m_Material.SetBuffer("_Waveform", m_Buffer);
m_Material.SetVector("_Size", new Vector2(m_WaveformTexture.width, m_WaveformTexture.height));
m_Material.SetVector("_Channels", channels);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2013, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
using System;
using System.Collections.Generic;
using SharpBox2D.Callbacks;
using SharpBox2D.Collision;
using SharpBox2D.Collision.Shapes;
using SharpBox2D.Common;
using SharpBox2D.Dynamics;
using SharpBox2D.Dynamics.Contacts;
using SharpBox2D.Dynamics.Joints;
using SharpBox2D.Particle;
namespace SharpBox2D.TestBed.Framework
{
/**
* @author Daniel Murphy
*/
public abstract class TestbedTest :
ContactListener, DestructionListener, ParticleDestructionListener
{
public static readonly int MAX_CONTACT_POINTS = 4048;
public static readonly float ZOOM_SCALE_DIFF = .05f;
public static readonly int TEXT_LINE_SPACE = 13;
public static readonly int TEXT_SECTION_SPACE = 3;
public static readonly int MOUSE_JOINT_BUTTON = 1;
public static readonly int BOMB_SPAWN_BUTTON = 10;
protected static readonly long GROUND_BODY_TAG = 1897450239847L;
protected static readonly long BOMB_TAG = 98989788987L;
protected static readonly long MOUSE_JOINT_TAG = 4567893364789L;
public ContactPoint[] points = new ContactPoint[MAX_CONTACT_POINTS];
/**
* Only visible for compatibility. Should use {@link #getWorld()} instead.
*/
protected World m_world;
protected Body groundBody;
private MouseJoint mouseJoint;
private Body bomb;
private Vec2 bombMousePoint = new Vec2();
private Vec2 bombSpawnPoint = new Vec2();
private bool bombSpawning = false;
protected bool mouseTracing;
private Vec2 mouseTracerPosition = new Vec2();
private Vec2 mouseTracerVelocity = new Vec2();
private Vec2 mouseWorld = new Vec2();
private int pointCount;
private int stepCount;
private TestbedModel model;
protected DestructionListener destructionListener;
protected ParticleDestructionListener particleDestructionListener;
private string title = null;
protected int m_textLine;
private LinkedList<string> textList = new LinkedList<string>();
private TestbedCamera camera;
//private JbSerializer serializer;
//private JbDeserializer deserializer;
private Transform identity = new Transform();
public TestbedTest()
{
identity.setIdentity();
for (int i = 0; i < MAX_CONTACT_POINTS; i++)
{
points[i] = new ContactPoint();
}
//serializer = new PbSerializer(this, new SignerAdapter(this) {
//
// public long getTag(Body argBody) {
// if (isSaveLoadEnabled()) {
// if (argBody == groundBody) {
// return GROUND_BODY_TAG;
// } else if (argBody == bomb) {
// return BOMB_TAG;
// }
// }
// return base.getTag(argBody);
// }
//
// public long getTag(Joint argJoint) {
// if (isSaveLoadEnabled()) {
// if (argJoint == mouseJoint) {
// return MOUSE_JOINT_TAG;
// }
// }
// return base.getTag(argJoint);
// }
//});
//deserializer = new PbDeserializer(this, new ListenerAdapter(this) {
//
// public void processBody(Body argBody, long argTag) {
// if (isSaveLoadEnabled()) {
// if (argTag == GROUND_BODY_TAG) {
// groundBody = argBody;
// return;
// } else if (argTag == BOMB_TAG) {
// bomb = argBody;
// return;
// }
// }
// base.processBody(argBody, argTag);
// }
//
// public void processJoint(Joint argJoint, long argTag) {
// if (isSaveLoadEnabled()) {
// if (argTag == MOUSE_JOINT_TAG) {
// mouseJoint = (MouseJoint) argJoint;
// return;
// }
// }
// base.processJoint(argJoint, argTag);
// }
//});
camera = new TestbedCamera(getDefaultCameraPos(), getDefaultCameraScale(), ZOOM_SCALE_DIFF);
}
public void sayGoodbye(int index)
{
particleDestroyed(index);
}
public void sayGoodbye(ParticleGroup group)
{
particleGroupDestroyed(group);
}
public void sayGoodbye(Fixture fixture)
{
fixtureDestroyed(fixture);
}
public void sayGoodbye(Joint joint)
{
if (mouseJoint == joint)
{
mouseJoint = null;
}
else
{
jointDestroyed(joint);
}
}
public void init(TestbedModel model)
{
this.model = model;
Vec2 gravity = new Vec2(0, -10f);
m_world = model.getWorldCreator().createWorld(gravity);
m_world.setParticleGravityScale(0.4f);
m_world.setParticleDensity(1.2f);
bomb = null;
mouseJoint = null;
mouseTracing = false;
mouseTracerPosition.setZero();
mouseTracerVelocity.setZero();
BodyDef bodyDef = new BodyDef();
groundBody = m_world.createBody(bodyDef);
init(m_world, false);
}
public void init(World world, bool deserialized)
{
m_world = world;
pointCount = 0;
stepCount = 0;
bombSpawning = false;
model.getDebugDraw().setViewportTransform(camera.getTransform());
world.setDestructionListener(destructionListener);
world.setParticleDestructionListener(particleDestructionListener);
world.setContactListener(this);
world.setDebugDraw(model.getDebugDraw());
title = getTestName();
initTest(deserialized);
}
//protected JbSerializer getSerializer() {
// return serializer;
//}
//protected JbDeserializer getDeserializer() {
// return deserializer;
//}
/**
* Gets the current world
*/
public World getWorld()
{
return m_world;
}
/**
* Gets the testbed model
*/
public TestbedModel getModel()
{
return model;
}
/**
* Gets the contact points for the current test
*/
public ContactPoint[] getContactPoints()
{
return points;
}
/**
* Gets the ground body of the world, used for some joints
*/
public Body getGroundBody()
{
return groundBody;
}
/**
* Gets the debug draw for the testbed
*/
public DebugDraw getDebugDraw()
{
return model.getDebugDraw();
}
/**
* Gets the world position of the mouse
*/
public Vec2 getWorldMouse()
{
return mouseWorld;
}
public int getStepCount()
{
return stepCount;
}
/**
* The number of contact points we're storing
*/
public int getPointCount()
{
return pointCount;
}
public TestbedCamera getCamera()
{
return camera;
}
/**
* Gets the 'bomb' body if it's present
*/
public Body getBomb()
{
return bomb;
}
/**
* Override for a different default camera position
*/
public virtual Vec2 getDefaultCameraPos()
{
return new Vec2(0, 20);
}
/**
* Override for a different default camera scale
*/
public virtual float getDefaultCameraScale()
{
return 10;
}
public bool isMouseTracing()
{
return mouseTracing;
}
public Vec2 getMouseTracerPosition()
{
return mouseTracerPosition;
}
public Vec2 getMouseTracerVelocity()
{
return mouseTracerVelocity;
}
/**
* Gets the filename of the current test. Default implementation uses the test name with no
* spaces".
*/
public string getFilename()
{
return getTestName().ToLower().Replace(" ", "_") + ".box2d";
}
/** @deprecated use {@link #getCamera()} */
public void setCamera(Vec2 argPos)
{
camera.setCamera(argPos);
}
/** @deprecated use {@link #getCamera()} */
public void setCamera(Vec2 argPos, float scale)
{
camera.setCamera(argPos, scale);
}
/**
* Initializes the current test.
*
* @param deserialized if the test was deserialized from a file. If so, all physics objects are
* already added.
*/
public abstract void initTest(bool deserialized);
/**
* The name of the test
*/
public abstract string getTestName();
/**
* Adds a text line to the reporting area
*/
public virtual void addTextLine(string line)
{
textList.AddLast(line);
}
/**
* called when the tests exits
*/
public virtual void exit()
{
}
private Color4f color1 = new Color4f(.3f, .95f, .3f);
private Color4f color2 = new Color4f(.3f, .3f, .95f);
private Color4f color3 = new Color4f(.9f, .9f, .9f);
private Color4f color4 = new Color4f(.6f, .61f, 1);
private Color4f color5 = new Color4f(.9f, .9f, .3f);
private Color4f mouseColor = new Color4f(0f, 1f, 0f);
private Vec2 p1 = new Vec2();
private Vec2 p2 = new Vec2();
private Vec2 tangent = new Vec2();
private List<string> statsList = new List<string>();
private Vec2 acceleration = new Vec2();
private CircleShape pshape = new CircleShape();
private ParticleVelocityQueryCallback pcallback = new ParticleVelocityQueryCallback();
private AABB paabb = new AABB();
public virtual void step(TestbedSettings settings)
{
float hz = settings.getSetting(TestbedSettings.Hz).value;
float timeStep = hz > 0f ? 1f/hz : 0;
if (settings.singleStep && !settings.pause)
{
settings.pause = true;
}
DebugDraw debugDraw = model.getDebugDraw();
m_textLine = 20;
if (title != null)
{
debugDraw.drawString(camera.getTransform().getExtents().x, 15, title, Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
}
if (settings.pause)
{
if (settings.singleStep)
{
settings.singleStep = false;
}
else
{
timeStep = 0;
}
debugDraw.drawString(5, m_textLine, "****PAUSED****", Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
}
DebugDrawFlags flags = 0;
flags |= settings.getSetting(TestbedSettings.DrawShapes).enabled ? DebugDrawFlags.Shapes : 0;
flags |= settings.getSetting(TestbedSettings.DrawJoints).enabled ? DebugDrawFlags.Joints : 0;
flags |= settings.getSetting(TestbedSettings.DrawAABBs).enabled ? DebugDrawFlags.AABB : 0;
flags |=
settings.getSetting(TestbedSettings.DrawCOMs).enabled ? DebugDrawFlags.CenterOfMass : 0;
flags |= settings.getSetting(TestbedSettings.DrawTree).enabled ? DebugDrawFlags.DynamicTree : 0;
flags |=
settings.getSetting(TestbedSettings.DrawWireframe).enabled
? DebugDrawFlags.Wireframe
: 0;
debugDraw.setFlags(flags);
m_world.setAllowSleep(settings.getSetting(TestbedSettings.AllowSleep).enabled);
m_world.setWarmStarting(settings.getSetting(TestbedSettings.WarmStarting).enabled);
m_world.setSubStepping(settings.getSetting(TestbedSettings.SubStepping).enabled);
m_world.setContinuousPhysics(settings.getSetting(TestbedSettings.ContinuousCollision).enabled);
pointCount = 0;
m_world.step(timeStep, settings.getSetting(TestbedSettings.VelocityIterations).value,
settings.getSetting(TestbedSettings.PositionIterations).value);
m_world.drawDebugData();
if (timeStep > 0f)
{
++stepCount;
}
debugDraw.drawString(5, m_textLine, "Engine Info", color4);
m_textLine += TEXT_LINE_SPACE;
debugDraw.drawString(5, m_textLine, "Framerate: " + (int) model.getCalculatedFps(),
Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
if (settings.getSetting(TestbedSettings.DrawStats).enabled)
{
int particleCount = m_world.getParticleCount();
int groupCount = m_world.getParticleGroupCount();
debugDraw.drawString(
5,
m_textLine,
"bodies/contacts/joints/proxies/particles/groups = " + m_world.getBodyCount() + "/"
+ m_world.getContactCount() + "/" + m_world.getJointCount() + "/"
+ m_world.getProxyCount() + "/" + particleCount + "/" + groupCount, Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
debugDraw.drawString(5, m_textLine, "World mouse position: " + mouseWorld.ToString(),
Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
statsList.Clear();
Dynamics.Profile profile = getWorld().getProfile();
profile.toDebugStrings(statsList);
foreach (string s in statsList)
{
debugDraw.drawString(5, m_textLine, s, Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
}
m_textLine += TEXT_SECTION_SPACE;
}
if (settings.getSetting(TestbedSettings.DrawHelp).enabled)
{
debugDraw.drawString(5, m_textLine, "Help", color4);
m_textLine += TEXT_LINE_SPACE;
List<string> help = model.getImplSpecificHelp();
foreach (string s in help)
{
debugDraw.drawString(5, m_textLine, s, Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
}
m_textLine += TEXT_SECTION_SPACE;
}
if (textList.Count != 0)
{
debugDraw.drawString(5, m_textLine, "Test Info", color4);
m_textLine += TEXT_LINE_SPACE;
foreach (string s in textList)
{
debugDraw.drawString(5, m_textLine, s, Color4f.WHITE);
m_textLine += TEXT_LINE_SPACE;
}
textList.Clear();
}
if (mouseTracing && mouseJoint == null)
{
float delay = 0.1f;
acceleration.x =
2/delay*(1/delay*(mouseWorld.x - mouseTracerPosition.x) - mouseTracerVelocity.x);
acceleration.y =
2/delay*(1/delay*(mouseWorld.y - mouseTracerPosition.y) - mouseTracerVelocity.y);
mouseTracerVelocity.x += timeStep*acceleration.x;
mouseTracerVelocity.y += timeStep*acceleration.y;
mouseTracerPosition.x += timeStep*mouseTracerVelocity.x;
mouseTracerPosition.y += timeStep*mouseTracerVelocity.y;
pshape.m_p.set(mouseTracerPosition);
pshape.m_radius = 2;
pcallback.init(m_world, pshape, mouseTracerVelocity);
pshape.computeAABB(paabb, identity, 0);
m_world.queryAABB(pcallback, paabb);
}
if (mouseJoint != null)
{
mouseJoint.getAnchorB(ref p1);
Vec2 p2 = mouseJoint.getTarget();
debugDraw.drawSegment(p1, p2, mouseColor);
}
if (bombSpawning)
{
debugDraw.drawSegment(bombSpawnPoint, bombMousePoint, Color4f.WHITE);
}
if (settings.getSetting(TestbedSettings.DrawContactPoints).enabled)
{
float k_impulseScale = 0.1f;
float axisScale = 0.3f;
for (int i = 0; i < pointCount; i++)
{
ContactPoint point = points[i];
if (point.state == Collision.Collision.PointState.ADD_STATE)
{
debugDraw.drawPoint(point.position, 10f, color1);
}
else if (point.state == Collision.Collision.PointState.PERSIST_STATE)
{
debugDraw.drawPoint(point.position, 5f, color2);
}
if (settings.getSetting(TestbedSettings.DrawContactNormals).enabled)
{
p1.set(point.position);
p2.set(point.normal);
p2.mulLocal(axisScale);
p2.addLocal(p1);
debugDraw.drawSegment(p1, p2, color3);
}
else if (settings.getSetting(TestbedSettings.DrawContactImpulses).enabled)
{
p1.set(point.position);
p2.set(point.normal);
p2.mulLocal(k_impulseScale);
p2.mulLocal(point.normalImpulse);
p2.addLocal(p1);
debugDraw.drawSegment(p1, p2, color5);
}
if (settings.getSetting(TestbedSettings.DrawFrictionImpulses).enabled)
{
Vec2.crossToOutUnsafe(point.normal, 1, ref tangent);
p1.set(point.position);
p2.set(tangent);
p2.mulLocal(k_impulseScale);
p2.mulLocal(point.tangentImpulse);
p2.addLocal(p1);
debugDraw.drawSegment(p1, p2, color5);
}
}
}
}
/************ INPUT ************/
/**
* Called for mouse-up
*/
public virtual void mouseUp(Vec2 p, int button)
{
mouseTracing = false;
if (button == MOUSE_JOINT_BUTTON)
{
destroyMouseJoint();
}
completeBombSpawn(p);
}
public virtual void keyPressed(char keyChar, int keyCode)
{
}
public virtual void keyReleased(char keyChar, int keyCode)
{
}
public virtual void mouseDown(Vec2 p, int button)
{
mouseWorld.set(p);
mouseTracing = true;
mouseTracerVelocity.setZero();
mouseTracerPosition.set(p);
if (button == BOMB_SPAWN_BUTTON)
{
beginBombSpawn(p);
}
if (button == MOUSE_JOINT_BUTTON)
{
spawnMouseJoint(p);
}
}
public virtual void mouseMove(Vec2 p)
{
mouseWorld.set(p);
}
public virtual void mouseDrag(Vec2 p, int button)
{
mouseWorld.set(p);
if (button == MOUSE_JOINT_BUTTON)
{
updateMouseJoint(p);
}
if (button == BOMB_SPAWN_BUTTON)
{
bombMousePoint.set(p);
}
}
/************ MOUSE JOINT ************/
private AABB queryAABB = new AABB();
private TestQueryCallback callback = new TestQueryCallback();
private void spawnMouseJoint(Vec2 p)
{
if (mouseJoint != null)
{
return;
}
queryAABB.lowerBound.set(p.x - .001f, p.y - .001f);
queryAABB.upperBound.set(p.x + .001f, p.y + .001f);
callback.point.set(p);
callback.fixture = null;
m_world.queryAABB(callback, queryAABB);
if (callback.fixture != null)
{
Body body = callback.fixture.getBody();
MouseJointDef def = new MouseJointDef();
def.bodyA = groundBody;
def.bodyB = body;
def.collideConnected = true;
def.target.set(p);
def.maxForce = 1000f*body.getMass();
mouseJoint = (MouseJoint) m_world.createJoint(def);
body.setAwake(true);
}
}
private void updateMouseJoint(Vec2 target)
{
if (mouseJoint != null)
{
mouseJoint.setTarget(target);
}
}
private void destroyMouseJoint()
{
if (mouseJoint != null)
{
m_world.destroyJoint(mouseJoint);
mouseJoint = null;
}
}
/********** BOMB ************/
private Vec2 p = new Vec2();
private Vec2 v = new Vec2();
public void lanchBomb()
{
p.set((float) _random.NextDouble()*30 - 15, 30f);
v.set(p);
v.mulLocal(-5f);
launchBomb(p, v);
}
private AABB aabb = new AABB();
private void launchBomb(Vec2 position, Vec2 velocity)
{
if (bomb != null)
{
m_world.destroyBody(bomb);
bomb = null;
}
// todo optimize this
BodyDef bd = new BodyDef();
bd.type = BodyType.DYNAMIC;
bd.position.set(position);
bd.bullet = true;
bomb = m_world.createBody(bd);
bomb.setLinearVelocity(velocity);
CircleShape circle = new CircleShape();
circle.m_radius = 0.3f;
FixtureDef fd = new FixtureDef();
fd.shape = circle;
fd.density = 20f;
fd.restitution = 0;
Vec2 minV = new Vec2(position);
Vec2 maxV = new Vec2(position);
minV.subLocal(new Vec2(.3f, .3f));
maxV.addLocal(new Vec2(.3f, .3f));
aabb.lowerBound.set(minV);
aabb.upperBound.set(maxV);
bomb.createFixture(fd);
}
private void beginBombSpawn(Vec2 worldPt)
{
bombSpawnPoint.set(worldPt);
bombMousePoint.set(worldPt);
bombSpawning = true;
}
private Vec2 vel = new Vec2();
private void completeBombSpawn(Vec2 p)
{
if (bombSpawning == false)
{
return;
}
float multiplier = 30f;
vel.set(bombSpawnPoint);
vel.subLocal(p);
vel.mulLocal(multiplier);
launchBomb(bombSpawnPoint, vel);
bombSpawning = false;
}
/************ SERIALIZATION *************/
/**
* Override to enable saving and loading. Remember to also override the {@link ObjectListener} and
* {@link ObjectSigner} methods if you need to
*
* @return
*/
public virtual bool isSaveLoadEnabled()
{
return false;
}
public virtual long getTag(Body body)
{
return default(long);
}
public virtual long getTag(Fixture fixture)
{
return default(long);
}
public virtual long getTag(Joint joint)
{
return default(long);
}
public long getTag(Shape shape)
{
return default(long);
}
public long getTag(World world)
{
return default(long);
}
public virtual void processBody(Body body, long tag)
{
}
public virtual void processFixture(Fixture fixture, long tag)
{
}
public virtual void processJoint(Joint joint, long tag)
{
}
public virtual void processShape(Shape shape, long tag)
{
}
public virtual void processWorld(World world, long tag)
{
}
public virtual bool isUnsupported(InvalidOperationException exception)
{
return true;
}
public virtual void fixtureDestroyed(Fixture fixture)
{
}
public virtual void jointDestroyed(Joint joint)
{
}
public virtual void beginContact(Contact contact)
{
}
public virtual void endContact(Contact contact)
{
}
public virtual void particleDestroyed(int particle)
{
}
public virtual void particleGroupDestroyed(ParticleGroup group)
{
}
public virtual void postSolve(Contact contact, ContactImpulse impulse)
{
}
private Collision.Collision.PointState[] state1 = new Collision.Collision.PointState[Settings.maxManifoldPoints];
private Collision.Collision.PointState[] state2 = new Collision.Collision.PointState[Settings.maxManifoldPoints];
private WorldManifold worldManifold = new WorldManifold();
private Random _random = new Random();
public virtual void preSolve(Contact contact, Manifold oldManifold)
{
Manifold manifold = contact.getManifold();
if (manifold.pointCount == 0)
{
return;
}
Fixture fixtureA = contact.getFixtureA();
Fixture fixtureB = contact.getFixtureB();
Collision.Collision.getPointStates(state1, state2, oldManifold, manifold);
contact.getWorldManifold(worldManifold);
for (int i = 0; i < manifold.pointCount && pointCount < MAX_CONTACT_POINTS; i++)
{
ContactPoint cp = points[pointCount];
cp.fixtureA = fixtureA;
cp.fixtureB = fixtureB;
cp.position.set(worldManifold.points[i]);
cp.normal.set(worldManifold.normal);
cp.state = state2[i];
cp.normalImpulse = manifold.points[i].normalImpulse;
cp.tangentImpulse = manifold.points[i].tangentImpulse;
cp.separation = worldManifold.separations[i];
++pointCount;
}
}
}
internal class TestQueryCallback : QueryCallback
{
public Vec2 point;
public Fixture fixture;
public TestQueryCallback()
{
point = new Vec2();
fixture = null;
}
public bool reportFixture(Fixture argFixture)
{
Body body = argFixture.getBody();
if (body.getType() == BodyType.DYNAMIC)
{
bool inside = argFixture.testPoint(point);
if (inside)
{
fixture = argFixture;
return false;
}
}
return true;
}
}
internal class ParticleVelocityQueryCallback : ParticleQueryCallback
{
private World world;
private Shape shape;
private Vec2 velocity;
private Transform xf = new Transform();
public ParticleVelocityQueryCallback()
{
xf.setIdentity();
}
public void init(World world, Shape shape, Vec2 velocity)
{
this.world = world;
this.shape = shape;
this.velocity = velocity;
}
public bool reportParticle(int index)
{
Vec2 p = world.getParticlePositionBuffer()[index];
if (shape.testPoint(xf, p))
{
Vec2 v = world.getParticleVelocityBuffer()[index];
v.set(velocity);
}
return true;
}
}
//class SignerAdapter : ObjectSigner {
// private ObjectSigner signer;
// public SignerAdapter(ObjectSigner argSigner) {
// signer = argSigner;
// }
// public long getTag(World argWorld) {
// return signer.getTag(argWorld);
// }
// public long getTag(Body argBody) {
// return signer.getTag(argBody);
// }
// public long getTag(Shape argShape) {
// return signer.getTag(argShape);
// }
// public long getTag(Fixture argFixture) {
// return signer.getTag(argFixture);
// }
// public long getTag(Joint argJoint) {
// return signer.getTag(argJoint);
// }
//}
//class ListenerAdapter : ObjectListener {
// private ObjectListener listener;
// public ListenerAdapter(ObjectListener argListener) {
// listener = argListener;
// }
// public void processWorld(World argWorld, long argTag) {
// listener.processWorld(argWorld, argTag);
// }
// public void processBody(Body argBody, long argTag) {
// listener.processBody(argBody, argTag);
// }
// public void processFixture(Fixture argFixture, long argTag) {
// listener.processFixture(argFixture, argTag);
// }
// public void processShape(Shape argShape, long argTag) {
// listener.processShape(argShape, argTag);
// }
// public void processJoint(Joint argJoint, long argTag) {
// listener.processJoint(argJoint, argTag);
// }
}
| |
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace System.ServiceModel.Channels
{
using System.Globalization;
using System.Collections.Generic;
using System.EnterpriseServices;
using System.Runtime;
using System.Runtime.InteropServices;
using System.Threading;
using System.ServiceModel.Diagnostics;
using System.Transactions;
class MsmqSubqueueLockingQueue : MsmqQueue, ILockingQueue
{
string lockQueueName;
MsmqQueue mainQueueForMove;
MsmqQueue lockQueueForMove;
MsmqQueue lockQueueForReceive;
IOThreadTimer lockCollectionTimer;
TimeSpan lockCollectionInterval = TimeSpan.FromMinutes(5);
object timerLock = new object();
bool disposed;
string hostname;
bool validHostName;
private const string LockSubqueuePrefix = "lock_";
public MsmqSubqueueLockingQueue(string formatName, string hostname, int accessMode)
: base(formatName, accessMode)
{
// The hostname will be empty for MsmqIntegrationBinding
if (string.Compare(hostname, string.Empty, StringComparison.OrdinalIgnoreCase) == 0)
{
this.validHostName = MsmqSubqueueLockingQueue.TryGetHostName(formatName, out hostname);
}
else
{
this.validHostName = true;
}
this.disposed = false;
this.lockQueueName = this.formatName + ";" + MsmqSubqueueLockingQueue.GenerateLockQueueName();
this.lockQueueForReceive = new MsmqQueue(this.lockQueueName, UnsafeNativeMethods.MQ_RECEIVE_ACCESS, UnsafeNativeMethods.MQ_DENY_RECEIVE_SHARE);
this.lockQueueForMove = new MsmqQueue(this.lockQueueName, UnsafeNativeMethods.MQ_MOVE_ACCESS);
this.mainQueueForMove = new MsmqQueue(this.formatName, UnsafeNativeMethods.MQ_MOVE_ACCESS);
this.lockCollectionTimer = new IOThreadTimer(new Action<object>(OnCollectionTimer), null, false);
if (string.Compare(hostname, "localhost", StringComparison.OrdinalIgnoreCase) == 0)
{
this.hostname = null;
}
else
{
this.hostname = hostname;
}
}
private static string GenerateLockQueueName()
{
string lockGuid = Guid.NewGuid().ToString();
return MsmqSubqueueLockingQueue.LockSubqueuePrefix + lockGuid.Substring(lockGuid.Length - 8, 8);
}
public MsmqQueue LockQueueForReceive
{
get
{
return this.lockQueueForReceive;
}
}
internal override MsmqQueueHandle OpenQueue()
{
if (!this.validHostName)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MsmqException(SR.GetString(SR.MsmqOpenError,
MsmqError.GetErrorString(UnsafeNativeMethods.MQ_ERROR_UNSUPPORTED_FORMATNAME_OPERATION)),
UnsafeNativeMethods.MQ_ERROR_UNSUPPORTED_FORMATNAME_OPERATION));
}
this.EnsureLockQueuesOpen();
this.mainQueueForMove.EnsureOpen();
// first time collection
this.OnCollectionTimer(null);
return base.OpenQueue();
}
internal void EnsureLockQueuesOpen()
{
int attempts = 0;
// handle lock queue name collisions, if we fail three times in a row it is probably not the name
// collision that is causing the open to fail
while (true)
{
try
{
this.lockQueueForReceive.EnsureOpen();
break;
}
catch (MsmqException ex)
{
if (attempts >= 3)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(ex);
}
MsmqDiagnostics.ExpectedException(ex);
}
this.lockQueueForReceive.Dispose();
this.lockQueueForMove.Dispose();
this.lockQueueName = this.formatName + ";" + MsmqSubqueueLockingQueue.GenerateLockQueueName();
this.lockQueueForReceive = new MsmqQueue(this.lockQueueName, UnsafeNativeMethods.MQ_RECEIVE_ACCESS, UnsafeNativeMethods.MQ_DENY_RECEIVE_SHARE);
this.lockQueueForMove = new MsmqQueue(this.lockQueueName, UnsafeNativeMethods.MQ_MOVE_ACCESS);
attempts++;
}
this.lockQueueForMove.EnsureOpen();
}
public override ReceiveResult TryReceive(NativeMsmqMessage message, TimeSpan timeout, MsmqTransactionMode transactionMode)
{
// we ignore transaction mode for receive context receives
TimeoutHelper timeoutHelper = new TimeoutHelper(timeout);
bool receivedMessage = false;
long lookupId = 0;
// peek for new message, move it to the lock queue and then receive the full message
// if move fails because another thread moved it ahead of us then peek again
while (!receivedMessage)
{
ReceiveResult result;
MoveReceiveResult moveResult;
using (MsmqMessageLookupId emptyMessage = new MsmqMessageLookupId())
{
result = this.TryPeek(emptyMessage, timeoutHelper.RemainingTime());
if (result != ReceiveResult.MessageReceived)
{
return result;
}
lookupId = emptyMessage.lookupId.Value;
}
try
{
moveResult = this.TryMoveMessage(lookupId, this.lockQueueForMove, MsmqTransactionMode.None);
if (moveResult == MoveReceiveResult.Succeeded)
{
receivedMessage = true;
}
}
catch (MsmqException ex)
{
MsmqDiagnostics.ExpectedException(ex);
}
}
MoveReceiveResult lookupIdReceiveResult;
try
{
lookupIdReceiveResult = this.lockQueueForReceive.TryReceiveByLookupId(lookupId, message, MsmqTransactionMode.None, UnsafeNativeMethods.MQ_LOOKUP_PEEK_CURRENT);
}
catch (MsmqException ex)
{
this.UnlockMessage(lookupId, TimeSpan.Zero);
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(ex);
}
if (lookupIdReceiveResult == MoveReceiveResult.Succeeded)
{
return ReceiveResult.MessageReceived;
}
else
{
this.UnlockMessage(lookupId, TimeSpan.Zero);
return ReceiveResult.OperationCancelled;
}
}
public void DeleteMessage(long lookupId, TimeSpan timeout)
{
// operations on the lock subqueue need to be protected from ---- with close
MoveReceiveResult receiveResult;
IPostRollbackErrorStrategy postRollBack = new SimplePostRollbackErrorStrategy(lookupId);
do
{
using (MsmqEmptyMessage emptyMessage = new MsmqEmptyMessage())
{
receiveResult = this.lockQueueForReceive.TryReceiveByLookupId(lookupId, emptyMessage, MsmqTransactionMode.CurrentOrNone);
}
if (receiveResult != MsmqQueue.MoveReceiveResult.MessageLockedUnderTransaction)
break;
// We could have failed because of ---- with transaction.abort() for the transaction
// that had this message locked previously. We will retry in these cases.
} while (postRollBack.AnotherTryNeeded());
// We could have failed because of
// a) failure in the underlying queue manager
// b) expiration of the native message timer
// c) ---- with Channel.Close()
// ..not much we can do in any of these cases
if (receiveResult != MoveReceiveResult.Succeeded)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MsmqException(SR.GetString(SR.MsmqReceiveContextMessageNotReceived, lookupId.ToString(CultureInfo.InvariantCulture))));
}
}
public void UnlockMessage(long lookupId, TimeSpan timeout)
{
MoveReceiveResult moveResult;
IPostRollbackErrorStrategy postRollBack = new SimplePostRollbackErrorStrategy(lookupId);
do
{
moveResult = this.lockQueueForReceive.TryMoveMessage(lookupId, this.mainQueueForMove, MsmqTransactionMode.None);
if (moveResult != MsmqQueue.MoveReceiveResult.MessageLockedUnderTransaction)
break;
// We could have failed because of ---- with transaction.abort() for the transaction
// that had this message locked previously. We will retry in these cases.
} while (postRollBack.AnotherTryNeeded());
if (moveResult != MoveReceiveResult.Succeeded)
{
// We could have failed because of
// a) failure in the underlying queue manager
// b) expiration of the native message timer
// c) ---- with Channel.Close()
// ..not much we can do in any of these cases
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MsmqException(SR.GetString(SR.MsmqReceiveContextMessageNotMoved, lookupId.ToString(CultureInfo.InvariantCulture))));
}
}
public override void CloseQueue()
{
lock (this.timerLock)
{
if (!this.disposed)
{
this.disposed = true;
this.lockCollectionTimer.Cancel();
this.lockCollectionTimer = null;
}
}
this.CollectLocks(this.lockQueueForReceive);
this.mainQueueForMove.CloseQueue();
this.lockQueueForMove.CloseQueue();
this.lockQueueForReceive.CloseQueue();
base.CloseQueue();
}
private void OnCollectionTimer(object state)
{
lock (this.timerLock)
{
if (this.disposed)
{
return;
}
List<string> subqueues;
if (TryEnumerateSubqueues(out subqueues))
{
foreach (string subqueueName in subqueues)
{
if (subqueueName.StartsWith(MsmqSubqueueLockingQueue.LockSubqueuePrefix, StringComparison.OrdinalIgnoreCase))
{
MsmqQueue collectQueue;
if (TryOpenLockQueueForCollection(subqueueName, out collectQueue))
{
this.CollectLocks(collectQueue);
}
}
}
}
this.lockCollectionTimer.Set(this.lockCollectionInterval);
}
}
private bool TryOpenLockQueueForCollection(string subqueueName, out MsmqQueue lockQueue)
{
lockQueue = null;
string formatName = this.formatName + ";" + subqueueName;
int accessMode = UnsafeNativeMethods.MQ_RECEIVE_ACCESS;
int shareMode = UnsafeNativeMethods.MQ_DENY_RECEIVE_SHARE;
try
{
int error = 0;
if (MsmqQueue.IsQueueOpenable(formatName, accessMode, shareMode, out error))
{
lockQueue = new MsmqQueue(formatName, accessMode, shareMode);
lockQueue.EnsureOpen();
}
else
{
// The lock subqueue is either being actively used by a channel or is not available.
// So, we do not have to collect this lock queue.
if (error == UnsafeNativeMethods.MQ_ERROR_SHARING_VIOLATION ||
error == UnsafeNativeMethods.MQ_ERROR_QUEUE_NOT_FOUND)
{
return false;
}
else
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MsmqException(SR.GetString(SR.MsmqOpenError, MsmqError.GetErrorString(error)), error));
}
}
}
catch (MsmqException)
{
// The error has already been logged. Since this function is to decide whether to collect
// the lock queue, we return false.
return false;
}
return true;
}
private void CollectLocks(MsmqQueue lockQueue)
{
ReceiveResult result = ReceiveResult.MessageReceived;
while (result == ReceiveResult.MessageReceived)
{
using (MsmqMessageLookupId message = new MsmqMessageLookupId())
{
try
{
result = lockQueue.TryPeek(message, TimeSpan.FromSeconds(0));
if (result == ReceiveResult.MessageReceived)
{
lockQueue.TryMoveMessage(message.lookupId.Value, this.mainQueueForMove, MsmqTransactionMode.None);
}
}
catch (MsmqException ex)
{
// we will retry the collection in the next cleanup round
MsmqDiagnostics.ExpectedException(ex);
result = ReceiveResult.Unknown;
}
}
}
}
private bool TryEnumerateSubqueues(out List<String> subqueues)
{
subqueues = new List<string>();
int[] ids = new int[1];
UnsafeNativeMethods.MQMSGPROPS props = new UnsafeNativeMethods.MQMSGPROPS();
UnsafeNativeMethods.MQPROPVARIANT prop = new UnsafeNativeMethods.MQPROPVARIANT();
UnsafeNativeMethods.MQPROPVARIANT retProp;
GCHandle propsHandle = GCHandle.Alloc(null, GCHandleType.Pinned);
GCHandle nativePropertyIdsHandle = GCHandle.Alloc(null, GCHandleType.Pinned);
GCHandle propHandle = GCHandle.Alloc(null, GCHandleType.Pinned);
props.status = IntPtr.Zero;
props.count = 1;
ids[0] = UnsafeNativeMethods.PROPID_MGMT_QUEUE_SUBQUEUE_NAMES;
prop.vt = UnsafeNativeMethods.VT_NULL;
try
{
// pin
propsHandle.Target = props;
nativePropertyIdsHandle.Target = ids;
propHandle.Target = prop;
props.variants = propHandle.AddrOfPinnedObject();
props.ids = nativePropertyIdsHandle.AddrOfPinnedObject();
if (UnsafeNativeMethods.MQMgmtGetInfo(this.hostname, "queue=" + this.formatName, propsHandle.AddrOfPinnedObject()) == 0)
{
retProp = (UnsafeNativeMethods.MQPROPVARIANT)Marshal.PtrToStructure(props.variants, typeof(UnsafeNativeMethods.MQPROPVARIANT));
IntPtr[] stringArrays = new IntPtr[retProp.stringArraysValue.count];
Marshal.Copy(retProp.stringArraysValue.stringArrays, stringArrays, 0, retProp.stringArraysValue.count);
for (int i = 0; i < retProp.stringArraysValue.count; i++)
{
subqueues.Add(Marshal.PtrToStringUni(stringArrays[i]));
UnsafeNativeMethods.MQFreeMemory(stringArrays[i]);
}
UnsafeNativeMethods.MQFreeMemory(retProp.stringArraysValue.stringArrays);
}
else
{
return false;
}
}
finally
{
// unpin
nativePropertyIdsHandle.Target = null;
propsHandle.Target = null;
propHandle.Target = null;
}
return true;
}
private class MsmqMessageLookupId : NativeMsmqMessage
{
public LongProperty lookupId;
public MsmqMessageLookupId()
: base(1)
{
this.lookupId = new LongProperty(this, UnsafeNativeMethods.PROPID_M_LOOKUPID);
}
}
private static bool TryGetHostName(string formatName, out string hostName)
{
string directFormatNamePrefix = "DIRECT=";
string tcpProtocolPrefix = "TCP:";
string osProtocolPrefix = "OS:";
hostName = null;
if (formatName.StartsWith(directFormatNamePrefix, StringComparison.OrdinalIgnoreCase))
{
// The direct format name of the form DIRECT=OS:.\sampleq is parsed here
string formatNameWithProtocol = formatName.Substring(directFormatNamePrefix.Length,
formatName.Length - directFormatNamePrefix.Length);
int addressStartPos = formatNameWithProtocol.IndexOf(':') + 1;
string address = formatNameWithProtocol.Substring(addressStartPos,
formatNameWithProtocol.IndexOf('\\') - addressStartPos);
if (formatNameWithProtocol.StartsWith(tcpProtocolPrefix, StringComparison.OrdinalIgnoreCase))
{
// formatNameWithProtocol is TCP:<tcp-address>\<queue-type>\<queue-name>
hostName = address;
return true;
}
else if (formatNameWithProtocol.StartsWith(osProtocolPrefix, StringComparison.OrdinalIgnoreCase))
{
if (address.Equals("."))
{
// formatNameWithProtocol is OS:.\<queue-type>\<queue-name>
hostName = "localhost";
}
else
{
// formatNameWithProtocol is OS:<machine-name>\<queue-type>\<queue-name>
hostName = address;
}
return true;
}
else
{
// Other protocols not supported. IPX is valid only on NT, w2k
// HTTP/HTTPS: can be used only to send messages. If support changes in future,
// use Dns.GetHostEntry to obtain the IP address
return false;
}
}
else
{
// Other format names are not supported
return false;
}
}
}
}
| |
using System;
using System.Collections;
using System.Reflection;
using System.Reflection.Emit;
namespace Python.Runtime
{
/// <summary>
/// The DelegateManager class manages the creation of true managed
/// delegate instances that dispatch calls to Python methods.
/// </summary>
internal class DelegateManager
{
private Hashtable cache;
private Type basetype;
private Type listtype;
private Type voidtype;
private Type typetype;
private Type ptrtype;
private CodeGenerator codeGenerator;
public DelegateManager()
{
basetype = typeof(Dispatcher);
listtype = typeof(ArrayList);
voidtype = typeof(void);
typetype = typeof(Type);
ptrtype = typeof(IntPtr);
cache = new Hashtable();
codeGenerator = new CodeGenerator();
}
/// <summary>
/// Given a true delegate instance, return the PyObject handle of the
/// Python object implementing the delegate (or IntPtr.Zero if the
/// delegate is not implemented in Python code.
/// </summary>
public IntPtr GetPythonHandle(Delegate d)
{
if (d?.Target is Dispatcher)
{
var disp = (Dispatcher)d.Target;
return disp.target;
}
return IntPtr.Zero;
}
/// <summary>
/// GetDispatcher is responsible for creating a class that provides
/// an appropriate managed callback method for a given delegate type.
/// </summary>
private Type GetDispatcher(Type dtype)
{
// If a dispatcher type for the given delegate type has already
// been generated, get it from the cache. The cache maps delegate
// types to generated dispatcher types. A possible optimization
// for the future would be to generate dispatcher types based on
// unique signatures rather than delegate types, since multiple
// delegate types with the same sig could use the same dispatcher.
object item = cache[dtype];
if (item != null)
{
return (Type)item;
}
string name = $"__{dtype.FullName}Dispatcher";
name = name.Replace('.', '_');
name = name.Replace('+', '_');
TypeBuilder tb = codeGenerator.DefineType(name, basetype);
// Generate a constructor for the generated type that calls the
// appropriate constructor of the Dispatcher base type.
MethodAttributes ma = MethodAttributes.Public |
MethodAttributes.HideBySig |
MethodAttributes.SpecialName |
MethodAttributes.RTSpecialName;
var cc = CallingConventions.Standard;
Type[] args = { ptrtype, typetype };
ConstructorBuilder cb = tb.DefineConstructor(ma, cc, args);
ConstructorInfo ci = basetype.GetConstructor(args);
ILGenerator il = cb.GetILGenerator();
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Ldarg_1);
il.Emit(OpCodes.Ldarg_2);
il.Emit(OpCodes.Call, ci);
il.Emit(OpCodes.Ret);
// Method generation: we generate a method named "Invoke" on the
// dispatcher type, whose signature matches the delegate type for
// which it is generated. The method body simply packages the
// arguments and hands them to the Dispatch() method, which deals
// with converting the arguments, calling the Python method and
// converting the result of the call.
MethodInfo method = dtype.GetMethod("Invoke");
ParameterInfo[] pi = method.GetParameters();
var signature = new Type[pi.Length];
for (var i = 0; i < pi.Length; i++)
{
signature[i] = pi[i].ParameterType;
}
MethodBuilder mb = tb.DefineMethod("Invoke", MethodAttributes.Public, method.ReturnType, signature);
ConstructorInfo ctor = listtype.GetConstructor(Type.EmptyTypes);
MethodInfo dispatch = basetype.GetMethod("Dispatch");
MethodInfo add = listtype.GetMethod("Add");
il = mb.GetILGenerator();
il.DeclareLocal(listtype);
il.Emit(OpCodes.Newobj, ctor);
il.Emit(OpCodes.Stloc_0);
for (var c = 0; c < signature.Length; c++)
{
Type t = signature[c];
il.Emit(OpCodes.Ldloc_0);
il.Emit(OpCodes.Ldarg_S, (byte)(c + 1));
if (t.IsValueType)
{
il.Emit(OpCodes.Box, t);
}
il.Emit(OpCodes.Callvirt, add);
il.Emit(OpCodes.Pop);
}
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Ldloc_0);
il.Emit(OpCodes.Call, dispatch);
if (method.ReturnType == voidtype)
{
il.Emit(OpCodes.Pop);
}
else if (method.ReturnType.IsValueType)
{
il.Emit(OpCodes.Unbox_Any, method.ReturnType);
}
il.Emit(OpCodes.Ret);
Type disp = tb.CreateType();
cache[dtype] = disp;
return disp;
}
/// <summary>
/// Given a delegate type and a callable Python object, GetDelegate
/// returns an instance of the delegate type. The delegate instance
/// returned will dispatch calls to the given Python object.
/// </summary>
internal Delegate GetDelegate(Type dtype, IntPtr callable)
{
Type dispatcher = GetDispatcher(dtype);
object[] args = { callable, dtype };
object o = Activator.CreateInstance(dispatcher, args);
return Delegate.CreateDelegate(dtype, o, "Invoke");
}
}
/* When a delegate instance is created that has a Python implementation,
the delegate manager generates a custom subclass of Dispatcher and
instantiates it, passing the IntPtr of the Python callable.
The "real" delegate is created using CreateDelegate, passing the
instance of the generated type and the name of the (generated)
implementing method (Invoke).
The true delegate instance holds the only reference to the dispatcher
instance, which ensures that when the delegate dies, the finalizer
of the referenced instance will be able to decref the Python
callable.
A possible alternate strategy would be to create custom subclasses
of the required delegate type, storing the IntPtr in it directly.
This would be slightly cleaner, but I'm not sure if delegates are
too "special" for this to work. It would be more work, so for now
the 80/20 rule applies :) */
public class Dispatcher
{
public IntPtr target;
public Type dtype;
public Dispatcher(IntPtr target, Type dtype)
{
Runtime.XIncref(target);
this.target = target;
this.dtype = dtype;
}
~Dispatcher()
{
// We needs to disable Finalizers until it's valid implementation.
// Current implementation can produce low probability floating bugs.
return;
// Note: the managed GC thread can run and try to free one of
// these *after* the Python runtime has been finalized!
if (Runtime.Py_IsInitialized() > 0)
{
IntPtr gs = PythonEngine.AcquireLock();
Runtime.XDecref(target);
PythonEngine.ReleaseLock(gs);
}
}
public object Dispatch(ArrayList args)
{
IntPtr gs = PythonEngine.AcquireLock();
object ob = null;
try
{
ob = TrueDispatch(args);
}
catch (Exception e)
{
PythonEngine.ReleaseLock(gs);
throw e;
}
PythonEngine.ReleaseLock(gs);
return ob;
}
public object TrueDispatch(ArrayList args)
{
MethodInfo method = dtype.GetMethod("Invoke");
ParameterInfo[] pi = method.GetParameters();
IntPtr pyargs = Runtime.PyTuple_New(pi.Length);
Type rtype = method.ReturnType;
for (var i = 0; i < pi.Length; i++)
{
// Here we own the reference to the Python value, and we
// give the ownership to the arg tuple.
IntPtr arg = Converter.ToPython(args[i], pi[i].ParameterType);
Runtime.PyTuple_SetItem(pyargs, i, arg);
}
IntPtr op = Runtime.PyObject_Call(target, pyargs, IntPtr.Zero);
Runtime.XDecref(pyargs);
if (op == IntPtr.Zero)
{
var e = new PythonException();
throw e;
}
if (rtype == typeof(void))
{
return null;
}
object result = null;
if (!Converter.ToManaged(op, rtype, out result, false))
{
Runtime.XDecref(op);
throw new ConversionException($"could not convert Python result to {rtype}");
}
Runtime.XDecref(op);
return result;
}
}
public class ConversionException : Exception
{
public ConversionException()
{
}
public ConversionException(string msg) : base(msg)
{
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="ArcFile.cs" company="None">
// Copyright (c) Brandon Wallace and Jesse Calhoun. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
namespace TQVaultAE.Data
{
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text;
using TQVaultAE.Config;
using TQVaultAE.Domain.Contracts.Providers;
using TQVaultAE.Domain.Contracts.Services;
using TQVaultAE.Domain.Entities;
using TQVaultAE.Logs;
/// <summary>
/// Reads and decodes a Titan Quest ARC file.
/// </summary>
public class ArcFileProvider : IArcFileProvider
{
private readonly ILogger Log = null;
private readonly ITQDataService TQData;
/// <summary>
/// Ctr
/// </summary>
/// <param name="fileName">File Name of the ARC file to be read.</param>
public ArcFileProvider(ILogger<ArcFileProvider> log, ITQDataService tQData)
{
this.Log = log;
this.TQData = tQData;
}
/// <summary>
/// Gets the sorted list of directoryEntries.
/// </summary>
/// <returns>string array holding the sorted list</returns>
public string[] GetKeyTable(ArcFile file)
{
if (file.Keys == null || file.Keys.Length == 0)
this.BuildKeyTable(file);
return (string[])file.Keys.Clone();
}
#region ArcFile Public Methods
/// <summary>
/// Reads the ARC file table of contents to determine if the file is readable.
/// </summary>
/// <returns>True if able to read the ToC</returns>
public bool Read(ArcFile file)
{
try
{
if (!file.FileHasBeenRead)
this.ReadARCToC(file);
return file.DirectoryEntries != null;
}
catch (IOException exception)
{
Log.ErrorException(exception);
return false;
}
}
/// <summary>
/// Writes a record to a file.
/// </summary>
/// <param name="baseFolder">string holding the base folder path</param>
/// <param name="record">Record we are writing</param>
/// <param name="destinationFileName">Filename for the new file.</param>
public void Write(ArcFile file, string baseFolder, string record, string destinationFileName)
{
try
{
if (!file.FileHasBeenRead)
this.ReadARCToC(file);
string dataID = string.Concat(Path.GetFileNameWithoutExtension(file.FileName), "\\", record);
byte[] data = this.GetData(file, dataID);
if (data == null)
return;
string destination = baseFolder;
if (!destination.EndsWith("\\", StringComparison.OrdinalIgnoreCase))
destination = string.Concat(destination, "\\");
destination = string.Concat(destination, destinationFileName);
// If there is a sub directory in the arc file then we need to create it.
if (!Directory.Exists(Path.GetDirectoryName(destination)))
Directory.CreateDirectory(Path.GetDirectoryName(destination));
using (FileStream outStream = new FileStream(destination, FileMode.Create, FileAccess.Write))
{
outStream.Write(data, 0, data.Length);
}
}
catch (IOException exception)
{
Log.ErrorException(exception);
return;
}
}
/// <summary>
/// Reads data from an ARC file and puts it into a Byte array (or NULL if not found)
/// </summary>
/// <param name="dataId">The string ID for the data which we are retieving.</param>
/// <returns>Returns byte array of the data corresponding to the string ID.</returns>
public byte[] GetData(ArcFile file, string dataId)
{
if (TQDebug.ArcFileDebugLevel > 0)
Log.LogDebug("ARCFile.GetData({0})", dataId);
if (!file.FileHasBeenRead)
this.ReadARCToC(file);
if (file.DirectoryEntries == null)
{
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("Error - Could not read {0}", file.FileName);
// could not read the file
return null;
}
// First normalize the filename
dataId = TQData.NormalizeRecordPath(dataId);
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("Normalized dataID = {0}", dataId);
// Find our file in the toc.
// First strip off the leading folder since it is just the ARC name
int firstPathDelim = dataId.IndexOf('\\');
if (firstPathDelim != -1)
dataId = dataId.Substring(firstPathDelim + 1);
// Now see if this file is in the toc.
ArcDirEntry directoryEntry;
if (file.DirectoryEntries.ContainsKey(dataId))
directoryEntry = file.DirectoryEntries[dataId];
else
{
// record not found
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("Error - {0} not found.", dataId);
return null;
}
// Now open the ARC file and read in the record.
using (FileStream arcFile = new FileStream(file.FileName, FileMode.Open, FileAccess.Read))
{
// Allocate memory for the uncompressed data
byte[] data = new byte[directoryEntry.RealSize];
// Now process each part of this record
int startPosition = 0;
// First see if the data was just stored without compression.
if ((directoryEntry.StorageType == 1) && (directoryEntry.CompressedSize == directoryEntry.RealSize))
{
if (TQDebug.ArcFileDebugLevel > 1)
{
Log.LogDebug("Offset={0} Size={1}"
, directoryEntry.FileOffset
, directoryEntry.RealSize
);
}
arcFile.Seek(directoryEntry.FileOffset, SeekOrigin.Begin);
arcFile.Read(data, 0, directoryEntry.RealSize);
}
else
{
// The data was compressed so we attempt to decompress it.
foreach (ArcPartEntry partEntry in directoryEntry.Parts)
{
// seek to the part we want
arcFile.Seek(partEntry.FileOffset, SeekOrigin.Begin);
// Ignore the zlib compression method.
arcFile.ReadByte();
// Ignore the zlib compression flags.
arcFile.ReadByte();
// Create a deflate stream.
using (DeflateStream deflate = new DeflateStream(arcFile, CompressionMode.Decompress, true))
{
int bytesRead;
int partLength = 0;
while ((bytesRead = deflate.Read(data, startPosition, data.Length - startPosition)) > 0)
{
startPosition += bytesRead;
partLength += bytesRead;
// break out of the read loop if we have processed this part completely.
if (partLength >= partEntry.RealSize)
break;
}
}
}
}
if (TQDebug.ArcFileDebugLevel > 0)
Log.LogDebug("Exiting ARCFile.GetData()");
return data;
}
}
/// <summary>
/// Extracts the decoded ARC file contents into a folder.
/// </summary>
/// <param name="destination">Destination folder for the files.</param>
/// <returns>true if successful, false on error.</returns>
public bool ExtractArcFile(ArcFile file, string destination)
{
try
{
if (TQDebug.ArcFileDebugLevel > 0)
Log.LogDebug("ARCFile.ReadARCFile()");
if (!file.FileHasBeenRead)
this.ReadARCToC(file);
foreach (ArcDirEntry dirEntry in file.DirectoryEntries.Values)
{
string dataID = string.Concat(Path.GetFileNameWithoutExtension(file.FileName), "\\", dirEntry.FileName);
if (TQDebug.ArcFileDebugLevel > 1)
{
Log.LogDebug($"Directory Filename = {dirEntry.FileName}");
Log.LogDebug($"dataID = {dataID}");
}
byte[] data = this.GetData(file, dataID);
string filename = destination;
if (!filename.EndsWith("\\", StringComparison.Ordinal))
filename = string.Concat(filename, "\\");
filename = string.Concat(filename, dirEntry.FileName);
// If there is a sub directory in the arc file then we need to create it.
if (!Directory.Exists(Path.GetDirectoryName(filename)))
Directory.CreateDirectory(Path.GetDirectoryName(filename));
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug($"Creating File - {filename}");
using (FileStream outStream = new FileStream(filename, FileMode.Create, FileAccess.Write))
{
outStream.Write(data, 0, data.Length);
}
}
if (TQDebug.ArcFileDebugLevel > 0)
Log.LogDebug("Exiting ARCFile.ReadARCFile()");
return true;
}
catch (IOException exception)
{
Log.LogError(exception, "ARCFile.ReadARCFile() - Error reading arcfile");
return false;
}
}
#endregion ArcFile Public Methods
#region ArcFile Private Methods
/// <summary>
/// Builds a sorted list of entries in the directoryEntries dictionary. Used to build a tree structure of the names.
/// </summary>
private void BuildKeyTable(ArcFile file)
{
if (file.DirectoryEntries == null || file.DirectoryEntries.Count == 0)
return;
int index = 0;
file.Keys = new string[file.DirectoryEntries.Count];
foreach (string filename in file.DirectoryEntries.Keys)
{
file.Keys[index] = filename;
index++;
}
Array.Sort(file.Keys);
}
/// <summary>
/// Read the table of contents of the ARC file
/// </summary>
public void ReadARCToC(ArcFile file)
{
// Format of an ARC file
// 0x08 - 4 bytes = # of files
// 0x0C - 4 bytes = # of parts
// 0x18 - 4 bytes = offset to directory structure
//
// Format of directory structure
// 4-byte int = offset in file where this part begins
// 4-byte int = size of compressed part
// 4-byte int = size of uncompressed part
// these triplets repeat for each part in the arc file
// After these triplets are a bunch of null-terminated strings
// which are the sub filenames.
// After the subfilenames comes the subfile data:
// 4-byte int = 3 == indicates start of subfile item (maybe compressed flag??)
// 1 == maybe uncompressed flag??
// 4-byte int = offset in file where first part of this subfile begins
// 4-byte int = compressed size of this file
// 4-byte int = uncompressed size of this file
// 4-byte crap
// 4-byte crap
// 4-byte crap
// 4-byte int = numParts this file uses
// 4-byte int = part# of first part for this file (starting at 0).
// 4-byte int = length of filename string
// 4-byte int = offset in directory structure for filename
file.FileHasBeenRead = true;
if (TQDebug.ArcFileDebugLevel > 0)
Log.LogDebug("ARCFile.ReadARCToC({0})", file.FileName);
try
{
using (FileStream arcFile = new FileStream(file.FileName, FileMode.Open, FileAccess.Read))
{
using (BinaryReader reader = new BinaryReader(arcFile))
{
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("File Length={0}", arcFile.Length);
// check the file header
if (reader.ReadByte() != 0x41)
return;
if (reader.ReadByte() != 0x52)
return;
if (reader.ReadByte() != 0x43)
return;
if (arcFile.Length < 0x21)
return;
reader.BaseStream.Seek(0x08, SeekOrigin.Begin);
int numEntries = reader.ReadInt32();
int numParts = reader.ReadInt32();
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("numEntries={0}, numParts={1}", numEntries, numParts);
ArcPartEntry[] parts = new ArcPartEntry[numParts];
ArcDirEntry[] records = new ArcDirEntry[numEntries];
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Seeking to tocOffset location");
reader.BaseStream.Seek(0x18, SeekOrigin.Begin);
int tocOffset = reader.ReadInt32();
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("tocOffset = {0}", tocOffset);
// Make sure all 3 entries exist for the toc entry.
if (arcFile.Length < (tocOffset + 12))
return;
// Read in all of the part data
reader.BaseStream.Seek(tocOffset, SeekOrigin.Begin);
int i;
for (i = 0; i < numParts; ++i)
{
parts[i] = new ArcPartEntry();
parts[i].FileOffset = reader.ReadInt32();
parts[i].CompressedSize = reader.ReadInt32();
parts[i].RealSize = reader.ReadInt32();
if (TQDebug.ArcFileDebugLevel > 2)
{
Log.LogDebug("parts[{0}]", i);
Log.LogDebug(" fileOffset={0}, compressedSize={1}, realSize={2}"
, parts[i].FileOffset
, parts[i].CompressedSize
, parts[i].RealSize
);
}
}
// Now record this offset so we can come back and read in the filenames
// after we have read in the file records
int fileNamesOffset = (int)arcFile.Position;
// Now seek to the location where the file record data is
// This offset is from the end of the file.
int fileRecordOffset = 44 * numEntries;
if (TQDebug.ArcFileDebugLevel > 1)
{
Log.LogDebug("fileNamesOffset = {0}. Seeking to {1} to read file record data."
, fileNamesOffset
, fileRecordOffset
);
}
arcFile.Seek(-1 * fileRecordOffset, SeekOrigin.End);
for (i = 0; i < numEntries; ++i)
{
records[i] = new ArcDirEntry();
// storageType = 3 - compressed / 1- non compressed
int storageType = reader.ReadInt32();
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("StorageType={0}", storageType);
// Added by VillageIdiot to support stored types
records[i].StorageType = storageType;
records[i].FileOffset = reader.ReadInt32();
records[i].CompressedSize = reader.ReadInt32();
records[i].RealSize = reader.ReadInt32();
int crap = reader.ReadInt32(); // crap
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Crap2={0}", crap);
crap = reader.ReadInt32(); // crap
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Crap3={0}", crap);
crap = reader.ReadInt32(); // crap
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Crap4={0}", crap);
int numberOfParts = reader.ReadInt32();
if (numberOfParts < 1)
{
records[i].Parts = null;
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("File {0} is not compressed.", i);
}
else
records[i].Parts = new ArcPartEntry[numberOfParts];
int firstPart = reader.ReadInt32();
crap = reader.ReadInt32(); // filename length
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Filename Length={0}", crap);
crap = reader.ReadInt32(); // filename offset
if (TQDebug.ArcFileDebugLevel > 2)
{
Log.LogDebug("Filename Offset={0}", crap);
Log.LogDebug("record[{0}]", i);
Log.LogDebug(" offset={0} compressedSize={1} realSize={2}",
records[i].FileOffset,
records[i].CompressedSize,
records[i].RealSize);
if (storageType != 1 && records[i].IsActive)
{
Log.LogDebug(" numParts={0} firstPart={1} lastPart={2}",
records[i].Parts.Length,
firstPart,
firstPart + records[i].Parts.Length - 1);
}
else
Log.LogDebug(" INACTIVE firstPart={0}", firstPart);
}
if (storageType != 1 && records[i].IsActive)
{
for (int ip = 0; ip < records[i].Parts.Length; ++ip)
records[i].Parts[ip] = parts[ip + firstPart];
}
}
// Now read in the record names
arcFile.Seek(fileNamesOffset, SeekOrigin.Begin);
byte[] buffer = new byte[2048];
ASCIIEncoding ascii = new ASCIIEncoding();
for (i = 0; i < numEntries; ++i)
{
// only Active files have a filename entry
if (records[i].IsActive)
{
// For each string, read bytes until I hit a 0x00 byte.
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Reading entry name {0:n0}", i);
int bufferSize = 0;
while ((buffer[bufferSize++] = reader.ReadByte()) != 0x00)
{
if (buffer[bufferSize - 1] == 0x03)
{
// File is null?
arcFile.Seek(-1, SeekOrigin.Current); // backup
bufferSize--;
buffer[bufferSize] = 0x00;
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Null file - inactive?");
break;
}
if (bufferSize >= buffer.Length)
{
Log.LogDebug("ARCFile.ReadARCToC() Error - Buffer size of 2048 has been exceeded.");
if (TQDebug.ArcFileDebugLevel > 2)
{
var content = buffer.Select(b => string.Format(CultureInfo.InvariantCulture, "0x{0:X}", b)).ToArray();
Log.LogDebug($"Buffer contents:{Environment.NewLine}{string.Join(string.Empty, content)}{Environment.NewLine}{string.Empty}");
}
}
}
if (TQDebug.ArcFileDebugLevel > 2)
{
Log.LogDebug("Read {0:n0} bytes for name. Converting to string.", bufferSize);
}
string newfile;
if (bufferSize >= 1)
{
// Now convert the buffer to a string
char[] chars = new char[ascii.GetCharCount(buffer, 0, bufferSize - 1)];
ascii.GetChars(buffer, 0, bufferSize - 1, chars, 0);
newfile = new string(chars);
}
else
newfile = string.Format(CultureInfo.InvariantCulture, "Null File {0}", i);
records[i].FileName = TQData.NormalizeRecordPath(newfile);
if (TQDebug.ArcFileDebugLevel > 2)
Log.LogDebug("Name {0:n0} = '{1}'", i, records[i].FileName);
}
}
// Now convert the array of records into a Dictionary.
Dictionary<string, ArcDirEntry> dictionary = new Dictionary<string, ArcDirEntry>(numEntries);
if (TQDebug.ArcFileDebugLevel > 1)
Log.LogDebug("Creating Dictionary");
for (i = 0; i < numEntries; ++i)
{
if (records[i].IsActive)
dictionary.Add(records[i].FileName, records[i]);
}
file.DirectoryEntries = dictionary;
if (TQDebug.ArcFileDebugLevel > 0)
Log.LogDebug("Exiting ARCFile.ReadARCToC()");
}
}
}
catch (IOException exception)
{
Log.LogError(exception, "ARCFile.ReadARCToC() - Error reading arcfile");
}
}
#endregion ArcFile Private Methods
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.2.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.DataLake.Analytics
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Azure.Management.DataLake;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Microsoft.Rest.Serialization;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
/// <summary>
/// Creates an Azure Data Lake Analytics job client.
/// </summary>
public partial class DataLakeAnalyticsJobManagementClient : ServiceClient<DataLakeAnalyticsJobManagementClient>, IDataLakeAnalyticsJobManagementClient, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
internal string BaseUri {get; set;}
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
public ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// Client Api Version.
/// </summary>
public string ApiVersion { get; private set; }
/// <summary>
/// Gets the DNS suffix used as the base for all Azure Data Lake Analytics Job
/// service requests.
/// </summary>
public string AdlaJobDnsSuffix { get; set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IPipelineOperations.
/// </summary>
public virtual IPipelineOperations Pipeline { get; private set; }
/// <summary>
/// Gets the IRecurrenceOperations.
/// </summary>
public virtual IRecurrenceOperations Recurrence { get; private set; }
/// <summary>
/// Gets the IJobOperations.
/// </summary>
public virtual IJobOperations Job { get; private set; }
/// <summary>
/// Initializes a new instance of the DataLakeAnalyticsJobManagementClient class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected DataLakeAnalyticsJobManagementClient(params DelegatingHandler[] handlers) : base(handlers)
{
Initialize();
}
/// <summary>
/// Initializes a new instance of the DataLakeAnalyticsJobManagementClient class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected DataLakeAnalyticsJobManagementClient(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
Initialize();
}
/// <summary>
/// Initializes a new instance of the DataLakeAnalyticsJobManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal DataLakeAnalyticsJobManagementClient(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the DataLakeAnalyticsJobManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal DataLakeAnalyticsJobManagementClient(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// An optional partial-method to perform custom initialization.
/// </summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
Pipeline = new PipelineOperations(this);
Recurrence = new RecurrenceOperations(this);
Job = new JobOperations(this);
BaseUri = "https://{accountName}.{adlaJobDnsSuffix}";
ApiVersion = "2016-11-01";
AdlaJobDnsSuffix = "azuredatalakeanalytics.net";
AcceptLanguage = "en-US";
LongRunningOperationRetryTimeout = 30;
GenerateClientRequestId = true;
SerializationSettings = new JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented,
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter<JobProperties>("type"));
DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter<JobProperties>("type"));
SerializationSettings.Converters.Add(new PolymorphicSerializeJsonConverter<CreateJobProperties>("type"));
DeserializationSettings.Converters.Add(new PolymorphicDeserializeJsonConverter<CreateJobProperties>("type"));
CustomInitialize();
DeserializationSettings.Converters.Add(new CloudErrorJsonConverter());
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Xml;
using Xamarin.Forms.Internals;
using Xamarin.Forms.Xaml.Internals;
namespace Xamarin.Forms.Xaml
{
internal class CreateValuesVisitor : IXamlNodeVisitor
{
public CreateValuesVisitor(HydratationContext context)
{
Context = context;
}
Dictionary<INode, object> Values
{
get { return Context.Values; }
}
HydratationContext Context { get; }
public bool VisitChildrenFirst
{
get { return true; }
}
public bool StopOnDataTemplate
{
get { return true; }
}
public bool StopOnResourceDictionary
{
get { return false; }
}
public void Visit(ValueNode node, INode parentNode)
{
Values[node] = node.Value;
XmlName propertyName;
if (ApplyPropertiesVisitor.TryGetPropertyName(node, parentNode, out propertyName))
{
if (propertyName.NamespaceURI == "http://schemas.openxmlformats.org/markup-compatibility/2006" &&
propertyName.LocalName == "Ignorable")
{
(parentNode.IgnorablePrefixes ?? (parentNode.IgnorablePrefixes = new List<string>())).AddRange(
(node.Value as string).Split(','));
}
}
}
public void Visit(MarkupNode node, INode parentNode)
{
}
public void Visit(ElementNode node, INode parentNode)
{
object value = null;
if (node.SkipPrefix(node.NamespaceResolver.LookupPrefix(node.NamespaceURI)))
return;
XamlParseException xpe;
var type = XamlParser.GetElementType(node.XmlType, node, Context.RootElement?.GetType().GetTypeInfo().Assembly,
out xpe);
if (xpe != null)
throw xpe;
Context.Types[node] = type;
string ctorargname;
if (IsXaml2009LanguagePrimitive(node))
value = CreateLanguagePrimitive(type, node);
else if (node.Properties.ContainsKey(XmlName.xArguments) || node.Properties.ContainsKey(XmlName.xFactoryMethod))
value = CreateFromFactory(type, node);
else if (
type.GetTypeInfo()
.DeclaredConstructors.Any(
ci =>
ci.IsPublic && ci.GetParameters().Length != 0 &&
ci.GetParameters().All(pi => pi.CustomAttributes.Any(attr => attr.AttributeType == typeof (ParameterAttribute)))) &&
ValidateCtorArguments(type, node, out ctorargname))
value = CreateFromParameterizedConstructor(type, node);
else if (!type.GetTypeInfo().DeclaredConstructors.Any(ci => ci.IsPublic && ci.GetParameters().Length == 0) &&
!ValidateCtorArguments(type, node, out ctorargname))
{
throw new XamlParseException(
String.Format("The Property {0} is required to create a {1} object.", ctorargname, type.FullName), node);
}
else
{
//this is a trick as the DataTemplate parameterless ctor is internal, and we can't CreateInstance(..., false) on WP7
try
{
if (type == typeof (DataTemplate))
value = new DataTemplate();
if (type == typeof (ControlTemplate))
value = new ControlTemplate();
if (value == null && node.CollectionItems.Any() && node.CollectionItems.First() is ValueNode)
{
var serviceProvider = new XamlServiceProvider(node, Context);
var converted = ((ValueNode)node.CollectionItems.First()).Value.ConvertTo(type, () => type.GetTypeInfo(),
serviceProvider);
if (converted != null && converted.GetType() == type)
value = converted;
}
if (value == null)
value = Activator.CreateInstance(type);
}
catch (TargetInvocationException e)
{
if (e.InnerException is XamlParseException || e.InnerException is XmlException)
throw e.InnerException;
throw;
}
}
Values[node] = value;
var typeExtension = value as TypeExtension;
if (typeExtension != null)
{
var serviceProvider = new XamlServiceProvider(node, Context);
var visitor = new ApplyPropertiesVisitor(Context);
foreach (var cnode in node.Properties.Values.ToList())
cnode.Accept(visitor, node);
foreach (var cnode in node.CollectionItems)
cnode.Accept(visitor, node);
value = typeExtension.ProvideValue(serviceProvider);
node.Properties.Clear();
node.CollectionItems.Clear();
Values[node] = value;
}
if (value is BindableObject)
NameScope.SetNameScope(value as BindableObject, node.Namescope);
}
public void Visit(RootNode node, INode parentNode)
{
var rnode = (XamlLoader.RuntimeRootNode)node;
Values[node] = rnode.Root;
Context.Types[node] = rnode.Root.GetType();
var bindableRoot = rnode.Root as BindableObject;
if (bindableRoot != null)
NameScope.SetNameScope(bindableRoot, node.Namescope);
}
public void Visit(ListNode node, INode parentNode)
{
//this is a gross hack to keep ListNode alive. ListNode must go in favor of Properties
XmlName name;
if (ApplyPropertiesVisitor.TryGetPropertyName(node, parentNode, out name))
node.XmlName = name;
}
bool ValidateCtorArguments(Type nodeType, IElementNode node, out string missingArgName)
{
missingArgName = null;
var ctorInfo =
nodeType.GetTypeInfo()
.DeclaredConstructors.FirstOrDefault(
ci =>
ci.GetParameters().Length != 0 && ci.IsPublic &&
ci.GetParameters().All(pi => pi.CustomAttributes.Any(attr => attr.AttributeType == typeof (ParameterAttribute))));
if (ctorInfo == null)
return true;
foreach (var parameter in ctorInfo.GetParameters())
{
var propname =
parameter.CustomAttributes.First(ca => ca.AttributeType.FullName == "Xamarin.Forms.ParameterAttribute")
.ConstructorArguments.First()
.Value as string;
if (!node.Properties.ContainsKey(new XmlName("", propname)))
{
missingArgName = propname;
return false;
}
}
return true;
}
public object CreateFromParameterizedConstructor(Type nodeType, IElementNode node)
{
var ctorInfo =
nodeType.GetTypeInfo()
.DeclaredConstructors.FirstOrDefault(
ci =>
ci.GetParameters().Length != 0 && ci.IsPublic &&
ci.GetParameters().All(pi => pi.CustomAttributes.Any(attr => attr.AttributeType == typeof (ParameterAttribute))));
object[] arguments = CreateArgumentsArray(node, ctorInfo);
return ctorInfo.Invoke(arguments);
}
public object CreateFromFactory(Type nodeType, IElementNode node)
{
object[] arguments = CreateArgumentsArray(node);
if (!node.Properties.ContainsKey(XmlName.xFactoryMethod))
{
//non-default ctor
return Activator.CreateInstance(nodeType, arguments);
}
var factoryMethod = ((string)((ValueNode)node.Properties[XmlName.xFactoryMethod]).Value);
Type[] types = arguments == null ? new Type[0] : arguments.Select(a => a.GetType()).ToArray();
var mi = nodeType.GetRuntimeMethod(factoryMethod, types);
if (mi == null || !mi.IsStatic)
{
throw new MissingMemberException(String.Format("No static method found for {0}::{1} ({2})", nodeType.FullName,
factoryMethod, string.Join(", ", types.Select(t => t.FullName))));
}
return mi.Invoke(null, arguments);
}
public object[] CreateArgumentsArray(IElementNode enode)
{
if (!enode.Properties.ContainsKey(XmlName.xArguments))
return null;
var node = enode.Properties[XmlName.xArguments];
var elementNode = node as ElementNode;
if (elementNode != null)
{
var array = new object[1];
array[0] = Values[elementNode];
return array;
}
var listnode = node as ListNode;
if (listnode != null)
{
var array = new object[listnode.CollectionItems.Count];
for (var i = 0; i < listnode.CollectionItems.Count; i++)
array[i] = Values[(ElementNode)listnode.CollectionItems[i]];
return array;
}
return null;
}
public object[] CreateArgumentsArray(IElementNode enode, ConstructorInfo ctorInfo)
{
var n = ctorInfo.GetParameters().Length;
var array = new object[n];
for (var i = 0; i < n; i++)
{
var parameter = ctorInfo.GetParameters()[i];
var propname =
parameter.CustomAttributes.First(attr => attr.AttributeType == typeof (ParameterAttribute))
.ConstructorArguments.First()
.Value as string;
var name = new XmlName("", propname);
INode node;
if (!enode.Properties.TryGetValue(name, out node))
{
throw new XamlParseException(
String.Format("The Property {0} is required to create a {1} object.", propname, ctorInfo.DeclaringType.FullName),
enode as IXmlLineInfo);
}
if (!enode.SkipProperties.Contains(name))
enode.SkipProperties.Add(name);
var value = Context.Values[node];
var serviceProvider = new XamlServiceProvider(enode, Context);
var convertedValue = value.ConvertTo(parameter.ParameterType, () => parameter, serviceProvider);
array[i] = convertedValue;
}
return array;
}
static bool IsXaml2009LanguagePrimitive(IElementNode node)
{
return node.NamespaceURI == "http://schemas.microsoft.com/winfx/2009/xaml";
}
static object CreateLanguagePrimitive(Type nodeType, IElementNode node)
{
object value = null;
if (nodeType == typeof (string))
value = String.Empty;
else if (nodeType == typeof (Uri))
value = null;
else
value = Activator.CreateInstance(nodeType);
if (node.CollectionItems.Count == 1 && node.CollectionItems[0] is ValueNode &&
((ValueNode)node.CollectionItems[0]).Value is string)
{
var valuestring = ((ValueNode)node.CollectionItems[0]).Value as string;
if (nodeType == typeof (bool))
{
bool outbool;
if (bool.TryParse(valuestring, out outbool))
value = outbool;
}
else if (nodeType == typeof (char))
{
char retval;
if (char.TryParse(valuestring, out retval))
value = retval;
}
else if (nodeType == typeof (string))
value = valuestring;
else if (nodeType == typeof (decimal))
{
decimal retval;
if (decimal.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (float))
{
float retval;
if (float.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (double))
{
double retval;
if (double.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (byte))
{
byte retval;
if (byte.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (short))
{
short retval;
if (short.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (int))
{
int retval;
if (int.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (long))
{
long retval;
if (long.TryParse(valuestring, NumberStyles.Number, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (TimeSpan))
{
TimeSpan retval;
if (TimeSpan.TryParse(valuestring, CultureInfo.InvariantCulture, out retval))
value = retval;
}
else if (nodeType == typeof (Uri))
{
Uri retval;
if (Uri.TryCreate(valuestring, UriKind.RelativeOrAbsolute, out retval))
value = retval;
}
}
return value;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.Serialization;
namespace DDay.iCal
{
/// <summary>
/// A class that represents an RFC 5545 VEVENT component.
/// </summary>
/// <note>
/// TODO: Add support for the following properties:
/// <list type="bullet">
/// <item>Add support for the Organizer and Attendee properties</item>
/// <item>Add support for the Class property</item>
/// <item>Add support for the Geo property</item>
/// <item>Add support for the Priority property</item>
/// <item>Add support for the Related property</item>
/// <item>Create a TextCollection DataType for 'text' items separated by commas</item>
/// </list>
/// </note>
#if !SILVERLIGHT
[Serializable]
#endif
public class Event :
RecurringComponent,
IEvent
{
#region Public Properties
/// <summary>
/// The start date/time of the event.
/// <note>
/// If the duration has not been set, but
/// the start/end time of the event is available,
/// the duration is automatically determined.
/// Likewise, if the end date/time has not been
/// set, but a start and duration are available,
/// the end date/time will be extrapolated.
/// </note>
/// </summary>
public override IDateTime DTStart
{
get
{
return base.DTStart;
}
set
{
base.DTStart = value;
ExtrapolateTimes();
}
}
/// <summary>
/// The end date/time of the event.
/// <note>
/// If the duration has not been set, but
/// the start/end time of the event is available,
/// the duration is automatically determined.
/// Likewise, if an end time and duration are available,
/// but a start time has not been set, the start time
/// will be extrapolated.
/// </note>
/// </summary>
virtual public IDateTime DTEnd
{
get { return Properties.Get<IDateTime>("DTEND"); }
set
{
if (!object.Equals(DTEnd, value))
{
Properties.Set("DTEND", value);
ExtrapolateTimes();
}
}
}
/// <summary>
/// The duration of the event.
/// <note>
/// If a start time and duration is available,
/// the end time is automatically determined.
/// Likewise, if the end time and duration is
/// available, but a start time is not determined,
/// the start time will be extrapolated from
/// available information.
/// </note>
/// </summary>
// NOTE: Duration is not supported by all systems,
// (i.e. iPhone) and cannot co-exist with DTEnd.
// RFC 5545 states:
//
// ; either 'dtend' or 'duration' may appear in
// ; a 'eventprop', but 'dtend' and 'duration'
// ; MUST NOT occur in the same 'eventprop'
//
// Therefore, Duration is not serialized, as DTEnd
// should always be extrapolated from the duration.
virtual public TimeSpan Duration
{
get { return Properties.Get<TimeSpan>("DURATION"); }
set
{
if (!object.Equals(Duration, value))
{
Properties.Set("DURATION", value);
ExtrapolateTimes();
}
}
}
/// <summary>
/// An alias to the DTEnd field (i.e. end date/time).
/// </summary>
virtual public IDateTime End
{
get { return DTEnd; }
set { DTEnd = value; }
}
/// <summary>
/// Returns true if the event is an all-day event.
/// </summary>
virtual public bool IsAllDay
{
get { return !Start.HasTime; }
set
{
// Set whether or not the start date/time
// has a time value.
if (Start != null)
Start.HasTime = !value;
if (End != null)
End.HasTime = !value;
if (value &&
Start != null &&
End != null &&
object.Equals(Start.Date, End.Date))
{
Duration = default(TimeSpan);
End = Start.AddDays(1);
}
}
}
/// <summary>
/// The geographic location (lat/long) of the event.
/// </summary>
public IGeographicLocation GeographicLocation
{
get { return Properties.Get<IGeographicLocation>("GEO"); }
set { Properties.Set("GEO", value); }
}
/// <summary>
/// The location of the event.
/// </summary>
public string Location
{
get { return Properties.Get<string>("LOCATION"); }
set { Properties.Set("LOCATION", value); }
}
/// <summary>
/// Resources that will be used during the event.
/// <example>Conference room #2</example>
/// <example>Projector</example>
/// </summary>
public IList<string> Resources
{
get { return Properties.GetMany<string>("RESOURCES"); }
set { Properties.Set("RESOURCES", value); }
}
/// <summary>
/// The status of the event.
/// </summary>
public EventStatus Status
{
get { return Properties.Get<EventStatus>("STATUS"); }
set { Properties.Set("STATUS", value); }
}
/// <summary>
/// The transparency of the event. In other words,
/// whether or not the period of time this event
/// occupies can contain other events (transparent),
/// or if the time cannot be scheduled for anything
/// else (opaque).
/// </summary>
public TransparencyType Transparency
{
get { return Properties.Get<TransparencyType>("TRANSP"); }
set { Properties.Set("TRANSP", value); }
}
#endregion
#region Private Fields
EventEvaluator m_Evaluator;
#endregion
#region Constructors
/// <summary>
/// Constructs an Event object, with an <see cref="iCalObject"/>
/// (usually an iCalendar object) as its parent.
/// </summary>
/// <param name="parent">An <see cref="iCalObject"/>, usually an iCalendar object.</param>
public Event() : base()
{
Initialize();
}
private void Initialize()
{
this.Name = Components.EVENT;
m_Evaluator = new EventEvaluator(this);
SetService(m_Evaluator);
}
#endregion
#region Public Methods
/// <summary>
/// Use this method to determine if an event occurs on a given date.
/// <note type="caution">
/// This event should be called only after the <see cref="Evaluate"/>
/// method has calculated the dates for which this event occurs.
/// </note>
/// </summary>
/// <param name="DateTime">The date to test.</param>
/// <returns>True if the event occurs on the <paramref name="DateTime"/> provided, False otherwise.</returns>
virtual public bool OccursOn(IDateTime DateTime)
{
foreach (IPeriod p in m_Evaluator.Periods)
// NOTE: removed UTC from date checks, since a date is a date.
if (p.StartTime.Date == DateTime.Date || // It's the start date OR
(p.StartTime.Date <= DateTime.Date && // It's after the start date AND
(p.EndTime.HasTime && p.EndTime.Date >= DateTime.Date || // an end time was specified, and it's after the test date
(!p.EndTime.HasTime && p.EndTime.Date > DateTime.Date)))) // an end time was not specified, and it's before the end date
// NOTE: fixed bug as follows:
// DTSTART;VALUE=DATE:20060704
// DTEND;VALUE=DATE:20060705
// Event.OccursOn(new iCalDateTime(2006, 7, 5)); // Evals to true; should be false
return true;
return false;
}
/// <summary>
/// Use this method to determine if an event begins at a given date and time.
/// </summary>
/// <param name="DateTime">The date and time to test.</param>
/// <returns>True if the event begins at the given date and time</returns>
virtual public bool OccursAt(IDateTime DateTime)
{
foreach (IPeriod p in m_Evaluator.Periods)
if (p.StartTime.Equals(DateTime))
return true;
return false;
}
/// <summary>
/// Determines whether or not the <see cref="Event"/> is actively displayed
/// as an upcoming or occurred event.
/// </summary>
/// <returns>True if the event has not been cancelled, False otherwise.</returns>
virtual public bool IsActive()
{
return (Status != EventStatus.Cancelled);
}
#endregion
#region Overrides
protected override bool EvaluationIncludesReferenceDate
{
get
{
return true;
}
}
protected override void OnDeserializing(StreamingContext context)
{
base.OnDeserializing(context);
Initialize();
}
protected override void OnDeserialized(StreamingContext context)
{
base.OnDeserialized(context);
ExtrapolateTimes();
}
#endregion
#region Private Methods
private void ExtrapolateTimes()
{
// if (DTEnd == null && DTStart != null && Duration != default(TimeSpan))
// DTEnd = DTStart.Add(Duration);
// else if (Duration == default(TimeSpan) && DTStart != null && DTEnd != null)
// Duration = DTEnd.Subtract(DTStart);
// else if (DTStart == null && Duration != default(TimeSpan) && DTEnd != null)
// DTStart = DTEnd.Subtract(Duration);
}
#endregion
}
}
| |
/*
Project Orleans Cloud Service SDK ver. 1.0
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the ""Software""), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using Orleans.Runtime.Configuration;
using Orleans.Messaging;
namespace Orleans.Runtime.Messaging
{
internal class MessageCenter : ISiloMessageCenter, IDisposable
{
private Gateway Gateway { get; set; }
private IncomingMessageAcceptor ima;
private static readonly TraceLogger log = TraceLogger.GetLogger("Orleans.Messaging.MessageCenter");
private Action<Message> rerouteHandler;
private Action<List<GrainId>> clientDropHandler;
// ReSharper disable UnaccessedField.Local
private IntValueStatistic sendQueueLengthCounter;
private IntValueStatistic receiveQueueLengthCounter;
// ReSharper restore UnaccessedField.Local
internal IOutboundMessageQueue OutboundQueue { get; set; }
internal IInboundMessageQueue InboundQueue { get; set; }
internal SocketManager SocketManager;
internal bool IsBlockingApplicationMessages { get; private set; }
internal ISiloPerformanceMetrics Metrics { get; private set; }
public bool IsProxying { get { return Gateway != null; } }
public bool TryDeliverToProxy(Message msg)
{
return msg.TargetGrain.IsClient && Gateway != null && Gateway.TryDeliverToProxy(msg);
}
// This is determined by the IMA but needed by the OMS, and so is kept here in the message center itself.
public SiloAddress MyAddress { get; private set; }
public IMessagingConfiguration MessagingConfiguration { get; private set; }
public MessageCenter(IPEndPoint here, int generation, IMessagingConfiguration config, ISiloPerformanceMetrics metrics = null)
{
Initialize(here, generation, config, metrics);
}
private void Initialize(IPEndPoint here, int generation, IMessagingConfiguration config, ISiloPerformanceMetrics metrics = null)
{
if(log.IsVerbose3) log.Verbose3("Starting initialization.");
SocketManager = new SocketManager(config);
ima = new IncomingMessageAcceptor(this, here, SocketDirection.SiloToSilo);
MyAddress = SiloAddress.New((IPEndPoint)ima.AcceptingSocket.LocalEndPoint, generation);
MessagingConfiguration = config;
InboundQueue = new InboundMessageQueue();
OutboundQueue = new OutboundMessageQueue(this, config);
Gateway = null;
Metrics = metrics;
sendQueueLengthCounter = IntValueStatistic.FindOrCreate(StatisticNames.MESSAGE_CENTER_SEND_QUEUE_LENGTH, () => SendQueueLength);
receiveQueueLengthCounter = IntValueStatistic.FindOrCreate(StatisticNames.MESSAGE_CENTER_RECEIVE_QUEUE_LENGTH, () => ReceiveQueueLength);
if (log.IsVerbose3) log.Verbose3("Completed initialization.");
}
public void InstallGateway(IPEndPoint gatewayAddress)
{
Gateway = new Gateway(this, gatewayAddress);
}
public void RecordProxiedGrain(GrainId grainId, Guid clientId)
{
if (Gateway != null)
Gateway.RecordProxiedGrain(grainId, clientId);
}
public void RecordUnproxiedGrain(GrainId grainId)
{
if (Gateway != null)
Gateway.RecordUnproxiedGrain(grainId);
}
public void Start()
{
IsBlockingApplicationMessages = false;
ima.Start();
OutboundQueue.Start();
}
public void StartGateway()
{
if (Gateway != null)
Gateway.Start();
}
public void PrepareToStop()
{
}
public void Stop()
{
IsBlockingApplicationMessages = true;
try
{
ima.Stop();
}
catch (Exception exc)
{
log.Error(ErrorCode.Runtime_Error_100108, "Stop failed.", exc);
}
StopAcceptingClientMessages();
try
{
OutboundQueue.Stop();
}
catch (Exception exc)
{
log.Error(ErrorCode.Runtime_Error_100110, "Stop failed.", exc);
}
try
{
SocketManager.Stop();
}
catch (Exception exc)
{
log.Error(ErrorCode.Runtime_Error_100111, "Stop failed.", exc);
}
}
public void StopAcceptingClientMessages()
{
if (log.IsVerbose) log.Verbose("StopClientMessages");
if (Gateway == null) return;
try
{
Gateway.Stop();
}
catch (Exception exc) { log.Error(ErrorCode.Runtime_Error_100109, "Stop failed.", exc); }
Gateway = null;
}
public Action<Message> RerouteHandler
{
set
{
if (rerouteHandler != null)
throw new InvalidOperationException("MessageCenter RerouteHandler already set");
rerouteHandler = value;
}
}
public void RerouteMessage(Message message)
{
if (rerouteHandler != null)
rerouteHandler(message);
else
SendMessage(message);
}
public Action<Message> SniffIncomingMessage
{
set
{
ima.SniffIncomingMessage = value;
}
}
public Func<SiloAddress, bool> SiloDeadOracle { get; set; }
public void SendMessage(Message msg)
{
// Note that if we identify or add other grains that are required for proper stopping, we will need to treat them as we do the membership table grain here.
if (IsBlockingApplicationMessages && (msg.Category == Message.Categories.Application) && (msg.Result != Message.ResponseTypes.Rejection)
&& (msg.TargetGrain != Constants.SystemMembershipTableId))
{
// Drop the message on the floor if it's an application message that isn't a rejection
}
else
{
if (msg.SendingSilo == null)
msg.SendingSilo = MyAddress;
OutboundQueue.SendMessage(msg);
}
}
public Action<List<GrainId>> ClientDropHandler
{
set
{
if (clientDropHandler != null)
throw new InvalidOperationException("MessageCenter ClientDropHandler already set");
clientDropHandler = value;
}
}
internal void RecordClientDrop(List<GrainId> client)
{
if (clientDropHandler != null && client != null)
clientDropHandler(client);
}
internal void SendRejection(Message msg, Message.RejectionTypes rejectionType, string reason)
{
MessagingStatisticsGroup.OnRejectedMessage(msg);
if (string.IsNullOrEmpty(reason)) reason = String.Format("Rejection from silo {0} - Unknown reason.", MyAddress);
Message error = msg.CreateRejectionResponse(rejectionType, reason);
// rejection msgs are always originated in the local silo, they are never remote.
InboundQueue.PostMessage(error);
}
public Message WaitMessage(Message.Categories type, CancellationToken ct)
{
return InboundQueue.WaitMessage(type);
}
public void Dispose()
{
if (ima != null)
{
ima.Dispose();
ima = null;
}
OutboundQueue.Dispose();
GC.SuppressFinalize(this);
}
public int SendQueueLength { get { return OutboundQueue.Count; } }
public int ReceiveQueueLength { get { return InboundQueue.Count; } }
/// <summary>
/// Indicates that application messages should be blocked from being sent or received.
/// This method is used by the "fast stop" process.
/// <para>
/// Specifically, all outbound application messages are dropped, except for rejections and messages to the membership table grain.
/// Inbound application requests are rejected, and other inbound application messages are dropped.
/// </para>
/// </summary>
public void BlockApplicationMessages()
{
if(log.IsVerbose) log.Verbose("BlockApplicationMessages");
IsBlockingApplicationMessages = true;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
namespace Microsoft.Azure.Management.DataLake.Analytics
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure.OData;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for AccountsOperations.
/// </summary>
public static partial class AccountsOperationsExtensions
{
/// <summary>
/// Tests whether the specified Azure Storage account is linked to the given Data
/// Lake Analytics account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which to test
/// Azure storage account existence.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure Storage account for which to test for existence.
/// </param>
public static bool StorageAccountExists(this IAccountsOperations operations, string resourceGroupName, string accountName, string storageAccountName)
{
return Task.Factory.StartNew(s => ((IAccountsOperations)s).StorageAccountExistsAsync(resourceGroupName, accountName, storageAccountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Tests whether the specified Azure Storage account is linked to the given Data
/// Lake Analytics account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which to test
/// Azure storage account existence.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure Storage account for which to test for existence.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<bool> StorageAccountExistsAsync(this IAccountsOperations operations, string resourceGroupName, string accountName, string storageAccountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.StorageAccountExistsWithHttpMessagesAsync(resourceGroupName, accountName, storageAccountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Tests the existence of the specified Azure Storage container associated with the
/// given Data Lake Analytics and Azure Storage accounts.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account for which to retrieve
/// blob container.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure storage account from which to test the
/// blob container's existence.
/// </param>
/// <param name='containerName'>
/// The name of the Azure storage container to test for existence.
/// </param>
public static bool StorageContainerExists(this IAccountsOperations operations, string resourceGroupName, string accountName, string storageAccountName, string containerName)
{
return Task.Factory.StartNew(s => ((IAccountsOperations)s).StorageContainerExistsAsync(resourceGroupName, accountName, storageAccountName, containerName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Tests the existence of the specified Azure Storage container associated with the
/// given Data Lake Analytics and Azure Storage accounts.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account for which to retrieve
/// blob container.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure storage account from which to test the
/// blob container's existence.
/// </param>
/// <param name='containerName'>
/// The name of the Azure storage container to test for existence.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<bool> StorageContainerExistsAsync(this IAccountsOperations operations, string resourceGroupName, string accountName, string storageAccountName, string containerName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.StorageContainerExistsWithHttpMessagesAsync(resourceGroupName, accountName, storageAccountName, containerName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Tests whether the specified Data Lake Store account is linked to the
/// specified Data Lake Analytics account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which to test
/// the existence of the Data Lake Store account.
/// </param>
/// <param name='dataLakeStoreAccountName'>
/// The name of the Data Lake Store account to test for existence
/// </param>
public static bool DataLakeStoreAccountExists(this IAccountsOperations operations, string resourceGroupName, string accountName, string dataLakeStoreAccountName)
{
return Task.Factory.StartNew(s => ((IAccountsOperations)s).DataLakeStoreAccountExistsAsync(resourceGroupName, accountName, dataLakeStoreAccountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Tests whether the specified Data Lake Store account is linked to the
/// specified Data Lake Analytics account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which to test
/// the existence of the Data Lake Store account.
/// </param>
/// <param name='dataLakeStoreAccountName'>
/// The name of the Data Lake Store account to test for existence
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<bool> DataLakeStoreAccountExistsAsync(this IAccountsOperations operations, string resourceGroupName, string accountName, string dataLakeStoreAccountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.DataLakeStoreAccountExistsWithHttpMessagesAsync(resourceGroupName, accountName, dataLakeStoreAccountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Tests for the existence of the specified Data Lake Analytics account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account to test existence of.
/// </param>
public static bool Exists(this IAccountsOperations operations, string resourceGroupName, string accountName)
{
return Task.Factory.StartNew(s => ((IAccountsOperations)s).ExistsAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Tests for the existence of the specified Data Lake Analytics account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account to test existence of.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<bool> ExistsAsync(this IAccountsOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ExistsWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using Orleans.Runtime.Configuration;
namespace Orleans.Runtime
{
/// <summary>
/// Identifies activations that have been idle long enough to be deactivated.
/// </summary>
internal class ActivationCollector : IActivationCollector
{
internal Action<GrainId> Debug_OnDecideToCollectActivation;
private readonly TimeSpan quantum;
private readonly TimeSpan shortestAgeLimit;
private readonly ConcurrentDictionary<DateTime, Bucket> buckets;
private readonly object nextTicketLock;
private DateTime nextTicket;
private static readonly List<ActivationData> nothing = new List<ActivationData> { Capacity = 0 };
private readonly Logger logger;
public ActivationCollector(ClusterConfiguration config)
{
if (TimeSpan.Zero == config.Globals.CollectionQuantum)
{
throw new ArgumentException("Globals.CollectionQuantum cannot be zero.", "config");
}
quantum = config.Globals.CollectionQuantum;
shortestAgeLimit = config.Globals.Application.ShortestCollectionAgeLimit;
buckets = new ConcurrentDictionary<DateTime, Bucket>();
nextTicket = MakeTicketFromDateTime(DateTime.UtcNow);
nextTicketLock = new object();
logger = LogManager.GetLogger("ActivationCollector", LoggerType.Runtime);
}
public TimeSpan Quantum { get { return quantum; } }
private int ApproximateCount
{
get
{
int sum = 0;
foreach (var bucket in buckets.Values)
{
sum += bucket.ApproximateCount;
}
return sum;
}
}
// Return the number of activations that were used (touched) in the last recencyPeriod.
public int GetNumRecentlyUsed(TimeSpan recencyPeriod)
{
if (TimeSpan.Zero == shortestAgeLimit)
{
// Collection has been disabled for some types.
return ApproximateCount;
}
var now = DateTime.UtcNow;
int sum = 0;
foreach (var bucket in buckets)
{
// Ticket is the date time when this bucket should be collected (last touched time plus age limit)
// For now we take the shortest age limit as an approximation of the per-type age limit.
DateTime ticket = bucket.Key;
var timeTillCollection = ticket - now;
var timeSinceLastUsed = shortestAgeLimit - timeTillCollection;
if (timeSinceLastUsed <= recencyPeriod)
{
sum += bucket.Value.ApproximateCount;
}
}
return sum;
}
public void ScheduleCollection(ActivationData item)
{
lock (item)
{
if (item.IsExemptFromCollection)
{
return;
}
TimeSpan timeout = item.CollectionAgeLimit;
if (TimeSpan.Zero == timeout)
{
// either the CollectionAgeLimit hasn't been initialized (will be rectified later) or it's been disabled.
return;
}
DateTime ticket = MakeTicketFromTimeSpan(timeout);
if (default(DateTime) != item.CollectionTicket)
{
throw new InvalidOperationException("Call CancelCollection before calling ScheduleCollection.");
}
Add(item, ticket);
}
}
public bool TryCancelCollection(ActivationData item)
{
if (item.IsExemptFromCollection) return false;
lock (item)
{
DateTime ticket = item.CollectionTicket;
if (default(DateTime) == ticket) return false;
if (IsExpired(ticket)) return false;
// first, we attempt to remove the ticket.
Bucket bucket;
if (!buckets.TryGetValue(ticket, out bucket) || !bucket.TryRemove(item)) return false;
}
return true;
}
public bool TryRescheduleCollection(ActivationData item)
{
if (item.IsExemptFromCollection) return false;
lock (item)
{
if (TryRescheduleCollection_Impl(item, item.CollectionAgeLimit)) return true;
item.ResetCollectionTicket();
return false;
}
}
private bool TryRescheduleCollection_Impl(ActivationData item, TimeSpan timeout)
{
// note: we expect the activation lock to be held.
if (default(DateTime) == item.CollectionTicket) return false;
ThrowIfTicketIsInvalid(item.CollectionTicket);
if (IsExpired(item.CollectionTicket)) return false;
DateTime oldTicket = item.CollectionTicket;
DateTime newTicket = MakeTicketFromTimeSpan(timeout);
// if the ticket value doesn't change, then the source and destination bucket are the same and there's nothing to do.
if (newTicket.Equals(oldTicket)) return true;
Bucket bucket;
if (!buckets.TryGetValue(oldTicket, out bucket) || !bucket.TryRemove(item))
{
// fail: item is not associated with currentKey.
return false;
}
// it shouldn't be possible for Add to throw an exception here, as only one concurrent competitor should be able to reach to this point in the method.
item.ResetCollectionTicket();
Add(item, newTicket);
return true;
}
private bool DequeueQuantum(out IEnumerable<ActivationData> items, DateTime now)
{
DateTime key;
lock (nextTicketLock)
{
if (nextTicket > now)
{
items = null;
return false;
}
key = nextTicket;
nextTicket += quantum;
}
Bucket bucket;
if (!buckets.TryRemove(key, out bucket))
{
items = nothing;
return true;
}
items = bucket.CancelAll();
return true;
}
public override string ToString()
{
var now = DateTime.UtcNow;
return string.Format("<#Activations={0}, #Buckets={1}, buckets={2}>",
ApproximateCount,
buckets.Count,
Utils.EnumerableToString(
buckets.Values.OrderBy(bucket => bucket.Key), bucket => Utils.TimeSpanToString(bucket.Key - now) + "->" + bucket.ApproximateCount + " items"));
}
/// <summary>
/// Scans for activations that are due for collection.
/// </summary>
/// <returns>A list of activations that are due for collection.</returns>
public List<ActivationData> ScanStale()
{
var now = DateTime.UtcNow;
List<ActivationData> result = null;
IEnumerable<ActivationData> activations;
while (DequeueQuantum(out activations, now))
{
// at this point, all tickets associated with activations are cancelled and any attempts to reschedule will fail silently. if the activation is to be reactivated, it's our job to clear the activation's copy of the ticket.
foreach (var activation in activations)
{
lock (activation)
{
activation.ResetCollectionTicket();
if (activation.State != ActivationState.Valid)
{
// Do nothing: don't collect, don't reschedule.
// The activation can't be in Created or Activating, since we only ScheduleCollection after successfull activation.
// If the activation is already in Deactivating or Invalid state, its already being collected or was collected
// (both mean a bug, this activation should not be in the collector)
// So in any state except for Valid we should just not collect and not reschedule.
logger.Warn(ErrorCode.Catalog_ActivationCollector_BadState_1,
"ActivationCollector found an activation in a non Valid state. All activation inside the ActivationCollector should be in Valid state. Activation: {0}",
activation.ToDetailedString());
}
else if (activation.ShouldBeKeptAlive)
{
// Consider: need to reschedule to what is the remaining time for ShouldBeKeptAlive, not the full CollectionAgeLimit.
ScheduleCollection(activation);
}
else if (!activation.IsInactive)
{
// This is essentialy a bug, an active activation should not be in the last bucket.
logger.Warn(ErrorCode.Catalog_ActivationCollector_BadState_2,
"ActivationCollector found an active activation in it's last bucket. This is violation of ActivationCollector invariants. " +
"For now going to defer it's collection. Activation: {0}",
activation.ToDetailedString());
ScheduleCollection(activation);
}
else if (!activation.IsStale(now))
{
// This is essentialy a bug, a non stale activation should not be in the last bucket.
logger.Warn(ErrorCode.Catalog_ActivationCollector_BadState_3,
"ActivationCollector found a non stale activation in it's last bucket. This is violation of ActivationCollector invariants. Now: {0}" +
"For now going to defer it's collection. Activation: {1}",
LogFormatter.PrintDate(now),
activation.ToDetailedString());
ScheduleCollection(activation);
}
else
{
// Atomically set Deactivating state, to disallow any new requests or new timer ticks to be dispatched on this activation.
activation.PrepareForDeactivation();
DecideToCollectActivation(activation, ref result);
}
}
}
}
return result ?? nothing;
}
/// <summary>
/// Scans for activations that have been idle for the specified age limit.
/// </summary>
/// <param name="ageLimit">The age limit.</param>
/// <returns></returns>
public List<ActivationData> ScanAll(TimeSpan ageLimit)
{
List<ActivationData> result = null;
var now = DateTime.UtcNow;
int bucketCount = buckets.Count;
int i = 0;
foreach (var bucket in buckets.Values)
{
if (i >= bucketCount) break;
int notToExceed = bucket.ApproximateCount;
int j = 0;
foreach (var activation in bucket)
{
// theoretically, we could iterate forever on the ConcurrentDictionary. we limit ourselves to an approximation of the bucket's Count property to limit the number of iterations we perform.
if (j >= notToExceed) break;
lock (activation)
{
if (activation.State != ActivationState.Valid)
{
// Do nothing: don't collect, don't reschedule.
}
else if (activation.ShouldBeKeptAlive)
{
// do nothing
}
else if (!activation.IsInactive)
{
// do nothing
}
else
{
if (activation.GetIdleness(now) >= ageLimit)
{
if (bucket.TryCancel(activation))
{
// we removed the activation from the collector. it's our responsibility to deactivate it.
activation.PrepareForDeactivation();
DecideToCollectActivation(activation, ref result);
}
// someone else has already deactivated the activation, so there's nothing to do.
}
else
{
// activation is not idle long enough for collection. do nothing.
}
}
}
++j;
}
++i;
}
return result ?? nothing;
}
private void DecideToCollectActivation(ActivationData activation, ref List<ActivationData> condemned)
{
if (null == condemned)
{
condemned = new List<ActivationData> { activation };
}
else
{
condemned.Add(activation);
}
this.Debug_OnDecideToCollectActivation?.Invoke(activation.Grain);
}
private static void ThrowIfTicketIsInvalid(DateTime ticket, TimeSpan quantum)
{
ThrowIfDefault(ticket, "ticket");
if (0 != ticket.Ticks % quantum.Ticks)
{
throw new ArgumentException(string.Format("invalid ticket ({0})", ticket));
}
}
private void ThrowIfTicketIsInvalid(DateTime ticket)
{
ThrowIfTicketIsInvalid(ticket, quantum);
}
private void ThrowIfExemptFromCollection(ActivationData activation, string name)
{
if (activation.IsExemptFromCollection)
{
throw new ArgumentException(string.Format("{0} should not refer to a system target or system grain.", name), name);
}
}
private bool IsExpired(DateTime ticket)
{
return ticket < nextTicket;
}
private DateTime MakeTicketFromDateTime(DateTime timestamp)
{
// round the timestamp to the next quantum. e.g. if the quantum is 1 minute and the timestamp is 3:45:22, then the ticket will be 3:46. note that TimeStamp.Ticks and DateTime.Ticks both return a long.
DateTime ticket = new DateTime(((timestamp.Ticks - 1) / quantum.Ticks + 1) * quantum.Ticks);
if (ticket < nextTicket)
{
throw new ArgumentException(string.Format("The earliest collection that can be scheduled from now is for {0}", new DateTime(nextTicket.Ticks - quantum.Ticks + 1)));
}
return ticket;
}
private DateTime MakeTicketFromTimeSpan(TimeSpan timeout)
{
if (timeout < quantum)
{
throw new ArgumentException(String.Format("timeout must be at least {0}, but it is {1}", quantum, timeout), "timeout");
}
return MakeTicketFromDateTime(DateTime.UtcNow + timeout);
}
private void Add(ActivationData item, DateTime ticket)
{
// note: we expect the activation lock to be held.
item.ResetCollectionCancelledFlag();
Bucket bucket =
buckets.GetOrAdd(
ticket,
key =>
new Bucket(key, quantum));
bucket.Add(item);
item.SetCollectionTicket(ticket);
}
static private void ThrowIfDefault<T>(T value, string name) where T : IEquatable<T>
{
if (value.Equals(default(T)))
{
throw new ArgumentException(string.Format("default({0}) is not allowed in this context.", typeof(T).Name), name);
}
}
private class Bucket : IEnumerable<ActivationData>
{
private readonly DateTime key;
private readonly ConcurrentDictionary<ActivationId, ActivationData> items;
public DateTime Key { get { return key; } }
public int ApproximateCount { get { return items.Count; } }
public Bucket(DateTime key, TimeSpan quantum)
{
ThrowIfTicketIsInvalid(key, quantum);
this.key = key;
items = new ConcurrentDictionary<ActivationId, ActivationData>();
}
public void Add(ActivationData item)
{
if (!items.TryAdd(item.ActivationId, item))
{
throw new InvalidOperationException("item is already associated with this bucket");
}
}
public bool TryRemove(ActivationData item)
{
if (!TryCancel(item)) return false;
// actual removal is a memory optimization and isn't technically necessary to cancel the timeout.
ActivationData unused;
return items.TryRemove(item.ActivationId, out unused);
}
public bool TryCancel(ActivationData item)
{
if (!item.TrySetCollectionCancelledFlag()) return false;
// we need to null out the ActivationData reference in the bucket in order to ensure that the memory gets collected. if we've succeeded in setting the cancellation flag, then we should have won the right to do this, so we throw an exception if we fail.
if (items.TryUpdate(item.ActivationId, null, item)) return true;
throw new InvalidOperationException("unexpected failure to cancel deactivation");
}
public IEnumerable<ActivationData> CancelAll()
{
List<ActivationData> result = null;
foreach (var pair in items)
{
// attempt to cancel the item. if we succeed, it wasn't already cancelled and we can return it. otherwise, we silently ignore it.
if (pair.Value.TrySetCollectionCancelledFlag())
{
if (result == null)
{
// we only need to ensure there's enough space left for this element and any potential entries.
result = new List<ActivationData>();
}
result.Add(pair.Value);
}
}
return result ?? nothing;
}
public IEnumerator<ActivationData> GetEnumerator()
{
return items.Values.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
}
| |
namespace ContosoUniversity.Migrations
{
using ContosoUniversity.Models;
using ContosoUniversity.DAL;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.Entity.Migrations;
using System.Linq;
internal sealed class Configuration : DbMigrationsConfiguration<SchoolContext>
{
public Configuration()
{
AutomaticMigrationsEnabled = false;
}
protected override void Seed(SchoolContext context)
{
var students = new List<Student>
{
new Student { FirstMidName = "Carson", LastName = "Alexander",
EnrollmentDate = DateTime.Parse("2010-09-01") },
new Student { FirstMidName = "Meredith", LastName = "Alonso",
EnrollmentDate = DateTime.Parse("2012-09-01") },
new Student { FirstMidName = "Arturo", LastName = "Anand",
EnrollmentDate = DateTime.Parse("2013-09-01") },
new Student { FirstMidName = "Gytis", LastName = "Barzdukas",
EnrollmentDate = DateTime.Parse("2012-09-01") },
new Student { FirstMidName = "Yan", LastName = "Li",
EnrollmentDate = DateTime.Parse("2012-09-01") },
new Student { FirstMidName = "Peggy", LastName = "Justice",
EnrollmentDate = DateTime.Parse("2011-09-01") },
new Student { FirstMidName = "Laura", LastName = "Norman",
EnrollmentDate = DateTime.Parse("2013-09-01") },
new Student { FirstMidName = "Nino", LastName = "Olivetto",
EnrollmentDate = DateTime.Parse("2005-09-01") }
};
students.ForEach(s => context.Students.AddOrUpdate(p => p.LastName, s));
context.SaveChanges();
var instructors = new List<Instructor>
{
new Instructor { FirstMidName = "Kim", LastName = "Abercrombie",
HireDate = DateTime.Parse("1995-03-11") },
new Instructor { FirstMidName = "Fadi", LastName = "Fakhouri",
HireDate = DateTime.Parse("2002-07-06") },
new Instructor { FirstMidName = "Roger", LastName = "Harui",
HireDate = DateTime.Parse("1998-07-01") },
new Instructor { FirstMidName = "Candace", LastName = "Kapoor",
HireDate = DateTime.Parse("2001-01-15") },
new Instructor { FirstMidName = "Roger", LastName = "Zheng",
HireDate = DateTime.Parse("2004-02-12") }
};
instructors.ForEach(s => context.Instructors.AddOrUpdate(p => p.LastName, s));
context.SaveChanges();
var departments = new List<Department>
{
new Department { Name = "English", Budget = 350000,
StartDate = DateTime.Parse("2007-09-01"),
InstructorID = instructors.Single( i => i.LastName == "Abercrombie").ID },
new Department { Name = "Mathematics", Budget = 100000,
StartDate = DateTime.Parse("2007-09-01"),
InstructorID = instructors.Single( i => i.LastName == "Fakhouri").ID },
new Department { Name = "Engineering", Budget = 350000,
StartDate = DateTime.Parse("2007-09-01"),
InstructorID = instructors.Single( i => i.LastName == "Harui").ID },
new Department { Name = "Economics", Budget = 100000,
StartDate = DateTime.Parse("2007-09-01"),
InstructorID = instructors.Single( i => i.LastName == "Kapoor").ID }
};
departments.ForEach(s => context.Departments.AddOrUpdate(p => p.Name, s));
context.SaveChanges();
var courses = new List<Course>
{
new Course {CourseID = 1050, Title = "Chemistry", Credits = 3,
DepartmentID = departments.Single( s => s.Name == "Engineering").DepartmentID,
Instructors = new List<Instructor>()
},
new Course {CourseID = 4022, Title = "Microeconomics", Credits = 3,
DepartmentID = departments.Single( s => s.Name == "Economics").DepartmentID,
Instructors = new List<Instructor>()
},
new Course {CourseID = 4041, Title = "Macroeconomics", Credits = 3,
DepartmentID = departments.Single( s => s.Name == "Economics").DepartmentID,
Instructors = new List<Instructor>()
},
new Course {CourseID = 1045, Title = "Calculus", Credits = 4,
DepartmentID = departments.Single( s => s.Name == "Mathematics").DepartmentID,
Instructors = new List<Instructor>()
},
new Course {CourseID = 3141, Title = "Trigonometry", Credits = 4,
DepartmentID = departments.Single( s => s.Name == "Mathematics").DepartmentID,
Instructors = new List<Instructor>()
},
new Course {CourseID = 2021, Title = "Composition", Credits = 3,
DepartmentID = departments.Single( s => s.Name == "English").DepartmentID,
Instructors = new List<Instructor>()
},
new Course {CourseID = 2042, Title = "Literature", Credits = 4,
DepartmentID = departments.Single( s => s.Name == "English").DepartmentID,
Instructors = new List<Instructor>()
},
};
courses.ForEach(s => context.Courses.AddOrUpdate(p => p.CourseID, s));
context.SaveChanges();
var officeAssignments = new List<OfficeAssignment>
{
new OfficeAssignment {
InstructorID = instructors.Single( i => i.LastName == "Fakhouri").ID,
Location = "Smith 17" },
new OfficeAssignment {
InstructorID = instructors.Single( i => i.LastName == "Harui").ID,
Location = "Gowan 27" },
new OfficeAssignment {
InstructorID = instructors.Single( i => i.LastName == "Kapoor").ID,
Location = "Thompson 304" },
};
officeAssignments.ForEach(s => context.OfficeAssignments.AddOrUpdate(p => p.InstructorID, s));
context.SaveChanges();
AddOrUpdateInstructor(context, "Chemistry", "Kapoor");
AddOrUpdateInstructor(context, "Chemistry", "Harui");
AddOrUpdateInstructor(context, "Microeconomics", "Zheng");
AddOrUpdateInstructor(context, "Macroeconomics", "Zheng");
AddOrUpdateInstructor(context, "Calculus", "Fakhouri");
AddOrUpdateInstructor(context, "Trigonometry", "Harui");
AddOrUpdateInstructor(context, "Composition", "Abercrombie");
AddOrUpdateInstructor(context, "Literature", "Abercrombie");
context.SaveChanges();
var enrollments = new List<Enrollment>
{
new Enrollment {
StudentID = students.Single(s => s.LastName == "Alexander").ID,
CourseID = courses.Single(c => c.Title == "Chemistry" ).CourseID,
Grade = Grade.A
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Alexander").ID,
CourseID = courses.Single(c => c.Title == "Microeconomics" ).CourseID,
Grade = Grade.C
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Alexander").ID,
CourseID = courses.Single(c => c.Title == "Macroeconomics" ).CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Alonso").ID,
CourseID = courses.Single(c => c.Title == "Calculus" ).CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Alonso").ID,
CourseID = courses.Single(c => c.Title == "Trigonometry" ).CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Alonso").ID,
CourseID = courses.Single(c => c.Title == "Composition" ).CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Anand").ID,
CourseID = courses.Single(c => c.Title == "Chemistry" ).CourseID
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Anand").ID,
CourseID = courses.Single(c => c.Title == "Microeconomics").CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Barzdukas").ID,
CourseID = courses.Single(c => c.Title == "Chemistry").CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Li").ID,
CourseID = courses.Single(c => c.Title == "Composition").CourseID,
Grade = Grade.B
},
new Enrollment {
StudentID = students.Single(s => s.LastName == "Justice").ID,
CourseID = courses.Single(c => c.Title == "Literature").CourseID,
Grade = Grade.B
}
};
foreach (Enrollment e in enrollments)
{
var enrollmentInDataBase = context.Enrollments.Where(
s =>
s.Student.ID == e.StudentID &&
s.Course.CourseID == e.CourseID).SingleOrDefault();
if (enrollmentInDataBase == null)
{
context.Enrollments.Add(e);
}
}
context.SaveChanges();
}
void AddOrUpdateInstructor(SchoolContext context, string courseTitle, string instructorName)
{
var crs = context.Courses.SingleOrDefault(c => c.Title == courseTitle);
var inst = crs.Instructors.SingleOrDefault(i => i.LastName == instructorName);
if (inst == null)
crs.Instructors.Add(context.Instructors.Single(i => i.LastName == instructorName));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Data.Common;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using System.Collections.Concurrent;
namespace System.Data.ProviderBase
{
internal sealed class DbConnectionPool
{
private enum State
{
Initializing,
Running,
ShuttingDown,
}
private sealed class PendingGetConnection
{
public PendingGetConnection(long dueTime, DbConnection owner, TaskCompletionSource<DbConnectionInternal> completion, DbConnectionOptions userOptions)
{
DueTime = dueTime;
Owner = owner;
Completion = completion;
}
public long DueTime { get; private set; }
public DbConnection Owner { get; private set; }
public TaskCompletionSource<DbConnectionInternal> Completion { get; private set; }
public DbConnectionOptions UserOptions { get; private set; }
}
private sealed class PoolWaitHandles
{
private readonly Semaphore _poolSemaphore;
private readonly ManualResetEvent _errorEvent;
// Using a Mutex requires ThreadAffinity because SQL CLR can swap
// the underlying Win32 thread associated with a managed thread in preemptive mode.
// Using an AutoResetEvent does not have that complication.
private readonly Semaphore _creationSemaphore;
private readonly WaitHandle[] _handlesWithCreate;
private readonly WaitHandle[] _handlesWithoutCreate;
internal PoolWaitHandles()
{
_poolSemaphore = new Semaphore(0, MAX_Q_SIZE);
_errorEvent = new ManualResetEvent(false);
_creationSemaphore = new Semaphore(1, 1);
_handlesWithCreate = new WaitHandle[] { _poolSemaphore, _errorEvent, _creationSemaphore };
_handlesWithoutCreate = new WaitHandle[] { _poolSemaphore, _errorEvent };
}
internal Semaphore CreationSemaphore
{
get { return _creationSemaphore; }
}
internal ManualResetEvent ErrorEvent
{
get { return _errorEvent; }
}
internal Semaphore PoolSemaphore
{
get { return _poolSemaphore; }
}
internal WaitHandle[] GetHandles(bool withCreate)
{
return withCreate ? _handlesWithCreate : _handlesWithoutCreate;
}
}
private const int MAX_Q_SIZE = (int)0x00100000;
// The order of these is important; we want the WaitAny call to be signaled
// for a free object before a creation signal. Only the index first signaled
// object is returned from the WaitAny call.
private const int SEMAPHORE_HANDLE = (int)0x0;
private const int ERROR_HANDLE = (int)0x1;
private const int CREATION_HANDLE = (int)0x2;
private const int BOGUS_HANDLE = (int)0x3;
private const int ERROR_WAIT_DEFAULT = 5 * 1000; // 5 seconds
// we do want a testable, repeatable set of generated random numbers
private static readonly Random s_random = new Random(5101977); // Value obtained from Dave Driver
private readonly int _cleanupWait;
private readonly DbConnectionPoolIdentity _identity;
private readonly DbConnectionFactory _connectionFactory;
private readonly DbConnectionPoolGroup _connectionPoolGroup;
private readonly DbConnectionPoolGroupOptions _connectionPoolGroupOptions;
private DbConnectionPoolProviderInfo _connectionPoolProviderInfo;
private State _state;
private readonly ConcurrentStack<DbConnectionInternal> _stackOld = new ConcurrentStack<DbConnectionInternal>();
private readonly ConcurrentStack<DbConnectionInternal> _stackNew = new ConcurrentStack<DbConnectionInternal>();
private readonly ConcurrentQueue<PendingGetConnection> _pendingOpens = new ConcurrentQueue<PendingGetConnection>();
private int _pendingOpensWaiting = 0;
private readonly WaitCallback _poolCreateRequest;
private int _waitCount;
private readonly PoolWaitHandles _waitHandles;
private Exception _resError;
private volatile bool _errorOccurred;
private int _errorWait;
private Timer _errorTimer;
private Timer _cleanupTimer;
private readonly List<DbConnectionInternal> _objectList;
private int _totalObjects;
// only created by DbConnectionPoolGroup.GetConnectionPool
internal DbConnectionPool(
DbConnectionFactory connectionFactory,
DbConnectionPoolGroup connectionPoolGroup,
DbConnectionPoolIdentity identity,
DbConnectionPoolProviderInfo connectionPoolProviderInfo)
{
Debug.Assert(null != connectionPoolGroup, "null connectionPoolGroup");
if ((null != identity) && identity.IsRestricted)
{
throw ADP.InternalError(ADP.InternalErrorCode.AttemptingToPoolOnRestrictedToken);
}
_state = State.Initializing;
lock (s_random)
{ // Random.Next is not thread-safe
_cleanupWait = s_random.Next(12, 24) * 10 * 1000; // 2-4 minutes in 10 sec intervals
}
_connectionFactory = connectionFactory;
_connectionPoolGroup = connectionPoolGroup;
_connectionPoolGroupOptions = connectionPoolGroup.PoolGroupOptions;
_connectionPoolProviderInfo = connectionPoolProviderInfo;
_identity = identity;
_waitHandles = new PoolWaitHandles();
_errorWait = ERROR_WAIT_DEFAULT;
_errorTimer = null; // No error yet.
_objectList = new List<DbConnectionInternal>(MaxPoolSize);
_poolCreateRequest = new WaitCallback(PoolCreateRequest); // used by CleanupCallback
_state = State.Running;
//_cleanupTimer & QueuePoolCreateRequest is delayed until DbConnectionPoolGroup calls
// StartBackgroundCallbacks after pool is actually in the collection
}
private int CreationTimeout
{
get { return PoolGroupOptions.CreationTimeout; }
}
internal int Count
{
get { return _totalObjects; }
}
internal DbConnectionFactory ConnectionFactory
{
get { return _connectionFactory; }
}
internal bool ErrorOccurred
{
get { return _errorOccurred; }
}
internal TimeSpan LoadBalanceTimeout
{
get { return PoolGroupOptions.LoadBalanceTimeout; }
}
private bool NeedToReplenish
{
get
{
if (State.Running != _state) // Don't allow connection create when not running.
return false;
int totalObjects = Count;
if (totalObjects >= MaxPoolSize)
return false;
if (totalObjects < MinPoolSize)
return true;
int freeObjects = (_stackNew.Count + _stackOld.Count);
int waitingRequests = _waitCount;
bool needToReplenish = (freeObjects < waitingRequests) || ((freeObjects == waitingRequests) && (totalObjects > 1));
return needToReplenish;
}
}
internal DbConnectionPoolIdentity Identity
{
get { return _identity; }
}
internal bool IsRunning
{
get { return State.Running == _state; }
}
private int MaxPoolSize
{
get { return PoolGroupOptions.MaxPoolSize; }
}
private int MinPoolSize
{
get { return PoolGroupOptions.MinPoolSize; }
}
internal DbConnectionPoolGroup PoolGroup
{
get { return _connectionPoolGroup; }
}
internal DbConnectionPoolGroupOptions PoolGroupOptions
{
get { return _connectionPoolGroupOptions; }
}
internal DbConnectionPoolProviderInfo ProviderInfo
{
get { return _connectionPoolProviderInfo; }
}
internal bool UseLoadBalancing
{
get { return PoolGroupOptions.UseLoadBalancing; }
}
private bool UsingIntegrateSecurity
{
get { return (null != _identity && DbConnectionPoolIdentity.NoIdentity != _identity); }
}
private void CleanupCallback(Object state)
{
// Called when the cleanup-timer ticks over.
// This is the automatic pruning method. Every period, we will
// perform a two-step process:
//
// First, for each free object above MinPoolSize, we will obtain a
// semaphore representing one object and destroy one from old stack.
// We will continue this until we either reach MinPoolSize, we are
// unable to obtain a free object, or we have exhausted all the
// objects on the old stack.
//
// Second we move all free objects on the new stack to the old stack.
// So, every period the objects on the old stack are destroyed and
// the objects on the new stack are pushed to the old stack. All
// objects that are currently out and in use are not on either stack.
//
// With this logic, objects are pruned from the pool if unused for
// at least one period but not more than two periods.
// Destroy free objects that put us above MinPoolSize from old stack.
while (Count > MinPoolSize)
{ // While above MinPoolSize...
if (_waitHandles.PoolSemaphore.WaitOne(0))
{
// We obtained a objects from the semaphore.
DbConnectionInternal obj;
if (_stackOld.TryPop(out obj))
{
Debug.Assert(obj != null, "null connection is not expected");
// If we obtained one from the old stack, destroy it.
DestroyObject(obj);
}
else
{
// Else we exhausted the old stack (the object the
// semaphore represents is on the new stack), so break.
_waitHandles.PoolSemaphore.Release(1);
break;
}
}
else
{
break;
}
}
// Push to the old-stack. For each free object, move object from
// new stack to old stack.
if (_waitHandles.PoolSemaphore.WaitOne(0))
{
for (;;)
{
DbConnectionInternal obj;
if (!_stackNew.TryPop(out obj))
break;
Debug.Assert(obj != null, "null connection is not expected");
Debug.Assert(!obj.IsEmancipated, "pooled object not in pool");
Debug.Assert(obj.CanBePooled, "pooled object is not poolable");
_stackOld.Push(obj);
}
_waitHandles.PoolSemaphore.Release(1);
}
// Queue up a request to bring us up to MinPoolSize
QueuePoolCreateRequest();
}
internal void Clear()
{
DbConnectionInternal obj;
// First, quickly doom everything.
lock (_objectList)
{
int count = _objectList.Count;
for (int i = 0; i < count; ++i)
{
obj = _objectList[i];
if (null != obj)
{
obj.DoNotPoolThisConnection();
}
}
}
// Second, dispose of all the free connections.
while (_stackNew.TryPop(out obj))
{
Debug.Assert(obj != null, "null connection is not expected");
DestroyObject(obj);
}
while (_stackOld.TryPop(out obj))
{
Debug.Assert(obj != null, "null connection is not expected");
DestroyObject(obj);
}
// Finally, reclaim everything that's emancipated (which, because
// it's been doomed, will cause it to be disposed of as well)
ReclaimEmancipatedObjects();
}
private Timer CreateCleanupTimer()
{
return (new Timer(new TimerCallback(this.CleanupCallback), null, _cleanupWait, _cleanupWait));
}
private DbConnectionInternal CreateObject(DbConnection owningObject, DbConnectionOptions userOptions, DbConnectionInternal oldConnection)
{
DbConnectionInternal newObj = null;
try
{
newObj = _connectionFactory.CreatePooledConnection(this, owningObject, _connectionPoolGroup.ConnectionOptions, _connectionPoolGroup.PoolKey, userOptions);
if (null == newObj)
{
throw ADP.InternalError(ADP.InternalErrorCode.CreateObjectReturnedNull); // CreateObject succeeded, but null object
}
if (!newObj.CanBePooled)
{
throw ADP.InternalError(ADP.InternalErrorCode.NewObjectCannotBePooled); // CreateObject succeeded, but non-poolable object
}
newObj.PrePush(null);
lock (_objectList)
{
if ((oldConnection != null) && (oldConnection.Pool == this))
{
_objectList.Remove(oldConnection);
}
_objectList.Add(newObj);
_totalObjects = _objectList.Count;
}
// If the old connection belonged to another pool, we need to remove it from that
if (oldConnection != null)
{
var oldConnectionPool = oldConnection.Pool;
if (oldConnectionPool != null && oldConnectionPool != this)
{
Debug.Assert(oldConnectionPool._state == State.ShuttingDown, "Old connections pool should be shutting down");
lock (oldConnectionPool._objectList)
{
oldConnectionPool._objectList.Remove(oldConnection);
oldConnectionPool._totalObjects = oldConnectionPool._objectList.Count;
}
}
}
// Reset the error wait:
_errorWait = ERROR_WAIT_DEFAULT;
}
catch (Exception e)
{
if (!ADP.IsCatchableExceptionType(e))
{
throw;
}
newObj = null; // set to null, so we do not return bad new object
// Failed to create instance
_resError = e;
// Make sure the timer starts even if ThreadAbort occurs after setting the ErrorEvent.
// timer allocation has to be done out of CER block
Timer t = new Timer(new TimerCallback(this.ErrorCallback), null, Timeout.Infinite, Timeout.Infinite);
bool timerIsNotDisposed;
try { }
finally
{
_waitHandles.ErrorEvent.Set();
_errorOccurred = true;
// Enable the timer.
// Note that the timer is created to allow periodic invocation. If ThreadAbort occurs in the middle of ErrorCallback,
// the timer will restart. Otherwise, the timer callback (ErrorCallback) destroys the timer after resetting the error to avoid second callback.
_errorTimer = t;
timerIsNotDisposed = t.Change(_errorWait, _errorWait);
}
Debug.Assert(timerIsNotDisposed, "ErrorCallback timer has been disposed");
if (30000 < _errorWait)
{
_errorWait = 60000;
}
else
{
_errorWait *= 2;
}
throw;
}
return newObj;
}
private void DeactivateObject(DbConnectionInternal obj)
{
obj.DeactivateConnection();
bool returnToGeneralPool = false;
bool destroyObject = false;
if (obj.IsConnectionDoomed)
{
// the object is not fit for reuse -- just dispose of it.
destroyObject = true;
}
else
{
// NOTE: constructor should ensure that current state cannot be State.Initializing, so it can only
// be State.Running or State.ShuttingDown
Debug.Assert(_state == State.Running || _state == State.ShuttingDown);
lock (obj)
{
// A connection with a delegated transaction cannot currently
// be returned to a different customer until the transaction
// actually completes, so we send it into Stasis -- the SysTx
// transaction object will ensure that it is owned (not lost),
// and it will be certain to put it back into the pool.
if (_state == State.ShuttingDown)
{
// connection is being closed and the pool has been marked as shutting
// down, so destroy this object.
destroyObject = true;
}
else
{
if (obj.CanBePooled)
{
// We must put this connection into the transacted pool
// while inside a lock to prevent a race condition with
// the transaction asynchronously completing on a second
// thread.
// return to general pool
returnToGeneralPool = true;
}
else
{
// object is not fit for reuse -- just dispose of it
destroyObject = true;
}
}
}
}
if (returnToGeneralPool)
{
// Only push the connection into the general pool if we didn't
// already push it onto the transacted pool, put it into stasis,
// or want to destroy it.
Debug.Assert(destroyObject == false);
PutNewObject(obj);
}
else if (destroyObject)
{
DestroyObject(obj);
QueuePoolCreateRequest();
}
//-------------------------------------------------------------------------------------
// postcondition
// ensure that the connection was processed
Debug.Assert(
returnToGeneralPool == true || destroyObject == true);
}
internal void DestroyObject(DbConnectionInternal obj)
{
// A connection with a delegated transaction cannot be disposed of
// until the delegated transaction has actually completed. Instead,
// we simply leave it alone; when the transaction completes, it will
// come back through PutObjectFromTransactedPool, which will call us
// again.
bool removed = false;
lock (_objectList)
{
removed = _objectList.Remove(obj);
Debug.Assert(removed, "attempt to DestroyObject not in list");
_totalObjects = _objectList.Count;
}
if (removed)
{
}
obj.Dispose();
}
private void ErrorCallback(Object state)
{
_errorOccurred = false;
_waitHandles.ErrorEvent.Reset();
// the error state is cleaned, destroy the timer to avoid periodic invocation
Timer t = _errorTimer;
_errorTimer = null;
if (t != null)
{
t.Dispose(); // Cancel timer request.
}
}
// TODO: move this to src/Common and integrate with SqlClient
// Note: Odbc connections are not passing through this code
private Exception TryCloneCachedException()
{
return _resError;
}
private void WaitForPendingOpen()
{
PendingGetConnection next;
do
{
bool started = false;
try
{
try { }
finally
{
started = Interlocked.CompareExchange(ref _pendingOpensWaiting, 1, 0) == 0;
}
if (!started)
{
return;
}
while (_pendingOpens.TryDequeue(out next))
{
if (next.Completion.Task.IsCompleted)
{
continue;
}
uint delay;
if (next.DueTime == Timeout.Infinite)
{
delay = unchecked((uint)Timeout.Infinite);
}
else
{
delay = (uint)Math.Max(ADP.TimerRemainingMilliseconds(next.DueTime), 0);
}
DbConnectionInternal connection = null;
bool timeout = false;
Exception caughtException = null;
try
{
bool allowCreate = true;
bool onlyOneCheckConnection = false;
timeout = !TryGetConnection(next.Owner, delay, allowCreate, onlyOneCheckConnection, next.UserOptions, out connection);
}
catch (Exception e)
{
caughtException = e;
}
if (caughtException != null)
{
next.Completion.TrySetException(caughtException);
}
else if (timeout)
{
next.Completion.TrySetException(ADP.ExceptionWithStackTrace(ADP.PooledOpenTimeout()));
}
else
{
Debug.Assert(connection != null, "connection should never be null in success case");
if (!next.Completion.TrySetResult(connection))
{
// if the completion was cancelled, lets try and get this connection back for the next try
PutObject(connection, next.Owner);
}
}
}
}
finally
{
if (started)
{
Interlocked.Exchange(ref _pendingOpensWaiting, 0);
}
}
} while (_pendingOpens.TryPeek(out next));
}
internal bool TryGetConnection(DbConnection owningObject, TaskCompletionSource<DbConnectionInternal> retry, DbConnectionOptions userOptions, out DbConnectionInternal connection)
{
uint waitForMultipleObjectsTimeout = 0;
bool allowCreate = false;
if (retry == null)
{
waitForMultipleObjectsTimeout = (uint)CreationTimeout;
// Set the wait timeout to INFINITE (-1) if the SQL connection timeout is 0 (== infinite)
if (waitForMultipleObjectsTimeout == 0)
waitForMultipleObjectsTimeout = unchecked((uint)Timeout.Infinite);
allowCreate = true;
}
if (_state != State.Running)
{
connection = null;
return true;
}
bool onlyOneCheckConnection = true;
if (TryGetConnection(owningObject, waitForMultipleObjectsTimeout, allowCreate, onlyOneCheckConnection, userOptions, out connection))
{
return true;
}
else if (retry == null)
{
// timed out on a sync call
return true;
}
var pendingGetConnection =
new PendingGetConnection(
CreationTimeout == 0 ? Timeout.Infinite : ADP.TimerCurrent() + ADP.TimerFromSeconds(CreationTimeout / 1000),
owningObject,
retry,
userOptions);
_pendingOpens.Enqueue(pendingGetConnection);
// it is better to StartNew too many times than not enough
if (_pendingOpensWaiting == 0)
{
Thread waitOpenThread = new Thread(WaitForPendingOpen);
waitOpenThread.IsBackground = true;
waitOpenThread.Start();
}
connection = null;
return false;
}
private bool TryGetConnection(DbConnection owningObject, uint waitForMultipleObjectsTimeout, bool allowCreate, bool onlyOneCheckConnection, DbConnectionOptions userOptions, out DbConnectionInternal connection)
{
DbConnectionInternal obj = null;
if (null == obj)
{
Interlocked.Increment(ref _waitCount);
do
{
int waitResult = BOGUS_HANDLE;
try
{
try
{
}
finally
{
waitResult = WaitHandle.WaitAny(_waitHandles.GetHandles(allowCreate), unchecked((int)waitForMultipleObjectsTimeout));
}
// From the WaitAny docs: "If more than one object became signaled during
// the call, this is the array index of the signaled object with the
// smallest index value of all the signaled objects." This is important
// so that the free object signal will be returned before a creation
// signal.
switch (waitResult)
{
case WaitHandle.WaitTimeout:
Interlocked.Decrement(ref _waitCount);
connection = null;
return false;
case ERROR_HANDLE:
// Throw the error that PoolCreateRequest stashed.
Interlocked.Decrement(ref _waitCount);
throw TryCloneCachedException();
case CREATION_HANDLE:
try
{
obj = UserCreateRequest(owningObject, userOptions);
}
catch
{
if (null == obj)
{
Interlocked.Decrement(ref _waitCount);
}
throw;
}
finally
{
// Ensure that we release this waiter, regardless
// of any exceptions that may be thrown.
if (null != obj)
{
Interlocked.Decrement(ref _waitCount);
}
}
if (null == obj)
{
// If we were not able to create an object, check to see if
// we reached MaxPoolSize. If so, we will no longer wait on
// the CreationHandle, but instead wait for a free object or
// the timeout.
if (Count >= MaxPoolSize && 0 != MaxPoolSize)
{
if (!ReclaimEmancipatedObjects())
{
// modify handle array not to wait on creation mutex anymore
Debug.Assert(2 == CREATION_HANDLE, "creation handle changed value");
allowCreate = false;
}
}
}
break;
case SEMAPHORE_HANDLE:
//
// guaranteed available inventory
//
Interlocked.Decrement(ref _waitCount);
obj = GetFromGeneralPool();
if ((obj != null) && (!obj.IsConnectionAlive()))
{
DestroyObject(obj);
obj = null; // Setting to null in case creating a new object fails
if (onlyOneCheckConnection)
{
if (_waitHandles.CreationSemaphore.WaitOne(unchecked((int)waitForMultipleObjectsTimeout)))
{
try
{
obj = UserCreateRequest(owningObject, userOptions);
}
finally
{
_waitHandles.CreationSemaphore.Release(1);
}
}
else
{
// Timeout waiting for creation semaphore - return null
connection = null;
return false;
}
}
}
break;
default:
Interlocked.Decrement(ref _waitCount);
throw ADP.InternalError(ADP.InternalErrorCode.UnexpectedWaitAnyResult);
}
}
finally
{
if (CREATION_HANDLE == waitResult)
{
_waitHandles.CreationSemaphore.Release(1);
}
}
} while (null == obj);
}
if (null != obj)
{
PrepareConnection(owningObject, obj);
}
connection = obj;
return true;
}
private void PrepareConnection(DbConnection owningObject, DbConnectionInternal obj)
{
lock (obj)
{ // Protect against Clear and ReclaimEmancipatedObjects, which call IsEmancipated, which is affected by PrePush and PostPop
obj.PostPop(owningObject);
}
try
{
obj.ActivateConnection();
}
catch
{
// if Activate throws an exception
// put it back in the pool or have it properly disposed of
this.PutObject(obj, owningObject);
throw;
}
}
/// <summary>
/// Creates a new connection to replace an existing connection
/// </summary>
/// <param name="owningObject">Outer connection that currently owns <paramref name="oldConnection"/></param>
/// <param name="userOptions">Options used to create the new connection</param>
/// <param name="oldConnection">Inner connection that will be replaced</param>
/// <returns>A new inner connection that is attached to the <paramref name="owningObject"/></returns>
internal DbConnectionInternal ReplaceConnection(DbConnection owningObject, DbConnectionOptions userOptions, DbConnectionInternal oldConnection)
{
DbConnectionInternal newConnection = UserCreateRequest(owningObject, userOptions, oldConnection);
if (newConnection != null)
{
PrepareConnection(owningObject, newConnection);
oldConnection.PrepareForReplaceConnection();
oldConnection.DeactivateConnection();
oldConnection.Dispose();
}
return newConnection;
}
private DbConnectionInternal GetFromGeneralPool()
{
DbConnectionInternal obj = null;
if (!_stackNew.TryPop(out obj))
{
if (!_stackOld.TryPop(out obj))
{
obj = null;
}
else
{
Debug.Assert(obj != null, "null connection is not expected");
}
}
else
{
Debug.Assert(obj != null, "null connection is not expected");
}
// When another thread is clearing this pool,
// it will remove all connections in this pool which causes the
// following assert to fire, which really mucks up stress against
// checked bits.
if (null != obj)
{
}
return (obj);
}
private void PoolCreateRequest(object state)
{
// called by pooler to ensure pool requests are currently being satisfied -
// creation mutex has not been obtained
if (State.Running == _state)
{
// in case WaitForPendingOpen ever failed with no subsequent OpenAsync calls,
// start it back up again
if (!_pendingOpens.IsEmpty && _pendingOpensWaiting == 0)
{
Thread waitOpenThread = new Thread(WaitForPendingOpen);
waitOpenThread.IsBackground = true;
waitOpenThread.Start();
}
// Before creating any new objects, reclaim any released objects that were
// not closed.
ReclaimEmancipatedObjects();
if (!ErrorOccurred)
{
if (NeedToReplenish)
{
// Check to see if pool was created using integrated security and if so, make
// sure the identity of current user matches that of user that created pool.
// If it doesn't match, do not create any objects on the ThreadPool thread,
// since either Open will fail or we will open a object for this pool that does
// not belong in this pool. The side effect of this is that if using integrated
// security min pool size cannot be guaranteed.
if (UsingIntegrateSecurity && !_identity.Equals(DbConnectionPoolIdentity.GetCurrent()))
{
return;
}
int waitResult = BOGUS_HANDLE;
try
{
try { }
finally
{
waitResult = WaitHandle.WaitAny(_waitHandles.GetHandles(withCreate: true), CreationTimeout);
}
if (CREATION_HANDLE == waitResult)
{
DbConnectionInternal newObj;
// Check ErrorOccurred again after obtaining mutex
if (!ErrorOccurred)
{
while (NeedToReplenish)
{
// Don't specify any user options because there is no outer connection associated with the new connection
newObj = CreateObject(owningObject: null, userOptions: null, oldConnection: null);
// We do not need to check error flag here, since we know if
// CreateObject returned null, we are in error case.
if (null != newObj)
{
PutNewObject(newObj);
}
else
{
break;
}
}
}
}
else if (WaitHandle.WaitTimeout == waitResult)
{
// do not wait forever and potential block this worker thread
// instead wait for a period of time and just requeue to try again
QueuePoolCreateRequest();
}
}
finally
{
if (CREATION_HANDLE == waitResult)
{
// reuse waitResult and ignore its value
_waitHandles.CreationSemaphore.Release(1);
}
}
}
}
}
}
internal void PutNewObject(DbConnectionInternal obj)
{
Debug.Assert(null != obj, "why are we adding a null object to the pool?");
// Debug.Assert(obj.CanBePooled, "non-poolable object in pool");
_stackNew.Push(obj);
_waitHandles.PoolSemaphore.Release(1);
}
internal void PutObject(DbConnectionInternal obj, object owningObject)
{
Debug.Assert(null != obj, "null obj?");
// Once a connection is closing (which is the state that we're in at
// this point in time) you cannot delegate a transaction to or enlist
// a transaction in it, so we can correctly presume that if there was
// not a delegated or enlisted transaction to start with, that there
// will not be a delegated or enlisted transaction once we leave the
// lock.
lock (obj)
{
// Calling PrePush prevents the object from being reclaimed
// once we leave the lock, because it sets _pooledCount such
// that it won't appear to be out of the pool. What that
// means, is that we're now responsible for this connection:
// it won't get reclaimed if we drop the ball somewhere.
obj.PrePush(owningObject);
}
DeactivateObject(obj);
}
private void QueuePoolCreateRequest()
{
if (State.Running == _state)
{
// Make sure we're at quota by posting a callback to the threadpool.
ThreadPool.QueueUserWorkItem(_poolCreateRequest);
}
}
private bool ReclaimEmancipatedObjects()
{
bool emancipatedObjectFound = false;
List<DbConnectionInternal> reclaimedObjects = new List<DbConnectionInternal>();
int count;
lock (_objectList)
{
count = _objectList.Count;
for (int i = 0; i < count; ++i)
{
DbConnectionInternal obj = _objectList[i];
if (null != obj)
{
bool locked = false;
try
{
Monitor.TryEnter(obj, ref locked);
if (locked)
{ // avoid race condition with PrePush/PostPop and IsEmancipated
if (obj.IsEmancipated)
{
// Inside the lock, we want to do as little
// as possible, so we simply mark the object
// as being in the pool, but hand it off to
// an out of pool list to be deactivated,
// etc.
obj.PrePush(null);
reclaimedObjects.Add(obj);
}
}
}
finally
{
if (locked)
Monitor.Exit(obj);
}
}
}
}
// NOTE: we don't want to call DeactivateObject while we're locked,
// because it can make roundtrips to the server and this will block
// object creation in the pooler. Instead, we queue things we need
// to do up, and process them outside the lock.
count = reclaimedObjects.Count;
for (int i = 0; i < count; ++i)
{
DbConnectionInternal obj = reclaimedObjects[i];
emancipatedObjectFound = true;
DeactivateObject(obj);
}
return emancipatedObjectFound;
}
internal void Startup()
{
_cleanupTimer = CreateCleanupTimer();
if (NeedToReplenish)
{
QueuePoolCreateRequest();
}
}
internal void Shutdown()
{
_state = State.ShuttingDown;
// deactivate timer callbacks
Timer t = _cleanupTimer;
_cleanupTimer = null;
if (null != t)
{
t.Dispose();
}
}
private DbConnectionInternal UserCreateRequest(DbConnection owningObject, DbConnectionOptions userOptions, DbConnectionInternal oldConnection = null)
{
// called by user when they were not able to obtain a free object but
// instead obtained creation mutex
DbConnectionInternal obj = null;
if (ErrorOccurred)
{
throw TryCloneCachedException();
}
else
{
if ((oldConnection != null) || (Count < MaxPoolSize) || (0 == MaxPoolSize))
{
// If we have an odd number of total objects, reclaim any dead objects.
// If we did not find any objects to reclaim, create a new one.
if ((oldConnection != null) || (Count & 0x1) == 0x1 || !ReclaimEmancipatedObjects())
obj = CreateObject(owningObject, userOptions, oldConnection);
}
return obj;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using MVCing.Areas.HelpPage.ModelDescriptions;
using MVCing.Areas.HelpPage.Models;
namespace MVCing.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.